introspectron 2.1.4 → 2.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/esm/gql-types.js +1 -0
- package/esm/gql.js +95 -174
- package/esm/index.js +2 -1
- package/esm/introspect.js +20 -35
- package/esm/pg-types.js +1 -0
- package/esm/process.js +24 -143
- package/esm/utils.js +1 -2
- package/gql-types.d.ts +57 -0
- package/gql-types.js +2 -0
- package/gql.d.ts +28 -3
- package/gql.js +98 -175
- package/index.d.ts +2 -0
- package/index.js +2 -1
- package/introspect.d.ts +7 -13
- package/introspect.js +20 -35
- package/package.json +8 -3
- package/pg-types.d.ts +104 -0
- package/pg-types.js +2 -0
- package/process.d.ts +2 -1
- package/process.js +24 -143
- package/utils.d.ts +5 -2
- package/utils.js +1 -2
package/gql.js
CHANGED
|
@@ -1,7 +1,51 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
-
exports.parseGraphQuery = void 0;
|
|
4
|
-
|
|
3
|
+
exports.parseGraphQuery = exports.parseSingleQuery = exports.parseConnectionQuery = void 0;
|
|
4
|
+
const parseConnectionQuery = (context, query, nesting) => {
|
|
5
|
+
const { HASH } = context;
|
|
6
|
+
const objectType = getObjectType(query.type);
|
|
7
|
+
const Connection = HASH[objectType];
|
|
8
|
+
const nodes = Connection.fields.find((f) => f.name === 'nodes');
|
|
9
|
+
const model = getObjectType(nodes.type);
|
|
10
|
+
if (nesting === 0) {
|
|
11
|
+
return {
|
|
12
|
+
qtype: 'getMany',
|
|
13
|
+
model,
|
|
14
|
+
selection: parseSelectionScalar(context, model)
|
|
15
|
+
};
|
|
16
|
+
}
|
|
17
|
+
return {
|
|
18
|
+
qtype: 'getMany',
|
|
19
|
+
model,
|
|
20
|
+
selection: parseSelectionObject(context, model, 1)
|
|
21
|
+
};
|
|
22
|
+
};
|
|
23
|
+
exports.parseConnectionQuery = parseConnectionQuery;
|
|
24
|
+
const parseSingleQuery = (context, query, nesting) => {
|
|
25
|
+
const { HASH, getInputForQueries } = context;
|
|
26
|
+
const model = getObjectType(query.type);
|
|
27
|
+
if (nesting === 0) {
|
|
28
|
+
return {
|
|
29
|
+
qtype: 'getOne',
|
|
30
|
+
model,
|
|
31
|
+
properties: query.args.reduce((m2, v) => {
|
|
32
|
+
m2[v.name] = getInputForQueries(v.type);
|
|
33
|
+
return m2;
|
|
34
|
+
}, {}),
|
|
35
|
+
selection: parseSelectionScalar(context, model)
|
|
36
|
+
};
|
|
37
|
+
}
|
|
38
|
+
return {
|
|
39
|
+
model,
|
|
40
|
+
qtype: 'getOne',
|
|
41
|
+
properties: query.args.reduce((m2, v) => {
|
|
42
|
+
m2[v.name] = getInputForQueries(v.type);
|
|
43
|
+
return m2;
|
|
44
|
+
}, {}),
|
|
45
|
+
selection: parseSelectionObject(context, model, 1)
|
|
46
|
+
};
|
|
47
|
+
};
|
|
48
|
+
exports.parseSingleQuery = parseSingleQuery;
|
|
5
49
|
const parseGraphQuery = (introQuery) => {
|
|
6
50
|
const types = introQuery.__schema.types;
|
|
7
51
|
const HASH = types.reduce((m, v) => {
|
|
@@ -23,43 +67,29 @@ const parseGraphQuery = (introQuery) => {
|
|
|
23
67
|
}
|
|
24
68
|
return getInputForQueries(input.ofType, context);
|
|
25
69
|
}
|
|
26
|
-
if (input.kind === 'INPUT_OBJECT') {
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
name: field.name,
|
|
33
|
-
type: field.type
|
|
34
|
-
};
|
|
35
|
-
})
|
|
36
|
-
.reduce((m3, v) => {
|
|
37
|
-
m3[v.name] = v;
|
|
38
|
-
return m3;
|
|
39
|
-
}, {});
|
|
40
|
-
}
|
|
70
|
+
if (input.kind === 'INPUT_OBJECT' && input.name && HASH.hasOwnProperty(input.name)) {
|
|
71
|
+
const schema = HASH[input.name];
|
|
72
|
+
context.properties = schema.inputFields.map((field) => ({ name: field.name, type: field.type })).reduce((m3, v) => {
|
|
73
|
+
m3[v.name] = v;
|
|
74
|
+
return m3;
|
|
75
|
+
}, {});
|
|
41
76
|
}
|
|
42
|
-
else if (input.kind === 'OBJECT') {
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
name: field.name,
|
|
49
|
-
type: field.type
|
|
50
|
-
};
|
|
51
|
-
})
|
|
52
|
-
.reduce((m3, v) => {
|
|
53
|
-
m3[v.name] = v;
|
|
54
|
-
return m3;
|
|
55
|
-
}, {});
|
|
56
|
-
}
|
|
77
|
+
else if (input.kind === 'OBJECT' && input.name && HASH.hasOwnProperty(input.name)) {
|
|
78
|
+
const schema = HASH[input.name];
|
|
79
|
+
context.properties = schema.fields.map((field) => ({ name: field.name, type: field.type })).reduce((m3, v) => {
|
|
80
|
+
m3[v.name] = v;
|
|
81
|
+
return m3;
|
|
82
|
+
}, {});
|
|
57
83
|
}
|
|
58
84
|
else {
|
|
59
|
-
context.type = input.name;
|
|
85
|
+
context.type = input.name ?? null;
|
|
60
86
|
}
|
|
61
87
|
return context;
|
|
62
88
|
};
|
|
89
|
+
const context = {
|
|
90
|
+
HASH,
|
|
91
|
+
getInputForQueries
|
|
92
|
+
};
|
|
63
93
|
const getInputForMutations = (input, context = {}) => {
|
|
64
94
|
if (input.kind === 'NON_NULL') {
|
|
65
95
|
context.isNotNull = true;
|
|
@@ -73,166 +103,70 @@ const parseGraphQuery = (introQuery) => {
|
|
|
73
103
|
}
|
|
74
104
|
return getInputForMutations(input.ofType, context);
|
|
75
105
|
}
|
|
76
|
-
if (input.kind === 'INPUT_OBJECT') {
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
})
|
|
83
|
-
.reduce((m3, v) => {
|
|
84
|
-
m3[v.name] = v;
|
|
85
|
-
return m3;
|
|
86
|
-
}, {});
|
|
87
|
-
}
|
|
106
|
+
if (input.kind === 'INPUT_OBJECT' && input.name && HASH.hasOwnProperty(input.name)) {
|
|
107
|
+
const schema = HASH[input.name];
|
|
108
|
+
context.properties = schema.inputFields.map((field) => getInputForMutations(field.type, { name: field.name })).reduce((m3, v) => {
|
|
109
|
+
m3[v.name] = v;
|
|
110
|
+
return m3;
|
|
111
|
+
}, {});
|
|
88
112
|
}
|
|
89
|
-
else if (input.kind === 'OBJECT') {
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
name: field.name,
|
|
96
|
-
type: field.type
|
|
97
|
-
};
|
|
98
|
-
})
|
|
99
|
-
.reduce((m3, v) => {
|
|
100
|
-
m3[v.name] = v;
|
|
101
|
-
return m3;
|
|
102
|
-
}, {});
|
|
103
|
-
}
|
|
113
|
+
else if (input.kind === 'OBJECT' && input.name && HASH.hasOwnProperty(input.name)) {
|
|
114
|
+
const schema = HASH[input.name];
|
|
115
|
+
context.properties = schema.fields.map((field) => ({ name: field.name, type: field.type })).reduce((m3, v) => {
|
|
116
|
+
m3[v.name] = v;
|
|
117
|
+
return m3;
|
|
118
|
+
}, {});
|
|
104
119
|
}
|
|
105
120
|
else {
|
|
106
|
-
context.type = input.name;
|
|
121
|
+
context.type = input.name ?? null;
|
|
107
122
|
}
|
|
108
123
|
return context;
|
|
109
124
|
};
|
|
110
125
|
const mutations = mutationsRoot.fields.reduce((m, mutation) => {
|
|
111
126
|
let mutationType = 'other';
|
|
112
|
-
if (/^Create/.test(mutation.type.name))
|
|
127
|
+
if (/^Create/.test(mutation.type.name))
|
|
113
128
|
mutationType = 'create';
|
|
114
|
-
|
|
115
|
-
else if (/^Update/.test(mutation.type.name)) {
|
|
129
|
+
else if (/^Update/.test(mutation.type.name))
|
|
116
130
|
mutationType = 'patch';
|
|
117
|
-
|
|
118
|
-
else if (/^Delete/.test(mutation.type.name)) {
|
|
131
|
+
else if (/^Delete/.test(mutation.type.name))
|
|
119
132
|
mutationType = 'delete';
|
|
120
|
-
}
|
|
121
133
|
const props = mutation.args.reduce((m2, arg) => {
|
|
122
134
|
const type = arg.type?.ofType?.name;
|
|
123
135
|
const isNotNull = arg.type?.kind === 'NON_NULL';
|
|
124
136
|
if (type && HASH.hasOwnProperty(type)) {
|
|
125
137
|
const schema = HASH[type];
|
|
126
138
|
const fields = schema.inputFields.filter((a) => a.name !== 'clientMutationId');
|
|
127
|
-
const properties = fields
|
|
128
|
-
.map((a) => getInputForMutations(a.type, { name: a.name }))
|
|
129
|
-
.reduce((m3, v) => {
|
|
139
|
+
const properties = fields.map((a) => getInputForMutations(a.type, { name: a.name })).reduce((m3, v) => {
|
|
130
140
|
m3[v.name] = v;
|
|
131
141
|
return m3;
|
|
132
142
|
}, {});
|
|
133
|
-
m2[arg.name] = {
|
|
134
|
-
isNotNull,
|
|
135
|
-
type,
|
|
136
|
-
properties
|
|
137
|
-
};
|
|
138
|
-
}
|
|
139
|
-
else {
|
|
140
|
-
console.warn('whats wrong with ' + arg);
|
|
143
|
+
m2[arg.name] = { isNotNull, type, properties };
|
|
141
144
|
}
|
|
142
145
|
return m2;
|
|
143
146
|
}, {});
|
|
144
|
-
const getModelTypes = (type) => {
|
|
145
|
-
return type.fields
|
|
146
|
-
.filter((t) => t.type.kind === 'OBJECT')
|
|
147
|
-
.filter((t) => t.type.name !== 'Query')
|
|
148
|
-
.map((f) => ({ name: f.name, type: f.type }));
|
|
149
|
-
};
|
|
147
|
+
const getModelTypes = (type) => type.fields.filter((t) => t.type.kind === 'OBJECT' && t.type.name !== 'Query').map((f) => ({ name: f.name, type: f.type }));
|
|
150
148
|
const models = getModelTypes(HASH[mutation.type.name]);
|
|
151
149
|
if (models.length > 0) {
|
|
152
|
-
// TODO this is probably brittle
|
|
153
150
|
const model = models[0].type.name;
|
|
154
|
-
m[mutation.name] = {
|
|
155
|
-
qtype: 'mutation',
|
|
156
|
-
mutationType,
|
|
157
|
-
model,
|
|
158
|
-
properties: props,
|
|
159
|
-
output: mutation.type
|
|
160
|
-
};
|
|
151
|
+
m[mutation.name] = { qtype: 'mutation', mutationType, model, properties: props, output: mutation.type };
|
|
161
152
|
}
|
|
162
153
|
else {
|
|
163
|
-
// no return args, probably void functions
|
|
164
|
-
let t;
|
|
165
154
|
let outputFields = [];
|
|
166
155
|
if (mutation.type.kind === 'OBJECT') {
|
|
167
|
-
t = HASH[mutation.type.name];
|
|
168
|
-
outputFields = t.fields
|
|
169
|
-
.map((f) => ({ name: f.name, type: f.type }))
|
|
170
|
-
.filter((f) => f.name !== 'clientMutationId')
|
|
171
|
-
.filter((f) => f.type.name !== 'Query');
|
|
156
|
+
const t = HASH[mutation.type.name];
|
|
157
|
+
outputFields = t.fields.map((f) => ({ name: f.name, type: f.type })).filter((f) => f.name !== 'clientMutationId' && f.type.name !== 'Query');
|
|
172
158
|
}
|
|
173
|
-
m[mutation.name] = {
|
|
174
|
-
qtype: 'mutation',
|
|
175
|
-
mutationType,
|
|
176
|
-
properties: props,
|
|
177
|
-
output: mutation.type,
|
|
178
|
-
outputs: outputFields
|
|
179
|
-
};
|
|
159
|
+
m[mutation.name] = { qtype: 'mutation', mutationType, properties: props, output: mutation.type, outputs: outputFields };
|
|
180
160
|
}
|
|
181
161
|
return m;
|
|
182
162
|
}, {});
|
|
183
|
-
// expect(mts).toMatchSnapshot();
|
|
184
|
-
const parseConnectionQuery = (query, nesting) => {
|
|
185
|
-
const objectType = getObjectType(query.type);
|
|
186
|
-
const Connection = HASH[objectType];
|
|
187
|
-
const nodes = Connection.fields.find((f) => f.name === 'nodes');
|
|
188
|
-
const edges = Connection.fields.find((f) => f.name === 'edges');
|
|
189
|
-
const model = getObjectType(nodes.type);
|
|
190
|
-
const context = { HASH, parseConnectionQuery, parseSingleQuery };
|
|
191
|
-
if (nesting === 0) {
|
|
192
|
-
return {
|
|
193
|
-
qtype: 'getMany',
|
|
194
|
-
model,
|
|
195
|
-
selection: parseSelectionScalar(context, model)
|
|
196
|
-
};
|
|
197
|
-
}
|
|
198
|
-
return {
|
|
199
|
-
qtype: 'getMany',
|
|
200
|
-
model,
|
|
201
|
-
selection: parseSelectionObject(context, model, 1)
|
|
202
|
-
};
|
|
203
|
-
};
|
|
204
|
-
const parseSingleQuery = (query, nesting) => {
|
|
205
|
-
const model = getObjectType(query.type);
|
|
206
|
-
const context = { HASH, parseConnectionQuery, parseSingleQuery };
|
|
207
|
-
if (nesting === 0) {
|
|
208
|
-
return {
|
|
209
|
-
qtype: 'getOne',
|
|
210
|
-
model,
|
|
211
|
-
properties: query.args.reduce((m2, v) => {
|
|
212
|
-
m2[v.name] = getInputForQueries(v.type);
|
|
213
|
-
return m2;
|
|
214
|
-
}, {}),
|
|
215
|
-
selection: parseSelectionScalar(context, model)
|
|
216
|
-
};
|
|
217
|
-
}
|
|
218
|
-
return {
|
|
219
|
-
model,
|
|
220
|
-
qtype: 'getOne',
|
|
221
|
-
properties: query.args.reduce((m2, v) => {
|
|
222
|
-
m2[v.name] = getInputForQueries(v.type);
|
|
223
|
-
return m2;
|
|
224
|
-
}, {}),
|
|
225
|
-
selection: parseSelectionObject(context, model, 1)
|
|
226
|
-
};
|
|
227
|
-
};
|
|
228
163
|
const queries = queriesRoot.fields.reduce((m, query) => {
|
|
229
|
-
// m[query.name] = getInputForQueries(query.type);
|
|
230
164
|
if (query.type.kind === 'OBJECT') {
|
|
231
165
|
if (isConnectionQuery(query)) {
|
|
232
|
-
m[query.name] = parseConnectionQuery(query, 1);
|
|
166
|
+
m[query.name] = (0, exports.parseConnectionQuery)(context, query, 1);
|
|
233
167
|
}
|
|
234
168
|
else {
|
|
235
|
-
m[query.name] = parseSingleQuery(query, 1);
|
|
169
|
+
m[query.name] = (0, exports.parseSingleQuery)(context, query, 1);
|
|
236
170
|
}
|
|
237
171
|
}
|
|
238
172
|
return m;
|
|
@@ -243,59 +177,48 @@ const parseGraphQuery = (introQuery) => {
|
|
|
243
177
|
};
|
|
244
178
|
};
|
|
245
179
|
exports.parseGraphQuery = parseGraphQuery;
|
|
246
|
-
// Parse selections for both scalar and object fields
|
|
247
180
|
function parseSelectionObject(context, model, nesting) {
|
|
248
|
-
const { HASH
|
|
181
|
+
const { HASH } = context;
|
|
249
182
|
throwIfInvalidContext(context);
|
|
250
183
|
const selectionFields = HASH[model].fields.filter((f) => !isPureObjectType(f.type));
|
|
251
|
-
|
|
184
|
+
return selectionFields.map((f) => {
|
|
252
185
|
if (f.type.ofType?.kind === 'OBJECT') {
|
|
253
186
|
if (isConnectionQuery(f)) {
|
|
254
|
-
return { name: f.name, ...parseConnectionQuery(f, nesting - 1) };
|
|
187
|
+
return { name: f.name, ...(0, exports.parseConnectionQuery)(context, f, nesting - 1) };
|
|
255
188
|
}
|
|
256
189
|
else {
|
|
257
|
-
return { name: f.name, ...parseSingleQuery(f, nesting - 1) };
|
|
190
|
+
return { name: f.name, ...(0, exports.parseSingleQuery)(context, f, nesting - 1) };
|
|
258
191
|
}
|
|
259
192
|
}
|
|
260
193
|
return f.name;
|
|
261
194
|
});
|
|
262
|
-
return selection;
|
|
263
195
|
}
|
|
264
|
-
// Parse selections for scalar types only, ignore all field selections
|
|
265
|
-
// that have more nesting selection level
|
|
266
196
|
function parseSelectionScalar(context, model) {
|
|
267
197
|
const { HASH } = context;
|
|
268
198
|
throwIfInvalidContext(context);
|
|
269
199
|
const selectionFields = HASH[model].fields.filter((f) => !isPureObjectType(f.type) && !isConnectionQuery(f));
|
|
270
|
-
|
|
271
|
-
return selection;
|
|
200
|
+
return selectionFields.map((f) => f.name);
|
|
272
201
|
}
|
|
273
202
|
function isConnectionQuery(query) {
|
|
274
203
|
const objectType = getObjectType(query.type);
|
|
275
204
|
const fields = query.args.map((a) => a.name);
|
|
276
|
-
return (/Connection$/.test(objectType) &&
|
|
205
|
+
return (/Connection$/.test(objectType || '') &&
|
|
277
206
|
fields.includes('condition') &&
|
|
278
207
|
fields.includes('filter'));
|
|
279
208
|
}
|
|
280
|
-
/**
|
|
281
|
-
* Check is a type is pure object type
|
|
282
|
-
* pure object type is different from custom types in the sense that
|
|
283
|
-
* it does not inherit from any type, custom types inherit from a parent type
|
|
284
|
-
* @param {Object} typeObj
|
|
285
|
-
* @returns {boolean}
|
|
286
|
-
*/
|
|
287
209
|
function isPureObjectType(typeObj) {
|
|
288
210
|
return typeObj.kind === 'OBJECT' && typeObj.name == null;
|
|
289
211
|
}
|
|
290
212
|
function getObjectType(type) {
|
|
291
213
|
if (type.kind === 'OBJECT')
|
|
292
|
-
return type.name;
|
|
214
|
+
return type.name || undefined;
|
|
293
215
|
if (type.ofType)
|
|
294
216
|
return getObjectType(type.ofType);
|
|
217
|
+
return undefined;
|
|
295
218
|
}
|
|
296
219
|
function throwIfInvalidContext(context) {
|
|
297
|
-
const { HASH,
|
|
298
|
-
if (!HASH || !
|
|
220
|
+
const { HASH, getInputForQueries } = context;
|
|
221
|
+
if (!HASH || !getInputForQueries) {
|
|
299
222
|
throw new Error('parseSelection: context missing');
|
|
300
223
|
}
|
|
301
224
|
}
|
package/index.d.ts
CHANGED
package/index.js
CHANGED
|
@@ -14,9 +14,10 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
|
|
14
14
|
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
|
|
15
15
|
};
|
|
16
16
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
-
// @ts-nocheck
|
|
18
17
|
__exportStar(require("./introspect"), exports);
|
|
18
|
+
__exportStar(require("./pg-types"), exports);
|
|
19
19
|
__exportStar(require("./process"), exports);
|
|
20
20
|
__exportStar(require("./query"), exports);
|
|
21
21
|
__exportStar(require("./gql"), exports);
|
|
22
|
+
__exportStar(require("./gql-types"), exports);
|
|
22
23
|
__exportStar(require("./introspectGql"), exports);
|
package/introspect.d.ts
CHANGED
|
@@ -1,16 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
import type { ClientBase } from 'pg';
|
|
2
|
+
import type { PgIntrospectionResultByKind } from './pg-types';
|
|
3
|
+
interface IntrospectOptions {
|
|
4
|
+
schemas: string[];
|
|
3
5
|
includeExtensions?: boolean;
|
|
4
6
|
pgEnableTags?: boolean;
|
|
5
7
|
pgThrowOnMissingSchema?: boolean;
|
|
6
|
-
}
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class: any[];
|
|
10
|
-
attribute: any[];
|
|
11
|
-
type: any[];
|
|
12
|
-
constraint: any[];
|
|
13
|
-
procedure: any[];
|
|
14
|
-
extension: any[];
|
|
15
|
-
index: any[];
|
|
16
|
-
}>>;
|
|
8
|
+
}
|
|
9
|
+
export declare const introspect: (pgClient: ClientBase, { schemas, includeExtensions, pgEnableTags, pgThrowOnMissingSchema }: IntrospectOptions) => Promise<PgIntrospectionResultByKind>;
|
|
10
|
+
export {};
|
package/introspect.js
CHANGED
|
@@ -1,20 +1,16 @@
|
|
|
1
1
|
"use strict";
|
|
2
2
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
3
|
exports.introspect = void 0;
|
|
4
|
-
// @ts-nocheck
|
|
5
4
|
const query_1 = require("./query");
|
|
6
5
|
const utils_1 = require("./utils");
|
|
7
|
-
const introspect = async (pgClient, { schemas, includeExtensions = false, pgEnableTags = true, pgThrowOnMissingSchema = true }
|
|
6
|
+
const introspect = async (pgClient, { schemas, includeExtensions = false, pgEnableTags = true, pgThrowOnMissingSchema = true }) => {
|
|
8
7
|
const versionResult = await pgClient.query('show server_version_num;');
|
|
9
8
|
const serverVersionNum = parseInt(versionResult.rows[0].server_version_num, 10);
|
|
10
9
|
const introspectionQuery = (0, query_1.makeIntrospectionQuery)(serverVersionNum, {
|
|
11
10
|
pgLegacyFunctionsOnly: false,
|
|
12
11
|
pgIgnoreRBAC: true
|
|
13
12
|
});
|
|
14
|
-
const { rows } = await pgClient.query(introspectionQuery, [
|
|
15
|
-
schemas,
|
|
16
|
-
includeExtensions
|
|
17
|
-
]);
|
|
13
|
+
const { rows } = await pgClient.query(introspectionQuery, [schemas, includeExtensions]);
|
|
18
14
|
const result = {
|
|
19
15
|
__pgVersion: serverVersionNum,
|
|
20
16
|
namespace: [],
|
|
@@ -29,8 +25,7 @@ const introspect = async (pgClient, { schemas, includeExtensions = false, pgEnab
|
|
|
29
25
|
for (const { object } of rows) {
|
|
30
26
|
result[object.kind].push(object);
|
|
31
27
|
}
|
|
32
|
-
|
|
33
|
-
[
|
|
28
|
+
const kinds = [
|
|
34
29
|
'namespace',
|
|
35
30
|
'class',
|
|
36
31
|
'attribute',
|
|
@@ -39,9 +34,9 @@ const introspect = async (pgClient, { schemas, includeExtensions = false, pgEnab
|
|
|
39
34
|
'procedure',
|
|
40
35
|
'extension',
|
|
41
36
|
'index'
|
|
42
|
-
]
|
|
43
|
-
|
|
44
|
-
|
|
37
|
+
];
|
|
38
|
+
for (const kind of kinds) {
|
|
39
|
+
for (const object of result[kind]) {
|
|
45
40
|
object.comment = object.description;
|
|
46
41
|
if (pgEnableTags && object.description) {
|
|
47
42
|
const parsed = (0, utils_1.parseTags)(object.description);
|
|
@@ -51,39 +46,29 @@ const introspect = async (pgClient, { schemas, includeExtensions = false, pgEnab
|
|
|
51
46
|
else {
|
|
52
47
|
object.tags = {};
|
|
53
48
|
}
|
|
54
|
-
}
|
|
55
|
-
}
|
|
56
|
-
const extensionConfigurationClassIds = result.extension.flatMap((e) => e.configurationClassIds);
|
|
57
|
-
result.class
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
const extensionConfigurationClassIds = result.extension.flatMap((e) => e.configurationClassIds || []);
|
|
52
|
+
for (const klass of result.class) {
|
|
58
53
|
klass.isExtensionConfigurationTable =
|
|
59
|
-
extensionConfigurationClassIds.
|
|
60
|
-
}
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
'constraint',
|
|
67
|
-
'procedure',
|
|
68
|
-
'extension',
|
|
69
|
-
'index'
|
|
70
|
-
].forEach((k) => {
|
|
71
|
-
result[k].forEach(Object.freeze);
|
|
72
|
-
});
|
|
54
|
+
extensionConfigurationClassIds.includes(klass.id);
|
|
55
|
+
}
|
|
56
|
+
for (const kind of kinds) {
|
|
57
|
+
for (const obj of result[kind]) {
|
|
58
|
+
Object.freeze(obj);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
73
61
|
const knownSchemas = result.namespace.map((n) => n.name);
|
|
74
|
-
const missingSchemas = schemas.filter((s) => knownSchemas.
|
|
75
|
-
if (missingSchemas.length) {
|
|
62
|
+
const missingSchemas = schemas.filter((s) => !knownSchemas.includes(s));
|
|
63
|
+
if (missingSchemas.length > 0) {
|
|
76
64
|
const errorMessage = `You requested to use schema '${schemas.join("', '")}'; however we couldn't find some of those! Missing schemas are: '${missingSchemas.join("', '")}'`;
|
|
77
65
|
if (pgThrowOnMissingSchema) {
|
|
78
66
|
throw new Error(errorMessage);
|
|
79
67
|
}
|
|
80
68
|
else {
|
|
81
|
-
console.warn('⚠️ WARNING⚠️ ' + errorMessage);
|
|
69
|
+
console.warn('⚠️ WARNING⚠️ ' + errorMessage);
|
|
82
70
|
}
|
|
83
71
|
}
|
|
84
|
-
// return result;
|
|
85
72
|
return Object.freeze(result);
|
|
86
73
|
};
|
|
87
74
|
exports.introspect = introspect;
|
|
88
|
-
// export const processIntrospection = async (pgClient, introspectionResultsByKind) => {
|
|
89
|
-
// }
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "introspectron",
|
|
3
|
-
"version": "2.1
|
|
3
|
+
"version": "2.2.1",
|
|
4
4
|
"description": "introspect your Postgres database and generate an SDK",
|
|
5
5
|
"author": "Dan Lynch <pyramation@gmail.com>",
|
|
6
6
|
"main": "index.js",
|
|
@@ -29,15 +29,20 @@
|
|
|
29
29
|
"test": "jest",
|
|
30
30
|
"test:watch": "jest --watch"
|
|
31
31
|
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"graphile-test": "^2.1.10",
|
|
34
|
+
"pgsql-test": "^2.1.14"
|
|
35
|
+
},
|
|
32
36
|
"dependencies": {
|
|
33
|
-
"graphql-tag": "2.12.
|
|
37
|
+
"graphql-tag": "2.12.6"
|
|
34
38
|
},
|
|
35
39
|
"keywords": [
|
|
36
40
|
"graphql",
|
|
37
41
|
"introspection",
|
|
42
|
+
"pg",
|
|
38
43
|
"schema",
|
|
39
44
|
"graphile",
|
|
40
45
|
"launchql"
|
|
41
46
|
],
|
|
42
|
-
"gitHead": "
|
|
47
|
+
"gitHead": "565af20915f5a737f31f0695423c09ec9abc607f"
|
|
43
48
|
}
|
package/pg-types.d.ts
ADDED
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
type WithTags<T> = T & {
|
|
2
|
+
comment?: string;
|
|
3
|
+
description?: string;
|
|
4
|
+
tags: Record<string, string>;
|
|
5
|
+
};
|
|
6
|
+
export type PgNamespace = WithTags<{
|
|
7
|
+
id: string;
|
|
8
|
+
name: string;
|
|
9
|
+
}>;
|
|
10
|
+
export type PgClass = WithTags<{
|
|
11
|
+
id: string;
|
|
12
|
+
name: string;
|
|
13
|
+
namespaceId: string;
|
|
14
|
+
namespaceName: string;
|
|
15
|
+
typeId?: string;
|
|
16
|
+
isExtensionConfigurationTable?: boolean;
|
|
17
|
+
namespace?: PgNamespace;
|
|
18
|
+
type?: PgType;
|
|
19
|
+
attributes?: PgAttribute[];
|
|
20
|
+
canUseAsterisk?: boolean;
|
|
21
|
+
constraints?: PgConstraint[];
|
|
22
|
+
foreignConstraints?: PgConstraint[];
|
|
23
|
+
primaryKeyConstraint?: PgConstraint;
|
|
24
|
+
}>;
|
|
25
|
+
export type PgAttribute = WithTags<{
|
|
26
|
+
id: string;
|
|
27
|
+
classId: string;
|
|
28
|
+
name: string;
|
|
29
|
+
num: number;
|
|
30
|
+
typeId: string;
|
|
31
|
+
class?: PgClass;
|
|
32
|
+
type?: PgType;
|
|
33
|
+
isIndexed?: boolean;
|
|
34
|
+
isUnique?: boolean;
|
|
35
|
+
columnLevelSelectGrant?: boolean;
|
|
36
|
+
}>;
|
|
37
|
+
export type PgType = WithTags<{
|
|
38
|
+
id: string;
|
|
39
|
+
name: string;
|
|
40
|
+
namespaceId: string;
|
|
41
|
+
type: string;
|
|
42
|
+
classId?: string;
|
|
43
|
+
domainBaseTypeId?: string;
|
|
44
|
+
arrayItemTypeId?: string;
|
|
45
|
+
namespace?: PgNamespace;
|
|
46
|
+
class?: PgClass;
|
|
47
|
+
domainBaseType?: PgType;
|
|
48
|
+
arrayItemType?: PgType;
|
|
49
|
+
arrayType?: PgType;
|
|
50
|
+
}>;
|
|
51
|
+
export type PgConstraint = WithTags<{
|
|
52
|
+
id: string;
|
|
53
|
+
name: string;
|
|
54
|
+
classId: string;
|
|
55
|
+
foreignClassId: string | null;
|
|
56
|
+
type: 'p' | 'f' | 'u';
|
|
57
|
+
keyAttributeNums: number[];
|
|
58
|
+
foreignKeyAttributeNums: number[] | null;
|
|
59
|
+
isFake?: boolean;
|
|
60
|
+
isIndexed?: boolean;
|
|
61
|
+
class?: PgClass;
|
|
62
|
+
foreignClass?: PgClass;
|
|
63
|
+
keyAttributes?: PgAttribute[];
|
|
64
|
+
foreignKeyAttributes?: PgAttribute[];
|
|
65
|
+
}>;
|
|
66
|
+
export type PgProcedure = WithTags<{
|
|
67
|
+
id: string;
|
|
68
|
+
name: string;
|
|
69
|
+
namespaceId: string;
|
|
70
|
+
namespace?: PgNamespace;
|
|
71
|
+
}>;
|
|
72
|
+
export type PgExtension = WithTags<{
|
|
73
|
+
id: string;
|
|
74
|
+
name: string;
|
|
75
|
+
namespaceId: string;
|
|
76
|
+
configurationClassIds: string[];
|
|
77
|
+
namespace?: PgNamespace;
|
|
78
|
+
configurationClasses?: PgClass[];
|
|
79
|
+
}>;
|
|
80
|
+
export type PgIndex = WithTags<{
|
|
81
|
+
id: string;
|
|
82
|
+
name: string;
|
|
83
|
+
classId: string;
|
|
84
|
+
attributeNums: number[];
|
|
85
|
+
isUnique: boolean;
|
|
86
|
+
class: PgClass;
|
|
87
|
+
}>;
|
|
88
|
+
export interface PgIntrospectionResultByKind {
|
|
89
|
+
__pgVersion: number;
|
|
90
|
+
namespace: PgNamespace[];
|
|
91
|
+
class: PgClass[];
|
|
92
|
+
attribute: PgAttribute[];
|
|
93
|
+
type: PgType[];
|
|
94
|
+
constraint: PgConstraint[];
|
|
95
|
+
procedure: PgProcedure[];
|
|
96
|
+
extension: PgExtension[];
|
|
97
|
+
index: PgIndex[];
|
|
98
|
+
namespaceById?: Record<string, PgNamespace>;
|
|
99
|
+
classById?: Record<string, PgClass>;
|
|
100
|
+
typeById?: Record<string, PgType>;
|
|
101
|
+
attributeByClassIdAndNum?: Record<string, Record<string, PgAttribute>>;
|
|
102
|
+
extensionById?: Record<string, PgExtension>;
|
|
103
|
+
}
|
|
104
|
+
export {};
|
package/pg-types.js
ADDED
package/process.d.ts
CHANGED
|
@@ -1 +1,2 @@
|
|
|
1
|
-
|
|
1
|
+
import type { PgIntrospectionResultByKind } from './pg-types';
|
|
2
|
+
export declare const introspectionResultsFromRaw: (rawResults: PgIntrospectionResultByKind, pgAugmentIntrospectionResults: ((res: PgIntrospectionResultByKind) => void) | null) => PgIntrospectionResultByKind;
|