@soda-gql/codegen 0.11.10 → 0.11.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/generator--76HN3Ud.mjs +3 -0
- package/dist/{generator-BILAfpvh.cjs → generator-0VPnXv1q.cjs} +1 -1
- package/dist/{generator-1A_wwhEm.mjs → generator-BJlFKC6z.mjs} +53 -8
- package/dist/generator-BJlFKC6z.mjs.map +1 -0
- package/dist/{generator-lc18-vLD.cjs → generator-DvfP6gTY.cjs} +53 -8
- package/dist/generator-DvfP6gTY.cjs.map +1 -0
- package/dist/index.cjs +869 -7
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +286 -2
- package/dist/index.d.cts.map +1 -1
- package/dist/index.d.mts +286 -2
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +864 -8
- package/dist/index.mjs.map +1 -1
- package/package.json +3 -3
- package/dist/generator-1A_wwhEm.mjs.map +0 -1
- package/dist/generator-D6nHdJ1K.mjs +0 -3
- package/dist/generator-lc18-vLD.cjs.map +0 -1
package/dist/index.cjs
CHANGED
|
@@ -1,11 +1,675 @@
|
|
|
1
|
-
const require_generator = require('./generator-
|
|
1
|
+
const require_generator = require('./generator-DvfP6gTY.cjs');
|
|
2
|
+
let neverthrow = require("neverthrow");
|
|
3
|
+
let graphql = require("graphql");
|
|
2
4
|
let node_fs = require("node:fs");
|
|
3
5
|
let node_path = require("node:path");
|
|
4
|
-
let neverthrow = require("neverthrow");
|
|
5
6
|
let esbuild = require("esbuild");
|
|
6
|
-
let graphql = require("graphql");
|
|
7
7
|
let node_crypto = require("node:crypto");
|
|
8
8
|
|
|
9
|
+
//#region packages/codegen/src/graphql-compat/emitter.ts
|
|
10
|
+
/**
|
|
11
|
+
* Emit TypeScript code for an operation.
|
|
12
|
+
*/
|
|
13
|
+
const emitOperation = (operation, options) => {
|
|
14
|
+
const lines = [];
|
|
15
|
+
const schema = options.schemaDocument ? require_generator.createSchemaIndex(options.schemaDocument) : null;
|
|
16
|
+
lines.push(`import { gql } from "${options.graphqlSystemPath}";`);
|
|
17
|
+
if (operation.fragmentDependencies.length > 0 && options.fragmentImports) {
|
|
18
|
+
for (const fragName of operation.fragmentDependencies) {
|
|
19
|
+
const importPath = options.fragmentImports.get(fragName);
|
|
20
|
+
if (importPath) {
|
|
21
|
+
lines.push(`import { ${fragName}Fragment } from "${importPath}";`);
|
|
22
|
+
}
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
lines.push("");
|
|
26
|
+
const exportName = `${operation.name}Compat`;
|
|
27
|
+
const operationType = operation.kind;
|
|
28
|
+
lines.push(`export const ${exportName} = gql.${options.schemaName}(({ ${operationType}, $var }) =>`);
|
|
29
|
+
lines.push(` ${operationType}.compat({`);
|
|
30
|
+
lines.push(` name: ${JSON.stringify(operation.name)},`);
|
|
31
|
+
if (operation.variables.length > 0) {
|
|
32
|
+
lines.push(` variables: { ${emitVariables(operation.variables)} },`);
|
|
33
|
+
}
|
|
34
|
+
lines.push(` fields: ({ f, $ }) => ({`);
|
|
35
|
+
const fieldLinesResult = emitSelections(operation.selections, 3, operation.variables, schema);
|
|
36
|
+
if (fieldLinesResult.isErr()) {
|
|
37
|
+
return (0, neverthrow.err)(fieldLinesResult.error);
|
|
38
|
+
}
|
|
39
|
+
lines.push(fieldLinesResult.value);
|
|
40
|
+
lines.push(` }),`);
|
|
41
|
+
lines.push(` }),`);
|
|
42
|
+
lines.push(`);`);
|
|
43
|
+
return (0, neverthrow.ok)(lines.join("\n"));
|
|
44
|
+
};
|
|
45
|
+
/**
|
|
46
|
+
* Emit TypeScript code for a fragment.
|
|
47
|
+
*/
|
|
48
|
+
const emitFragment = (fragment, options) => {
|
|
49
|
+
const lines = [];
|
|
50
|
+
const schema = options.schemaDocument ? require_generator.createSchemaIndex(options.schemaDocument) : null;
|
|
51
|
+
lines.push(`import { gql } from "${options.graphqlSystemPath}";`);
|
|
52
|
+
if (fragment.fragmentDependencies.length > 0 && options.fragmentImports) {
|
|
53
|
+
for (const fragName of fragment.fragmentDependencies) {
|
|
54
|
+
const importPath = options.fragmentImports.get(fragName);
|
|
55
|
+
if (importPath) {
|
|
56
|
+
lines.push(`import { ${fragName}Fragment } from "${importPath}";`);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
lines.push("");
|
|
61
|
+
const exportName = `${fragment.name}Fragment`;
|
|
62
|
+
lines.push(`export const ${exportName} = gql.${options.schemaName}(({ fragment }) =>`);
|
|
63
|
+
lines.push(` fragment.${fragment.onType}({`);
|
|
64
|
+
lines.push(` fields: ({ f }) => ({`);
|
|
65
|
+
const fieldLinesResult = emitSelections(fragment.selections, 3, [], schema);
|
|
66
|
+
if (fieldLinesResult.isErr()) {
|
|
67
|
+
return (0, neverthrow.err)(fieldLinesResult.error);
|
|
68
|
+
}
|
|
69
|
+
lines.push(fieldLinesResult.value);
|
|
70
|
+
lines.push(` }),`);
|
|
71
|
+
lines.push(` }),`);
|
|
72
|
+
lines.push(`);`);
|
|
73
|
+
return (0, neverthrow.ok)(lines.join("\n"));
|
|
74
|
+
};
|
|
75
|
+
/**
|
|
76
|
+
* Emit variable definitions.
|
|
77
|
+
*/
|
|
78
|
+
const emitVariables = (variables) => {
|
|
79
|
+
return variables.map((v) => `...$var(${JSON.stringify(v.name)}).${v.typeName}(${JSON.stringify(v.modifier)})`).join(", ");
|
|
80
|
+
};
|
|
81
|
+
/**
|
|
82
|
+
* Emit field selections (public API).
|
|
83
|
+
* Converts EnrichedVariable[] to Set<string> and delegates to internal implementation.
|
|
84
|
+
*/
|
|
85
|
+
const emitSelections = (selections, indent, variables, schema) => {
|
|
86
|
+
const variableNames = new Set(variables.map((v) => v.name));
|
|
87
|
+
return emitSelectionsInternal(selections, indent, variableNames, schema);
|
|
88
|
+
};
|
|
89
|
+
/**
|
|
90
|
+
* Internal implementation for emitting field selections.
|
|
91
|
+
* Takes variableNames as Set<string> for recursive calls.
|
|
92
|
+
*/
|
|
93
|
+
const emitSelectionsInternal = (selections, indent, variableNames, schema) => {
|
|
94
|
+
const lines = [];
|
|
95
|
+
const inlineFragments = [];
|
|
96
|
+
const otherSelections = [];
|
|
97
|
+
for (const sel of selections) {
|
|
98
|
+
if (sel.kind === "inlineFragment") {
|
|
99
|
+
inlineFragments.push(sel);
|
|
100
|
+
} else {
|
|
101
|
+
otherSelections.push(sel);
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
for (const sel of otherSelections) {
|
|
105
|
+
const result = emitSingleSelection(sel, indent, variableNames, schema);
|
|
106
|
+
if (result.isErr()) {
|
|
107
|
+
return (0, neverthrow.err)(result.error);
|
|
108
|
+
}
|
|
109
|
+
lines.push(result.value);
|
|
110
|
+
}
|
|
111
|
+
if (inlineFragments.length > 0) {
|
|
112
|
+
const unionResult = emitInlineFragmentsAsUnion(inlineFragments, indent, variableNames, schema);
|
|
113
|
+
if (unionResult.isErr()) {
|
|
114
|
+
return (0, neverthrow.err)(unionResult.error);
|
|
115
|
+
}
|
|
116
|
+
lines.push(unionResult.value);
|
|
117
|
+
}
|
|
118
|
+
return (0, neverthrow.ok)(lines.join("\n"));
|
|
119
|
+
};
|
|
120
|
+
/**
|
|
121
|
+
* Emit a single selection (field or fragment spread).
|
|
122
|
+
*/
|
|
123
|
+
const emitSingleSelection = (sel, indent, variableNames, schema) => {
|
|
124
|
+
const padding = " ".repeat(indent);
|
|
125
|
+
switch (sel.kind) {
|
|
126
|
+
case "field": return emitFieldSelection(sel, indent, variableNames, schema);
|
|
127
|
+
case "fragmentSpread": return (0, neverthrow.ok)(`${padding}...${sel.name}Fragment.spread(),`);
|
|
128
|
+
case "inlineFragment": return (0, neverthrow.ok)("");
|
|
129
|
+
}
|
|
130
|
+
};
|
|
131
|
+
/**
|
|
132
|
+
* Emit inline fragments grouped as a union selection.
|
|
133
|
+
* Format: { TypeA: ({ f }) => ({ ...fields }), TypeB: ({ f }) => ({ ...fields }) }
|
|
134
|
+
*/
|
|
135
|
+
const emitInlineFragmentsAsUnion = (inlineFragments, indent, variableNames, schema) => {
|
|
136
|
+
const padding = " ".repeat(indent);
|
|
137
|
+
for (const frag of inlineFragments) {
|
|
138
|
+
if (frag.onType === "") {
|
|
139
|
+
return (0, neverthrow.err)({
|
|
140
|
+
code: "GRAPHQL_INLINE_FRAGMENT_WITHOUT_TYPE",
|
|
141
|
+
message: "Inline fragments without type condition are not supported. Use `... on TypeName { }` syntax."
|
|
142
|
+
});
|
|
143
|
+
}
|
|
144
|
+
}
|
|
145
|
+
for (const frag of inlineFragments) {
|
|
146
|
+
if (schema && !schema.objects.has(frag.onType)) {
|
|
147
|
+
let isUnionMember = false;
|
|
148
|
+
for (const [, unionDef] of schema.unions) {
|
|
149
|
+
if (unionDef.members.has(frag.onType)) {
|
|
150
|
+
isUnionMember = true;
|
|
151
|
+
break;
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
if (!isUnionMember) {
|
|
155
|
+
return (0, neverthrow.err)({
|
|
156
|
+
code: "GRAPHQL_INLINE_FRAGMENT_ON_INTERFACE",
|
|
157
|
+
message: `Inline fragments on interface type "${frag.onType}" are not supported. Use union types instead.`,
|
|
158
|
+
onType: frag.onType
|
|
159
|
+
});
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
const entries = [];
|
|
164
|
+
for (const frag of inlineFragments) {
|
|
165
|
+
const innerPadding = " ".repeat(indent + 1);
|
|
166
|
+
const fieldsResult = emitSelectionsInternal(frag.selections, indent + 2, variableNames, schema);
|
|
167
|
+
if (fieldsResult.isErr()) {
|
|
168
|
+
return (0, neverthrow.err)(fieldsResult.error);
|
|
169
|
+
}
|
|
170
|
+
entries.push(`${innerPadding}${frag.onType}: ({ f }) => ({
|
|
171
|
+
${fieldsResult.value}
|
|
172
|
+
${innerPadding}}),`);
|
|
173
|
+
}
|
|
174
|
+
return (0, neverthrow.ok)(`${padding}...({
|
|
175
|
+
${entries.join("\n")}
|
|
176
|
+
${padding}}),`);
|
|
177
|
+
};
|
|
178
|
+
/**
|
|
179
|
+
* Emit a single field selection.
|
|
180
|
+
*/
|
|
181
|
+
const emitFieldSelection = (field, indent, variableNames, schema) => {
|
|
182
|
+
const padding = " ".repeat(indent);
|
|
183
|
+
const args = field.arguments;
|
|
184
|
+
const selections = field.selections;
|
|
185
|
+
const hasArgs = args && args.length > 0;
|
|
186
|
+
const hasSelections = selections && selections.length > 0;
|
|
187
|
+
let line = `${padding}...f.${field.name}(`;
|
|
188
|
+
if (hasArgs) {
|
|
189
|
+
const argsResult = emitArguments(args, variableNames);
|
|
190
|
+
if (argsResult.isErr()) {
|
|
191
|
+
return (0, neverthrow.err)(argsResult.error);
|
|
192
|
+
}
|
|
193
|
+
line += argsResult.value;
|
|
194
|
+
}
|
|
195
|
+
line += ")";
|
|
196
|
+
if (hasSelections) {
|
|
197
|
+
const hasInlineFragments = selections.some((s) => s.kind === "inlineFragment");
|
|
198
|
+
if (hasInlineFragments) {
|
|
199
|
+
const nestedResult = emitSelectionsInternal(selections, indent + 1, variableNames, schema);
|
|
200
|
+
if (nestedResult.isErr()) {
|
|
201
|
+
return (0, neverthrow.err)(nestedResult.error);
|
|
202
|
+
}
|
|
203
|
+
line += "({\n";
|
|
204
|
+
line += `${nestedResult.value}\n`;
|
|
205
|
+
line += `${padding}})`;
|
|
206
|
+
} else {
|
|
207
|
+
line += "(({ f }) => ({\n";
|
|
208
|
+
const nestedResult = emitSelectionsInternal(selections, indent + 1, variableNames, schema);
|
|
209
|
+
if (nestedResult.isErr()) {
|
|
210
|
+
return (0, neverthrow.err)(nestedResult.error);
|
|
211
|
+
}
|
|
212
|
+
line += `${nestedResult.value}\n`;
|
|
213
|
+
line += `${padding}}))`;
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
line += ",";
|
|
217
|
+
return (0, neverthrow.ok)(line);
|
|
218
|
+
};
|
|
219
|
+
/**
|
|
220
|
+
* Emit field arguments.
|
|
221
|
+
*/
|
|
222
|
+
const emitArguments = (args, variableNames) => {
|
|
223
|
+
if (args.length === 0) {
|
|
224
|
+
return (0, neverthrow.ok)("");
|
|
225
|
+
}
|
|
226
|
+
const argEntries = [];
|
|
227
|
+
for (const arg of args) {
|
|
228
|
+
const result = emitValue(arg.value, variableNames);
|
|
229
|
+
if (result.isErr()) {
|
|
230
|
+
return (0, neverthrow.err)(result.error);
|
|
231
|
+
}
|
|
232
|
+
argEntries.push(`${arg.name}: ${result.value}`);
|
|
233
|
+
}
|
|
234
|
+
return (0, neverthrow.ok)(`{ ${argEntries.join(", ")} }`);
|
|
235
|
+
};
|
|
236
|
+
/**
|
|
237
|
+
* Emit a value (literal or variable reference).
|
|
238
|
+
*/
|
|
239
|
+
const emitValue = (value, variableNames) => {
|
|
240
|
+
switch (value.kind) {
|
|
241
|
+
case "variable":
|
|
242
|
+
if (variableNames.has(value.name)) {
|
|
243
|
+
return (0, neverthrow.ok)(`$.${value.name}`);
|
|
244
|
+
}
|
|
245
|
+
return (0, neverthrow.err)({
|
|
246
|
+
code: "GRAPHQL_UNDECLARED_VARIABLE",
|
|
247
|
+
message: `Variable "$${value.name}" is not declared in the operation`,
|
|
248
|
+
variableName: value.name
|
|
249
|
+
});
|
|
250
|
+
case "int":
|
|
251
|
+
case "float": return (0, neverthrow.ok)(value.value);
|
|
252
|
+
case "string": return (0, neverthrow.ok)(JSON.stringify(value.value));
|
|
253
|
+
case "boolean": return (0, neverthrow.ok)(value.value ? "true" : "false");
|
|
254
|
+
case "null": return (0, neverthrow.ok)("null");
|
|
255
|
+
case "enum": return (0, neverthrow.ok)(JSON.stringify(value.value));
|
|
256
|
+
case "list": {
|
|
257
|
+
const values = [];
|
|
258
|
+
for (const v of value.values) {
|
|
259
|
+
const result = emitValue(v, variableNames);
|
|
260
|
+
if (result.isErr()) {
|
|
261
|
+
return (0, neverthrow.err)(result.error);
|
|
262
|
+
}
|
|
263
|
+
values.push(result.value);
|
|
264
|
+
}
|
|
265
|
+
return (0, neverthrow.ok)(`[${values.join(", ")}]`);
|
|
266
|
+
}
|
|
267
|
+
case "object": {
|
|
268
|
+
if (value.fields.length === 0) {
|
|
269
|
+
return (0, neverthrow.ok)("{}");
|
|
270
|
+
}
|
|
271
|
+
const entries = [];
|
|
272
|
+
for (const f of value.fields) {
|
|
273
|
+
const result = emitValue(f.value, variableNames);
|
|
274
|
+
if (result.isErr()) {
|
|
275
|
+
return (0, neverthrow.err)(result.error);
|
|
276
|
+
}
|
|
277
|
+
entries.push(`${f.name}: ${result.value}`);
|
|
278
|
+
}
|
|
279
|
+
return (0, neverthrow.ok)(`{ ${entries.join(", ")} }`);
|
|
280
|
+
}
|
|
281
|
+
}
|
|
282
|
+
};
|
|
283
|
+
|
|
284
|
+
//#endregion
|
|
285
|
+
//#region packages/codegen/src/graphql-compat/parser.ts
|
|
286
|
+
/**
|
|
287
|
+
* Parser for .graphql operation files.
|
|
288
|
+
* Extracts operations and fragments from GraphQL documents.
|
|
289
|
+
* @module
|
|
290
|
+
*/
|
|
291
|
+
/**
|
|
292
|
+
* Parse a single .graphql file and extract operations and fragments.
|
|
293
|
+
*/
|
|
294
|
+
const parseGraphqlFile = (filePath) => {
|
|
295
|
+
const resolvedPath = (0, node_path.resolve)(filePath);
|
|
296
|
+
if (!(0, node_fs.existsSync)(resolvedPath)) {
|
|
297
|
+
return (0, neverthrow.err)({
|
|
298
|
+
code: "GRAPHQL_FILE_NOT_FOUND",
|
|
299
|
+
message: `GraphQL file not found at ${resolvedPath}`,
|
|
300
|
+
filePath: resolvedPath
|
|
301
|
+
});
|
|
302
|
+
}
|
|
303
|
+
try {
|
|
304
|
+
const source = (0, node_fs.readFileSync)(resolvedPath, "utf8");
|
|
305
|
+
const document = (0, graphql.parse)(source);
|
|
306
|
+
return (0, neverthrow.ok)(extractFromDocument(document, resolvedPath));
|
|
307
|
+
} catch (error) {
|
|
308
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
309
|
+
return (0, neverthrow.err)({
|
|
310
|
+
code: "GRAPHQL_PARSE_ERROR",
|
|
311
|
+
message: `GraphQL parse error: ${message}`,
|
|
312
|
+
filePath: resolvedPath
|
|
313
|
+
});
|
|
314
|
+
}
|
|
315
|
+
};
|
|
316
|
+
/**
|
|
317
|
+
* Parse GraphQL source string directly.
|
|
318
|
+
*/
|
|
319
|
+
const parseGraphqlSource = (source, sourceFile) => {
|
|
320
|
+
try {
|
|
321
|
+
const document = (0, graphql.parse)(source);
|
|
322
|
+
return (0, neverthrow.ok)(extractFromDocument(document, sourceFile));
|
|
323
|
+
} catch (error) {
|
|
324
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
325
|
+
return (0, neverthrow.err)({
|
|
326
|
+
code: "GRAPHQL_PARSE_ERROR",
|
|
327
|
+
message: `GraphQL parse error: ${message}`,
|
|
328
|
+
filePath: sourceFile
|
|
329
|
+
});
|
|
330
|
+
}
|
|
331
|
+
};
|
|
332
|
+
/**
|
|
333
|
+
* Extract operations and fragments from a parsed GraphQL document.
|
|
334
|
+
*/
|
|
335
|
+
const extractFromDocument = (document, sourceFile) => {
|
|
336
|
+
const operations = [];
|
|
337
|
+
const fragments = [];
|
|
338
|
+
for (const definition of document.definitions) {
|
|
339
|
+
if (definition.kind === graphql.Kind.OPERATION_DEFINITION) {
|
|
340
|
+
const operation = extractOperation(definition, sourceFile);
|
|
341
|
+
if (operation) {
|
|
342
|
+
operations.push(operation);
|
|
343
|
+
}
|
|
344
|
+
} else if (definition.kind === graphql.Kind.FRAGMENT_DEFINITION) {
|
|
345
|
+
fragments.push(extractFragment(definition, sourceFile));
|
|
346
|
+
}
|
|
347
|
+
}
|
|
348
|
+
return {
|
|
349
|
+
operations,
|
|
350
|
+
fragments
|
|
351
|
+
};
|
|
352
|
+
};
|
|
353
|
+
/**
|
|
354
|
+
* Extract a single operation from an OperationDefinitionNode.
|
|
355
|
+
*/
|
|
356
|
+
const extractOperation = (node, sourceFile) => {
|
|
357
|
+
if (!node.name) {
|
|
358
|
+
return null;
|
|
359
|
+
}
|
|
360
|
+
const variables = (node.variableDefinitions ?? []).map(extractVariable);
|
|
361
|
+
const selections = extractSelections(node.selectionSet.selections);
|
|
362
|
+
return {
|
|
363
|
+
kind: node.operation,
|
|
364
|
+
name: node.name.value,
|
|
365
|
+
variables,
|
|
366
|
+
selections,
|
|
367
|
+
sourceFile
|
|
368
|
+
};
|
|
369
|
+
};
|
|
370
|
+
/**
|
|
371
|
+
* Extract a fragment from a FragmentDefinitionNode.
|
|
372
|
+
*/
|
|
373
|
+
const extractFragment = (node, sourceFile) => {
|
|
374
|
+
const selections = extractSelections(node.selectionSet.selections);
|
|
375
|
+
return {
|
|
376
|
+
name: node.name.value,
|
|
377
|
+
onType: node.typeCondition.name.value,
|
|
378
|
+
selections,
|
|
379
|
+
sourceFile
|
|
380
|
+
};
|
|
381
|
+
};
|
|
382
|
+
/**
|
|
383
|
+
* Extract a variable definition.
|
|
384
|
+
*/
|
|
385
|
+
const extractVariable = (node) => {
|
|
386
|
+
const { typeName, modifier } = parseTypeNode(node.type);
|
|
387
|
+
const defaultValue = node.defaultValue ? extractValue(node.defaultValue) : undefined;
|
|
388
|
+
return {
|
|
389
|
+
name: node.variable.name.value,
|
|
390
|
+
typeName,
|
|
391
|
+
modifier,
|
|
392
|
+
typeKind: "scalar",
|
|
393
|
+
defaultValue
|
|
394
|
+
};
|
|
395
|
+
};
|
|
396
|
+
/**
|
|
397
|
+
* Parse a GraphQL TypeNode into type name and modifier.
|
|
398
|
+
*
|
|
399
|
+
* Format: inner nullability + list modifiers
|
|
400
|
+
* - Inner: `!` (non-null) or `?` (nullable)
|
|
401
|
+
* - List: `[]!` (non-null list) or `[]?` (nullable list)
|
|
402
|
+
*/
|
|
403
|
+
const parseTypeNode = (node) => {
|
|
404
|
+
const levels = [];
|
|
405
|
+
const collect = (n, nonNull) => {
|
|
406
|
+
if (n.kind === graphql.Kind.NON_NULL_TYPE) {
|
|
407
|
+
return collect(n.type, true);
|
|
408
|
+
}
|
|
409
|
+
if (n.kind === graphql.Kind.LIST_TYPE) {
|
|
410
|
+
levels.push({
|
|
411
|
+
kind: "list",
|
|
412
|
+
nonNull
|
|
413
|
+
});
|
|
414
|
+
return collect(n.type, false);
|
|
415
|
+
}
|
|
416
|
+
levels.push({
|
|
417
|
+
kind: "named",
|
|
418
|
+
nonNull
|
|
419
|
+
});
|
|
420
|
+
return n.name.value;
|
|
421
|
+
};
|
|
422
|
+
const typeName = collect(node, false);
|
|
423
|
+
let modifier = "?";
|
|
424
|
+
for (const level of levels.slice().reverse()) {
|
|
425
|
+
if (level.kind === "named") {
|
|
426
|
+
modifier = level.nonNull ? "!" : "?";
|
|
427
|
+
continue;
|
|
428
|
+
}
|
|
429
|
+
const listSuffix = level.nonNull ? "[]!" : "[]?";
|
|
430
|
+
modifier = `${modifier}${listSuffix}`;
|
|
431
|
+
}
|
|
432
|
+
return {
|
|
433
|
+
typeName,
|
|
434
|
+
modifier
|
|
435
|
+
};
|
|
436
|
+
};
|
|
437
|
+
/**
|
|
438
|
+
* Extract selections from a SelectionSet.
|
|
439
|
+
*/
|
|
440
|
+
const extractSelections = (selections) => {
|
|
441
|
+
return selections.map(extractSelection);
|
|
442
|
+
};
|
|
443
|
+
/**
|
|
444
|
+
* Extract a single selection.
|
|
445
|
+
*/
|
|
446
|
+
const extractSelection = (node) => {
|
|
447
|
+
switch (node.kind) {
|
|
448
|
+
case graphql.Kind.FIELD: return extractFieldSelection(node);
|
|
449
|
+
case graphql.Kind.FRAGMENT_SPREAD: return extractFragmentSpread(node);
|
|
450
|
+
case graphql.Kind.INLINE_FRAGMENT: return extractInlineFragment(node);
|
|
451
|
+
}
|
|
452
|
+
};
|
|
453
|
+
/**
|
|
454
|
+
* Extract a field selection.
|
|
455
|
+
*/
|
|
456
|
+
const extractFieldSelection = (node) => {
|
|
457
|
+
const args = node.arguments?.length ? node.arguments.map(extractArgument) : undefined;
|
|
458
|
+
const selections = node.selectionSet ? extractSelections(node.selectionSet.selections) : undefined;
|
|
459
|
+
return {
|
|
460
|
+
kind: "field",
|
|
461
|
+
name: node.name.value,
|
|
462
|
+
alias: node.alias?.value,
|
|
463
|
+
arguments: args,
|
|
464
|
+
selections
|
|
465
|
+
};
|
|
466
|
+
};
|
|
467
|
+
/**
|
|
468
|
+
* Extract a fragment spread.
|
|
469
|
+
*/
|
|
470
|
+
const extractFragmentSpread = (node) => {
|
|
471
|
+
return {
|
|
472
|
+
kind: "fragmentSpread",
|
|
473
|
+
name: node.name.value
|
|
474
|
+
};
|
|
475
|
+
};
|
|
476
|
+
/**
|
|
477
|
+
* Extract an inline fragment.
|
|
478
|
+
*/
|
|
479
|
+
const extractInlineFragment = (node) => {
|
|
480
|
+
return {
|
|
481
|
+
kind: "inlineFragment",
|
|
482
|
+
onType: node.typeCondition?.name.value ?? "",
|
|
483
|
+
selections: extractSelections(node.selectionSet.selections)
|
|
484
|
+
};
|
|
485
|
+
};
|
|
486
|
+
/**
|
|
487
|
+
* Extract an argument.
|
|
488
|
+
*/
|
|
489
|
+
const extractArgument = (node) => {
|
|
490
|
+
return {
|
|
491
|
+
name: node.name.value,
|
|
492
|
+
value: extractValue(node.value)
|
|
493
|
+
};
|
|
494
|
+
};
|
|
495
|
+
/**
|
|
496
|
+
* Assert unreachable code path (for exhaustiveness checks).
|
|
497
|
+
*/
|
|
498
|
+
const assertUnreachable = (value) => {
|
|
499
|
+
throw new Error(`Unexpected value: ${JSON.stringify(value)}`);
|
|
500
|
+
};
|
|
501
|
+
/**
|
|
502
|
+
* Extract a value (literal or variable reference).
|
|
503
|
+
*/
|
|
504
|
+
const extractValue = (node) => {
|
|
505
|
+
switch (node.kind) {
|
|
506
|
+
case graphql.Kind.VARIABLE: return {
|
|
507
|
+
kind: "variable",
|
|
508
|
+
name: node.name.value
|
|
509
|
+
};
|
|
510
|
+
case graphql.Kind.INT: return {
|
|
511
|
+
kind: "int",
|
|
512
|
+
value: node.value
|
|
513
|
+
};
|
|
514
|
+
case graphql.Kind.FLOAT: return {
|
|
515
|
+
kind: "float",
|
|
516
|
+
value: node.value
|
|
517
|
+
};
|
|
518
|
+
case graphql.Kind.STRING: return {
|
|
519
|
+
kind: "string",
|
|
520
|
+
value: node.value
|
|
521
|
+
};
|
|
522
|
+
case graphql.Kind.BOOLEAN: return {
|
|
523
|
+
kind: "boolean",
|
|
524
|
+
value: node.value
|
|
525
|
+
};
|
|
526
|
+
case graphql.Kind.NULL: return { kind: "null" };
|
|
527
|
+
case graphql.Kind.ENUM: return {
|
|
528
|
+
kind: "enum",
|
|
529
|
+
value: node.value
|
|
530
|
+
};
|
|
531
|
+
case graphql.Kind.LIST: return {
|
|
532
|
+
kind: "list",
|
|
533
|
+
values: node.values.map(extractValue)
|
|
534
|
+
};
|
|
535
|
+
case graphql.Kind.OBJECT: return {
|
|
536
|
+
kind: "object",
|
|
537
|
+
fields: node.fields.map((field) => ({
|
|
538
|
+
name: field.name.value,
|
|
539
|
+
value: extractValue(field.value)
|
|
540
|
+
}))
|
|
541
|
+
};
|
|
542
|
+
default: return assertUnreachable(node);
|
|
543
|
+
}
|
|
544
|
+
};
|
|
545
|
+
|
|
546
|
+
//#endregion
|
|
547
|
+
//#region packages/codegen/src/graphql-compat/transformer.ts
|
|
548
|
+
/**
|
|
549
|
+
* Built-in GraphQL scalar types.
|
|
550
|
+
*/
|
|
551
|
+
const builtinScalarTypes = new Set([
|
|
552
|
+
"ID",
|
|
553
|
+
"String",
|
|
554
|
+
"Int",
|
|
555
|
+
"Float",
|
|
556
|
+
"Boolean"
|
|
557
|
+
]);
|
|
558
|
+
/**
|
|
559
|
+
* Check if a type name is a scalar type.
|
|
560
|
+
*/
|
|
561
|
+
const isScalarName = (schema, name) => builtinScalarTypes.has(name) || schema.scalars.has(name);
|
|
562
|
+
/**
|
|
563
|
+
* Check if a type name is an enum type.
|
|
564
|
+
*/
|
|
565
|
+
const isEnumName = (schema, name) => schema.enums.has(name);
|
|
566
|
+
/**
|
|
567
|
+
* Transform parsed operations/fragments by enriching them with schema information.
|
|
568
|
+
*
|
|
569
|
+
* This resolves variable type kinds (scalar, enum, input) and collects
|
|
570
|
+
* fragment dependencies.
|
|
571
|
+
*/
|
|
572
|
+
const transformParsedGraphql = (parsed, options) => {
|
|
573
|
+
const schema = require_generator.createSchemaIndex(options.schemaDocument);
|
|
574
|
+
const operations = [];
|
|
575
|
+
for (const op of parsed.operations) {
|
|
576
|
+
const result = transformOperation(op, schema);
|
|
577
|
+
if (result.isErr()) {
|
|
578
|
+
return (0, neverthrow.err)(result.error);
|
|
579
|
+
}
|
|
580
|
+
operations.push(result.value);
|
|
581
|
+
}
|
|
582
|
+
const fragments = [];
|
|
583
|
+
for (const frag of parsed.fragments) {
|
|
584
|
+
const result = transformFragment(frag, schema);
|
|
585
|
+
if (result.isErr()) {
|
|
586
|
+
return (0, neverthrow.err)(result.error);
|
|
587
|
+
}
|
|
588
|
+
fragments.push(result.value);
|
|
589
|
+
}
|
|
590
|
+
return (0, neverthrow.ok)({
|
|
591
|
+
operations,
|
|
592
|
+
fragments
|
|
593
|
+
});
|
|
594
|
+
};
|
|
595
|
+
/**
|
|
596
|
+
* Transform a single operation.
|
|
597
|
+
*/
|
|
598
|
+
const transformOperation = (op, schema) => {
|
|
599
|
+
const variables = [];
|
|
600
|
+
for (const v of op.variables) {
|
|
601
|
+
const typeKind = resolveTypeKind(schema, v.typeName);
|
|
602
|
+
if (typeKind === null) {
|
|
603
|
+
return (0, neverthrow.err)({
|
|
604
|
+
code: "GRAPHQL_UNKNOWN_TYPE",
|
|
605
|
+
message: `Unknown type "${v.typeName}" in variable "${v.name}"`,
|
|
606
|
+
typeName: v.typeName
|
|
607
|
+
});
|
|
608
|
+
}
|
|
609
|
+
variables.push({
|
|
610
|
+
...v,
|
|
611
|
+
typeKind
|
|
612
|
+
});
|
|
613
|
+
}
|
|
614
|
+
const fragmentDependencies = collectFragmentDependencies(op.selections);
|
|
615
|
+
return (0, neverthrow.ok)({
|
|
616
|
+
...op,
|
|
617
|
+
variables,
|
|
618
|
+
fragmentDependencies
|
|
619
|
+
});
|
|
620
|
+
};
|
|
621
|
+
/**
|
|
622
|
+
* Transform a single fragment.
|
|
623
|
+
*/
|
|
624
|
+
const transformFragment = (frag, _schema) => {
|
|
625
|
+
const fragmentDependencies = collectFragmentDependencies(frag.selections);
|
|
626
|
+
return (0, neverthrow.ok)({
|
|
627
|
+
...frag,
|
|
628
|
+
fragmentDependencies
|
|
629
|
+
});
|
|
630
|
+
};
|
|
631
|
+
/**
|
|
632
|
+
* Resolve the type kind for a type name.
|
|
633
|
+
*/
|
|
634
|
+
const resolveTypeKind = (schema, typeName) => {
|
|
635
|
+
if (isScalarName(schema, typeName)) {
|
|
636
|
+
return "scalar";
|
|
637
|
+
}
|
|
638
|
+
if (isEnumName(schema, typeName)) {
|
|
639
|
+
return "enum";
|
|
640
|
+
}
|
|
641
|
+
if (schema.inputs.has(typeName)) {
|
|
642
|
+
return "input";
|
|
643
|
+
}
|
|
644
|
+
return null;
|
|
645
|
+
};
|
|
646
|
+
/**
|
|
647
|
+
* Collect fragment names used in selections (recursively).
|
|
648
|
+
*/
|
|
649
|
+
const collectFragmentDependencies = (selections) => {
|
|
650
|
+
const fragments = new Set();
|
|
651
|
+
const collect = (sels) => {
|
|
652
|
+
for (const sel of sels) {
|
|
653
|
+
switch (sel.kind) {
|
|
654
|
+
case "fragmentSpread":
|
|
655
|
+
fragments.add(sel.name);
|
|
656
|
+
break;
|
|
657
|
+
case "field":
|
|
658
|
+
if (sel.selections) {
|
|
659
|
+
collect(sel.selections);
|
|
660
|
+
}
|
|
661
|
+
break;
|
|
662
|
+
case "inlineFragment":
|
|
663
|
+
collect(sel.selections);
|
|
664
|
+
break;
|
|
665
|
+
}
|
|
666
|
+
}
|
|
667
|
+
};
|
|
668
|
+
collect(selections);
|
|
669
|
+
return [...fragments];
|
|
670
|
+
};
|
|
671
|
+
|
|
672
|
+
//#endregion
|
|
9
673
|
//#region packages/codegen/src/inject-template.ts
|
|
10
674
|
const templateContents = `\
|
|
11
675
|
import { defineScalar } from "@soda-gql/core";
|
|
@@ -73,6 +737,170 @@ const esbuildBundler = {
|
|
|
73
737
|
}
|
|
74
738
|
};
|
|
75
739
|
|
|
740
|
+
//#endregion
|
|
741
|
+
//#region packages/codegen/src/defs-generator.ts
|
|
742
|
+
/**
|
|
743
|
+
* Split an array into chunks of the specified size.
|
|
744
|
+
*/
|
|
745
|
+
const chunkArray = (array, size) => {
|
|
746
|
+
if (size <= 0) {
|
|
747
|
+
return [Array.from(array)];
|
|
748
|
+
}
|
|
749
|
+
const result = [];
|
|
750
|
+
for (let i = 0; i < array.length; i += size) {
|
|
751
|
+
result.push(array.slice(i, i + size));
|
|
752
|
+
}
|
|
753
|
+
return result;
|
|
754
|
+
};
|
|
755
|
+
/**
|
|
756
|
+
* Determine if chunking is needed based on the number of definitions.
|
|
757
|
+
*/
|
|
758
|
+
const needsChunking = (vars, chunkSize) => {
|
|
759
|
+
return vars.length > chunkSize;
|
|
760
|
+
};
|
|
761
|
+
/**
|
|
762
|
+
* Generate a single definition file content.
|
|
763
|
+
*/
|
|
764
|
+
const generateDefinitionFile = (options) => {
|
|
765
|
+
const { category, vars, needsDefineEnum } = options;
|
|
766
|
+
if (vars.length === 0) {
|
|
767
|
+
return `/**
|
|
768
|
+
* ${category} definitions (empty)
|
|
769
|
+
* @generated by @soda-gql/codegen
|
|
770
|
+
*/
|
|
771
|
+
`;
|
|
772
|
+
}
|
|
773
|
+
const imports = [];
|
|
774
|
+
if (needsDefineEnum && category === "enums") {
|
|
775
|
+
imports.push("import { defineEnum } from \"@soda-gql/core\";");
|
|
776
|
+
}
|
|
777
|
+
const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
|
|
778
|
+
const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
|
|
779
|
+
return `/**
|
|
780
|
+
* ${category} definitions
|
|
781
|
+
* @generated by @soda-gql/codegen
|
|
782
|
+
*/
|
|
783
|
+
${importsBlock}${exportStatements}
|
|
784
|
+
`;
|
|
785
|
+
};
|
|
786
|
+
/**
|
|
787
|
+
* Generate a chunk file content.
|
|
788
|
+
*/
|
|
789
|
+
const generateChunkFile = (options) => {
|
|
790
|
+
const { category, vars, chunkIndex, needsDefineEnum } = options;
|
|
791
|
+
if (vars.length === 0) {
|
|
792
|
+
return `/**
|
|
793
|
+
* ${category} chunk ${chunkIndex} (empty)
|
|
794
|
+
* @generated by @soda-gql/codegen
|
|
795
|
+
*/
|
|
796
|
+
`;
|
|
797
|
+
}
|
|
798
|
+
const imports = [];
|
|
799
|
+
if (needsDefineEnum && category === "enums") {
|
|
800
|
+
imports.push("import { defineEnum } from \"@soda-gql/core\";");
|
|
801
|
+
}
|
|
802
|
+
const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
|
|
803
|
+
const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
|
|
804
|
+
return `/**
|
|
805
|
+
* ${category} chunk ${chunkIndex}
|
|
806
|
+
* @generated by @soda-gql/codegen
|
|
807
|
+
*/
|
|
808
|
+
${importsBlock}${exportStatements}
|
|
809
|
+
`;
|
|
810
|
+
};
|
|
811
|
+
/**
|
|
812
|
+
* Generate the index file that re-exports all chunks.
|
|
813
|
+
*/
|
|
814
|
+
const generateChunkIndex = (options) => {
|
|
815
|
+
const { category, chunkCount } = options;
|
|
816
|
+
const reExports = Array.from({ length: chunkCount }, (_, i) => `export * from "./chunk-${i}";`).join("\n");
|
|
817
|
+
return `/**
|
|
818
|
+
* ${category} index (re-exports all chunks)
|
|
819
|
+
* @generated by @soda-gql/codegen
|
|
820
|
+
*/
|
|
821
|
+
${reExports}
|
|
822
|
+
`;
|
|
823
|
+
};
|
|
824
|
+
/**
|
|
825
|
+
* Generate chunked definition files.
|
|
826
|
+
*/
|
|
827
|
+
const generateChunkedDefinitionFiles = (category, schemaName, vars, chunkSize) => {
|
|
828
|
+
const chunks = chunkArray(vars, chunkSize);
|
|
829
|
+
const needsDefineEnum = category === "enums";
|
|
830
|
+
const chunkContents = chunks.map((chunkVars, chunkIndex) => ({
|
|
831
|
+
chunkIndex,
|
|
832
|
+
content: generateChunkFile({
|
|
833
|
+
category,
|
|
834
|
+
schemaName,
|
|
835
|
+
vars: chunkVars,
|
|
836
|
+
chunkIndex,
|
|
837
|
+
needsDefineEnum
|
|
838
|
+
}),
|
|
839
|
+
varNames: chunkVars.map((v) => v.name)
|
|
840
|
+
}));
|
|
841
|
+
const allVarNames = vars.map((v) => v.name);
|
|
842
|
+
const indexContent = generateChunkIndex({
|
|
843
|
+
category,
|
|
844
|
+
chunkCount: chunks.length,
|
|
845
|
+
varNames: allVarNames
|
|
846
|
+
});
|
|
847
|
+
return {
|
|
848
|
+
indexContent,
|
|
849
|
+
chunks: chunkContents
|
|
850
|
+
};
|
|
851
|
+
};
|
|
852
|
+
/**
|
|
853
|
+
* Generate the complete _defs directory structure.
|
|
854
|
+
*/
|
|
855
|
+
const generateDefsStructure = (schemaName, categoryVars, chunkSize) => {
|
|
856
|
+
const files = [];
|
|
857
|
+
const importPaths = {
|
|
858
|
+
enums: "./_defs/enums",
|
|
859
|
+
inputs: "./_defs/inputs",
|
|
860
|
+
objects: "./_defs/objects",
|
|
861
|
+
unions: "./_defs/unions"
|
|
862
|
+
};
|
|
863
|
+
const categories = [
|
|
864
|
+
"enums",
|
|
865
|
+
"inputs",
|
|
866
|
+
"objects",
|
|
867
|
+
"unions"
|
|
868
|
+
];
|
|
869
|
+
for (const category of categories) {
|
|
870
|
+
const vars = categoryVars[category];
|
|
871
|
+
const needsDefineEnum = category === "enums";
|
|
872
|
+
if (needsChunking(vars, chunkSize)) {
|
|
873
|
+
const chunked = generateChunkedDefinitionFiles(category, schemaName, vars, chunkSize);
|
|
874
|
+
importPaths[category] = `./_defs/${category}`;
|
|
875
|
+
files.push({
|
|
876
|
+
relativePath: `_defs/${category}/index.ts`,
|
|
877
|
+
content: chunked.indexContent
|
|
878
|
+
});
|
|
879
|
+
for (const chunk of chunked.chunks) {
|
|
880
|
+
files.push({
|
|
881
|
+
relativePath: `_defs/${category}/chunk-${chunk.chunkIndex}.ts`,
|
|
882
|
+
content: chunk.content
|
|
883
|
+
});
|
|
884
|
+
}
|
|
885
|
+
} else {
|
|
886
|
+
const content = generateDefinitionFile({
|
|
887
|
+
category,
|
|
888
|
+
schemaName,
|
|
889
|
+
vars,
|
|
890
|
+
needsDefineEnum
|
|
891
|
+
});
|
|
892
|
+
files.push({
|
|
893
|
+
relativePath: `_defs/${category}.ts`,
|
|
894
|
+
content
|
|
895
|
+
});
|
|
896
|
+
}
|
|
897
|
+
}
|
|
898
|
+
return {
|
|
899
|
+
files,
|
|
900
|
+
importPaths
|
|
901
|
+
};
|
|
902
|
+
};
|
|
903
|
+
|
|
76
904
|
//#endregion
|
|
77
905
|
//#region packages/codegen/src/file.ts
|
|
78
906
|
const writeModule = (outPath, contents) => {
|
|
@@ -226,10 +1054,12 @@ const runCodegen = async (options) => {
|
|
|
226
1054
|
inputDepthOverridesConfig.set(schemaName, schemaConfig.inputDepthOverrides);
|
|
227
1055
|
}
|
|
228
1056
|
}
|
|
229
|
-
const
|
|
1057
|
+
const chunkSize = options.chunkSize ?? 100;
|
|
1058
|
+
const { code: internalCode, injectsCode, categoryVars } = require_generator.generateMultiSchemaModule(schemas, {
|
|
230
1059
|
injection: injectionConfig,
|
|
231
1060
|
defaultInputDepth: defaultInputDepthConfig.size > 0 ? defaultInputDepthConfig : undefined,
|
|
232
|
-
inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined
|
|
1061
|
+
inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined,
|
|
1062
|
+
chunkSize
|
|
233
1063
|
});
|
|
234
1064
|
const indexCode = `/**
|
|
235
1065
|
* Generated by @soda-gql/codegen
|
|
@@ -239,7 +1069,7 @@ const runCodegen = async (options) => {
|
|
|
239
1069
|
export * from "./_internal";
|
|
240
1070
|
`;
|
|
241
1071
|
for (const [name, document] of schemas.entries()) {
|
|
242
|
-
const schemaIndex = (await Promise.resolve().then(() => require("./generator-
|
|
1072
|
+
const schemaIndex = (await Promise.resolve().then(() => require("./generator-0VPnXv1q.cjs"))).createSchemaIndex(document);
|
|
243
1073
|
const objects = Array.from(schemaIndex.objects.keys()).filter((n) => !n.startsWith("__")).length;
|
|
244
1074
|
const enums = Array.from(schemaIndex.enums.keys()).filter((n) => !n.startsWith("__")).length;
|
|
245
1075
|
const inputs = Array.from(schemaIndex.inputs.keys()).filter((n) => !n.startsWith("__")).length;
|
|
@@ -259,6 +1089,31 @@ export * from "./_internal";
|
|
|
259
1089
|
return (0, neverthrow.err)(injectsWriteResult.error);
|
|
260
1090
|
}
|
|
261
1091
|
}
|
|
1092
|
+
const defsPaths = [];
|
|
1093
|
+
if (categoryVars) {
|
|
1094
|
+
const outDir = (0, node_path.dirname)(outPath);
|
|
1095
|
+
const combinedVars = {
|
|
1096
|
+
enums: [],
|
|
1097
|
+
inputs: [],
|
|
1098
|
+
objects: [],
|
|
1099
|
+
unions: []
|
|
1100
|
+
};
|
|
1101
|
+
for (const vars of Object.values(categoryVars)) {
|
|
1102
|
+
combinedVars.enums.push(...vars.enums);
|
|
1103
|
+
combinedVars.inputs.push(...vars.inputs);
|
|
1104
|
+
combinedVars.objects.push(...vars.objects);
|
|
1105
|
+
combinedVars.unions.push(...vars.unions);
|
|
1106
|
+
}
|
|
1107
|
+
const defsStructure = generateDefsStructure("combined", combinedVars, chunkSize);
|
|
1108
|
+
for (const file of defsStructure.files) {
|
|
1109
|
+
const filePath = (0, node_path.join)(outDir, file.relativePath);
|
|
1110
|
+
const writeResult = await writeModule(filePath, file.content).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
1111
|
+
if (writeResult.isErr()) {
|
|
1112
|
+
return (0, neverthrow.err)(writeResult.error);
|
|
1113
|
+
}
|
|
1114
|
+
defsPaths.push(filePath);
|
|
1115
|
+
}
|
|
1116
|
+
}
|
|
262
1117
|
const internalPath = (0, node_path.join)((0, node_path.dirname)(outPath), "_internal.ts");
|
|
263
1118
|
const internalWriteResult = await writeModule(internalPath, internalCode).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
264
1119
|
if (internalWriteResult.isErr()) {
|
|
@@ -281,13 +1136,20 @@ export * from "./_internal";
|
|
|
281
1136
|
outPath,
|
|
282
1137
|
internalPath,
|
|
283
1138
|
injectsPath,
|
|
284
|
-
cjsPath: bundleResult.value.cjsPath
|
|
1139
|
+
cjsPath: bundleResult.value.cjsPath,
|
|
1140
|
+
...defsPaths.length > 0 ? { defsPaths } : {}
|
|
285
1141
|
});
|
|
286
1142
|
};
|
|
287
1143
|
|
|
288
1144
|
//#endregion
|
|
1145
|
+
exports.emitFragment = emitFragment;
|
|
1146
|
+
exports.emitOperation = emitOperation;
|
|
289
1147
|
exports.hashSchema = hashSchema;
|
|
290
1148
|
exports.loadSchema = loadSchema;
|
|
1149
|
+
exports.parseGraphqlFile = parseGraphqlFile;
|
|
1150
|
+
exports.parseGraphqlSource = parseGraphqlSource;
|
|
1151
|
+
exports.parseTypeNode = parseTypeNode;
|
|
291
1152
|
exports.runCodegen = runCodegen;
|
|
1153
|
+
exports.transformParsedGraphql = transformParsedGraphql;
|
|
292
1154
|
exports.writeInjectTemplate = writeInjectTemplate;
|
|
293
1155
|
//# sourceMappingURL=index.cjs.map
|