@soda-gql/tools 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -0
- package/codegen.d.ts +2 -0
- package/codegen.js +1 -0
- package/dist/bin.cjs +15509 -0
- package/dist/bin.cjs.map +1 -0
- package/dist/bin.d.cts +839 -0
- package/dist/bin.d.cts.map +1 -0
- package/dist/chunk-BrXtsOCC.cjs +41 -0
- package/dist/codegen.cjs +4704 -0
- package/dist/codegen.cjs.map +1 -0
- package/dist/codegen.d.cts +416 -0
- package/dist/codegen.d.cts.map +1 -0
- package/dist/codegen.d.mts +416 -0
- package/dist/codegen.d.mts.map +1 -0
- package/dist/codegen.mjs +4712 -0
- package/dist/codegen.mjs.map +1 -0
- package/dist/formatter-Glj5a663.cjs +510 -0
- package/dist/formatter-Glj5a663.cjs.map +1 -0
- package/dist/formatter.cjs +509 -0
- package/dist/formatter.cjs.map +1 -0
- package/dist/formatter.d.cts +33 -0
- package/dist/formatter.d.cts.map +1 -0
- package/dist/formatter.d.mts +33 -0
- package/dist/formatter.d.mts.map +1 -0
- package/dist/formatter.mjs +507 -0
- package/dist/formatter.mjs.map +1 -0
- package/dist/index.cjs +13 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +11 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +11 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +12 -0
- package/dist/index.mjs.map +1 -0
- package/dist/typegen.cjs +864 -0
- package/dist/typegen.cjs.map +1 -0
- package/dist/typegen.d.cts +205 -0
- package/dist/typegen.d.cts.map +1 -0
- package/dist/typegen.d.mts +205 -0
- package/dist/typegen.d.mts.map +1 -0
- package/dist/typegen.mjs +859 -0
- package/dist/typegen.mjs.map +1 -0
- package/formatter.d.ts +2 -0
- package/formatter.js +1 -0
- package/index.d.ts +2 -0
- package/index.js +1 -0
- package/package.json +102 -0
- package/typegen.d.ts +2 -0
- package/typegen.js +1 -0
package/dist/typegen.mjs
ADDED
|
@@ -0,0 +1,859 @@
|
|
|
1
|
+
import { err, ok } from "neverthrow";
|
|
2
|
+
import { buildFieldsFromSelectionSet, calculateFieldsType, createSchemaIndexFromSchema, extractFragmentVariables, generateInputObjectType, generateInputType, generateInputTypeFromVarDefs, parseInputSpecifier, preprocessFragmentArgs } from "@soda-gql/core";
|
|
3
|
+
import { Kind, parse } from "graphql";
|
|
4
|
+
import { existsSync, readFileSync } from "node:fs";
|
|
5
|
+
import { dirname, extname, join, normalize, relative, resolve } from "node:path";
|
|
6
|
+
import { walkAndExtract } from "@soda-gql/common/template-extraction";
|
|
7
|
+
import { parseSync } from "@swc/core";
|
|
8
|
+
import { writeFile } from "node:fs/promises";
|
|
9
|
+
import { builderErrors, createBuilderService, createGraphqlSystemIdentifyHelper, extractFieldSelections, loadSchemasFromBundle } from "@soda-gql/builder";
|
|
10
|
+
import fg from "fast-glob";
|
|
11
|
+
|
|
12
|
+
//#region packages/tools/src/typegen/emitter.ts
|
|
13
|
+
/**
|
|
14
|
+
* Prebuilt types emitter.
|
|
15
|
+
*
|
|
16
|
+
* Generates TypeScript type definitions for PrebuiltTypes registry
|
|
17
|
+
* from field selection data and schema.
|
|
18
|
+
*
|
|
19
|
+
* ## Error Handling Strategy
|
|
20
|
+
*
|
|
21
|
+
* The emitter uses a partial failure approach for type calculation errors:
|
|
22
|
+
*
|
|
23
|
+
* **Recoverable errors** (result in warnings, element skipped):
|
|
24
|
+
* - Type calculation failures (e.g., `calculateFieldsType` throws)
|
|
25
|
+
* - Input type generation failures (e.g., `generateInputType` throws)
|
|
26
|
+
* - These are caught per-element, logged as warnings, and the element is omitted
|
|
27
|
+
*
|
|
28
|
+
* **Fatal errors** (result in error result):
|
|
29
|
+
* - `SCHEMA_NOT_FOUND`: Selection references non-existent schema
|
|
30
|
+
* - `WRITE_FAILED`: Cannot write output file to disk
|
|
31
|
+
*
|
|
32
|
+
* This allows builds to succeed with partial type coverage when some elements
|
|
33
|
+
* have issues, while providing visibility into problems via warnings.
|
|
34
|
+
*
|
|
35
|
+
* @module
|
|
36
|
+
*/
|
|
37
|
+
/**
|
|
38
|
+
* Group field selections by schema.
|
|
39
|
+
* Uses the schemaLabel from each selection to group them correctly.
|
|
40
|
+
*
|
|
41
|
+
* Fragments without a 'key' property are skipped (not included in PrebuiltTypes)
|
|
42
|
+
* and a warning is added. This allows projects to use fragments without keys
|
|
43
|
+
* while still generating prebuilt types for those that have keys.
|
|
44
|
+
*
|
|
45
|
+
* @returns Result containing grouped selections and warnings, or error if schema not found
|
|
46
|
+
*/
|
|
47
|
+
const groupBySchema = (fieldSelections, schemas) => {
|
|
48
|
+
const grouped = new Map();
|
|
49
|
+
const warnings = [];
|
|
50
|
+
let skippedFragmentCount = 0;
|
|
51
|
+
for (const schemaName of Object.keys(schemas)) {
|
|
52
|
+
grouped.set(schemaName, {
|
|
53
|
+
fragments: [],
|
|
54
|
+
operations: [],
|
|
55
|
+
inputObjects: new Set()
|
|
56
|
+
});
|
|
57
|
+
}
|
|
58
|
+
for (const [canonicalId, selection] of fieldSelections) {
|
|
59
|
+
const schemaName = selection.schemaLabel;
|
|
60
|
+
const schema = schemas[schemaName];
|
|
61
|
+
const group = grouped.get(schemaName);
|
|
62
|
+
if (!schema || !group) {
|
|
63
|
+
return err(builderErrors.schemaNotFound(schemaName, canonicalId));
|
|
64
|
+
}
|
|
65
|
+
const outputFormatters = { scalarOutput: (name) => `ScalarOutput_${schemaName}<"${name}">` };
|
|
66
|
+
const inputFormatters = {
|
|
67
|
+
scalarInput: (name) => `ScalarInput_${schemaName}<"${name}">`,
|
|
68
|
+
inputObject: (name) => `Input_${schemaName}_${name}`
|
|
69
|
+
};
|
|
70
|
+
if (selection.type === "fragment") {
|
|
71
|
+
if (!selection.key) {
|
|
72
|
+
skippedFragmentCount++;
|
|
73
|
+
warnings.push(`[prebuilt] Fragment "${canonicalId}" skipped: missing 'key' property. ` + `Use tagged template syntax fragment("Name", "Type")\`{ ... }\` to auto-assign a key, ` + `or set 'key' explicitly in the callback builder.`);
|
|
74
|
+
continue;
|
|
75
|
+
}
|
|
76
|
+
try {
|
|
77
|
+
const usedInputObjects = collectUsedInputObjectsFromVarDefs(schema, selection.variableDefinitions);
|
|
78
|
+
for (const inputName of usedInputObjects) {
|
|
79
|
+
group.inputObjects.add(inputName);
|
|
80
|
+
}
|
|
81
|
+
const outputType = calculateFieldsType(schema, selection.fields, outputFormatters, selection.typename);
|
|
82
|
+
const hasVariables = Object.keys(selection.variableDefinitions).length > 0;
|
|
83
|
+
const inputType = hasVariables ? generateInputTypeFromVarDefs(schema, selection.variableDefinitions, { formatters: inputFormatters }) : "void";
|
|
84
|
+
group.fragments.push({
|
|
85
|
+
key: selection.key,
|
|
86
|
+
typename: selection.typename,
|
|
87
|
+
inputType,
|
|
88
|
+
outputType
|
|
89
|
+
});
|
|
90
|
+
} catch (error) {
|
|
91
|
+
warnings.push(`[prebuilt] Failed to calculate type for fragment "${selection.key}": ${error instanceof Error ? error.message : String(error)}`);
|
|
92
|
+
}
|
|
93
|
+
} else if (selection.type === "operation") {
|
|
94
|
+
try {
|
|
95
|
+
const usedInputObjects = collectUsedInputObjects(schema, selection.variableDefinitions);
|
|
96
|
+
for (const inputName of usedInputObjects) {
|
|
97
|
+
group.inputObjects.add(inputName);
|
|
98
|
+
}
|
|
99
|
+
const rootTypeName = schema.operations[selection.operationType];
|
|
100
|
+
const outputType = calculateFieldsType(schema, selection.fields, outputFormatters, rootTypeName ?? undefined);
|
|
101
|
+
const inputType = generateInputType(schema, selection.variableDefinitions, inputFormatters);
|
|
102
|
+
group.operations.push({
|
|
103
|
+
key: selection.operationName,
|
|
104
|
+
inputType,
|
|
105
|
+
outputType
|
|
106
|
+
});
|
|
107
|
+
} catch (error) {
|
|
108
|
+
warnings.push(`[prebuilt] Failed to calculate type for operation "${selection.operationName}": ${error instanceof Error ? error.message : String(error)}`);
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
}
|
|
112
|
+
return ok({
|
|
113
|
+
grouped,
|
|
114
|
+
warnings,
|
|
115
|
+
skippedFragmentCount
|
|
116
|
+
});
|
|
117
|
+
};
|
|
118
|
+
/**
|
|
119
|
+
* Extract input object names from a GraphQL TypeNode.
|
|
120
|
+
*/
|
|
121
|
+
const extractInputObjectsFromType = (schema, typeNode, inputObjects) => {
|
|
122
|
+
switch (typeNode.kind) {
|
|
123
|
+
case Kind.NON_NULL_TYPE:
|
|
124
|
+
extractInputObjectsFromType(schema, typeNode.type, inputObjects);
|
|
125
|
+
break;
|
|
126
|
+
case Kind.LIST_TYPE:
|
|
127
|
+
extractInputObjectsFromType(schema, typeNode.type, inputObjects);
|
|
128
|
+
break;
|
|
129
|
+
case Kind.NAMED_TYPE: {
|
|
130
|
+
const name = typeNode.name.value;
|
|
131
|
+
if (!schema.scalar[name] && !schema.enum[name] && schema.input[name]) {
|
|
132
|
+
inputObjects.add(name);
|
|
133
|
+
}
|
|
134
|
+
break;
|
|
135
|
+
}
|
|
136
|
+
}
|
|
137
|
+
};
|
|
138
|
+
/**
|
|
139
|
+
* Recursively collect nested input objects from schema definitions.
|
|
140
|
+
* Takes a set of initial input names and expands to include all nested inputs.
|
|
141
|
+
*/
|
|
142
|
+
const collectNestedInputObjects = (schema, initialInputNames) => {
|
|
143
|
+
const inputObjects = new Set(initialInputNames);
|
|
144
|
+
const collectNested = (inputName, seen) => {
|
|
145
|
+
if (seen.has(inputName)) {
|
|
146
|
+
return;
|
|
147
|
+
}
|
|
148
|
+
seen.add(inputName);
|
|
149
|
+
const inputDef = schema.input[inputName];
|
|
150
|
+
if (!inputDef) {
|
|
151
|
+
return;
|
|
152
|
+
}
|
|
153
|
+
for (const fieldSpec of Object.values(inputDef.fields)) {
|
|
154
|
+
const parsed = parseInputSpecifier(fieldSpec);
|
|
155
|
+
if (parsed.kind === "input" && !inputObjects.has(parsed.name)) {
|
|
156
|
+
inputObjects.add(parsed.name);
|
|
157
|
+
collectNested(parsed.name, seen);
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
};
|
|
161
|
+
for (const inputName of Array.from(initialInputNames)) {
|
|
162
|
+
collectNested(inputName, new Set());
|
|
163
|
+
}
|
|
164
|
+
return inputObjects;
|
|
165
|
+
};
|
|
166
|
+
/**
|
|
167
|
+
* Collect all input object types used in variable definitions.
|
|
168
|
+
* Recursively collects nested input objects from the schema.
|
|
169
|
+
*/
|
|
170
|
+
const collectUsedInputObjects = (schema, variableDefinitions) => {
|
|
171
|
+
const directInputs = new Set();
|
|
172
|
+
for (const varDef of variableDefinitions) {
|
|
173
|
+
extractInputObjectsFromType(schema, varDef.type, directInputs);
|
|
174
|
+
}
|
|
175
|
+
return collectNestedInputObjects(schema, directInputs);
|
|
176
|
+
};
|
|
177
|
+
/**
|
|
178
|
+
* Collect all input object types used in InputTypeSpecifiers.
|
|
179
|
+
* Recursively collects nested input objects from the schema.
|
|
180
|
+
*/
|
|
181
|
+
const _collectUsedInputObjectsFromSpecifiers = (schema, specifiers) => {
|
|
182
|
+
const directInputs = new Set();
|
|
183
|
+
for (const specStr of Object.values(specifiers)) {
|
|
184
|
+
const parsed = parseInputSpecifier(specStr);
|
|
185
|
+
if (parsed.kind === "input" && schema.input[parsed.name]) {
|
|
186
|
+
directInputs.add(parsed.name);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
return collectNestedInputObjects(schema, directInputs);
|
|
190
|
+
};
|
|
191
|
+
/**
|
|
192
|
+
* Collect all input object types used in VariableDefinitions (VarSpecifier objects).
|
|
193
|
+
* Used for fragment variable definitions.
|
|
194
|
+
*/
|
|
195
|
+
const collectUsedInputObjectsFromVarDefs = (schema, varDefs) => {
|
|
196
|
+
const directInputs = new Set();
|
|
197
|
+
for (const varSpec of Object.values(varDefs)) {
|
|
198
|
+
if (varSpec.kind === "input" && schema.input[varSpec.name]) {
|
|
199
|
+
directInputs.add(varSpec.name);
|
|
200
|
+
}
|
|
201
|
+
}
|
|
202
|
+
return collectNestedInputObjects(schema, directInputs);
|
|
203
|
+
};
|
|
204
|
+
/**
|
|
205
|
+
* Generate type definitions for input objects.
|
|
206
|
+
*/
|
|
207
|
+
const generateInputObjectTypeDefinitions = (schema, schemaName, inputNames) => {
|
|
208
|
+
const lines = [];
|
|
209
|
+
const defaultDepth = schema.__defaultInputDepth ?? 3;
|
|
210
|
+
const depthOverrides = schema.__inputDepthOverrides ?? {};
|
|
211
|
+
const formatters = {
|
|
212
|
+
scalarInput: (name) => `ScalarInput_${schemaName}<"${name}">`,
|
|
213
|
+
inputObject: (name) => `Input_${schemaName}_${name}`
|
|
214
|
+
};
|
|
215
|
+
const sortedNames = Array.from(inputNames).sort();
|
|
216
|
+
for (const inputName of sortedNames) {
|
|
217
|
+
const typeString = generateInputObjectType(schema, inputName, {
|
|
218
|
+
defaultDepth,
|
|
219
|
+
depthOverrides,
|
|
220
|
+
formatters
|
|
221
|
+
});
|
|
222
|
+
lines.push(`export type Input_${schemaName}_${inputName} = ${typeString};`);
|
|
223
|
+
}
|
|
224
|
+
return lines;
|
|
225
|
+
};
|
|
226
|
+
/**
|
|
227
|
+
* Generate the TypeScript code for prebuilt types.
|
|
228
|
+
*/
|
|
229
|
+
const generateTypesCode = (grouped, schemas, injectsModulePath) => {
|
|
230
|
+
const schemaNames = Object.keys(schemas);
|
|
231
|
+
const lines = [
|
|
232
|
+
"/**",
|
|
233
|
+
" * Prebuilt type registry.",
|
|
234
|
+
" *",
|
|
235
|
+
" * This file is auto-generated by @soda-gql/tools/typegen.",
|
|
236
|
+
" * Do not edit manually.",
|
|
237
|
+
" *",
|
|
238
|
+
" * @module",
|
|
239
|
+
" * @generated",
|
|
240
|
+
" */",
|
|
241
|
+
"",
|
|
242
|
+
"import type { AssertExtends, PrebuiltTypeRegistry } from \"@soda-gql/core\";"
|
|
243
|
+
];
|
|
244
|
+
const scalarImports = schemaNames.map((name) => `scalar_${name}`).join(", ");
|
|
245
|
+
lines.push(`import type { ${scalarImports} } from "${injectsModulePath}";`);
|
|
246
|
+
lines.push("");
|
|
247
|
+
for (const schemaName of schemaNames) {
|
|
248
|
+
lines.push(`type ScalarInput_${schemaName}<T extends keyof typeof scalar_${schemaName}> = ` + `typeof scalar_${schemaName}[T]["$type"]["input"];`);
|
|
249
|
+
lines.push(`type ScalarOutput_${schemaName}<T extends keyof typeof scalar_${schemaName}> = ` + `typeof scalar_${schemaName}[T]["$type"]["output"];`);
|
|
250
|
+
}
|
|
251
|
+
lines.push("");
|
|
252
|
+
for (const [schemaName, { fragments, operations, inputObjects }] of grouped) {
|
|
253
|
+
const schema = schemas[schemaName];
|
|
254
|
+
if (inputObjects.size > 0 && schema) {
|
|
255
|
+
lines.push("// Input object types");
|
|
256
|
+
const inputTypeLines = generateInputObjectTypeDefinitions(schema, schemaName, inputObjects);
|
|
257
|
+
lines.push(...inputTypeLines);
|
|
258
|
+
lines.push("");
|
|
259
|
+
}
|
|
260
|
+
const deduplicatedFragments = new Map();
|
|
261
|
+
for (const f of fragments) {
|
|
262
|
+
deduplicatedFragments.set(f.key, f);
|
|
263
|
+
}
|
|
264
|
+
const fragmentEntries = Array.from(deduplicatedFragments.values()).sort((a, b) => a.key.localeCompare(b.key)).map((f) => ` readonly "${f.key}": { readonly typename: "${f.typename}"; readonly input: ${f.inputType}; readonly output: ${f.outputType} };`);
|
|
265
|
+
const deduplicatedOperations = new Map();
|
|
266
|
+
for (const o of operations) {
|
|
267
|
+
deduplicatedOperations.set(o.key, o);
|
|
268
|
+
}
|
|
269
|
+
const operationEntries = Array.from(deduplicatedOperations.values()).sort((a, b) => a.key.localeCompare(b.key)).map((o) => ` readonly "${o.key}": { readonly input: ${o.inputType}; readonly output: ${o.outputType} };`);
|
|
270
|
+
lines.push(`export type PrebuiltTypes_${schemaName} = {`);
|
|
271
|
+
lines.push(" readonly fragments: {");
|
|
272
|
+
if (fragmentEntries.length > 0) {
|
|
273
|
+
lines.push(...fragmentEntries);
|
|
274
|
+
}
|
|
275
|
+
lines.push(" };");
|
|
276
|
+
lines.push(" readonly operations: {");
|
|
277
|
+
if (operationEntries.length > 0) {
|
|
278
|
+
lines.push(...operationEntries);
|
|
279
|
+
}
|
|
280
|
+
lines.push(" };");
|
|
281
|
+
lines.push("};");
|
|
282
|
+
lines.push(`type _AssertPrebuiltTypes_${schemaName} = AssertExtends<PrebuiltTypes_${schemaName}, PrebuiltTypeRegistry>;`);
|
|
283
|
+
lines.push("");
|
|
284
|
+
}
|
|
285
|
+
return lines.join("\n");
|
|
286
|
+
};
|
|
287
|
+
/**
|
|
288
|
+
* Emit prebuilt types to the types.prebuilt.ts file.
|
|
289
|
+
*
|
|
290
|
+
* This function uses a partial failure strategy: if type calculation fails for
|
|
291
|
+
* individual elements (e.g., due to invalid field selections or missing schema
|
|
292
|
+
* types), those elements are skipped and warnings are collected rather than
|
|
293
|
+
* failing the entire emission. This allows builds to succeed even when some
|
|
294
|
+
* elements have issues, while still reporting problems via warnings.
|
|
295
|
+
*
|
|
296
|
+
* @param options - Emitter options including schemas, field selections, and output directory
|
|
297
|
+
* @returns Result containing output path and warnings, or error if a hard failure occurs
|
|
298
|
+
*
|
|
299
|
+
* @example
|
|
300
|
+
* ```typescript
|
|
301
|
+
* const result = await emitPrebuiltTypes({
|
|
302
|
+
* schemas: { mySchema: schema },
|
|
303
|
+
* fieldSelections,
|
|
304
|
+
* outdir: "./generated",
|
|
305
|
+
* injects: { mySchema: { scalars: "./scalars.ts" } },
|
|
306
|
+
* });
|
|
307
|
+
*
|
|
308
|
+
* if (result.isOk()) {
|
|
309
|
+
* console.log(`Generated: ${result.value.path}`);
|
|
310
|
+
* if (result.value.warnings.length > 0) {
|
|
311
|
+
* console.warn("Warnings:", result.value.warnings);
|
|
312
|
+
* }
|
|
313
|
+
* }
|
|
314
|
+
* ```
|
|
315
|
+
*/
|
|
316
|
+
const emitPrebuiltTypes = async (options) => {
|
|
317
|
+
const { schemas, fieldSelections, outdir, injectsModulePath } = options;
|
|
318
|
+
const groupResult = groupBySchema(fieldSelections, schemas);
|
|
319
|
+
if (groupResult.isErr()) {
|
|
320
|
+
return err(groupResult.error);
|
|
321
|
+
}
|
|
322
|
+
const { grouped, warnings, skippedFragmentCount } = groupResult.value;
|
|
323
|
+
const code = generateTypesCode(grouped, schemas, injectsModulePath);
|
|
324
|
+
const typesPath = join(outdir, "types.prebuilt.ts");
|
|
325
|
+
try {
|
|
326
|
+
await writeFile(typesPath, code, "utf-8");
|
|
327
|
+
return ok({
|
|
328
|
+
path: typesPath,
|
|
329
|
+
warnings,
|
|
330
|
+
skippedFragmentCount
|
|
331
|
+
});
|
|
332
|
+
} catch (error) {
|
|
333
|
+
return err(builderErrors.writeFailed(typesPath, `Failed to write prebuilt types: ${error instanceof Error ? error.message : String(error)}`, error));
|
|
334
|
+
}
|
|
335
|
+
};
|
|
336
|
+
|
|
337
|
+
//#endregion
|
|
338
|
+
//#region packages/tools/src/typegen/errors.ts
|
|
339
|
+
/**
|
|
340
|
+
* Error constructor helpers for concise error creation.
|
|
341
|
+
*/
|
|
342
|
+
const typegenErrors = {
|
|
343
|
+
codegenRequired: (outdir) => ({
|
|
344
|
+
code: "TYPEGEN_CODEGEN_REQUIRED",
|
|
345
|
+
message: `Generated graphql-system module not found at '${outdir}'. Run 'soda-gql codegen' first.`,
|
|
346
|
+
outdir
|
|
347
|
+
}),
|
|
348
|
+
schemaLoadFailed: (schemaNames, cause) => ({
|
|
349
|
+
code: "TYPEGEN_SCHEMA_LOAD_FAILED",
|
|
350
|
+
message: `Failed to load schemas: ${schemaNames.join(", ")}`,
|
|
351
|
+
schemaNames,
|
|
352
|
+
cause
|
|
353
|
+
}),
|
|
354
|
+
buildFailed: (message, cause) => ({
|
|
355
|
+
code: "TYPEGEN_BUILD_FAILED",
|
|
356
|
+
message,
|
|
357
|
+
cause
|
|
358
|
+
})
|
|
359
|
+
};
|
|
360
|
+
/**
|
|
361
|
+
* Format TypegenError for console output (human-readable).
|
|
362
|
+
*/
|
|
363
|
+
const formatTypegenError = (error) => {
|
|
364
|
+
const lines = [];
|
|
365
|
+
lines.push(`Error [${error.code}]: ${error.message}`);
|
|
366
|
+
switch (error.code) {
|
|
367
|
+
case "TYPEGEN_CODEGEN_REQUIRED":
|
|
368
|
+
lines.push(` Output directory: ${error.outdir}`);
|
|
369
|
+
lines.push(" Hint: Run 'soda-gql codegen' to generate the graphql-system module first.");
|
|
370
|
+
break;
|
|
371
|
+
case "TYPEGEN_SCHEMA_LOAD_FAILED":
|
|
372
|
+
lines.push(` Schemas: ${error.schemaNames.join(", ")}`);
|
|
373
|
+
break;
|
|
374
|
+
}
|
|
375
|
+
if ("cause" in error && error.cause) {
|
|
376
|
+
lines.push(` Caused by: ${error.cause}`);
|
|
377
|
+
}
|
|
378
|
+
return lines.join("\n");
|
|
379
|
+
};
|
|
380
|
+
|
|
381
|
+
//#endregion
|
|
382
|
+
//#region packages/tools/src/typegen/template-extractor.ts
|
|
383
|
+
/**
|
|
384
|
+
* Parse TypeScript source with SWC, returning null on failure.
|
|
385
|
+
*/
|
|
386
|
+
const safeParseSync = (source, tsx) => {
|
|
387
|
+
try {
|
|
388
|
+
return parseSync(source, {
|
|
389
|
+
syntax: "typescript",
|
|
390
|
+
tsx,
|
|
391
|
+
decorators: false,
|
|
392
|
+
dynamicImport: true
|
|
393
|
+
});
|
|
394
|
+
} catch {
|
|
395
|
+
return null;
|
|
396
|
+
}
|
|
397
|
+
};
|
|
398
|
+
/**
|
|
399
|
+
* Collect gql identifiers from import declarations.
|
|
400
|
+
* Finds imports like `import { gql } from "./graphql-system"`.
|
|
401
|
+
*/
|
|
402
|
+
const collectGqlIdentifiers = (module, filePath, helper) => {
|
|
403
|
+
const identifiers = new Set();
|
|
404
|
+
for (const item of module.body) {
|
|
405
|
+
let declaration = null;
|
|
406
|
+
if (item.type === "ImportDeclaration") {
|
|
407
|
+
declaration = item;
|
|
408
|
+
} else if ("declaration" in item && item.declaration && item.declaration.type === "ImportDeclaration") {
|
|
409
|
+
declaration = item.declaration;
|
|
410
|
+
}
|
|
411
|
+
if (!declaration) {
|
|
412
|
+
continue;
|
|
413
|
+
}
|
|
414
|
+
if (!helper.isGraphqlSystemImportSpecifier({
|
|
415
|
+
filePath,
|
|
416
|
+
specifier: declaration.source.value
|
|
417
|
+
})) {
|
|
418
|
+
continue;
|
|
419
|
+
}
|
|
420
|
+
for (const specifier of declaration.specifiers ?? []) {
|
|
421
|
+
if (specifier.type === "ImportSpecifier") {
|
|
422
|
+
const imported = specifier.imported ? specifier.imported.value : specifier.local.value;
|
|
423
|
+
if (imported === "gql" && !specifier.imported) {
|
|
424
|
+
identifiers.add(specifier.local.value);
|
|
425
|
+
}
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
return identifiers;
|
|
430
|
+
};
|
|
431
|
+
/**
|
|
432
|
+
* Extract all tagged templates from a TypeScript source file.
|
|
433
|
+
*
|
|
434
|
+
* @param filePath - Absolute path to the source file (used for import resolution)
|
|
435
|
+
* @param source - TypeScript source code
|
|
436
|
+
* @param helper - GraphQL system identifier for resolving gql imports
|
|
437
|
+
* @returns Extracted templates and any warnings
|
|
438
|
+
*/
|
|
439
|
+
const extractTemplatesFromSource = (filePath, source, helper) => {
|
|
440
|
+
const warnings = [];
|
|
441
|
+
const isTsx = filePath.endsWith(".tsx");
|
|
442
|
+
const program = safeParseSync(source, isTsx);
|
|
443
|
+
if (!program || program.type !== "Module") {
|
|
444
|
+
if (source.includes("gql")) {
|
|
445
|
+
warnings.push(`[typegen-extract] Failed to parse ${filePath}`);
|
|
446
|
+
}
|
|
447
|
+
return {
|
|
448
|
+
templates: [],
|
|
449
|
+
warnings
|
|
450
|
+
};
|
|
451
|
+
}
|
|
452
|
+
const gqlIdentifiers = collectGqlIdentifiers(program, filePath, helper);
|
|
453
|
+
if (gqlIdentifiers.size === 0) {
|
|
454
|
+
return {
|
|
455
|
+
templates: [],
|
|
456
|
+
warnings
|
|
457
|
+
};
|
|
458
|
+
}
|
|
459
|
+
return {
|
|
460
|
+
templates: walkAndExtract(program, gqlIdentifiers),
|
|
461
|
+
warnings
|
|
462
|
+
};
|
|
463
|
+
};
|
|
464
|
+
|
|
465
|
+
//#endregion
|
|
466
|
+
//#region packages/tools/src/typegen/template-scanner.ts
|
|
467
|
+
/**
|
|
468
|
+
* Source file scanner for tagged template extraction.
|
|
469
|
+
*
|
|
470
|
+
* Discovers source files from config include/exclude patterns,
|
|
471
|
+
* reads them, and extracts tagged templates using the template extractor.
|
|
472
|
+
*
|
|
473
|
+
* @module
|
|
474
|
+
*/
|
|
475
|
+
/**
|
|
476
|
+
* Scan source files for tagged templates.
|
|
477
|
+
*
|
|
478
|
+
* Uses fast-glob to discover files matching include/exclude patterns,
|
|
479
|
+
* then extracts tagged templates from each file.
|
|
480
|
+
*/
|
|
481
|
+
const scanSourceFiles = (options) => {
|
|
482
|
+
const { include, exclude, baseDir, helper } = options;
|
|
483
|
+
const warnings = [];
|
|
484
|
+
const ignorePatterns = exclude.map((pattern) => pattern.startsWith("!") ? pattern.slice(1) : pattern);
|
|
485
|
+
const matchedFiles = fg.sync(include, {
|
|
486
|
+
cwd: baseDir,
|
|
487
|
+
ignore: ignorePatterns,
|
|
488
|
+
onlyFiles: true,
|
|
489
|
+
absolute: true
|
|
490
|
+
});
|
|
491
|
+
const templates = new Map();
|
|
492
|
+
for (const filePath of matchedFiles) {
|
|
493
|
+
const normalizedPath = normalize(resolve(filePath)).replace(/\\/g, "/");
|
|
494
|
+
try {
|
|
495
|
+
const source = readFileSync(normalizedPath, "utf-8");
|
|
496
|
+
const { templates: extracted, warnings: extractionWarnings } = extractTemplatesFromSource(normalizedPath, source, helper);
|
|
497
|
+
warnings.push(...extractionWarnings);
|
|
498
|
+
if (extracted.length > 0) {
|
|
499
|
+
templates.set(normalizedPath, extracted);
|
|
500
|
+
}
|
|
501
|
+
} catch (error) {
|
|
502
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
503
|
+
warnings.push(`[typegen-scan] Failed to read ${normalizedPath}: ${message}`);
|
|
504
|
+
}
|
|
505
|
+
}
|
|
506
|
+
return {
|
|
507
|
+
templates,
|
|
508
|
+
warnings
|
|
509
|
+
};
|
|
510
|
+
};
|
|
511
|
+
|
|
512
|
+
//#endregion
|
|
513
|
+
//#region packages/tools/src/typegen/template-to-selections.ts
|
|
514
|
+
/**
|
|
515
|
+
* Convert extracted templates into field selections for the emitter.
|
|
516
|
+
*
|
|
517
|
+
* @param templates - Templates extracted from source files, keyed by file path
|
|
518
|
+
* @param schemas - Loaded schema objects keyed by schema name
|
|
519
|
+
* @returns Map of canonical IDs to field selection data, plus any warnings
|
|
520
|
+
*/
|
|
521
|
+
const convertTemplatesToSelections = (templates, schemas) => {
|
|
522
|
+
const selections = new Map();
|
|
523
|
+
const warnings = [];
|
|
524
|
+
const schemaIndexes = new Map(Object.entries(schemas).map(([name, schema]) => [name, createSchemaIndexFromSchema(schema)]));
|
|
525
|
+
for (const [filePath, fileTemplates] of templates) {
|
|
526
|
+
for (const template of fileTemplates) {
|
|
527
|
+
const schema = schemas[template.schemaName];
|
|
528
|
+
if (!schema) {
|
|
529
|
+
warnings.push(`[typegen-template] Unknown schema "${template.schemaName}" in ${filePath}`);
|
|
530
|
+
continue;
|
|
531
|
+
}
|
|
532
|
+
const schemaIndex = schemaIndexes.get(template.schemaName);
|
|
533
|
+
if (!schemaIndex) {
|
|
534
|
+
continue;
|
|
535
|
+
}
|
|
536
|
+
try {
|
|
537
|
+
if (template.kind === "fragment") {
|
|
538
|
+
const selection = convertFragmentTemplate(template, schema, filePath);
|
|
539
|
+
if (selection) {
|
|
540
|
+
selections.set(selection.id, selection.data);
|
|
541
|
+
}
|
|
542
|
+
} else {
|
|
543
|
+
const selection = convertOperationTemplate(template, schema, filePath);
|
|
544
|
+
if (selection) {
|
|
545
|
+
selections.set(selection.id, selection.data);
|
|
546
|
+
}
|
|
547
|
+
}
|
|
548
|
+
} catch (error) {
|
|
549
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
550
|
+
warnings.push(`[typegen-template] Failed to process ${template.kind} in ${filePath}: ${message}`);
|
|
551
|
+
}
|
|
552
|
+
}
|
|
553
|
+
}
|
|
554
|
+
return {
|
|
555
|
+
selections,
|
|
556
|
+
warnings
|
|
557
|
+
};
|
|
558
|
+
};
|
|
559
|
+
/**
|
|
560
|
+
* Recursively filter out __FRAG_SPREAD_ placeholder nodes from a selection set.
|
|
561
|
+
* These placeholders are created by template-extractor for interpolated fragment references.
|
|
562
|
+
* buildFieldsFromSelectionSet would throw on them since no interpolationMap is available.
|
|
563
|
+
*/
|
|
564
|
+
const filterPlaceholderSpreads = (selectionSet) => ({
|
|
565
|
+
...selectionSet,
|
|
566
|
+
selections: selectionSet.selections.filter((sel) => !(sel.kind === Kind.FRAGMENT_SPREAD && sel.name.value.startsWith("__FRAG_SPREAD_"))).map((sel) => {
|
|
567
|
+
if (sel.kind === Kind.FIELD && sel.selectionSet) {
|
|
568
|
+
return {
|
|
569
|
+
...sel,
|
|
570
|
+
selectionSet: filterPlaceholderSpreads(sel.selectionSet)
|
|
571
|
+
};
|
|
572
|
+
}
|
|
573
|
+
if (sel.kind === Kind.INLINE_FRAGMENT && sel.selectionSet) {
|
|
574
|
+
return {
|
|
575
|
+
...sel,
|
|
576
|
+
selectionSet: filterPlaceholderSpreads(sel.selectionSet)
|
|
577
|
+
};
|
|
578
|
+
}
|
|
579
|
+
return sel;
|
|
580
|
+
})
|
|
581
|
+
});
|
|
582
|
+
/** Simple matching-paren finder for template content (no comments/strings to handle). */
|
|
583
|
+
const findClosingParen = (source, openIndex) => {
|
|
584
|
+
let depth = 0;
|
|
585
|
+
for (let i = openIndex; i < source.length; i++) {
|
|
586
|
+
if (source[i] === "(") depth++;
|
|
587
|
+
else if (source[i] === ")") {
|
|
588
|
+
depth--;
|
|
589
|
+
if (depth === 0) return i;
|
|
590
|
+
}
|
|
591
|
+
}
|
|
592
|
+
return -1;
|
|
593
|
+
};
|
|
594
|
+
/**
|
|
595
|
+
* Reconstruct full GraphQL source from an extracted template.
|
|
596
|
+
* For curried syntax (new), prepends the definition header from tag call arguments.
|
|
597
|
+
* For curried fragments with Fragment Arguments, repositions variable declarations
|
|
598
|
+
* before the on-clause to produce RFC-compliant syntax.
|
|
599
|
+
* For old syntax, returns content as-is.
|
|
600
|
+
*/
|
|
601
|
+
const reconstructGraphql = (template) => {
|
|
602
|
+
if (template.elementName) {
|
|
603
|
+
if (template.kind === "fragment" && template.typeName) {
|
|
604
|
+
const trimmed = template.content.trim();
|
|
605
|
+
if (trimmed.startsWith("(")) {
|
|
606
|
+
const closeIdx = findClosingParen(trimmed, 0);
|
|
607
|
+
if (closeIdx !== -1) {
|
|
608
|
+
const varDecls = trimmed.slice(0, closeIdx + 1);
|
|
609
|
+
const selectionSet = trimmed.slice(closeIdx + 1).trim();
|
|
610
|
+
return `fragment ${template.elementName}${varDecls} on ${template.typeName} ${selectionSet}`;
|
|
611
|
+
}
|
|
612
|
+
}
|
|
613
|
+
return `fragment ${template.elementName} on ${template.typeName} ${trimmed}`;
|
|
614
|
+
}
|
|
615
|
+
return `${template.kind} ${template.elementName} ${template.content}`;
|
|
616
|
+
}
|
|
617
|
+
return template.content;
|
|
618
|
+
};
|
|
619
|
+
/**
|
|
620
|
+
* Convert a fragment template into FieldSelectionData.
|
|
621
|
+
*/
|
|
622
|
+
const convertFragmentTemplate = (template, schema, filePath) => {
|
|
623
|
+
const schemaIndex = createSchemaIndexFromSchema(schema);
|
|
624
|
+
const graphqlSource = reconstructGraphql(template);
|
|
625
|
+
const variableDefinitions = extractFragmentVariables(graphqlSource, schemaIndex);
|
|
626
|
+
const { preprocessed } = preprocessFragmentArgs(graphqlSource);
|
|
627
|
+
const document = parse(preprocessed);
|
|
628
|
+
const fragDef = document.definitions.find((d) => d.kind === Kind.FRAGMENT_DEFINITION);
|
|
629
|
+
if (!fragDef || fragDef.kind !== Kind.FRAGMENT_DEFINITION) {
|
|
630
|
+
return null;
|
|
631
|
+
}
|
|
632
|
+
const fragmentName = fragDef.name.value;
|
|
633
|
+
const onType = fragDef.typeCondition.name.value;
|
|
634
|
+
const fields = buildFieldsFromSelectionSet(filterPlaceholderSpreads(fragDef.selectionSet), schema, onType);
|
|
635
|
+
const id = `${filePath}::${fragmentName}`;
|
|
636
|
+
return {
|
|
637
|
+
id,
|
|
638
|
+
data: {
|
|
639
|
+
type: "fragment",
|
|
640
|
+
schemaLabel: schema.label,
|
|
641
|
+
key: fragmentName,
|
|
642
|
+
typename: onType,
|
|
643
|
+
fields,
|
|
644
|
+
variableDefinitions
|
|
645
|
+
}
|
|
646
|
+
};
|
|
647
|
+
};
|
|
648
|
+
/**
|
|
649
|
+
* Convert an operation template into FieldSelectionData.
|
|
650
|
+
*/
|
|
651
|
+
const convertOperationTemplate = (template, schema, filePath) => {
|
|
652
|
+
const graphqlSource = reconstructGraphql(template);
|
|
653
|
+
const document = parse(graphqlSource);
|
|
654
|
+
const opDef = document.definitions.find((d) => d.kind === Kind.OPERATION_DEFINITION);
|
|
655
|
+
if (!opDef || opDef.kind !== Kind.OPERATION_DEFINITION) {
|
|
656
|
+
return null;
|
|
657
|
+
}
|
|
658
|
+
const operationName = opDef.name?.value ?? "Anonymous";
|
|
659
|
+
const operationType = opDef.operation;
|
|
660
|
+
const rootTypeName = getRootTypeName(schema, operationType);
|
|
661
|
+
const fields = buildFieldsFromSelectionSet(filterPlaceholderSpreads(opDef.selectionSet), schema, rootTypeName);
|
|
662
|
+
const variableDefinitions = opDef.variableDefinitions ?? [];
|
|
663
|
+
const id = `${filePath}::${operationName}`;
|
|
664
|
+
return {
|
|
665
|
+
id,
|
|
666
|
+
data: {
|
|
667
|
+
type: "operation",
|
|
668
|
+
schemaLabel: schema.label,
|
|
669
|
+
operationName,
|
|
670
|
+
operationType,
|
|
671
|
+
fields,
|
|
672
|
+
variableDefinitions: [...variableDefinitions]
|
|
673
|
+
}
|
|
674
|
+
};
|
|
675
|
+
};
|
|
676
|
+
/**
|
|
677
|
+
* Get the root type name for an operation type from the schema.
|
|
678
|
+
*/
|
|
679
|
+
const getRootTypeName = (schema, operationType) => {
|
|
680
|
+
switch (operationType) {
|
|
681
|
+
case "query": return schema.operations.query ?? "Query";
|
|
682
|
+
case "mutation": return schema.operations.mutation ?? "Mutation";
|
|
683
|
+
case "subscription": return schema.operations.subscription ?? "Subscription";
|
|
684
|
+
default: return "Query";
|
|
685
|
+
}
|
|
686
|
+
};
|
|
687
|
+
|
|
688
|
+
//#endregion
|
|
689
|
+
//#region packages/tools/src/typegen/runner.ts
|
|
690
|
+
/**
|
|
691
|
+
* Main typegen runner.
|
|
692
|
+
*
|
|
693
|
+
* Orchestrates the prebuilt type generation process:
|
|
694
|
+
* 1. Load schemas from generated CJS bundle
|
|
695
|
+
* 2. Build artifact to evaluate elements
|
|
696
|
+
* 3. Extract field selections from builder
|
|
697
|
+
* 4. Scan source files for tagged templates and merge selections
|
|
698
|
+
* 5. Emit types.prebuilt.ts
|
|
699
|
+
*
|
|
700
|
+
* @module
|
|
701
|
+
*/
|
|
702
|
+
const extensionMap = {
|
|
703
|
+
".ts": ".js",
|
|
704
|
+
".tsx": ".js",
|
|
705
|
+
".mts": ".mjs",
|
|
706
|
+
".cts": ".cjs",
|
|
707
|
+
".js": ".js",
|
|
708
|
+
".mjs": ".mjs",
|
|
709
|
+
".cjs": ".cjs"
|
|
710
|
+
};
|
|
711
|
+
const toImportSpecifier = (fromPath, targetPath, options) => {
|
|
712
|
+
const fromDir = dirname(fromPath);
|
|
713
|
+
const normalized = relative(fromDir, targetPath).replace(/\\/g, "/");
|
|
714
|
+
const sourceExt = extname(targetPath);
|
|
715
|
+
if (!options?.includeExtension) {
|
|
716
|
+
if (normalized.length === 0) {
|
|
717
|
+
return `./${targetPath.slice(0, -sourceExt.length).split("/").pop()}`;
|
|
718
|
+
}
|
|
719
|
+
const withPrefix$1 = normalized.startsWith(".") ? normalized : `./${normalized}`;
|
|
720
|
+
const currentExt$1 = extname(withPrefix$1);
|
|
721
|
+
return currentExt$1 ? withPrefix$1.slice(0, -currentExt$1.length) : withPrefix$1;
|
|
722
|
+
}
|
|
723
|
+
const runtimeExt = extensionMap[sourceExt] ?? sourceExt;
|
|
724
|
+
if (normalized.length === 0) {
|
|
725
|
+
const base = runtimeExt !== sourceExt ? targetPath.slice(0, -sourceExt.length).split("/").pop() : targetPath.split("/").pop();
|
|
726
|
+
return `./${base}${runtimeExt}`;
|
|
727
|
+
}
|
|
728
|
+
const withPrefix = normalized.startsWith(".") ? normalized : `./${normalized}`;
|
|
729
|
+
if (!runtimeExt) {
|
|
730
|
+
return withPrefix;
|
|
731
|
+
}
|
|
732
|
+
if (withPrefix.endsWith(runtimeExt)) {
|
|
733
|
+
return withPrefix;
|
|
734
|
+
}
|
|
735
|
+
const currentExt = extname(withPrefix);
|
|
736
|
+
const withoutExt = currentExt ? withPrefix.slice(0, -currentExt.length) : withPrefix;
|
|
737
|
+
return `${withoutExt}${runtimeExt}`;
|
|
738
|
+
};
|
|
739
|
+
/**
|
|
740
|
+
* Run the typegen process.
|
|
741
|
+
*
|
|
742
|
+
* This function:
|
|
743
|
+
* 1. Loads schemas from the generated CJS bundle
|
|
744
|
+
* 2. Creates a BuilderService and builds the artifact
|
|
745
|
+
* 3. Extracts field selections from the artifact
|
|
746
|
+
* 4. Scans source files for tagged templates and merges selections
|
|
747
|
+
* 5. Emits types.prebuilt.ts using emitPrebuiltTypes
|
|
748
|
+
*
|
|
749
|
+
* @param options - Typegen options including config
|
|
750
|
+
* @returns Result containing success data or error
|
|
751
|
+
*/
|
|
752
|
+
const runTypegen = async (options) => {
|
|
753
|
+
const { config } = options;
|
|
754
|
+
const outdir = resolve(config.outdir);
|
|
755
|
+
const cjsPath = join(outdir, "index.cjs");
|
|
756
|
+
const importSpecifierOptions = { includeExtension: config.styles.importExtension };
|
|
757
|
+
if (!existsSync(cjsPath)) {
|
|
758
|
+
return err(typegenErrors.codegenRequired(outdir));
|
|
759
|
+
}
|
|
760
|
+
const schemaNames = Object.keys(config.schemas);
|
|
761
|
+
const schemasResult = loadSchemasFromBundle(cjsPath, schemaNames);
|
|
762
|
+
if (schemasResult.isErr()) {
|
|
763
|
+
return err(typegenErrors.schemaLoadFailed(schemaNames, schemasResult.error));
|
|
764
|
+
}
|
|
765
|
+
const schemas = schemasResult.value;
|
|
766
|
+
const prebuiltTypesPath = join(outdir, "types.prebuilt.ts");
|
|
767
|
+
const injectsModulePath = toImportSpecifier(prebuiltTypesPath, join(outdir, "_internal-injects.ts"), importSpecifierOptions);
|
|
768
|
+
const builderService = createBuilderService({ config });
|
|
769
|
+
const artifactResult = await builderService.buildAsync();
|
|
770
|
+
if (artifactResult.isErr()) {
|
|
771
|
+
return err(typegenErrors.buildFailed(`Builder failed: ${artifactResult.error.message}`, artifactResult.error));
|
|
772
|
+
}
|
|
773
|
+
const intermediateElements = builderService.getIntermediateElements();
|
|
774
|
+
if (!intermediateElements) {
|
|
775
|
+
return err(typegenErrors.buildFailed("No intermediate elements available after build", undefined));
|
|
776
|
+
}
|
|
777
|
+
const fieldSelectionsResult = extractFieldSelections(intermediateElements);
|
|
778
|
+
const { selections: builderSelections, warnings: extractWarnings } = fieldSelectionsResult;
|
|
779
|
+
const graphqlHelper = createGraphqlSystemIdentifyHelper(config);
|
|
780
|
+
const scanResult = scanSourceFiles({
|
|
781
|
+
include: [...config.include],
|
|
782
|
+
exclude: [...config.exclude],
|
|
783
|
+
baseDir: config.baseDir,
|
|
784
|
+
helper: graphqlHelper
|
|
785
|
+
});
|
|
786
|
+
const templateSelections = convertTemplatesToSelections(scanResult.templates, schemas);
|
|
787
|
+
const fieldSelections = mergeSelections(builderSelections, templateSelections.selections, config.baseDir);
|
|
788
|
+
const scanWarnings = [...scanResult.warnings, ...templateSelections.warnings];
|
|
789
|
+
const emitResult = await emitPrebuiltTypes({
|
|
790
|
+
schemas,
|
|
791
|
+
fieldSelections,
|
|
792
|
+
outdir,
|
|
793
|
+
injectsModulePath
|
|
794
|
+
});
|
|
795
|
+
if (emitResult.isErr()) {
|
|
796
|
+
return err(emitResult.error);
|
|
797
|
+
}
|
|
798
|
+
const { warnings: emitWarnings, skippedFragmentCount } = emitResult.value;
|
|
799
|
+
let fragmentCount = 0;
|
|
800
|
+
let operationCount = 0;
|
|
801
|
+
for (const selection of fieldSelections.values()) {
|
|
802
|
+
if (selection.type === "fragment" && selection.key) {
|
|
803
|
+
fragmentCount++;
|
|
804
|
+
} else if (selection.type === "operation") {
|
|
805
|
+
operationCount++;
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
const allWarnings = [
|
|
809
|
+
...extractWarnings,
|
|
810
|
+
...scanWarnings,
|
|
811
|
+
...emitWarnings
|
|
812
|
+
];
|
|
813
|
+
return ok({
|
|
814
|
+
prebuiltTypesPath,
|
|
815
|
+
fragmentCount,
|
|
816
|
+
operationCount,
|
|
817
|
+
skippedFragmentCount,
|
|
818
|
+
warnings: allWarnings
|
|
819
|
+
});
|
|
820
|
+
};
|
|
821
|
+
const extractElementName = (data) => data.type === "fragment" ? data.key : data.operationName;
|
|
822
|
+
/**
|
|
823
|
+
* Merge builder and template selections into a combined map.
|
|
824
|
+
*
|
|
825
|
+
* Builder selections are authoritative — VM evaluation correctly resolves
|
|
826
|
+
* fragment spreads that static template analysis cannot handle.
|
|
827
|
+
* Template selections serve as fallback for elements only found by the scanner.
|
|
828
|
+
*
|
|
829
|
+
* Deduplication is per-element (file + GraphQL name), not per-file, so that
|
|
830
|
+
* callback-builder operations in files that also contain tagged templates are preserved.
|
|
831
|
+
*/
|
|
832
|
+
const mergeSelections = (builderSelections, templateSelections, baseDir) => {
|
|
833
|
+
const extractFilePart = (id) => {
|
|
834
|
+
const filePart = id.split("::")[0] ?? "";
|
|
835
|
+
if (filePart.startsWith("/")) {
|
|
836
|
+
return relative(baseDir, filePart);
|
|
837
|
+
}
|
|
838
|
+
return filePart;
|
|
839
|
+
};
|
|
840
|
+
const builderElements = new Set();
|
|
841
|
+
for (const [id, data] of builderSelections) {
|
|
842
|
+
const name = extractElementName(data);
|
|
843
|
+
if (name) builderElements.add(`${extractFilePart(id)}::${name}`);
|
|
844
|
+
}
|
|
845
|
+
const fieldSelections = new Map();
|
|
846
|
+
for (const [id, data] of builderSelections) {
|
|
847
|
+
fieldSelections.set(id, data);
|
|
848
|
+
}
|
|
849
|
+
for (const [id, data] of templateSelections) {
|
|
850
|
+
const name = extractElementName(data);
|
|
851
|
+
if (name && builderElements.has(`${extractFilePart(id)}::${name}`)) continue;
|
|
852
|
+
fieldSelections.set(id, data);
|
|
853
|
+
}
|
|
854
|
+
return fieldSelections;
|
|
855
|
+
};
|
|
856
|
+
|
|
857
|
+
//#endregion
|
|
858
|
+
export { emitPrebuiltTypes, formatTypegenError, runTypegen, typegenErrors };
|
|
859
|
+
//# sourceMappingURL=typegen.mjs.map
|