@soda-gql/builder 0.9.0 → 0.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.cjs +382 -2
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +122 -3
- package/dist/index.d.cts.map +1 -1
- package/dist/index.d.mts +122 -3
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +384 -6
- package/dist/index.mjs.map +1 -1
- package/package.json +4 -4
package/dist/index.mjs
CHANGED
|
@@ -1,17 +1,18 @@
|
|
|
1
1
|
import { existsSync, readFileSync, realpathSync, statSync, unlinkSync } from "node:fs";
|
|
2
|
-
import { readFile, stat } from "node:fs/promises";
|
|
2
|
+
import { readFile, stat, writeFile } from "node:fs/promises";
|
|
3
3
|
import { err, ok } from "neverthrow";
|
|
4
4
|
import { z } from "zod";
|
|
5
5
|
import { createHash } from "node:crypto";
|
|
6
|
-
import { extname, join, normalize, resolve } from "node:path";
|
|
6
|
+
import { dirname, extname, join, normalize, relative, resolve } from "node:path";
|
|
7
7
|
import { Script, createContext } from "node:vm";
|
|
8
8
|
import { CanonicalIdSchema, Effect, Effects, ParallelEffect, cachedFn, createAsyncScheduler, createCanonicalId, createCanonicalTracker, createSyncScheduler, getPortableFS, getPortableHasher, isExternalSpecifier, isRelativeSpecifier, normalizePath, parseCanonicalId, resolveRelativeImportWithExistenceCheck, resolveRelativeImportWithReferences } from "@soda-gql/common";
|
|
9
9
|
import * as sandboxCore from "@soda-gql/core";
|
|
10
|
-
import { Fragment, GqlElement, Operation } from "@soda-gql/core";
|
|
10
|
+
import { Fragment, GqlElement, Operation, calculateFieldsType, createVarRefFromVariable, generateInputObjectType, generateInputType, generateInputTypeFromSpecifiers } from "@soda-gql/core";
|
|
11
11
|
import * as sandboxCoreAdapter from "@soda-gql/core/adapter";
|
|
12
12
|
import * as sandboxCoreRuntime from "@soda-gql/core/runtime";
|
|
13
13
|
import * as sandboxRuntime from "@soda-gql/runtime";
|
|
14
14
|
import { parseSync, transformSync } from "@swc/core";
|
|
15
|
+
import { Kind } from "graphql";
|
|
15
16
|
import ts from "typescript";
|
|
16
17
|
import fg from "fast-glob";
|
|
17
18
|
|
|
@@ -31,6 +32,7 @@ const BuilderArtifactOperationSchema = z.object({
|
|
|
31
32
|
"subscription"
|
|
32
33
|
]),
|
|
33
34
|
operationName: z.string(),
|
|
35
|
+
schemaLabel: z.string(),
|
|
34
36
|
document: z.unknown(),
|
|
35
37
|
variableNames: z.array(z.string())
|
|
36
38
|
})
|
|
@@ -39,7 +41,11 @@ const BuilderArtifactFragmentSchema = z.object({
|
|
|
39
41
|
id: z.string(),
|
|
40
42
|
type: z.literal("fragment"),
|
|
41
43
|
metadata: BuilderArtifactElementMetadataSchema,
|
|
42
|
-
prebuild: z.object({
|
|
44
|
+
prebuild: z.object({
|
|
45
|
+
typename: z.string(),
|
|
46
|
+
key: z.string().optional(),
|
|
47
|
+
schemaLabel: z.string()
|
|
48
|
+
})
|
|
43
49
|
});
|
|
44
50
|
const BuilderArtifactElementSchema = z.discriminatedUnion("type", [BuilderArtifactOperationSchema, BuilderArtifactFragmentSchema]);
|
|
45
51
|
const BuilderArtifactMetaSchema = z.object({
|
|
@@ -355,6 +361,12 @@ const builderErrors = {
|
|
|
355
361
|
message: `Internal invariant violated: ${message}`,
|
|
356
362
|
context,
|
|
357
363
|
cause
|
|
364
|
+
}),
|
|
365
|
+
schemaNotFound: (schemaLabel, canonicalId) => ({
|
|
366
|
+
code: "SCHEMA_NOT_FOUND",
|
|
367
|
+
message: `Schema not found for label "${schemaLabel}" (element: ${canonicalId})`,
|
|
368
|
+
schemaLabel,
|
|
369
|
+
canonicalId
|
|
358
370
|
})
|
|
359
371
|
};
|
|
360
372
|
/**
|
|
@@ -441,6 +453,10 @@ const formatBuilderError = (error) => {
|
|
|
441
453
|
lines.push(` Context: ${error.context}`);
|
|
442
454
|
}
|
|
443
455
|
break;
|
|
456
|
+
case "SCHEMA_NOT_FOUND":
|
|
457
|
+
lines.push(` Schema label: ${error.schemaLabel}`);
|
|
458
|
+
lines.push(` Element: ${error.canonicalId}`);
|
|
459
|
+
break;
|
|
444
460
|
}
|
|
445
461
|
if ("cause" in error && error.cause && !["CONFIG_INVALID"].includes(error.code)) {
|
|
446
462
|
lines.push(` Caused by: ${error.cause}`);
|
|
@@ -1346,6 +1362,363 @@ const createGraphqlSystemIdentifyHelper = (config) => {
|
|
|
1346
1362
|
};
|
|
1347
1363
|
};
|
|
1348
1364
|
|
|
1365
|
+
//#endregion
|
|
1366
|
+
//#region packages/builder/src/prebuilt/emitter.ts
|
|
1367
|
+
/**
|
|
1368
|
+
* Prebuilt types emitter.
|
|
1369
|
+
*
|
|
1370
|
+
* Generates TypeScript type definitions for PrebuiltTypes registry
|
|
1371
|
+
* from field selection data and schema.
|
|
1372
|
+
*
|
|
1373
|
+
* ## Error Handling Strategy
|
|
1374
|
+
*
|
|
1375
|
+
* The emitter uses a partial failure approach for type calculation errors:
|
|
1376
|
+
*
|
|
1377
|
+
* **Recoverable errors** (result in warnings, element skipped):
|
|
1378
|
+
* - Type calculation failures (e.g., `calculateFieldsType` throws)
|
|
1379
|
+
* - Input type generation failures (e.g., `generateInputType` throws)
|
|
1380
|
+
* - These are caught per-element, logged as warnings, and the element is omitted
|
|
1381
|
+
*
|
|
1382
|
+
* **Fatal errors** (result in error result):
|
|
1383
|
+
* - `SCHEMA_NOT_FOUND`: Selection references non-existent schema
|
|
1384
|
+
* - `WRITE_FAILED`: Cannot write output file to disk
|
|
1385
|
+
*
|
|
1386
|
+
* This allows builds to succeed with partial type coverage when some elements
|
|
1387
|
+
* have issues, while providing visibility into problems via warnings.
|
|
1388
|
+
*
|
|
1389
|
+
* @module
|
|
1390
|
+
*/
|
|
1391
|
+
/**
|
|
1392
|
+
* Group field selections by schema.
|
|
1393
|
+
* Uses the schemaLabel from each selection to group them correctly.
|
|
1394
|
+
*
|
|
1395
|
+
* @returns Result containing grouped selections and warnings, or error if schema not found
|
|
1396
|
+
*/
|
|
1397
|
+
const groupBySchema = (fieldSelections, schemas) => {
|
|
1398
|
+
const grouped = new Map();
|
|
1399
|
+
const warnings = [];
|
|
1400
|
+
for (const schemaName of Object.keys(schemas)) {
|
|
1401
|
+
grouped.set(schemaName, {
|
|
1402
|
+
fragments: [],
|
|
1403
|
+
operations: [],
|
|
1404
|
+
inputObjects: new Set()
|
|
1405
|
+
});
|
|
1406
|
+
}
|
|
1407
|
+
for (const [canonicalId, selection] of fieldSelections) {
|
|
1408
|
+
const schemaName = selection.schemaLabel;
|
|
1409
|
+
const schema = schemas[schemaName];
|
|
1410
|
+
const group = grouped.get(schemaName);
|
|
1411
|
+
if (!schema || !group) {
|
|
1412
|
+
return err(builderErrors.schemaNotFound(schemaName, canonicalId));
|
|
1413
|
+
}
|
|
1414
|
+
const outputFormatters = { scalarOutput: (name) => `ScalarOutput_${schemaName}<"${name}">` };
|
|
1415
|
+
const inputFormatters = {
|
|
1416
|
+
scalarInput: (name) => `ScalarInput_${schemaName}<"${name}">`,
|
|
1417
|
+
inputObject: (name) => `Input_${schemaName}_${name}`
|
|
1418
|
+
};
|
|
1419
|
+
if (selection.type === "fragment") {
|
|
1420
|
+
if (!selection.key) {
|
|
1421
|
+
continue;
|
|
1422
|
+
}
|
|
1423
|
+
try {
|
|
1424
|
+
const usedInputObjects = collectUsedInputObjectsFromSpecifiers(schema, selection.variableDefinitions);
|
|
1425
|
+
for (const inputName of usedInputObjects) {
|
|
1426
|
+
group.inputObjects.add(inputName);
|
|
1427
|
+
}
|
|
1428
|
+
const outputType = calculateFieldsType(schema, selection.fields, outputFormatters);
|
|
1429
|
+
const hasVariables = Object.keys(selection.variableDefinitions).length > 0;
|
|
1430
|
+
const inputType = hasVariables ? generateInputTypeFromSpecifiers(schema, selection.variableDefinitions, { formatters: inputFormatters }) : "void";
|
|
1431
|
+
group.fragments.push({
|
|
1432
|
+
key: selection.key,
|
|
1433
|
+
inputType,
|
|
1434
|
+
outputType
|
|
1435
|
+
});
|
|
1436
|
+
} catch (error) {
|
|
1437
|
+
warnings.push(`[prebuilt] Failed to calculate type for fragment "${selection.key}": ${error instanceof Error ? error.message : String(error)}`);
|
|
1438
|
+
}
|
|
1439
|
+
} else if (selection.type === "operation") {
|
|
1440
|
+
try {
|
|
1441
|
+
const usedInputObjects = collectUsedInputObjects(schema, selection.variableDefinitions);
|
|
1442
|
+
for (const inputName of usedInputObjects) {
|
|
1443
|
+
group.inputObjects.add(inputName);
|
|
1444
|
+
}
|
|
1445
|
+
const outputType = calculateFieldsType(schema, selection.fields, outputFormatters);
|
|
1446
|
+
const inputType = generateInputType(schema, selection.variableDefinitions, inputFormatters);
|
|
1447
|
+
group.operations.push({
|
|
1448
|
+
key: selection.operationName,
|
|
1449
|
+
inputType,
|
|
1450
|
+
outputType
|
|
1451
|
+
});
|
|
1452
|
+
} catch (error) {
|
|
1453
|
+
warnings.push(`[prebuilt] Failed to calculate type for operation "${selection.operationName}": ${error instanceof Error ? error.message : String(error)}`);
|
|
1454
|
+
}
|
|
1455
|
+
}
|
|
1456
|
+
}
|
|
1457
|
+
return ok({
|
|
1458
|
+
grouped,
|
|
1459
|
+
warnings
|
|
1460
|
+
});
|
|
1461
|
+
};
|
|
1462
|
+
/**
|
|
1463
|
+
* Calculate relative import path from one file to another.
|
|
1464
|
+
*/
|
|
1465
|
+
const toImportSpecifier = (from, to) => {
|
|
1466
|
+
const fromDir = dirname(from);
|
|
1467
|
+
let relativePath = relative(fromDir, to);
|
|
1468
|
+
if (!relativePath.startsWith(".")) {
|
|
1469
|
+
relativePath = `./${relativePath}`;
|
|
1470
|
+
}
|
|
1471
|
+
return relativePath.replace(/\.ts$/, "");
|
|
1472
|
+
};
|
|
1473
|
+
/**
|
|
1474
|
+
* Extract input object names from a GraphQL TypeNode.
|
|
1475
|
+
*/
|
|
1476
|
+
const extractInputObjectsFromType = (schema, typeNode, inputObjects) => {
|
|
1477
|
+
switch (typeNode.kind) {
|
|
1478
|
+
case Kind.NON_NULL_TYPE:
|
|
1479
|
+
extractInputObjectsFromType(schema, typeNode.type, inputObjects);
|
|
1480
|
+
break;
|
|
1481
|
+
case Kind.LIST_TYPE:
|
|
1482
|
+
extractInputObjectsFromType(schema, typeNode.type, inputObjects);
|
|
1483
|
+
break;
|
|
1484
|
+
case Kind.NAMED_TYPE: {
|
|
1485
|
+
const name = typeNode.name.value;
|
|
1486
|
+
if (!schema.scalar[name] && !schema.enum[name] && schema.input[name]) {
|
|
1487
|
+
inputObjects.add(name);
|
|
1488
|
+
}
|
|
1489
|
+
break;
|
|
1490
|
+
}
|
|
1491
|
+
}
|
|
1492
|
+
};
|
|
1493
|
+
/**
|
|
1494
|
+
* Recursively collect nested input objects from schema definitions.
|
|
1495
|
+
* Takes a set of initial input names and expands to include all nested inputs.
|
|
1496
|
+
*/
|
|
1497
|
+
const collectNestedInputObjects = (schema, initialInputNames) => {
|
|
1498
|
+
const inputObjects = new Set(initialInputNames);
|
|
1499
|
+
const collectNested = (inputName, seen) => {
|
|
1500
|
+
if (seen.has(inputName)) {
|
|
1501
|
+
return;
|
|
1502
|
+
}
|
|
1503
|
+
seen.add(inputName);
|
|
1504
|
+
const inputDef = schema.input[inputName];
|
|
1505
|
+
if (!inputDef) {
|
|
1506
|
+
return;
|
|
1507
|
+
}
|
|
1508
|
+
for (const field of Object.values(inputDef.fields)) {
|
|
1509
|
+
if (field.kind === "input" && !inputObjects.has(field.name)) {
|
|
1510
|
+
inputObjects.add(field.name);
|
|
1511
|
+
collectNested(field.name, seen);
|
|
1512
|
+
}
|
|
1513
|
+
}
|
|
1514
|
+
};
|
|
1515
|
+
for (const inputName of Array.from(initialInputNames)) {
|
|
1516
|
+
collectNested(inputName, new Set());
|
|
1517
|
+
}
|
|
1518
|
+
return inputObjects;
|
|
1519
|
+
};
|
|
1520
|
+
/**
|
|
1521
|
+
* Collect all input object types used in variable definitions.
|
|
1522
|
+
* Recursively collects nested input objects from the schema.
|
|
1523
|
+
*/
|
|
1524
|
+
const collectUsedInputObjects = (schema, variableDefinitions) => {
|
|
1525
|
+
const directInputs = new Set();
|
|
1526
|
+
for (const varDef of variableDefinitions) {
|
|
1527
|
+
extractInputObjectsFromType(schema, varDef.type, directInputs);
|
|
1528
|
+
}
|
|
1529
|
+
return collectNestedInputObjects(schema, directInputs);
|
|
1530
|
+
};
|
|
1531
|
+
/**
|
|
1532
|
+
* Collect all input object types used in InputTypeSpecifiers.
|
|
1533
|
+
* Recursively collects nested input objects from the schema.
|
|
1534
|
+
*/
|
|
1535
|
+
const collectUsedInputObjectsFromSpecifiers = (schema, specifiers) => {
|
|
1536
|
+
const directInputs = new Set();
|
|
1537
|
+
for (const specifier of Object.values(specifiers)) {
|
|
1538
|
+
if (specifier.kind === "input" && schema.input[specifier.name]) {
|
|
1539
|
+
directInputs.add(specifier.name);
|
|
1540
|
+
}
|
|
1541
|
+
}
|
|
1542
|
+
return collectNestedInputObjects(schema, directInputs);
|
|
1543
|
+
};
|
|
1544
|
+
/**
|
|
1545
|
+
* Generate type definitions for input objects.
|
|
1546
|
+
*/
|
|
1547
|
+
const generateInputObjectTypeDefinitions = (schema, schemaName, inputNames) => {
|
|
1548
|
+
const lines = [];
|
|
1549
|
+
const defaultDepth = schema.__defaultInputDepth ?? 3;
|
|
1550
|
+
const depthOverrides = schema.__inputDepthOverrides ?? {};
|
|
1551
|
+
const formatters = {
|
|
1552
|
+
scalarInput: (name) => `ScalarInput_${schemaName}<"${name}">`,
|
|
1553
|
+
inputObject: (name) => `Input_${schemaName}_${name}`
|
|
1554
|
+
};
|
|
1555
|
+
const sortedNames = Array.from(inputNames).sort();
|
|
1556
|
+
for (const inputName of sortedNames) {
|
|
1557
|
+
const typeString = generateInputObjectType(schema, inputName, {
|
|
1558
|
+
defaultDepth,
|
|
1559
|
+
depthOverrides,
|
|
1560
|
+
formatters
|
|
1561
|
+
});
|
|
1562
|
+
lines.push(`type Input_${schemaName}_${inputName} = ${typeString};`);
|
|
1563
|
+
}
|
|
1564
|
+
return lines;
|
|
1565
|
+
};
|
|
1566
|
+
/**
|
|
1567
|
+
* Generate the TypeScript code for prebuilt types.
|
|
1568
|
+
*/
|
|
1569
|
+
const generateTypesCode = (grouped, schemas, injects, outdir) => {
|
|
1570
|
+
const typesFilePath = join(outdir, "prebuilt", "types.ts");
|
|
1571
|
+
const lines = [
|
|
1572
|
+
"/**",
|
|
1573
|
+
" * Prebuilt type registry.",
|
|
1574
|
+
" *",
|
|
1575
|
+
" * This file is auto-generated by @soda-gql/builder.",
|
|
1576
|
+
" * Do not edit manually.",
|
|
1577
|
+
" *",
|
|
1578
|
+
" * @module",
|
|
1579
|
+
" * @generated",
|
|
1580
|
+
" */",
|
|
1581
|
+
"",
|
|
1582
|
+
"import type { PrebuiltTypeRegistry } from \"@soda-gql/core\";"
|
|
1583
|
+
];
|
|
1584
|
+
for (const [schemaName, inject] of Object.entries(injects)) {
|
|
1585
|
+
const relativePath = toImportSpecifier(typesFilePath, inject.scalars);
|
|
1586
|
+
lines.push(`import type { scalar as scalar_${schemaName} } from "${relativePath}";`);
|
|
1587
|
+
}
|
|
1588
|
+
lines.push("");
|
|
1589
|
+
for (const schemaName of Object.keys(injects)) {
|
|
1590
|
+
lines.push(`type ScalarInput_${schemaName}<T extends keyof typeof scalar_${schemaName}> = ` + `typeof scalar_${schemaName}[T]["$type"]["input"];`);
|
|
1591
|
+
lines.push(`type ScalarOutput_${schemaName}<T extends keyof typeof scalar_${schemaName}> = ` + `typeof scalar_${schemaName}[T]["$type"]["output"];`);
|
|
1592
|
+
}
|
|
1593
|
+
lines.push("");
|
|
1594
|
+
for (const [schemaName, { fragments, operations, inputObjects }] of grouped) {
|
|
1595
|
+
const schema = schemas[schemaName];
|
|
1596
|
+
if (inputObjects.size > 0 && schema) {
|
|
1597
|
+
lines.push("// Input object types");
|
|
1598
|
+
const inputTypeLines = generateInputObjectTypeDefinitions(schema, schemaName, inputObjects);
|
|
1599
|
+
lines.push(...inputTypeLines);
|
|
1600
|
+
lines.push("");
|
|
1601
|
+
}
|
|
1602
|
+
const fragmentEntries = fragments.sort((a, b) => a.key.localeCompare(b.key)).map((f) => ` readonly "${f.key}": { readonly input: ${f.inputType}; readonly output: ${f.outputType} };`);
|
|
1603
|
+
const operationEntries = operations.sort((a, b) => a.key.localeCompare(b.key)).map((o) => ` readonly "${o.key}": { readonly input: ${o.inputType}; readonly output: ${o.outputType} };`);
|
|
1604
|
+
lines.push(`export type PrebuiltTypes_${schemaName} = {`);
|
|
1605
|
+
lines.push(" readonly fragments: {");
|
|
1606
|
+
if (fragmentEntries.length > 0) {
|
|
1607
|
+
lines.push(...fragmentEntries);
|
|
1608
|
+
}
|
|
1609
|
+
lines.push(" };");
|
|
1610
|
+
lines.push(" readonly operations: {");
|
|
1611
|
+
if (operationEntries.length > 0) {
|
|
1612
|
+
lines.push(...operationEntries);
|
|
1613
|
+
}
|
|
1614
|
+
lines.push(" };");
|
|
1615
|
+
lines.push("} satisfies PrebuiltTypeRegistry;");
|
|
1616
|
+
lines.push("");
|
|
1617
|
+
}
|
|
1618
|
+
return lines.join("\n");
|
|
1619
|
+
};
|
|
1620
|
+
/**
|
|
1621
|
+
* Emit prebuilt types to the prebuilt/types.ts file.
|
|
1622
|
+
*
|
|
1623
|
+
* This function uses a partial failure strategy: if type calculation fails for
|
|
1624
|
+
* individual elements (e.g., due to invalid field selections or missing schema
|
|
1625
|
+
* types), those elements are skipped and warnings are collected rather than
|
|
1626
|
+
* failing the entire emission. This allows builds to succeed even when some
|
|
1627
|
+
* elements have issues, while still reporting problems via warnings.
|
|
1628
|
+
*
|
|
1629
|
+
* @param options - Emitter options including schemas, field selections, and output directory
|
|
1630
|
+
* @returns Result containing output path and warnings, or error if a hard failure occurs
|
|
1631
|
+
*
|
|
1632
|
+
* @example
|
|
1633
|
+
* ```typescript
|
|
1634
|
+
* const result = await emitPrebuiltTypes({
|
|
1635
|
+
* schemas: { mySchema: schema },
|
|
1636
|
+
* fieldSelections,
|
|
1637
|
+
* outdir: "./generated",
|
|
1638
|
+
* injects: { mySchema: { scalars: "./scalars.ts" } },
|
|
1639
|
+
* });
|
|
1640
|
+
*
|
|
1641
|
+
* if (result.isOk()) {
|
|
1642
|
+
* console.log(`Generated: ${result.value.path}`);
|
|
1643
|
+
* if (result.value.warnings.length > 0) {
|
|
1644
|
+
* console.warn("Warnings:", result.value.warnings);
|
|
1645
|
+
* }
|
|
1646
|
+
* }
|
|
1647
|
+
* ```
|
|
1648
|
+
*/
|
|
1649
|
+
const emitPrebuiltTypes = async (options) => {
|
|
1650
|
+
const { schemas, fieldSelections, outdir, injects } = options;
|
|
1651
|
+
const groupResult = groupBySchema(fieldSelections, schemas);
|
|
1652
|
+
if (groupResult.isErr()) {
|
|
1653
|
+
return err(groupResult.error);
|
|
1654
|
+
}
|
|
1655
|
+
const { grouped, warnings } = groupResult.value;
|
|
1656
|
+
const code = generateTypesCode(grouped, schemas, injects, outdir);
|
|
1657
|
+
const typesPath = join(outdir, "prebuilt", "types.ts");
|
|
1658
|
+
try {
|
|
1659
|
+
await writeFile(typesPath, code, "utf-8");
|
|
1660
|
+
return ok({
|
|
1661
|
+
path: typesPath,
|
|
1662
|
+
warnings
|
|
1663
|
+
});
|
|
1664
|
+
} catch (error) {
|
|
1665
|
+
return err(builderErrors.writeFailed(typesPath, `Failed to write prebuilt types: ${error instanceof Error ? error.message : String(error)}`, error));
|
|
1666
|
+
}
|
|
1667
|
+
};
|
|
1668
|
+
|
|
1669
|
+
//#endregion
|
|
1670
|
+
//#region packages/builder/src/prebuilt/extractor.ts
|
|
1671
|
+
/**
|
|
1672
|
+
* Extract field selections from evaluated intermediate elements.
|
|
1673
|
+
*
|
|
1674
|
+
* For fragments, calls `spread()` with empty/default variables to get field selections.
|
|
1675
|
+
* For operations, calls `documentSource()` to get field selections.
|
|
1676
|
+
*
|
|
1677
|
+
* @param elements - Record of canonical ID to intermediate artifact element
|
|
1678
|
+
* @returns Object containing selections map and any warnings encountered
|
|
1679
|
+
*/
|
|
1680
|
+
const extractFieldSelections = (elements) => {
|
|
1681
|
+
const selections = new Map();
|
|
1682
|
+
const warnings = [];
|
|
1683
|
+
for (const [id, element] of Object.entries(elements)) {
|
|
1684
|
+
const canonicalId = id;
|
|
1685
|
+
try {
|
|
1686
|
+
if (element.type === "fragment") {
|
|
1687
|
+
const variableDefinitions = element.element.variableDefinitions;
|
|
1688
|
+
const varRefs = Object.fromEntries(Object.keys(variableDefinitions).map((k) => [k, createVarRefFromVariable(k)]));
|
|
1689
|
+
const fields = element.element.spread(varRefs);
|
|
1690
|
+
selections.set(canonicalId, {
|
|
1691
|
+
type: "fragment",
|
|
1692
|
+
schemaLabel: element.element.schemaLabel,
|
|
1693
|
+
key: element.element.key,
|
|
1694
|
+
typename: element.element.typename,
|
|
1695
|
+
fields,
|
|
1696
|
+
variableDefinitions
|
|
1697
|
+
});
|
|
1698
|
+
} else if (element.type === "operation") {
|
|
1699
|
+
const fields = element.element.documentSource();
|
|
1700
|
+
const document = element.element.document;
|
|
1701
|
+
const operationDef = document.definitions.find((def) => def.kind === Kind.OPERATION_DEFINITION);
|
|
1702
|
+
const variableDefinitions = operationDef?.variableDefinitions ?? [];
|
|
1703
|
+
selections.set(canonicalId, {
|
|
1704
|
+
type: "operation",
|
|
1705
|
+
schemaLabel: element.element.schemaLabel,
|
|
1706
|
+
operationName: element.element.operationName,
|
|
1707
|
+
operationType: element.element.operationType,
|
|
1708
|
+
fields,
|
|
1709
|
+
variableDefinitions
|
|
1710
|
+
});
|
|
1711
|
+
}
|
|
1712
|
+
} catch (error) {
|
|
1713
|
+
warnings.push(`[prebuilt] Failed to extract field selections for ${canonicalId}: ${error instanceof Error ? error.message : String(error)}`);
|
|
1714
|
+
}
|
|
1715
|
+
}
|
|
1716
|
+
return {
|
|
1717
|
+
selections,
|
|
1718
|
+
warnings
|
|
1719
|
+
};
|
|
1720
|
+
};
|
|
1721
|
+
|
|
1349
1722
|
//#endregion
|
|
1350
1723
|
//#region packages/builder/src/artifact/aggregate.ts
|
|
1351
1724
|
const canonicalToFilePath$1 = (canonicalId) => canonicalId.split("::")[0] ?? canonicalId;
|
|
@@ -1378,7 +1751,11 @@ const aggregate = ({ analyses, elements }) => {
|
|
|
1378
1751
|
contentHash: ""
|
|
1379
1752
|
};
|
|
1380
1753
|
if (element.type === "fragment") {
|
|
1381
|
-
const prebuild = {
|
|
1754
|
+
const prebuild = {
|
|
1755
|
+
typename: element.element.typename,
|
|
1756
|
+
key: element.element.key,
|
|
1757
|
+
schemaLabel: element.element.schemaLabel
|
|
1758
|
+
};
|
|
1382
1759
|
registry.set(definition.canonicalId, {
|
|
1383
1760
|
id: definition.canonicalId,
|
|
1384
1761
|
type: "fragment",
|
|
@@ -1394,6 +1771,7 @@ const aggregate = ({ analyses, elements }) => {
|
|
|
1394
1771
|
const prebuild = {
|
|
1395
1772
|
operationType: element.element.operationType,
|
|
1396
1773
|
operationName: element.element.operationName,
|
|
1774
|
+
schemaLabel: element.element.schemaLabel,
|
|
1397
1775
|
document: element.element.document,
|
|
1398
1776
|
variableNames: element.element.variableNames,
|
|
1399
1777
|
metadata: element.element.metadata
|
|
@@ -3621,5 +3999,5 @@ const createBuilderService = ({ config, entrypointsOverride }) => {
|
|
|
3621
3999
|
};
|
|
3622
4000
|
|
|
3623
4001
|
//#endregion
|
|
3624
|
-
export { BuilderArtifactSchema, BuilderEffects, FileReadEffect, FileStatEffect, __clearGqlCache, collectAffectedFiles, createBuilderService, createBuilderSession, createGraphqlSystemIdentifyHelper, extractModuleAdjacency, formatBuilderErrorForCLI, formatBuilderErrorStructured, loadArtifact, loadArtifactSync };
|
|
4002
|
+
export { BuilderArtifactSchema, BuilderEffects, FileReadEffect, FileStatEffect, __clearGqlCache, collectAffectedFiles, createBuilderService, createBuilderSession, createGraphqlSystemIdentifyHelper, emitPrebuiltTypes, extractFieldSelections, extractModuleAdjacency, formatBuilderErrorForCLI, formatBuilderErrorStructured, loadArtifact, loadArtifactSync };
|
|
3625
4003
|
//# sourceMappingURL=index.mjs.map
|