@soda-gql/codegen 0.11.9 → 0.11.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1,4 +1,4 @@
1
- const require_generator = require('./generator-lc18-vLD.cjs');
1
+ const require_generator = require('./generator-DvfP6gTY.cjs');
2
2
  let node_fs = require("node:fs");
3
3
  let node_path = require("node:path");
4
4
  let neverthrow = require("neverthrow");
@@ -73,6 +73,170 @@ const esbuildBundler = {
73
73
  }
74
74
  };
75
75
 
76
+ //#endregion
77
+ //#region packages/codegen/src/defs-generator.ts
78
+ /**
79
+ * Split an array into chunks of the specified size.
80
+ */
81
+ const chunkArray = (array, size) => {
82
+ if (size <= 0) {
83
+ return [Array.from(array)];
84
+ }
85
+ const result = [];
86
+ for (let i = 0; i < array.length; i += size) {
87
+ result.push(array.slice(i, i + size));
88
+ }
89
+ return result;
90
+ };
91
+ /**
92
+ * Determine if chunking is needed based on the number of definitions.
93
+ */
94
+ const needsChunking = (vars, chunkSize) => {
95
+ return vars.length > chunkSize;
96
+ };
97
+ /**
98
+ * Generate a single definition file content.
99
+ */
100
+ const generateDefinitionFile = (options) => {
101
+ const { category, vars, needsDefineEnum } = options;
102
+ if (vars.length === 0) {
103
+ return `/**
104
+ * ${category} definitions (empty)
105
+ * @generated by @soda-gql/codegen
106
+ */
107
+ `;
108
+ }
109
+ const imports = [];
110
+ if (needsDefineEnum && category === "enums") {
111
+ imports.push("import { defineEnum } from \"@soda-gql/core\";");
112
+ }
113
+ const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
114
+ const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
115
+ return `/**
116
+ * ${category} definitions
117
+ * @generated by @soda-gql/codegen
118
+ */
119
+ ${importsBlock}${exportStatements}
120
+ `;
121
+ };
122
+ /**
123
+ * Generate a chunk file content.
124
+ */
125
+ const generateChunkFile = (options) => {
126
+ const { category, vars, chunkIndex, needsDefineEnum } = options;
127
+ if (vars.length === 0) {
128
+ return `/**
129
+ * ${category} chunk ${chunkIndex} (empty)
130
+ * @generated by @soda-gql/codegen
131
+ */
132
+ `;
133
+ }
134
+ const imports = [];
135
+ if (needsDefineEnum && category === "enums") {
136
+ imports.push("import { defineEnum } from \"@soda-gql/core\";");
137
+ }
138
+ const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
139
+ const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
140
+ return `/**
141
+ * ${category} chunk ${chunkIndex}
142
+ * @generated by @soda-gql/codegen
143
+ */
144
+ ${importsBlock}${exportStatements}
145
+ `;
146
+ };
147
+ /**
148
+ * Generate the index file that re-exports all chunks.
149
+ */
150
+ const generateChunkIndex = (options) => {
151
+ const { category, chunkCount } = options;
152
+ const reExports = Array.from({ length: chunkCount }, (_, i) => `export * from "./chunk-${i}";`).join("\n");
153
+ return `/**
154
+ * ${category} index (re-exports all chunks)
155
+ * @generated by @soda-gql/codegen
156
+ */
157
+ ${reExports}
158
+ `;
159
+ };
160
+ /**
161
+ * Generate chunked definition files.
162
+ */
163
+ const generateChunkedDefinitionFiles = (category, schemaName, vars, chunkSize) => {
164
+ const chunks = chunkArray(vars, chunkSize);
165
+ const needsDefineEnum = category === "enums";
166
+ const chunkContents = chunks.map((chunkVars, chunkIndex) => ({
167
+ chunkIndex,
168
+ content: generateChunkFile({
169
+ category,
170
+ schemaName,
171
+ vars: chunkVars,
172
+ chunkIndex,
173
+ needsDefineEnum
174
+ }),
175
+ varNames: chunkVars.map((v) => v.name)
176
+ }));
177
+ const allVarNames = vars.map((v) => v.name);
178
+ const indexContent = generateChunkIndex({
179
+ category,
180
+ chunkCount: chunks.length,
181
+ varNames: allVarNames
182
+ });
183
+ return {
184
+ indexContent,
185
+ chunks: chunkContents
186
+ };
187
+ };
188
+ /**
189
+ * Generate the complete _defs directory structure.
190
+ */
191
+ const generateDefsStructure = (schemaName, categoryVars, chunkSize) => {
192
+ const files = [];
193
+ const importPaths = {
194
+ enums: "./_defs/enums",
195
+ inputs: "./_defs/inputs",
196
+ objects: "./_defs/objects",
197
+ unions: "./_defs/unions"
198
+ };
199
+ const categories = [
200
+ "enums",
201
+ "inputs",
202
+ "objects",
203
+ "unions"
204
+ ];
205
+ for (const category of categories) {
206
+ const vars = categoryVars[category];
207
+ const needsDefineEnum = category === "enums";
208
+ if (needsChunking(vars, chunkSize)) {
209
+ const chunked = generateChunkedDefinitionFiles(category, schemaName, vars, chunkSize);
210
+ importPaths[category] = `./_defs/${category}`;
211
+ files.push({
212
+ relativePath: `_defs/${category}/index.ts`,
213
+ content: chunked.indexContent
214
+ });
215
+ for (const chunk of chunked.chunks) {
216
+ files.push({
217
+ relativePath: `_defs/${category}/chunk-${chunk.chunkIndex}.ts`,
218
+ content: chunk.content
219
+ });
220
+ }
221
+ } else {
222
+ const content = generateDefinitionFile({
223
+ category,
224
+ schemaName,
225
+ vars,
226
+ needsDefineEnum
227
+ });
228
+ files.push({
229
+ relativePath: `_defs/${category}.ts`,
230
+ content
231
+ });
232
+ }
233
+ }
234
+ return {
235
+ files,
236
+ importPaths
237
+ };
238
+ };
239
+
76
240
  //#endregion
77
241
  //#region packages/codegen/src/file.ts
78
242
  const writeModule = (outPath, contents) => {
@@ -226,10 +390,12 @@ const runCodegen = async (options) => {
226
390
  inputDepthOverridesConfig.set(schemaName, schemaConfig.inputDepthOverrides);
227
391
  }
228
392
  }
229
- const { code: internalCode, injectsCode } = require_generator.generateMultiSchemaModule(schemas, {
393
+ const chunkSize = options.chunkSize ?? 100;
394
+ const { code: internalCode, injectsCode, categoryVars } = require_generator.generateMultiSchemaModule(schemas, {
230
395
  injection: injectionConfig,
231
396
  defaultInputDepth: defaultInputDepthConfig.size > 0 ? defaultInputDepthConfig : undefined,
232
- inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined
397
+ inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined,
398
+ chunkSize
233
399
  });
234
400
  const indexCode = `/**
235
401
  * Generated by @soda-gql/codegen
@@ -239,7 +405,7 @@ const runCodegen = async (options) => {
239
405
  export * from "./_internal";
240
406
  `;
241
407
  for (const [name, document] of schemas.entries()) {
242
- const schemaIndex = (await Promise.resolve().then(() => require("./generator-BILAfpvh.cjs"))).createSchemaIndex(document);
408
+ const schemaIndex = (await Promise.resolve().then(() => require("./generator-0VPnXv1q.cjs"))).createSchemaIndex(document);
243
409
  const objects = Array.from(schemaIndex.objects.keys()).filter((n) => !n.startsWith("__")).length;
244
410
  const enums = Array.from(schemaIndex.enums.keys()).filter((n) => !n.startsWith("__")).length;
245
411
  const inputs = Array.from(schemaIndex.inputs.keys()).filter((n) => !n.startsWith("__")).length;
@@ -259,6 +425,31 @@ export * from "./_internal";
259
425
  return (0, neverthrow.err)(injectsWriteResult.error);
260
426
  }
261
427
  }
428
+ const defsPaths = [];
429
+ if (categoryVars) {
430
+ const outDir = (0, node_path.dirname)(outPath);
431
+ const combinedVars = {
432
+ enums: [],
433
+ inputs: [],
434
+ objects: [],
435
+ unions: []
436
+ };
437
+ for (const vars of Object.values(categoryVars)) {
438
+ combinedVars.enums.push(...vars.enums);
439
+ combinedVars.inputs.push(...vars.inputs);
440
+ combinedVars.objects.push(...vars.objects);
441
+ combinedVars.unions.push(...vars.unions);
442
+ }
443
+ const defsStructure = generateDefsStructure("combined", combinedVars, chunkSize);
444
+ for (const file of defsStructure.files) {
445
+ const filePath = (0, node_path.join)(outDir, file.relativePath);
446
+ const writeResult = await writeModule(filePath, file.content).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
447
+ if (writeResult.isErr()) {
448
+ return (0, neverthrow.err)(writeResult.error);
449
+ }
450
+ defsPaths.push(filePath);
451
+ }
452
+ }
262
453
  const internalPath = (0, node_path.join)((0, node_path.dirname)(outPath), "_internal.ts");
263
454
  const internalWriteResult = await writeModule(internalPath, internalCode).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
264
455
  if (internalWriteResult.isErr()) {
@@ -281,7 +472,8 @@ export * from "./_internal";
281
472
  outPath,
282
473
  internalPath,
283
474
  injectsPath,
284
- cjsPath: bundleResult.value.cjsPath
475
+ cjsPath: bundleResult.value.cjsPath,
476
+ ...defsPaths.length > 0 ? { defsPaths } : {}
285
477
  });
286
478
  };
287
479
 
@@ -1 +1 @@
1
- {"version":3,"file":"index.cjs","names":["esbuildBundler: Bundler","documents: DocumentNode[]","extensionMap: Record<string, string>","withPrefix","currentExt","schemaHashes: Record<string, { schemaHash: string; objects: number; enums: number; inputs: number; unions: number }>","generateMultiSchemaModule","defaultBundler"],"sources":["../src/inject-template.ts","../src/bundler/esbuild.ts","../src/file.ts","../src/schema.ts","../src/runner.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from \"node:fs\";\nimport { dirname, resolve } from \"node:path\";\nimport { err, ok } from \"neverthrow\";\n\nimport type { CodegenError } from \"./types\";\n\nconst templateContents = `\\\nimport { defineScalar } from \"@soda-gql/core\";\n\nexport const scalar = {\n ...defineScalar<\"ID\", string, string>(\"ID\"),\n ...defineScalar<\"String\", string, string>(\"String\"),\n ...defineScalar<\"Int\", number, number>(\"Int\"),\n ...defineScalar<\"Float\", number, number>(\"Float\"),\n ...defineScalar<\"Boolean\", boolean, boolean>(\"Boolean\"),\n} as const;\n`;\n\nexport const writeInjectTemplate = (outPath: string) => {\n const targetPath = resolve(outPath);\n\n try {\n if (existsSync(targetPath)) {\n return err<void, CodegenError>({\n code: \"INJECT_TEMPLATE_EXISTS\",\n message: `Inject module already exists: ${targetPath}`,\n outPath: targetPath,\n });\n }\n\n mkdirSync(dirname(targetPath), { recursive: true });\n writeFileSync(targetPath, `${templateContents}\\n`);\n return ok<void, CodegenError>(undefined);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return err<void, CodegenError>({\n code: \"INJECT_TEMPLATE_FAILED\",\n message,\n outPath: targetPath,\n });\n }\n};\n\nexport const getInjectTemplate = (): string => `${templateContents}\\n`;\n","import { extname } from \"node:path\";\nimport { build } from \"esbuild\";\nimport { err, ok } from \"neverthrow\";\nimport type { Bundler } from \"./types\";\n\nexport const esbuildBundler: Bundler = {\n name: \"esbuild\",\n bundle: async ({ sourcePath, external }) => {\n try {\n const sourceExt = extname(sourcePath);\n const baseName = sourcePath.slice(0, -sourceExt.length);\n const cjsPath = `${baseName}.cjs`;\n\n await build({\n entryPoints: [sourcePath],\n outfile: cjsPath,\n format: \"cjs\",\n platform: \"node\",\n bundle: true,\n external: [...external],\n sourcemap: false,\n minify: false,\n treeShaking: false,\n });\n\n return ok({ cjsPath });\n } catch (error) {\n return err({\n code: \"EMIT_FAILED\" as const,\n message: `[esbuild] Failed to bundle: ${error instanceof Error ? error.message : String(error)}`,\n outPath: sourcePath,\n });\n }\n },\n};\n","import { mkdirSync, writeFileSync } from \"node:fs\";\nimport { dirname, resolve } from \"node:path\";\nimport { err, ok } from \"neverthrow\";\n\nimport type { CodegenError } from \"./types\";\n\nexport const writeModule = (outPath: string, contents: string) => {\n const targetPath = resolve(outPath);\n\n try {\n mkdirSync(dirname(targetPath), { recursive: true });\n writeFileSync(targetPath, contents);\n return ok<void, CodegenError>(undefined);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return err<void, CodegenError>({\n code: \"EMIT_FAILED\",\n message,\n outPath: targetPath,\n });\n }\n};\n","import { createHash } from \"node:crypto\";\nimport { existsSync, readFileSync } from \"node:fs\";\nimport { resolve } from \"node:path\";\nimport { concatAST, type DocumentNode, parse, print } from \"graphql\";\nimport { err, ok } from \"neverthrow\";\n\nimport type { CodegenError } from \"./types\";\n\n/**\n * Load a single schema file.\n * @internal Use loadSchema for public API.\n */\nexport const loadSingleSchema = (schemaPath: string) => {\n const resolvedPath = resolve(schemaPath);\n\n if (!existsSync(resolvedPath)) {\n return err<DocumentNode, CodegenError>({\n code: \"SCHEMA_NOT_FOUND\",\n message: `Schema file not found at ${resolvedPath}`,\n schemaPath: resolvedPath,\n });\n }\n\n try {\n const schemaSource = readFileSync(resolvedPath, \"utf8\");\n const document = parse(schemaSource);\n return ok<DocumentNode, CodegenError>(document);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return err<DocumentNode, CodegenError>({\n code: \"SCHEMA_INVALID\",\n message: `SchemaValidationError: ${message}`,\n schemaPath: resolvedPath,\n });\n }\n};\n\n/**\n * Load and merge multiple schema files into a single DocumentNode.\n * Uses GraphQL's concatAST to combine definitions from all files.\n */\nexport const loadSchema = (schemaPaths: readonly string[]) => {\n const documents: DocumentNode[] = [];\n\n for (const schemaPath of schemaPaths) {\n const result = loadSingleSchema(schemaPath);\n if (result.isErr()) {\n return err<DocumentNode, CodegenError>(result.error);\n }\n documents.push(result.value);\n }\n\n // Merge all documents into one\n const merged = concatAST(documents);\n return ok<DocumentNode, CodegenError>(merged);\n};\n\nexport const hashSchema = (document: DocumentNode): string => createHash(\"sha256\").update(print(document)).digest(\"hex\");\n","import { existsSync } from \"node:fs\";\nimport { basename, dirname, extname, join, relative, resolve } from \"node:path\";\nimport { err, ok } from \"neverthrow\";\nimport { defaultBundler } from \"./bundler\";\nimport { writeModule } from \"./file\";\nimport { generateMultiSchemaModule } from \"./generator\";\nimport { hashSchema, loadSchema } from \"./schema\";\nimport type { CodegenOptions, CodegenResult, CodegenSuccess } from \"./types\";\n\nconst extensionMap: Record<string, string> = {\n \".ts\": \".js\",\n \".tsx\": \".js\",\n \".mts\": \".mjs\",\n \".cts\": \".cjs\",\n \".js\": \".js\",\n \".mjs\": \".mjs\",\n \".cjs\": \".cjs\",\n};\n\ntype ImportSpecifierOptions = {\n includeExtension?: boolean;\n};\n\nconst toImportSpecifier = (fromPath: string, targetPath: string, options?: ImportSpecifierOptions): string => {\n const fromDir = dirname(fromPath);\n const normalized = relative(fromDir, targetPath).replace(/\\\\/g, \"/\");\n const sourceExt = extname(targetPath);\n\n // When includeExtension is false (default), strip the extension entirely\n if (!options?.includeExtension) {\n if (normalized.length === 0) {\n return `./${basename(targetPath, sourceExt)}`;\n }\n const withPrefix = normalized.startsWith(\".\") ? normalized : `./${normalized}`;\n const currentExt = extname(withPrefix);\n return currentExt ? withPrefix.slice(0, -currentExt.length) : withPrefix;\n }\n\n // When includeExtension is true, convert to runtime extension\n const runtimeExt = extensionMap[sourceExt] ?? sourceExt;\n\n if (normalized.length === 0) {\n const base = runtimeExt !== sourceExt ? basename(targetPath, sourceExt) : basename(targetPath);\n return `./${base}${runtimeExt}`;\n }\n\n const withPrefix = normalized.startsWith(\".\") ? normalized : `./${normalized}`;\n if (!runtimeExt) {\n return withPrefix;\n }\n if (withPrefix.endsWith(runtimeExt)) {\n return withPrefix;\n }\n\n const currentExt = extname(withPrefix);\n const withoutExt = currentExt ? withPrefix.slice(0, -currentExt.length) : withPrefix;\n return `${withoutExt}${runtimeExt}`;\n};\n\nexport const runCodegen = async (options: CodegenOptions): Promise<CodegenResult> => {\n const outPath = resolve(options.outPath);\n const importSpecifierOptions = { includeExtension: options.importExtension };\n\n // Validate that all schema and inject files exist\n for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {\n const scalarPath = resolve(schemaConfig.inject.scalars);\n if (!existsSync(scalarPath)) {\n return err({\n code: \"INJECT_MODULE_NOT_FOUND\",\n message: `Scalar module not found for schema '${schemaName}': ${scalarPath}`,\n injectPath: scalarPath,\n });\n }\n\n if (schemaConfig.inject.adapter) {\n const adapterPath = resolve(schemaConfig.inject.adapter);\n if (!existsSync(adapterPath)) {\n return err({\n code: \"INJECT_MODULE_NOT_FOUND\",\n message: `Adapter module not found for schema '${schemaName}': ${adapterPath}`,\n injectPath: adapterPath,\n });\n }\n }\n }\n\n // Load all schemas\n const schemas = new Map<string, import(\"graphql\").DocumentNode>();\n const schemaHashes: Record<string, { schemaHash: string; objects: number; enums: number; inputs: number; unions: number }> = {};\n\n for (const [name, schemaConfig] of Object.entries(options.schemas)) {\n const result = await loadSchema(schemaConfig.schema).match(\n (doc) => Promise.resolve(ok(doc)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (result.isErr()) {\n return err(result.error);\n }\n\n schemas.set(name, result.value);\n }\n\n // Build injection config for each schema\n const injectionConfig = new Map<\n string,\n {\n scalarImportPath: string;\n adapterImportPath?: string;\n }\n >();\n\n for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {\n const injectConfig = schemaConfig.inject;\n\n injectionConfig.set(schemaName, {\n scalarImportPath: toImportSpecifier(outPath, resolve(injectConfig.scalars), importSpecifierOptions),\n ...(injectConfig.adapter\n ? { adapterImportPath: toImportSpecifier(outPath, resolve(injectConfig.adapter), importSpecifierOptions) }\n : {}),\n });\n }\n\n // Build defaultInputDepth and inputDepthOverrides config for each schema\n const defaultInputDepthConfig = new Map<string, number>();\n const inputDepthOverridesConfig = new Map<string, Readonly<Record<string, number>>>();\n\n for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {\n if (schemaConfig.defaultInputDepth !== undefined && schemaConfig.defaultInputDepth !== 3) {\n defaultInputDepthConfig.set(schemaName, schemaConfig.defaultInputDepth);\n }\n if (schemaConfig.inputDepthOverrides && Object.keys(schemaConfig.inputDepthOverrides).length > 0) {\n inputDepthOverridesConfig.set(schemaName, schemaConfig.inputDepthOverrides);\n }\n }\n\n // Generate multi-schema module (this becomes _internal.ts content)\n const { code: internalCode, injectsCode } = generateMultiSchemaModule(schemas, {\n injection: injectionConfig,\n defaultInputDepth: defaultInputDepthConfig.size > 0 ? defaultInputDepthConfig : undefined,\n inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined,\n });\n\n // Generate index.ts wrapper (simple re-export from _internal)\n const indexCode = `/**\n * Generated by @soda-gql/codegen\n * @module\n * @generated\n */\nexport * from \"./_internal\";\n`;\n\n // Calculate individual schema stats and hashes\n for (const [name, document] of schemas.entries()) {\n const schemaIndex = (await import(\"./generator\")).createSchemaIndex(document);\n const objects = Array.from(schemaIndex.objects.keys()).filter((n) => !n.startsWith(\"__\")).length;\n const enums = Array.from(schemaIndex.enums.keys()).filter((n) => !n.startsWith(\"__\")).length;\n const inputs = Array.from(schemaIndex.inputs.keys()).filter((n) => !n.startsWith(\"__\")).length;\n const unions = Array.from(schemaIndex.unions.keys()).filter((n) => !n.startsWith(\"__\")).length;\n\n schemaHashes[name] = {\n schemaHash: hashSchema(document),\n objects,\n enums,\n inputs,\n unions,\n };\n }\n\n // Write _internal-injects.ts (adapter imports only, referenced by both _internal.ts and prebuilt)\n const injectsPath = join(dirname(outPath), \"_internal-injects.ts\");\n if (injectsCode) {\n const injectsWriteResult = await writeModule(injectsPath, injectsCode).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (injectsWriteResult.isErr()) {\n return err(injectsWriteResult.error);\n }\n }\n\n // Write _internal.ts (implementation)\n const internalPath = join(dirname(outPath), \"_internal.ts\");\n const internalWriteResult = await writeModule(internalPath, internalCode).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (internalWriteResult.isErr()) {\n return err(internalWriteResult.error);\n }\n\n // Write index.ts (re-export wrapper)\n const indexWriteResult = await writeModule(outPath, indexCode).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (indexWriteResult.isErr()) {\n return err(indexWriteResult.error);\n }\n\n // Bundle the generated module\n const bundleOutcome = await defaultBundler.bundle({\n sourcePath: outPath,\n external: [\"@soda-gql/core\", \"@soda-gql/runtime\"],\n });\n const bundleResult = bundleOutcome.match(\n (result) => ok(result),\n (error) => err(error),\n );\n\n if (bundleResult.isErr()) {\n return err(bundleResult.error);\n }\n\n return ok({\n schemas: schemaHashes,\n outPath,\n internalPath,\n injectsPath,\n cjsPath: bundleResult.value.cjsPath,\n } satisfies CodegenSuccess);\n};\n"],"mappings":";;;;;;;;;AAMA,MAAM,mBAAmB;;;;;;;;;;;AAYzB,MAAa,uBAAuB,YAAoB;CACtD,MAAM,oCAAqB,QAAQ;AAEnC,KAAI;AACF,8BAAe,WAAW,EAAE;AAC1B,8BAA+B;IAC7B,MAAM;IACN,SAAS,iCAAiC;IAC1C,SAAS;IACV,CAAC;;AAGJ,gDAAkB,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,6BAAc,YAAY,GAAG,iBAAiB,IAAI;AAClD,4BAA8B,UAAU;UACjC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,6BAA+B;GAC7B,MAAM;GACN;GACA,SAAS;GACV,CAAC;;;AAIN,MAAa,0BAAkC,GAAG,iBAAiB;;;;ACtCnE,MAAaA,iBAA0B;CACrC,MAAM;CACN,QAAQ,OAAO,EAAE,YAAY,eAAe;AAC1C,MAAI;GACF,MAAM,mCAAoB,WAAW;GACrC,MAAM,WAAW,WAAW,MAAM,GAAG,CAAC,UAAU,OAAO;GACvD,MAAM,UAAU,GAAG,SAAS;AAE5B,4BAAY;IACV,aAAa,CAAC,WAAW;IACzB,SAAS;IACT,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,UAAU,CAAC,GAAG,SAAS;IACvB,WAAW;IACX,QAAQ;IACR,aAAa;IACd,CAAC;AAEF,6BAAU,EAAE,SAAS,CAAC;WACf,OAAO;AACd,8BAAW;IACT,MAAM;IACN,SAAS,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;IAC9F,SAAS;IACV,CAAC;;;CAGP;;;;AC5BD,MAAa,eAAe,SAAiB,aAAqB;CAChE,MAAM,oCAAqB,QAAQ;AAEnC,KAAI;AACF,gDAAkB,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,6BAAc,YAAY,SAAS;AACnC,4BAA8B,UAAU;UACjC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,6BAA+B;GAC7B,MAAM;GACN;GACA,SAAS;GACV,CAAC;;;;;;;;;;ACPN,MAAa,oBAAoB,eAAuB;CACtD,MAAM,sCAAuB,WAAW;AAExC,KAAI,yBAAY,aAAa,EAAE;AAC7B,6BAAuC;GACrC,MAAM;GACN,SAAS,4BAA4B;GACrC,YAAY;GACb,CAAC;;AAGJ,KAAI;EACF,MAAM,yCAA4B,cAAc,OAAO;EACvD,MAAM,8BAAiB,aAAa;AACpC,4BAAsC,SAAS;UACxC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,6BAAuC;GACrC,MAAM;GACN,SAAS,0BAA0B;GACnC,YAAY;GACb,CAAC;;;;;;;AAQN,MAAa,cAAc,gBAAmC;CAC5D,MAAMC,YAA4B,EAAE;AAEpC,MAAK,MAAM,cAAc,aAAa;EACpC,MAAM,SAAS,iBAAiB,WAAW;AAC3C,MAAI,OAAO,OAAO,EAAE;AAClB,8BAAuC,OAAO,MAAM;;AAEtD,YAAU,KAAK,OAAO,MAAM;;CAI9B,MAAM,gCAAmB,UAAU;AACnC,2BAAsC,OAAO;;AAG/C,MAAa,cAAc,yCAA8C,SAAS,CAAC,0BAAa,SAAS,CAAC,CAAC,OAAO,MAAM;;;;AChDxH,MAAMC,eAAuC;CAC3C,OAAO;CACP,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,QAAQ;CACT;AAMD,MAAM,qBAAqB,UAAkB,YAAoB,YAA6C;CAC5G,MAAM,iCAAkB,SAAS;CACjC,MAAM,qCAAsB,SAAS,WAAW,CAAC,QAAQ,OAAO,IAAI;CACpE,MAAM,mCAAoB,WAAW;AAGrC,KAAI,CAAC,SAAS,kBAAkB;AAC9B,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAO,6BAAc,YAAY,UAAU;;EAE7C,MAAMC,eAAa,WAAW,WAAW,IAAI,GAAG,aAAa,KAAK;EAClE,MAAMC,sCAAqBD,aAAW;AACtC,SAAOC,eAAaD,aAAW,MAAM,GAAG,CAACC,aAAW,OAAO,GAAGD;;CAIhE,MAAM,aAAa,aAAa,cAAc;AAE9C,KAAI,WAAW,WAAW,GAAG;EAC3B,MAAM,OAAO,eAAe,oCAAqB,YAAY,UAAU,2BAAY,WAAW;AAC9F,SAAO,KAAK,OAAO;;CAGrB,MAAM,aAAa,WAAW,WAAW,IAAI,GAAG,aAAa,KAAK;AAClE,KAAI,CAAC,YAAY;AACf,SAAO;;AAET,KAAI,WAAW,SAAS,WAAW,EAAE;AACnC,SAAO;;CAGT,MAAM,oCAAqB,WAAW;CACtC,MAAM,aAAa,aAAa,WAAW,MAAM,GAAG,CAAC,WAAW,OAAO,GAAG;AAC1E,QAAO,GAAG,aAAa;;AAGzB,MAAa,aAAa,OAAO,YAAoD;CACnF,MAAM,iCAAkB,QAAQ,QAAQ;CACxC,MAAM,yBAAyB,EAAE,kBAAkB,QAAQ,iBAAiB;AAG5E,MAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;EACxE,MAAM,oCAAqB,aAAa,OAAO,QAAQ;AACvD,MAAI,yBAAY,WAAW,EAAE;AAC3B,8BAAW;IACT,MAAM;IACN,SAAS,uCAAuC,WAAW,KAAK;IAChE,YAAY;IACb,CAAC;;AAGJ,MAAI,aAAa,OAAO,SAAS;GAC/B,MAAM,qCAAsB,aAAa,OAAO,QAAQ;AACxD,OAAI,yBAAY,YAAY,EAAE;AAC5B,+BAAW;KACT,MAAM;KACN,SAAS,wCAAwC,WAAW,KAAK;KACjE,YAAY;KACb,CAAC;;;;CAMR,MAAM,UAAU,IAAI,KAA6C;CACjE,MAAME,eAAuH,EAAE;AAE/H,MAAK,MAAM,CAAC,MAAM,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;EAClE,MAAM,SAAS,MAAM,WAAW,aAAa,OAAO,CAAC,OAClD,QAAQ,QAAQ,2BAAW,IAAI,CAAC,GAChC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,MAAI,OAAO,OAAO,EAAE;AAClB,8BAAW,OAAO,MAAM;;AAG1B,UAAQ,IAAI,MAAM,OAAO,MAAM;;CAIjC,MAAM,kBAAkB,IAAI,KAMzB;AAEH,MAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;EACxE,MAAM,eAAe,aAAa;AAElC,kBAAgB,IAAI,YAAY;GAC9B,kBAAkB,kBAAkB,gCAAiB,aAAa,QAAQ,EAAE,uBAAuB;GACnG,GAAI,aAAa,UACb,EAAE,mBAAmB,kBAAkB,gCAAiB,aAAa,QAAQ,EAAE,uBAAuB,EAAE,GACxG,EAAE;GACP,CAAC;;CAIJ,MAAM,0BAA0B,IAAI,KAAqB;CACzD,MAAM,4BAA4B,IAAI,KAA+C;AAErF,MAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;AACxE,MAAI,aAAa,sBAAsB,aAAa,aAAa,sBAAsB,GAAG;AACxF,2BAAwB,IAAI,YAAY,aAAa,kBAAkB;;AAEzE,MAAI,aAAa,uBAAuB,OAAO,KAAK,aAAa,oBAAoB,CAAC,SAAS,GAAG;AAChG,6BAA0B,IAAI,YAAY,aAAa,oBAAoB;;;CAK/E,MAAM,EAAE,MAAM,cAAc,gBAAgBC,4CAA0B,SAAS;EAC7E,WAAW;EACX,mBAAmB,wBAAwB,OAAO,IAAI,0BAA0B;EAChF,qBAAqB,0BAA0B,OAAO,IAAI,4BAA4B;EACvF,CAAC;CAGF,MAAM,YAAY;;;;;;;AASlB,MAAK,MAAM,CAAC,MAAM,aAAa,QAAQ,SAAS,EAAE;EAChD,MAAM,eAAe,2CAAM,8BAAuB,kBAAkB,SAAS;EAC7E,MAAM,UAAU,MAAM,KAAK,YAAY,QAAQ,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;EAC1F,MAAM,QAAQ,MAAM,KAAK,YAAY,MAAM,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;EACtF,MAAM,SAAS,MAAM,KAAK,YAAY,OAAO,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;EACxF,MAAM,SAAS,MAAM,KAAK,YAAY,OAAO,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;AAExF,eAAa,QAAQ;GACnB,YAAY,WAAW,SAAS;GAChC;GACA;GACA;GACA;GACD;;CAIH,MAAM,yDAA2B,QAAQ,EAAE,uBAAuB;AAClE,KAAI,aAAa;EACf,MAAM,qBAAqB,MAAM,YAAY,aAAa,YAAY,CAAC,YAC/D,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,MAAI,mBAAmB,OAAO,EAAE;AAC9B,8BAAW,mBAAmB,MAAM;;;CAKxC,MAAM,0DAA4B,QAAQ,EAAE,eAAe;CAC3D,MAAM,sBAAsB,MAAM,YAAY,cAAc,aAAa,CAAC,YAClE,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,KAAI,oBAAoB,OAAO,EAAE;AAC/B,6BAAW,oBAAoB,MAAM;;CAIvC,MAAM,mBAAmB,MAAM,YAAY,SAAS,UAAU,CAAC,YACvD,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,KAAI,iBAAiB,OAAO,EAAE;AAC5B,6BAAW,iBAAiB,MAAM;;CAIpC,MAAM,gBAAgB,MAAMC,eAAe,OAAO;EAChD,YAAY;EACZ,UAAU,CAAC,kBAAkB,oBAAoB;EAClD,CAAC;CACF,MAAM,eAAe,cAAc,OAChC,8BAAc,OAAO,GACrB,8BAAc,MAAM,CACtB;AAED,KAAI,aAAa,OAAO,EAAE;AACxB,6BAAW,aAAa,MAAM;;AAGhC,2BAAU;EACR,SAAS;EACT;EACA;EACA;EACA,SAAS,aAAa,MAAM;EAC7B,CAA0B"}
1
+ {"version":3,"file":"index.cjs","names":["esbuildBundler: Bundler","result: T[][]","imports: string[]","files: Array<{ relativePath: string; content: string }>","importPaths: Record<DefinitionCategory, string>","categories: DefinitionCategory[]","documents: DocumentNode[]","extensionMap: Record<string, string>","withPrefix","currentExt","schemaHashes: Record<string, { schemaHash: string; objects: number; enums: number; inputs: number; unions: number }>","generateMultiSchemaModule","defsPaths: string[]","defaultBundler"],"sources":["../src/inject-template.ts","../src/bundler/esbuild.ts","../src/defs-generator.ts","../src/file.ts","../src/schema.ts","../src/runner.ts"],"sourcesContent":["import { existsSync, mkdirSync, writeFileSync } from \"node:fs\";\nimport { dirname, resolve } from \"node:path\";\nimport { err, ok } from \"neverthrow\";\n\nimport type { CodegenError } from \"./types\";\n\nconst templateContents = `\\\nimport { defineScalar } from \"@soda-gql/core\";\n\nexport const scalar = {\n ...defineScalar<\"ID\", string, string>(\"ID\"),\n ...defineScalar<\"String\", string, string>(\"String\"),\n ...defineScalar<\"Int\", number, number>(\"Int\"),\n ...defineScalar<\"Float\", number, number>(\"Float\"),\n ...defineScalar<\"Boolean\", boolean, boolean>(\"Boolean\"),\n} as const;\n`;\n\nexport const writeInjectTemplate = (outPath: string) => {\n const targetPath = resolve(outPath);\n\n try {\n if (existsSync(targetPath)) {\n return err<void, CodegenError>({\n code: \"INJECT_TEMPLATE_EXISTS\",\n message: `Inject module already exists: ${targetPath}`,\n outPath: targetPath,\n });\n }\n\n mkdirSync(dirname(targetPath), { recursive: true });\n writeFileSync(targetPath, `${templateContents}\\n`);\n return ok<void, CodegenError>(undefined);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return err<void, CodegenError>({\n code: \"INJECT_TEMPLATE_FAILED\",\n message,\n outPath: targetPath,\n });\n }\n};\n\nexport const getInjectTemplate = (): string => `${templateContents}\\n`;\n","import { extname } from \"node:path\";\nimport { build } from \"esbuild\";\nimport { err, ok } from \"neverthrow\";\nimport type { Bundler } from \"./types\";\n\nexport const esbuildBundler: Bundler = {\n name: \"esbuild\",\n bundle: async ({ sourcePath, external }) => {\n try {\n const sourceExt = extname(sourcePath);\n const baseName = sourcePath.slice(0, -sourceExt.length);\n const cjsPath = `${baseName}.cjs`;\n\n await build({\n entryPoints: [sourcePath],\n outfile: cjsPath,\n format: \"cjs\",\n platform: \"node\",\n bundle: true,\n external: [...external],\n sourcemap: false,\n minify: false,\n treeShaking: false,\n });\n\n return ok({ cjsPath });\n } catch (error) {\n return err({\n code: \"EMIT_FAILED\" as const,\n message: `[esbuild] Failed to bundle: ${error instanceof Error ? error.message : String(error)}`,\n outPath: sourcePath,\n });\n }\n },\n};\n","/**\n * Definition file generator for split codegen.\n * Generates separate files for each definition category (enums, inputs, objects, unions).\n */\n\nexport type DefinitionCategory = \"enums\" | \"inputs\" | \"objects\" | \"unions\";\n\nexport type DefinitionVar = {\n readonly name: string;\n readonly code: string;\n};\n\n/**\n * Split an array into chunks of the specified size.\n */\nexport const chunkArray = <T>(array: readonly T[], size: number): T[][] => {\n if (size <= 0) {\n return [Array.from(array)];\n }\n\n const result: T[][] = [];\n for (let i = 0; i < array.length; i += size) {\n result.push(array.slice(i, i + size));\n }\n\n return result;\n};\n\n/**\n * Determine if chunking is needed based on the number of definitions.\n */\nexport const needsChunking = (vars: readonly DefinitionVar[], chunkSize: number): boolean => {\n return vars.length > chunkSize;\n};\n\ntype DefinitionFileOptions = {\n readonly category: DefinitionCategory;\n readonly schemaName: string;\n readonly vars: readonly DefinitionVar[];\n readonly needsDefineEnum: boolean;\n};\n\n/**\n * Generate a single definition file content.\n */\nexport const generateDefinitionFile = (options: DefinitionFileOptions): string => {\n const { category, vars, needsDefineEnum } = options;\n\n if (vars.length === 0) {\n return `/**\n * ${category} definitions (empty)\n * @generated by @soda-gql/codegen\n */\n`;\n }\n\n const imports: string[] = [];\n if (needsDefineEnum && category === \"enums\") {\n imports.push('import { defineEnum } from \"@soda-gql/core\";');\n }\n\n const importsBlock = imports.length > 0 ? `${imports.join(\"\\n\")}\\n\\n` : \"\";\n const exportStatements = vars.map((v) => `export ${v.code}`).join(\"\\n\");\n\n return `/**\n * ${category} definitions\n * @generated by @soda-gql/codegen\n */\n${importsBlock}${exportStatements}\n`;\n};\n\ntype ChunkFileOptions = {\n readonly category: DefinitionCategory;\n readonly schemaName: string;\n readonly vars: readonly DefinitionVar[];\n readonly chunkIndex: number;\n readonly needsDefineEnum: boolean;\n};\n\n/**\n * Generate a chunk file content.\n */\nexport const generateChunkFile = (options: ChunkFileOptions): string => {\n const { category, vars, chunkIndex, needsDefineEnum } = options;\n\n if (vars.length === 0) {\n return `/**\n * ${category} chunk ${chunkIndex} (empty)\n * @generated by @soda-gql/codegen\n */\n`;\n }\n\n const imports: string[] = [];\n if (needsDefineEnum && category === \"enums\") {\n imports.push('import { defineEnum } from \"@soda-gql/core\";');\n }\n\n const importsBlock = imports.length > 0 ? `${imports.join(\"\\n\")}\\n\\n` : \"\";\n const exportStatements = vars.map((v) => `export ${v.code}`).join(\"\\n\");\n\n return `/**\n * ${category} chunk ${chunkIndex}\n * @generated by @soda-gql/codegen\n */\n${importsBlock}${exportStatements}\n`;\n};\n\ntype ChunkIndexOptions = {\n readonly category: DefinitionCategory;\n readonly chunkCount: number;\n readonly varNames: readonly string[];\n};\n\n/**\n * Generate the index file that re-exports all chunks.\n */\nexport const generateChunkIndex = (options: ChunkIndexOptions): string => {\n const { category, chunkCount } = options;\n\n const reExports = Array.from({ length: chunkCount }, (_, i) => `export * from \"./chunk-${i}\";`).join(\"\\n\");\n\n return `/**\n * ${category} index (re-exports all chunks)\n * @generated by @soda-gql/codegen\n */\n${reExports}\n`;\n};\n\nexport type ChunkedDefinitionFiles = {\n readonly indexContent: string;\n readonly chunks: ReadonlyArray<{\n readonly chunkIndex: number;\n readonly content: string;\n readonly varNames: readonly string[];\n }>;\n};\n\n/**\n * Generate chunked definition files.\n */\nexport const generateChunkedDefinitionFiles = (\n category: DefinitionCategory,\n schemaName: string,\n vars: readonly DefinitionVar[],\n chunkSize: number,\n): ChunkedDefinitionFiles => {\n const chunks = chunkArray(vars, chunkSize);\n const needsDefineEnum = category === \"enums\";\n\n const chunkContents = chunks.map((chunkVars, chunkIndex) => ({\n chunkIndex,\n content: generateChunkFile({\n category,\n schemaName,\n vars: chunkVars,\n chunkIndex,\n needsDefineEnum,\n }),\n varNames: chunkVars.map((v) => v.name),\n }));\n\n const allVarNames = vars.map((v) => v.name);\n const indexContent = generateChunkIndex({\n category,\n chunkCount: chunks.length,\n varNames: allVarNames,\n });\n\n return {\n indexContent,\n chunks: chunkContents,\n };\n};\n\ntype DefsDirectoryStructure = {\n readonly files: ReadonlyArray<{\n readonly relativePath: string;\n readonly content: string;\n }>;\n readonly importPaths: Record<DefinitionCategory, string>;\n};\n\nexport type CategoryVars = {\n readonly enums: readonly DefinitionVar[];\n readonly inputs: readonly DefinitionVar[];\n readonly objects: readonly DefinitionVar[];\n readonly unions: readonly DefinitionVar[];\n};\n\n/**\n * Generate the complete _defs directory structure.\n */\nexport const generateDefsStructure = (\n schemaName: string,\n categoryVars: CategoryVars,\n chunkSize: number,\n): DefsDirectoryStructure => {\n const files: Array<{ relativePath: string; content: string }> = [];\n const importPaths: Record<DefinitionCategory, string> = {\n enums: \"./_defs/enums\",\n inputs: \"./_defs/inputs\",\n objects: \"./_defs/objects\",\n unions: \"./_defs/unions\",\n };\n\n const categories: DefinitionCategory[] = [\"enums\", \"inputs\", \"objects\", \"unions\"];\n\n for (const category of categories) {\n const vars = categoryVars[category];\n const needsDefineEnum = category === \"enums\";\n\n if (needsChunking(vars, chunkSize)) {\n // Generate chunked files\n const chunked = generateChunkedDefinitionFiles(category, schemaName, vars, chunkSize);\n\n // Update import path to point to the directory (which has index.ts)\n importPaths[category] = `./_defs/${category}`;\n\n // Add index file\n files.push({\n relativePath: `_defs/${category}/index.ts`,\n content: chunked.indexContent,\n });\n\n // Add chunk files\n for (const chunk of chunked.chunks) {\n files.push({\n relativePath: `_defs/${category}/chunk-${chunk.chunkIndex}.ts`,\n content: chunk.content,\n });\n }\n } else {\n // Generate single file\n const content = generateDefinitionFile({\n category,\n schemaName,\n vars,\n needsDefineEnum,\n });\n\n files.push({\n relativePath: `_defs/${category}.ts`,\n content,\n });\n }\n }\n\n return { files, importPaths };\n};\n","import { mkdirSync, writeFileSync } from \"node:fs\";\nimport { dirname, resolve } from \"node:path\";\nimport { err, ok } from \"neverthrow\";\n\nimport type { CodegenError } from \"./types\";\n\nexport const writeModule = (outPath: string, contents: string) => {\n const targetPath = resolve(outPath);\n\n try {\n mkdirSync(dirname(targetPath), { recursive: true });\n writeFileSync(targetPath, contents);\n return ok<void, CodegenError>(undefined);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return err<void, CodegenError>({\n code: \"EMIT_FAILED\",\n message,\n outPath: targetPath,\n });\n }\n};\n","import { createHash } from \"node:crypto\";\nimport { existsSync, readFileSync } from \"node:fs\";\nimport { resolve } from \"node:path\";\nimport { concatAST, type DocumentNode, parse, print } from \"graphql\";\nimport { err, ok } from \"neverthrow\";\n\nimport type { CodegenError } from \"./types\";\n\n/**\n * Load a single schema file.\n * @internal Use loadSchema for public API.\n */\nexport const loadSingleSchema = (schemaPath: string) => {\n const resolvedPath = resolve(schemaPath);\n\n if (!existsSync(resolvedPath)) {\n return err<DocumentNode, CodegenError>({\n code: \"SCHEMA_NOT_FOUND\",\n message: `Schema file not found at ${resolvedPath}`,\n schemaPath: resolvedPath,\n });\n }\n\n try {\n const schemaSource = readFileSync(resolvedPath, \"utf8\");\n const document = parse(schemaSource);\n return ok<DocumentNode, CodegenError>(document);\n } catch (error) {\n const message = error instanceof Error ? error.message : String(error);\n return err<DocumentNode, CodegenError>({\n code: \"SCHEMA_INVALID\",\n message: `SchemaValidationError: ${message}`,\n schemaPath: resolvedPath,\n });\n }\n};\n\n/**\n * Load and merge multiple schema files into a single DocumentNode.\n * Uses GraphQL's concatAST to combine definitions from all files.\n */\nexport const loadSchema = (schemaPaths: readonly string[]) => {\n const documents: DocumentNode[] = [];\n\n for (const schemaPath of schemaPaths) {\n const result = loadSingleSchema(schemaPath);\n if (result.isErr()) {\n return err<DocumentNode, CodegenError>(result.error);\n }\n documents.push(result.value);\n }\n\n // Merge all documents into one\n const merged = concatAST(documents);\n return ok<DocumentNode, CodegenError>(merged);\n};\n\nexport const hashSchema = (document: DocumentNode): string => createHash(\"sha256\").update(print(document)).digest(\"hex\");\n","import { existsSync } from \"node:fs\";\nimport { basename, dirname, extname, join, relative, resolve } from \"node:path\";\nimport { err, ok } from \"neverthrow\";\nimport { defaultBundler } from \"./bundler\";\nimport { generateDefsStructure } from \"./defs-generator\";\nimport { writeModule } from \"./file\";\nimport { generateMultiSchemaModule } from \"./generator\";\nimport { hashSchema, loadSchema } from \"./schema\";\nimport type { CodegenOptions, CodegenResult, CodegenSuccess } from \"./types\";\n\nconst extensionMap: Record<string, string> = {\n \".ts\": \".js\",\n \".tsx\": \".js\",\n \".mts\": \".mjs\",\n \".cts\": \".cjs\",\n \".js\": \".js\",\n \".mjs\": \".mjs\",\n \".cjs\": \".cjs\",\n};\n\ntype ImportSpecifierOptions = {\n includeExtension?: boolean;\n};\n\nconst toImportSpecifier = (fromPath: string, targetPath: string, options?: ImportSpecifierOptions): string => {\n const fromDir = dirname(fromPath);\n const normalized = relative(fromDir, targetPath).replace(/\\\\/g, \"/\");\n const sourceExt = extname(targetPath);\n\n // When includeExtension is false (default), strip the extension entirely\n if (!options?.includeExtension) {\n if (normalized.length === 0) {\n return `./${basename(targetPath, sourceExt)}`;\n }\n const withPrefix = normalized.startsWith(\".\") ? normalized : `./${normalized}`;\n const currentExt = extname(withPrefix);\n return currentExt ? withPrefix.slice(0, -currentExt.length) : withPrefix;\n }\n\n // When includeExtension is true, convert to runtime extension\n const runtimeExt = extensionMap[sourceExt] ?? sourceExt;\n\n if (normalized.length === 0) {\n const base = runtimeExt !== sourceExt ? basename(targetPath, sourceExt) : basename(targetPath);\n return `./${base}${runtimeExt}`;\n }\n\n const withPrefix = normalized.startsWith(\".\") ? normalized : `./${normalized}`;\n if (!runtimeExt) {\n return withPrefix;\n }\n if (withPrefix.endsWith(runtimeExt)) {\n return withPrefix;\n }\n\n const currentExt = extname(withPrefix);\n const withoutExt = currentExt ? withPrefix.slice(0, -currentExt.length) : withPrefix;\n return `${withoutExt}${runtimeExt}`;\n};\n\nexport const runCodegen = async (options: CodegenOptions): Promise<CodegenResult> => {\n const outPath = resolve(options.outPath);\n const importSpecifierOptions = { includeExtension: options.importExtension };\n\n // Validate that all schema and inject files exist\n for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {\n const scalarPath = resolve(schemaConfig.inject.scalars);\n if (!existsSync(scalarPath)) {\n return err({\n code: \"INJECT_MODULE_NOT_FOUND\",\n message: `Scalar module not found for schema '${schemaName}': ${scalarPath}`,\n injectPath: scalarPath,\n });\n }\n\n if (schemaConfig.inject.adapter) {\n const adapterPath = resolve(schemaConfig.inject.adapter);\n if (!existsSync(adapterPath)) {\n return err({\n code: \"INJECT_MODULE_NOT_FOUND\",\n message: `Adapter module not found for schema '${schemaName}': ${adapterPath}`,\n injectPath: adapterPath,\n });\n }\n }\n }\n\n // Load all schemas\n const schemas = new Map<string, import(\"graphql\").DocumentNode>();\n const schemaHashes: Record<string, { schemaHash: string; objects: number; enums: number; inputs: number; unions: number }> = {};\n\n for (const [name, schemaConfig] of Object.entries(options.schemas)) {\n const result = await loadSchema(schemaConfig.schema).match(\n (doc) => Promise.resolve(ok(doc)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (result.isErr()) {\n return err(result.error);\n }\n\n schemas.set(name, result.value);\n }\n\n // Build injection config for each schema\n const injectionConfig = new Map<\n string,\n {\n scalarImportPath: string;\n adapterImportPath?: string;\n }\n >();\n\n for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {\n const injectConfig = schemaConfig.inject;\n\n injectionConfig.set(schemaName, {\n scalarImportPath: toImportSpecifier(outPath, resolve(injectConfig.scalars), importSpecifierOptions),\n ...(injectConfig.adapter\n ? { adapterImportPath: toImportSpecifier(outPath, resolve(injectConfig.adapter), importSpecifierOptions) }\n : {}),\n });\n }\n\n // Build defaultInputDepth and inputDepthOverrides config for each schema\n const defaultInputDepthConfig = new Map<string, number>();\n const inputDepthOverridesConfig = new Map<string, Readonly<Record<string, number>>>();\n\n for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {\n if (schemaConfig.defaultInputDepth !== undefined && schemaConfig.defaultInputDepth !== 3) {\n defaultInputDepthConfig.set(schemaName, schemaConfig.defaultInputDepth);\n }\n if (schemaConfig.inputDepthOverrides && Object.keys(schemaConfig.inputDepthOverrides).length > 0) {\n inputDepthOverridesConfig.set(schemaName, schemaConfig.inputDepthOverrides);\n }\n }\n\n // Get chunkSize config (default: 100)\n const chunkSize = options.chunkSize ?? 100;\n\n // Generate multi-schema module (this becomes _internal.ts content)\n const {\n code: internalCode,\n injectsCode,\n categoryVars,\n } = generateMultiSchemaModule(schemas, {\n injection: injectionConfig,\n defaultInputDepth: defaultInputDepthConfig.size > 0 ? defaultInputDepthConfig : undefined,\n inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined,\n chunkSize,\n });\n\n // Generate index.ts wrapper (simple re-export from _internal)\n const indexCode = `/**\n * Generated by @soda-gql/codegen\n * @module\n * @generated\n */\nexport * from \"./_internal\";\n`;\n\n // Calculate individual schema stats and hashes\n for (const [name, document] of schemas.entries()) {\n const schemaIndex = (await import(\"./generator\")).createSchemaIndex(document);\n const objects = Array.from(schemaIndex.objects.keys()).filter((n) => !n.startsWith(\"__\")).length;\n const enums = Array.from(schemaIndex.enums.keys()).filter((n) => !n.startsWith(\"__\")).length;\n const inputs = Array.from(schemaIndex.inputs.keys()).filter((n) => !n.startsWith(\"__\")).length;\n const unions = Array.from(schemaIndex.unions.keys()).filter((n) => !n.startsWith(\"__\")).length;\n\n schemaHashes[name] = {\n schemaHash: hashSchema(document),\n objects,\n enums,\n inputs,\n unions,\n };\n }\n\n // Write _internal-injects.ts (adapter imports only, referenced by both _internal.ts and prebuilt)\n const injectsPath = join(dirname(outPath), \"_internal-injects.ts\");\n if (injectsCode) {\n const injectsWriteResult = await writeModule(injectsPath, injectsCode).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (injectsWriteResult.isErr()) {\n return err(injectsWriteResult.error);\n }\n }\n\n // Write _defs/ files (always enabled)\n const defsPaths: string[] = [];\n if (categoryVars) {\n const outDir = dirname(outPath);\n\n // Merge all schema categoryVars into a single combined structure\n // This ensures all definitions from all schemas go into the same defs files\n type DefinitionVar = { name: string; code: string };\n const combinedVars = {\n enums: [] as DefinitionVar[],\n inputs: [] as DefinitionVar[],\n objects: [] as DefinitionVar[],\n unions: [] as DefinitionVar[],\n };\n\n for (const vars of Object.values(categoryVars)) {\n combinedVars.enums.push(...vars.enums);\n combinedVars.inputs.push(...vars.inputs);\n combinedVars.objects.push(...vars.objects);\n combinedVars.unions.push(...vars.unions);\n }\n\n // Generate defs structure for all schemas combined\n const defsStructure = generateDefsStructure(\"combined\", combinedVars, chunkSize);\n\n for (const file of defsStructure.files) {\n const filePath = join(outDir, file.relativePath);\n\n // writeModule handles directory creation internally via mkdirSync\n const writeResult = await writeModule(filePath, file.content).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (writeResult.isErr()) {\n return err(writeResult.error);\n }\n\n defsPaths.push(filePath);\n }\n }\n\n // Write _internal.ts (implementation)\n const internalPath = join(dirname(outPath), \"_internal.ts\");\n const internalWriteResult = await writeModule(internalPath, internalCode).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (internalWriteResult.isErr()) {\n return err(internalWriteResult.error);\n }\n\n // Write index.ts (re-export wrapper)\n const indexWriteResult = await writeModule(outPath, indexCode).match(\n () => Promise.resolve(ok(undefined)),\n (error) => Promise.resolve(err(error)),\n );\n\n if (indexWriteResult.isErr()) {\n return err(indexWriteResult.error);\n }\n\n // Bundle the generated module\n const bundleOutcome = await defaultBundler.bundle({\n sourcePath: outPath,\n external: [\"@soda-gql/core\", \"@soda-gql/runtime\"],\n });\n const bundleResult = bundleOutcome.match(\n (result) => ok(result),\n (error) => err(error),\n );\n\n if (bundleResult.isErr()) {\n return err(bundleResult.error);\n }\n\n return ok({\n schemas: schemaHashes,\n outPath,\n internalPath,\n injectsPath,\n cjsPath: bundleResult.value.cjsPath,\n ...(defsPaths.length > 0 ? { defsPaths } : {}),\n } satisfies CodegenSuccess);\n};\n"],"mappings":";;;;;;;;;AAMA,MAAM,mBAAmB;;;;;;;;;;;AAYzB,MAAa,uBAAuB,YAAoB;CACtD,MAAM,oCAAqB,QAAQ;AAEnC,KAAI;AACF,8BAAe,WAAW,EAAE;AAC1B,8BAA+B;IAC7B,MAAM;IACN,SAAS,iCAAiC;IAC1C,SAAS;IACV,CAAC;;AAGJ,gDAAkB,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,6BAAc,YAAY,GAAG,iBAAiB,IAAI;AAClD,4BAA8B,UAAU;UACjC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,6BAA+B;GAC7B,MAAM;GACN;GACA,SAAS;GACV,CAAC;;;AAIN,MAAa,0BAAkC,GAAG,iBAAiB;;;;ACtCnE,MAAaA,iBAA0B;CACrC,MAAM;CACN,QAAQ,OAAO,EAAE,YAAY,eAAe;AAC1C,MAAI;GACF,MAAM,mCAAoB,WAAW;GACrC,MAAM,WAAW,WAAW,MAAM,GAAG,CAAC,UAAU,OAAO;GACvD,MAAM,UAAU,GAAG,SAAS;AAE5B,4BAAY;IACV,aAAa,CAAC,WAAW;IACzB,SAAS;IACT,QAAQ;IACR,UAAU;IACV,QAAQ;IACR,UAAU,CAAC,GAAG,SAAS;IACvB,WAAW;IACX,QAAQ;IACR,aAAa;IACd,CAAC;AAEF,6BAAU,EAAE,SAAS,CAAC;WACf,OAAO;AACd,8BAAW;IACT,MAAM;IACN,SAAS,+BAA+B,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;IAC9F,SAAS;IACV,CAAC;;;CAGP;;;;;;;ACnBD,MAAa,cAAiB,OAAqB,SAAwB;AACzE,KAAI,QAAQ,GAAG;AACb,SAAO,CAAC,MAAM,KAAK,MAAM,CAAC;;CAG5B,MAAMC,SAAgB,EAAE;AACxB,MAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK,MAAM;AAC3C,SAAO,KAAK,MAAM,MAAM,GAAG,IAAI,KAAK,CAAC;;AAGvC,QAAO;;;;;AAMT,MAAa,iBAAiB,MAAgC,cAA+B;AAC3F,QAAO,KAAK,SAAS;;;;;AAavB,MAAa,0BAA0B,YAA2C;CAChF,MAAM,EAAE,UAAU,MAAM,oBAAoB;AAE5C,KAAI,KAAK,WAAW,GAAG;AACrB,SAAO;KACN,SAAS;;;;;CAMZ,MAAMC,UAAoB,EAAE;AAC5B,KAAI,mBAAmB,aAAa,SAAS;AAC3C,UAAQ,KAAK,iDAA+C;;CAG9D,MAAM,eAAe,QAAQ,SAAS,IAAI,GAAG,QAAQ,KAAK,KAAK,CAAC,QAAQ;CACxE,MAAM,mBAAmB,KAAK,KAAK,MAAM,UAAU,EAAE,OAAO,CAAC,KAAK,KAAK;AAEvE,QAAO;KACJ,SAAS;;;EAGZ,eAAe,iBAAiB;;;;;;AAelC,MAAa,qBAAqB,YAAsC;CACtE,MAAM,EAAE,UAAU,MAAM,YAAY,oBAAoB;AAExD,KAAI,KAAK,WAAW,GAAG;AACrB,SAAO;KACN,SAAS,SAAS,WAAW;;;;;CAMhC,MAAMA,UAAoB,EAAE;AAC5B,KAAI,mBAAmB,aAAa,SAAS;AAC3C,UAAQ,KAAK,iDAA+C;;CAG9D,MAAM,eAAe,QAAQ,SAAS,IAAI,GAAG,QAAQ,KAAK,KAAK,CAAC,QAAQ;CACxE,MAAM,mBAAmB,KAAK,KAAK,MAAM,UAAU,EAAE,OAAO,CAAC,KAAK,KAAK;AAEvE,QAAO;KACJ,SAAS,SAAS,WAAW;;;EAGhC,eAAe,iBAAiB;;;;;;AAalC,MAAa,sBAAsB,YAAuC;CACxE,MAAM,EAAE,UAAU,eAAe;CAEjC,MAAM,YAAY,MAAM,KAAK,EAAE,QAAQ,YAAY,GAAG,GAAG,MAAM,0BAA0B,EAAE,IAAI,CAAC,KAAK,KAAK;AAE1G,QAAO;KACJ,SAAS;;;EAGZ,UAAU;;;;;;AAgBZ,MAAa,kCACX,UACA,YACA,MACA,cAC2B;CAC3B,MAAM,SAAS,WAAW,MAAM,UAAU;CAC1C,MAAM,kBAAkB,aAAa;CAErC,MAAM,gBAAgB,OAAO,KAAK,WAAW,gBAAgB;EAC3D;EACA,SAAS,kBAAkB;GACzB;GACA;GACA,MAAM;GACN;GACA;GACD,CAAC;EACF,UAAU,UAAU,KAAK,MAAM,EAAE,KAAK;EACvC,EAAE;CAEH,MAAM,cAAc,KAAK,KAAK,MAAM,EAAE,KAAK;CAC3C,MAAM,eAAe,mBAAmB;EACtC;EACA,YAAY,OAAO;EACnB,UAAU;EACX,CAAC;AAEF,QAAO;EACL;EACA,QAAQ;EACT;;;;;AAqBH,MAAa,yBACX,YACA,cACA,cAC2B;CAC3B,MAAMC,QAA0D,EAAE;CAClE,MAAMC,cAAkD;EACtD,OAAO;EACP,QAAQ;EACR,SAAS;EACT,QAAQ;EACT;CAED,MAAMC,aAAmC;EAAC;EAAS;EAAU;EAAW;EAAS;AAEjF,MAAK,MAAM,YAAY,YAAY;EACjC,MAAM,OAAO,aAAa;EAC1B,MAAM,kBAAkB,aAAa;AAErC,MAAI,cAAc,MAAM,UAAU,EAAE;GAElC,MAAM,UAAU,+BAA+B,UAAU,YAAY,MAAM,UAAU;AAGrF,eAAY,YAAY,WAAW;AAGnC,SAAM,KAAK;IACT,cAAc,SAAS,SAAS;IAChC,SAAS,QAAQ;IAClB,CAAC;AAGF,QAAK,MAAM,SAAS,QAAQ,QAAQ;AAClC,UAAM,KAAK;KACT,cAAc,SAAS,SAAS,SAAS,MAAM,WAAW;KAC1D,SAAS,MAAM;KAChB,CAAC;;SAEC;GAEL,MAAM,UAAU,uBAAuB;IACrC;IACA;IACA;IACA;IACD,CAAC;AAEF,SAAM,KAAK;IACT,cAAc,SAAS,SAAS;IAChC;IACD,CAAC;;;AAIN,QAAO;EAAE;EAAO;EAAa;;;;;ACrP/B,MAAa,eAAe,SAAiB,aAAqB;CAChE,MAAM,oCAAqB,QAAQ;AAEnC,KAAI;AACF,gDAAkB,WAAW,EAAE,EAAE,WAAW,MAAM,CAAC;AACnD,6BAAc,YAAY,SAAS;AACnC,4BAA8B,UAAU;UACjC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,6BAA+B;GAC7B,MAAM;GACN;GACA,SAAS;GACV,CAAC;;;;;;;;;;ACPN,MAAa,oBAAoB,eAAuB;CACtD,MAAM,sCAAuB,WAAW;AAExC,KAAI,yBAAY,aAAa,EAAE;AAC7B,6BAAuC;GACrC,MAAM;GACN,SAAS,4BAA4B;GACrC,YAAY;GACb,CAAC;;AAGJ,KAAI;EACF,MAAM,yCAA4B,cAAc,OAAO;EACvD,MAAM,8BAAiB,aAAa;AACpC,4BAAsC,SAAS;UACxC,OAAO;EACd,MAAM,UAAU,iBAAiB,QAAQ,MAAM,UAAU,OAAO,MAAM;AACtE,6BAAuC;GACrC,MAAM;GACN,SAAS,0BAA0B;GACnC,YAAY;GACb,CAAC;;;;;;;AAQN,MAAa,cAAc,gBAAmC;CAC5D,MAAMC,YAA4B,EAAE;AAEpC,MAAK,MAAM,cAAc,aAAa;EACpC,MAAM,SAAS,iBAAiB,WAAW;AAC3C,MAAI,OAAO,OAAO,EAAE;AAClB,8BAAuC,OAAO,MAAM;;AAEtD,YAAU,KAAK,OAAO,MAAM;;CAI9B,MAAM,gCAAmB,UAAU;AACnC,2BAAsC,OAAO;;AAG/C,MAAa,cAAc,yCAA8C,SAAS,CAAC,0BAAa,SAAS,CAAC,CAAC,OAAO,MAAM;;;;AC/CxH,MAAMC,eAAuC;CAC3C,OAAO;CACP,QAAQ;CACR,QAAQ;CACR,QAAQ;CACR,OAAO;CACP,QAAQ;CACR,QAAQ;CACT;AAMD,MAAM,qBAAqB,UAAkB,YAAoB,YAA6C;CAC5G,MAAM,iCAAkB,SAAS;CACjC,MAAM,qCAAsB,SAAS,WAAW,CAAC,QAAQ,OAAO,IAAI;CACpE,MAAM,mCAAoB,WAAW;AAGrC,KAAI,CAAC,SAAS,kBAAkB;AAC9B,MAAI,WAAW,WAAW,GAAG;AAC3B,UAAO,6BAAc,YAAY,UAAU;;EAE7C,MAAMC,eAAa,WAAW,WAAW,IAAI,GAAG,aAAa,KAAK;EAClE,MAAMC,sCAAqBD,aAAW;AACtC,SAAOC,eAAaD,aAAW,MAAM,GAAG,CAACC,aAAW,OAAO,GAAGD;;CAIhE,MAAM,aAAa,aAAa,cAAc;AAE9C,KAAI,WAAW,WAAW,GAAG;EAC3B,MAAM,OAAO,eAAe,oCAAqB,YAAY,UAAU,2BAAY,WAAW;AAC9F,SAAO,KAAK,OAAO;;CAGrB,MAAM,aAAa,WAAW,WAAW,IAAI,GAAG,aAAa,KAAK;AAClE,KAAI,CAAC,YAAY;AACf,SAAO;;AAET,KAAI,WAAW,SAAS,WAAW,EAAE;AACnC,SAAO;;CAGT,MAAM,oCAAqB,WAAW;CACtC,MAAM,aAAa,aAAa,WAAW,MAAM,GAAG,CAAC,WAAW,OAAO,GAAG;AAC1E,QAAO,GAAG,aAAa;;AAGzB,MAAa,aAAa,OAAO,YAAoD;CACnF,MAAM,iCAAkB,QAAQ,QAAQ;CACxC,MAAM,yBAAyB,EAAE,kBAAkB,QAAQ,iBAAiB;AAG5E,MAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;EACxE,MAAM,oCAAqB,aAAa,OAAO,QAAQ;AACvD,MAAI,yBAAY,WAAW,EAAE;AAC3B,8BAAW;IACT,MAAM;IACN,SAAS,uCAAuC,WAAW,KAAK;IAChE,YAAY;IACb,CAAC;;AAGJ,MAAI,aAAa,OAAO,SAAS;GAC/B,MAAM,qCAAsB,aAAa,OAAO,QAAQ;AACxD,OAAI,yBAAY,YAAY,EAAE;AAC5B,+BAAW;KACT,MAAM;KACN,SAAS,wCAAwC,WAAW,KAAK;KACjE,YAAY;KACb,CAAC;;;;CAMR,MAAM,UAAU,IAAI,KAA6C;CACjE,MAAME,eAAuH,EAAE;AAE/H,MAAK,MAAM,CAAC,MAAM,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;EAClE,MAAM,SAAS,MAAM,WAAW,aAAa,OAAO,CAAC,OAClD,QAAQ,QAAQ,2BAAW,IAAI,CAAC,GAChC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,MAAI,OAAO,OAAO,EAAE;AAClB,8BAAW,OAAO,MAAM;;AAG1B,UAAQ,IAAI,MAAM,OAAO,MAAM;;CAIjC,MAAM,kBAAkB,IAAI,KAMzB;AAEH,MAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;EACxE,MAAM,eAAe,aAAa;AAElC,kBAAgB,IAAI,YAAY;GAC9B,kBAAkB,kBAAkB,gCAAiB,aAAa,QAAQ,EAAE,uBAAuB;GACnG,GAAI,aAAa,UACb,EAAE,mBAAmB,kBAAkB,gCAAiB,aAAa,QAAQ,EAAE,uBAAuB,EAAE,GACxG,EAAE;GACP,CAAC;;CAIJ,MAAM,0BAA0B,IAAI,KAAqB;CACzD,MAAM,4BAA4B,IAAI,KAA+C;AAErF,MAAK,MAAM,CAAC,YAAY,iBAAiB,OAAO,QAAQ,QAAQ,QAAQ,EAAE;AACxE,MAAI,aAAa,sBAAsB,aAAa,aAAa,sBAAsB,GAAG;AACxF,2BAAwB,IAAI,YAAY,aAAa,kBAAkB;;AAEzE,MAAI,aAAa,uBAAuB,OAAO,KAAK,aAAa,oBAAoB,CAAC,SAAS,GAAG;AAChG,6BAA0B,IAAI,YAAY,aAAa,oBAAoB;;;CAK/E,MAAM,YAAY,QAAQ,aAAa;CAGvC,MAAM,EACJ,MAAM,cACN,aACA,iBACEC,4CAA0B,SAAS;EACrC,WAAW;EACX,mBAAmB,wBAAwB,OAAO,IAAI,0BAA0B;EAChF,qBAAqB,0BAA0B,OAAO,IAAI,4BAA4B;EACtF;EACD,CAAC;CAGF,MAAM,YAAY;;;;;;;AASlB,MAAK,MAAM,CAAC,MAAM,aAAa,QAAQ,SAAS,EAAE;EAChD,MAAM,eAAe,2CAAM,8BAAuB,kBAAkB,SAAS;EAC7E,MAAM,UAAU,MAAM,KAAK,YAAY,QAAQ,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;EAC1F,MAAM,QAAQ,MAAM,KAAK,YAAY,MAAM,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;EACtF,MAAM,SAAS,MAAM,KAAK,YAAY,OAAO,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;EACxF,MAAM,SAAS,MAAM,KAAK,YAAY,OAAO,MAAM,CAAC,CAAC,QAAQ,MAAM,CAAC,EAAE,WAAW,KAAK,CAAC,CAAC;AAExF,eAAa,QAAQ;GACnB,YAAY,WAAW,SAAS;GAChC;GACA;GACA;GACA;GACD;;CAIH,MAAM,yDAA2B,QAAQ,EAAE,uBAAuB;AAClE,KAAI,aAAa;EACf,MAAM,qBAAqB,MAAM,YAAY,aAAa,YAAY,CAAC,YAC/D,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,MAAI,mBAAmB,OAAO,EAAE;AAC9B,8BAAW,mBAAmB,MAAM;;;CAKxC,MAAMC,YAAsB,EAAE;AAC9B,KAAI,cAAc;EAChB,MAAM,gCAAiB,QAAQ;EAK/B,MAAM,eAAe;GACnB,OAAO,EAAE;GACT,QAAQ,EAAE;GACV,SAAS,EAAE;GACX,QAAQ,EAAE;GACX;AAED,OAAK,MAAM,QAAQ,OAAO,OAAO,aAAa,EAAE;AAC9C,gBAAa,MAAM,KAAK,GAAG,KAAK,MAAM;AACtC,gBAAa,OAAO,KAAK,GAAG,KAAK,OAAO;AACxC,gBAAa,QAAQ,KAAK,GAAG,KAAK,QAAQ;AAC1C,gBAAa,OAAO,KAAK,GAAG,KAAK,OAAO;;EAI1C,MAAM,gBAAgB,sBAAsB,YAAY,cAAc,UAAU;AAEhF,OAAK,MAAM,QAAQ,cAAc,OAAO;GACtC,MAAM,+BAAgB,QAAQ,KAAK,aAAa;GAGhD,MAAM,cAAc,MAAM,YAAY,UAAU,KAAK,QAAQ,CAAC,YACtD,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,OAAI,YAAY,OAAO,EAAE;AACvB,+BAAW,YAAY,MAAM;;AAG/B,aAAU,KAAK,SAAS;;;CAK5B,MAAM,0DAA4B,QAAQ,EAAE,eAAe;CAC3D,MAAM,sBAAsB,MAAM,YAAY,cAAc,aAAa,CAAC,YAClE,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,KAAI,oBAAoB,OAAO,EAAE;AAC/B,6BAAW,oBAAoB,MAAM;;CAIvC,MAAM,mBAAmB,MAAM,YAAY,SAAS,UAAU,CAAC,YACvD,QAAQ,2BAAW,UAAU,CAAC,GACnC,UAAU,QAAQ,4BAAY,MAAM,CAAC,CACvC;AAED,KAAI,iBAAiB,OAAO,EAAE;AAC5B,6BAAW,iBAAiB,MAAM;;CAIpC,MAAM,gBAAgB,MAAMC,eAAe,OAAO;EAChD,YAAY;EACZ,UAAU,CAAC,kBAAkB,oBAAoB;EAClD,CAAC;CACF,MAAM,eAAe,cAAc,OAChC,8BAAc,OAAO,GACrB,8BAAc,MAAM,CACtB;AAED,KAAI,aAAa,OAAO,EAAE;AACxB,6BAAW,aAAa,MAAM;;AAGhC,2BAAU;EACR,SAAS;EACT;EACA;EACA;EACA,SAAS,aAAa,MAAM;EAC5B,GAAI,UAAU,SAAS,IAAI,EAAE,WAAW,GAAG,EAAE;EAC9C,CAA0B"}
package/dist/index.d.cts CHANGED
@@ -19,6 +19,7 @@ type CodegenOptions = {
19
19
  readonly outPath: string;
20
20
  readonly format: CodegenFormat;
21
21
  readonly importExtension?: boolean;
22
+ readonly chunkSize?: number;
22
23
  };
23
24
  type CodegenCliCommand = {
24
25
  readonly kind: "generate";
@@ -68,6 +69,7 @@ type CodegenSuccess = {
68
69
  readonly internalPath: string;
69
70
  readonly injectsPath: string;
70
71
  readonly cjsPath: string;
72
+ readonly defsPaths?: readonly string[];
71
73
  };
72
74
  type CodegenResult = Result<CodegenSuccess, CodegenError>;
73
75
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.cts","names":[],"sources":["../src/types.ts","../src/inject-template.ts","../src/runner.ts","../src/schema.ts"],"sourcesContent":[],"mappings":";;;;;KAEY,aAAA;KAGA,mBAAA;;;AAHZ,CAAA;AAGY,KAMA,mBAAA,GANmB;EAMnB,SAAA,MAAA,EAAA,SAAmB,MAAA,EAAA;EAEZ,SAAA,MAAA,EAAA,mBAAA;EAEuB,SAAA,iBAAA,CAAA,EAAA,MAAA;EAAT,SAAA,mBAAA,CAAA,EAAA,QAAA,CAAS,MAAT,CAAA,MAAA,EAAA,MAAA,CAAA,CAAA;CAAQ;AAG7B,KAAA,cAAA,GAAc;EACS,SAAA,OAAA,EAAf,MAAe,CAAA,MAAA,EAAA,mBAAA,CAAA;EAAf,SAAA,OAAA,EAAA,MAAA;EAED,SAAA,MAAA,EAAA,aAAA;EAAa,SAAA,eAAA,CAAA,EAAA,OAAA;AAIhC,CAAA;AAWY,KAXA,iBAAA,GAWY;EAoCZ,SAAA,IAAA,EAAA,UAAc;EAiBd,SAAA,OAAa,EA7DD,cA6DC;CAAU,GAAA;EAAgB,SAAA,IAAA,EAAA,oBAAA;EAAvB,SAAA,OAAA,EAAA,MAAA;EAAM,SAAA,MAAA,EAxDX,aAwDW;;KArDtB,YAAA;;EClBC,SAAA,OAAA,EAAA,MAuBZ;EAvBkD,SAAA,UAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,gBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,UAAA,EAAA,MAAA;AAyBnD,CAAA,GAAa;;;;ACgBb,CAAA,GAAa;EAA6B,SAAA,IAAA,EAAA,wBAAA;EAAyB,SAAA,OAAA,EAAA,MAAA;CAAR,GAAA;EAAO,SAAA,IAAA,EAAA,yBAAA;;;;EC/CrD,SAAA,IAAA,EAAA,wBAuBZ;EAvBkD,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,wBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;CAAA;AA6BtC,KH+BD,cAAA,GGjBX;EAdwD,SAAA,OAAA,EHgCrC,MGhCqC,CAAA,MAAA,EAAA;IAAA,SAAA,UAAA,EAAA,MAAA;IAAA,SAAA,OAAA,EAAA,MAAA;IAAA,SAAA,KAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;EAAA,CAAA,CAAA;EAgB5C,SAAA,OAA2G,EAAA,MAAA;;;;;KHgC5G,aAAA,GAAgB,OAAO,gBAAgB;;;cCvEtC,0CAAsC,WAAA,CAAA,UAAA,gBAAA,WAAA,CAAA,SAAA;cAyBtC;;;cCgBA,sBAA6B,mBAAiB,QAAQ;;;;;;AFzDnE;AAGY,cGOC,gBHPkB,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,GGOoB,WAAA,CAAA,GHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA,GGOoB,WAAA,CAAA,EHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA;AAM/B;;;;AAIyC,cG0B5B,UH1B4B,EAAA,CAAA,WAAA,EAAA,SAAA,MAAA,EAAA,EAAA,GG0BgB,WAAA,CAAA,GH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA,GG0BgB,WAAA,CAAA,EH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA;AAG7B,cGuCC,UHvCa,EAAA,CAAA,QAAA,EGuCW,YHvCX,EAAA,GAAA,MAAA"}
1
+ {"version":3,"file":"index.d.cts","names":[],"sources":["../src/types.ts","../src/inject-template.ts","../src/runner.ts","../src/schema.ts"],"sourcesContent":[],"mappings":";;;;;KAEY,aAAA;KAGA,mBAAA;;;AAHZ,CAAA;AAGY,KAMA,mBAAA,GANmB;EAMnB,SAAA,MAAA,EAAA,SAAmB,MAAA,EAAA;EAEZ,SAAA,MAAA,EAAA,mBAAA;EAEuB,SAAA,iBAAA,CAAA,EAAA,MAAA;EAAT,SAAA,mBAAA,CAAA,EAAA,QAAA,CAAS,MAAT,CAAA,MAAA,EAAA,MAAA,CAAA,CAAA;CAAQ;AAG7B,KAAA,cAAA,GAAc;EACS,SAAA,OAAA,EAAf,MAAe,CAAA,MAAA,EAAA,mBAAA,CAAA;EAAf,SAAA,OAAA,EAAA,MAAA;EAED,SAAA,MAAA,EAAA,aAAA;EAAa,SAAA,eAAA,CAAA,EAAA,OAAA;EAKpB,SAAA,SAAA,CAAA,EAAiB,MAAA;AAW7B,CAAA;AAoCY,KA/CA,iBAAA,GAgDQ;EAiBR,SAAA,IAAA,EAAA,UAAa;EAAU,SAAA,OAAA,EA9DX,cA8DW;CAAgB,GAAA;EAAvB,SAAA,IAAA,EAAA,oBAAA;EAAM,SAAA,OAAA,EAAA,MAAA;mBAzDX;;KAGX,YAAA;ECnBC,SAAA,IAAA,EAAA,kBAuBZ;EAvBkD,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,UAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,gBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAyBtC,SAAA,UAAA,EAAyD,MAAA;;;;ECiBzD,SAAA,OAwNZ,EAAA,MAAA;CAxNyC,GAAA;EAAyB,SAAA,IAAA,EAAA,wBAAA;EAAR,SAAA,OAAA,EAAA,MAAA;CAAO,GAAA;;;;AChDlE,CAAA,GAAa;EAAsC,SAAA,IAAA,EAAA,wBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,wBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;AA6BnD,CAAA;AAAyD,KHgC7C,cAAA,GGhC6C;EAAA,SAAA,OAAA,EHiCrC,MGjCqC,CAAA,MAAA,EAAA;IAAA,SAAA,UAAA,EAAA,MAAA;IAAA,SAAA,OAAA,EAAA,MAAA;IAAA,SAAA,KAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;EAgB5C,CAAA,CAAA;;;;;;;KHkCD,aAAA,GAAgB,OAAO,gBAAgB;;;cCzEtC,0CAAsC,WAAA,CAAA,UAAA,gBAAA,WAAA,CAAA,SAAA;cAyBtC;;;cCiBA,sBAA6B,mBAAiB,QAAQ;;;;;;AF1DnE;AAGY,cGOC,gBHPkB,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,GGOoB,WAAA,CAAA,GHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA,GGOoB,WAAA,CAAA,EHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA;AAM/B;;;;AAIyC,cG0B5B,UH1B4B,EAAA,CAAA,WAAA,EAAA,SAAA,MAAA,EAAA,EAAA,GG0BgB,WAAA,CAAA,GH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA,GG0BgB,WAAA,CAAA,EH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA;AAG7B,cGuCC,UHvCa,EAAA,CAAA,QAAA,EGuCW,YHvCX,EAAA,GAAA,MAAA"}
package/dist/index.d.mts CHANGED
@@ -19,6 +19,7 @@ type CodegenOptions = {
19
19
  readonly outPath: string;
20
20
  readonly format: CodegenFormat;
21
21
  readonly importExtension?: boolean;
22
+ readonly chunkSize?: number;
22
23
  };
23
24
  type CodegenCliCommand = {
24
25
  readonly kind: "generate";
@@ -68,6 +69,7 @@ type CodegenSuccess = {
68
69
  readonly internalPath: string;
69
70
  readonly injectsPath: string;
70
71
  readonly cjsPath: string;
72
+ readonly defsPaths?: readonly string[];
71
73
  };
72
74
  type CodegenResult = Result<CodegenSuccess, CodegenError>;
73
75
  //#endregion
@@ -1 +1 @@
1
- {"version":3,"file":"index.d.mts","names":[],"sources":["../src/types.ts","../src/inject-template.ts","../src/runner.ts","../src/schema.ts"],"sourcesContent":[],"mappings":";;;;;KAEY,aAAA;KAGA,mBAAA;;;AAHZ,CAAA;AAGY,KAMA,mBAAA,GANmB;EAMnB,SAAA,MAAA,EAAA,SAAmB,MAAA,EAAA;EAEZ,SAAA,MAAA,EAAA,mBAAA;EAEuB,SAAA,iBAAA,CAAA,EAAA,MAAA;EAAT,SAAA,mBAAA,CAAA,EAAA,QAAA,CAAS,MAAT,CAAA,MAAA,EAAA,MAAA,CAAA,CAAA;CAAQ;AAG7B,KAAA,cAAA,GAAc;EACS,SAAA,OAAA,EAAf,MAAe,CAAA,MAAA,EAAA,mBAAA,CAAA;EAAf,SAAA,OAAA,EAAA,MAAA;EAED,SAAA,MAAA,EAAA,aAAA;EAAa,SAAA,eAAA,CAAA,EAAA,OAAA;AAIhC,CAAA;AAWY,KAXA,iBAAA,GAWY;EAoCZ,SAAA,IAAA,EAAA,UAAc;EAiBd,SAAA,OAAa,EA7DD,cA6DC;CAAU,GAAA;EAAgB,SAAA,IAAA,EAAA,oBAAA;EAAvB,SAAA,OAAA,EAAA,MAAA;EAAM,SAAA,MAAA,EAxDX,aAwDW;;KArDtB,YAAA;;EClBC,SAAA,OAAA,EAAA,MAuBZ;EAvBkD,SAAA,UAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,gBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,UAAA,EAAA,MAAA;AAyBnD,CAAA,GAAa;;;;ACgBb,CAAA,GAAa;EAA6B,SAAA,IAAA,EAAA,wBAAA;EAAyB,SAAA,OAAA,EAAA,MAAA;CAAR,GAAA;EAAO,SAAA,IAAA,EAAA,yBAAA;;;;EC/CrD,SAAA,IAAA,EAAA,wBAuBZ;EAvBkD,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,wBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;CAAA;AA6BtC,KH+BD,cAAA,GGjBX;EAdwD,SAAA,OAAA,EHgCrC,MGhCqC,CAAA,MAAA,EAAA;IAAA,SAAA,UAAA,EAAA,MAAA;IAAA,SAAA,OAAA,EAAA,MAAA;IAAA,SAAA,KAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;EAAA,CAAA,CAAA;EAgB5C,SAAA,OAA2G,EAAA,MAAnF;;;;;KHgCzB,aAAA,GAAgB,OAAO,gBAAgB;;;cCvEtC,0CAAsC,WAAA,CAAA,UAAA,gBAAA,WAAA,CAAA,SAAA;cAyBtC;;;cCgBA,sBAA6B,mBAAiB,QAAQ;;;;;;AFzDnE;AAGY,cGOC,gBHPkB,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,GGOoB,WAAA,CAAA,GHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA,GGOoB,WAAA,CAAA,EHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA;AAM/B;;;;AAIyC,cG0B5B,UH1B4B,EAAA,CAAA,WAAA,EAAA,SAAA,MAAA,EAAA,EAAA,GG0BgB,WAAA,CAAA,GH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA,GG0BgB,WAAA,CAAA,EH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA;AAG7B,cGuCC,UHvCa,EAAA,CAAA,QAAA,EGuCW,YHvCX,EAAA,GAAA,MAAA"}
1
+ {"version":3,"file":"index.d.mts","names":[],"sources":["../src/types.ts","../src/inject-template.ts","../src/runner.ts","../src/schema.ts"],"sourcesContent":[],"mappings":";;;;;KAEY,aAAA;KAGA,mBAAA;;;AAHZ,CAAA;AAGY,KAMA,mBAAA,GANmB;EAMnB,SAAA,MAAA,EAAA,SAAmB,MAAA,EAAA;EAEZ,SAAA,MAAA,EAAA,mBAAA;EAEuB,SAAA,iBAAA,CAAA,EAAA,MAAA;EAAT,SAAA,mBAAA,CAAA,EAAA,QAAA,CAAS,MAAT,CAAA,MAAA,EAAA,MAAA,CAAA,CAAA;CAAQ;AAG7B,KAAA,cAAA,GAAc;EACS,SAAA,OAAA,EAAf,MAAe,CAAA,MAAA,EAAA,mBAAA,CAAA;EAAf,SAAA,OAAA,EAAA,MAAA;EAED,SAAA,MAAA,EAAA,aAAA;EAAa,SAAA,eAAA,CAAA,EAAA,OAAA;EAKpB,SAAA,SAAA,CAAA,EAAiB,MAAA;AAW7B,CAAA;AAoCY,KA/CA,iBAAA,GAgDQ;EAiBR,SAAA,IAAA,EAAA,UAAa;EAAU,SAAA,OAAA,EA9DX,cA8DW;CAAgB,GAAA;EAAvB,SAAA,IAAA,EAAA,oBAAA;EAAM,SAAA,OAAA,EAAA,MAAA;mBAzDX;;KAGX,YAAA;ECnBC,SAAA,IAAA,EAAA,kBAuBZ;EAvBkD,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,UAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,gBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAyBtC,SAAA,UAAA,EAAyD,MAAA;;;;ECiBzD,SAAA,OAwNZ,EAAA,MAAA;CAxNyC,GAAA;EAAyB,SAAA,IAAA,EAAA,wBAAA;EAAR,SAAA,OAAA,EAAA,MAAA;CAAO,GAAA;;;;AChDlE,CAAA,GAAa;EAAsC,SAAA,IAAA,EAAA,wBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;CAAA,GAAA;EAAA,SAAA,IAAA,EAAA,wBAAA;EAAA,SAAA,OAAA,EAAA,MAAA;EAAA,SAAA,OAAA,EAAA,MAAA;AA6BnD,CAAA;AAAyD,KHgC7C,cAAA,GGhC6C;EAAA,SAAA,OAAA,EHiCrC,MGjCqC,CAAA,MAAA,EAAA;IAAA,SAAA,UAAA,EAAA,MAAA;IAAA,SAAA,OAAA,EAAA,MAAA;IAAA,SAAA,KAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;IAAA,SAAA,MAAA,EAAA,MAAA;EAgB5C,CAAA,CAAA;;;;;;;KHkCD,aAAA,GAAgB,OAAO,gBAAgB;;;cCzEtC,0CAAsC,WAAA,CAAA,UAAA,gBAAA,WAAA,CAAA,SAAA;cAyBtC;;;cCiBA,sBAA6B,mBAAiB,QAAQ;;;;;;AF1DnE;AAGY,cGOC,gBHPkB,EAAA,CAAA,UAAA,EAAA,MAAA,EAAA,GGOoB,WAAA,CAAA,GHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA,GGOoB,WAAA,CAAA,EHPpB,CGOoB,YHPpB,EGOoB,YHPpB,CAAA;AAM/B;;;;AAIyC,cG0B5B,UH1B4B,EAAA,CAAA,WAAA,EAAA,SAAA,MAAA,EAAA,EAAA,GG0BgB,WAAA,CAAA,GH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA,GG0BgB,WAAA,CAAA,EH1BhB,CG0BgB,YH1BhB,EG0BgB,YH1BhB,CAAA;AAG7B,cGuCC,UHvCa,EAAA,CAAA,QAAA,EGuCW,YHvCX,EAAA,GAAA,MAAA"}
package/dist/index.mjs CHANGED
@@ -1,4 +1,4 @@
1
- import { n as generateMultiSchemaModule } from "./generator-1A_wwhEm.mjs";
1
+ import { n as generateMultiSchemaModule } from "./generator-BJlFKC6z.mjs";
2
2
  import { existsSync, mkdirSync, readFileSync, writeFileSync } from "node:fs";
3
3
  import { basename, dirname, extname, join, relative, resolve } from "node:path";
4
4
  import { err, ok } from "neverthrow";
@@ -73,6 +73,170 @@ const esbuildBundler = {
73
73
  }
74
74
  };
75
75
 
76
+ //#endregion
77
+ //#region packages/codegen/src/defs-generator.ts
78
+ /**
79
+ * Split an array into chunks of the specified size.
80
+ */
81
+ const chunkArray = (array, size) => {
82
+ if (size <= 0) {
83
+ return [Array.from(array)];
84
+ }
85
+ const result = [];
86
+ for (let i = 0; i < array.length; i += size) {
87
+ result.push(array.slice(i, i + size));
88
+ }
89
+ return result;
90
+ };
91
+ /**
92
+ * Determine if chunking is needed based on the number of definitions.
93
+ */
94
+ const needsChunking = (vars, chunkSize) => {
95
+ return vars.length > chunkSize;
96
+ };
97
+ /**
98
+ * Generate a single definition file content.
99
+ */
100
+ const generateDefinitionFile = (options) => {
101
+ const { category, vars, needsDefineEnum } = options;
102
+ if (vars.length === 0) {
103
+ return `/**
104
+ * ${category} definitions (empty)
105
+ * @generated by @soda-gql/codegen
106
+ */
107
+ `;
108
+ }
109
+ const imports = [];
110
+ if (needsDefineEnum && category === "enums") {
111
+ imports.push("import { defineEnum } from \"@soda-gql/core\";");
112
+ }
113
+ const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
114
+ const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
115
+ return `/**
116
+ * ${category} definitions
117
+ * @generated by @soda-gql/codegen
118
+ */
119
+ ${importsBlock}${exportStatements}
120
+ `;
121
+ };
122
+ /**
123
+ * Generate a chunk file content.
124
+ */
125
+ const generateChunkFile = (options) => {
126
+ const { category, vars, chunkIndex, needsDefineEnum } = options;
127
+ if (vars.length === 0) {
128
+ return `/**
129
+ * ${category} chunk ${chunkIndex} (empty)
130
+ * @generated by @soda-gql/codegen
131
+ */
132
+ `;
133
+ }
134
+ const imports = [];
135
+ if (needsDefineEnum && category === "enums") {
136
+ imports.push("import { defineEnum } from \"@soda-gql/core\";");
137
+ }
138
+ const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
139
+ const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
140
+ return `/**
141
+ * ${category} chunk ${chunkIndex}
142
+ * @generated by @soda-gql/codegen
143
+ */
144
+ ${importsBlock}${exportStatements}
145
+ `;
146
+ };
147
+ /**
148
+ * Generate the index file that re-exports all chunks.
149
+ */
150
+ const generateChunkIndex = (options) => {
151
+ const { category, chunkCount } = options;
152
+ const reExports = Array.from({ length: chunkCount }, (_, i) => `export * from "./chunk-${i}";`).join("\n");
153
+ return `/**
154
+ * ${category} index (re-exports all chunks)
155
+ * @generated by @soda-gql/codegen
156
+ */
157
+ ${reExports}
158
+ `;
159
+ };
160
+ /**
161
+ * Generate chunked definition files.
162
+ */
163
+ const generateChunkedDefinitionFiles = (category, schemaName, vars, chunkSize) => {
164
+ const chunks = chunkArray(vars, chunkSize);
165
+ const needsDefineEnum = category === "enums";
166
+ const chunkContents = chunks.map((chunkVars, chunkIndex) => ({
167
+ chunkIndex,
168
+ content: generateChunkFile({
169
+ category,
170
+ schemaName,
171
+ vars: chunkVars,
172
+ chunkIndex,
173
+ needsDefineEnum
174
+ }),
175
+ varNames: chunkVars.map((v) => v.name)
176
+ }));
177
+ const allVarNames = vars.map((v) => v.name);
178
+ const indexContent = generateChunkIndex({
179
+ category,
180
+ chunkCount: chunks.length,
181
+ varNames: allVarNames
182
+ });
183
+ return {
184
+ indexContent,
185
+ chunks: chunkContents
186
+ };
187
+ };
188
+ /**
189
+ * Generate the complete _defs directory structure.
190
+ */
191
+ const generateDefsStructure = (schemaName, categoryVars, chunkSize) => {
192
+ const files = [];
193
+ const importPaths = {
194
+ enums: "./_defs/enums",
195
+ inputs: "./_defs/inputs",
196
+ objects: "./_defs/objects",
197
+ unions: "./_defs/unions"
198
+ };
199
+ const categories = [
200
+ "enums",
201
+ "inputs",
202
+ "objects",
203
+ "unions"
204
+ ];
205
+ for (const category of categories) {
206
+ const vars = categoryVars[category];
207
+ const needsDefineEnum = category === "enums";
208
+ if (needsChunking(vars, chunkSize)) {
209
+ const chunked = generateChunkedDefinitionFiles(category, schemaName, vars, chunkSize);
210
+ importPaths[category] = `./_defs/${category}`;
211
+ files.push({
212
+ relativePath: `_defs/${category}/index.ts`,
213
+ content: chunked.indexContent
214
+ });
215
+ for (const chunk of chunked.chunks) {
216
+ files.push({
217
+ relativePath: `_defs/${category}/chunk-${chunk.chunkIndex}.ts`,
218
+ content: chunk.content
219
+ });
220
+ }
221
+ } else {
222
+ const content = generateDefinitionFile({
223
+ category,
224
+ schemaName,
225
+ vars,
226
+ needsDefineEnum
227
+ });
228
+ files.push({
229
+ relativePath: `_defs/${category}.ts`,
230
+ content
231
+ });
232
+ }
233
+ }
234
+ return {
235
+ files,
236
+ importPaths
237
+ };
238
+ };
239
+
76
240
  //#endregion
77
241
  //#region packages/codegen/src/file.ts
78
242
  const writeModule = (outPath, contents) => {
@@ -226,10 +390,12 @@ const runCodegen = async (options) => {
226
390
  inputDepthOverridesConfig.set(schemaName, schemaConfig.inputDepthOverrides);
227
391
  }
228
392
  }
229
- const { code: internalCode, injectsCode } = generateMultiSchemaModule(schemas, {
393
+ const chunkSize = options.chunkSize ?? 100;
394
+ const { code: internalCode, injectsCode, categoryVars } = generateMultiSchemaModule(schemas, {
230
395
  injection: injectionConfig,
231
396
  defaultInputDepth: defaultInputDepthConfig.size > 0 ? defaultInputDepthConfig : undefined,
232
- inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined
397
+ inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined,
398
+ chunkSize
233
399
  });
234
400
  const indexCode = `/**
235
401
  * Generated by @soda-gql/codegen
@@ -239,7 +405,7 @@ const runCodegen = async (options) => {
239
405
  export * from "./_internal";
240
406
  `;
241
407
  for (const [name, document] of schemas.entries()) {
242
- const schemaIndex = (await import("./generator-D6nHdJ1K.mjs")).createSchemaIndex(document);
408
+ const schemaIndex = (await import("./generator--76HN3Ud.mjs")).createSchemaIndex(document);
243
409
  const objects = Array.from(schemaIndex.objects.keys()).filter((n) => !n.startsWith("__")).length;
244
410
  const enums = Array.from(schemaIndex.enums.keys()).filter((n) => !n.startsWith("__")).length;
245
411
  const inputs = Array.from(schemaIndex.inputs.keys()).filter((n) => !n.startsWith("__")).length;
@@ -259,6 +425,31 @@ export * from "./_internal";
259
425
  return err(injectsWriteResult.error);
260
426
  }
261
427
  }
428
+ const defsPaths = [];
429
+ if (categoryVars) {
430
+ const outDir = dirname(outPath);
431
+ const combinedVars = {
432
+ enums: [],
433
+ inputs: [],
434
+ objects: [],
435
+ unions: []
436
+ };
437
+ for (const vars of Object.values(categoryVars)) {
438
+ combinedVars.enums.push(...vars.enums);
439
+ combinedVars.inputs.push(...vars.inputs);
440
+ combinedVars.objects.push(...vars.objects);
441
+ combinedVars.unions.push(...vars.unions);
442
+ }
443
+ const defsStructure = generateDefsStructure("combined", combinedVars, chunkSize);
444
+ for (const file of defsStructure.files) {
445
+ const filePath = join(outDir, file.relativePath);
446
+ const writeResult = await writeModule(filePath, file.content).match(() => Promise.resolve(ok(undefined)), (error) => Promise.resolve(err(error)));
447
+ if (writeResult.isErr()) {
448
+ return err(writeResult.error);
449
+ }
450
+ defsPaths.push(filePath);
451
+ }
452
+ }
262
453
  const internalPath = join(dirname(outPath), "_internal.ts");
263
454
  const internalWriteResult = await writeModule(internalPath, internalCode).match(() => Promise.resolve(ok(undefined)), (error) => Promise.resolve(err(error)));
264
455
  if (internalWriteResult.isErr()) {
@@ -281,7 +472,8 @@ export * from "./_internal";
281
472
  outPath,
282
473
  internalPath,
283
474
  injectsPath,
284
- cjsPath: bundleResult.value.cjsPath
475
+ cjsPath: bundleResult.value.cjsPath,
476
+ ...defsPaths.length > 0 ? { defsPaths } : {}
285
477
  });
286
478
  };
287
479