@sanity/cli 5.0.0-next-major.12 → 5.0.0-next-major.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,24 @@
1
1
  "use strict";
2
- var fs = require("node:fs/promises"), path = require("node:path"), node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), chalk = require("chalk"), prettier = require("prettier"), cliWorker = require("./cliWorker.js"), getCliConfig = require("./getCliConfig.js"), telemetry = require("@sanity/telemetry");
2
+ var __create = Object.create;
3
+ var __defProp = Object.defineProperty;
4
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
5
+ var __getOwnPropNames = Object.getOwnPropertyNames;
6
+ var __getProtoOf = Object.getPrototypeOf, __hasOwnProp = Object.prototype.hasOwnProperty;
7
+ var __copyProps = (to, from, except, desc) => {
8
+ if (from && typeof from == "object" || typeof from == "function")
9
+ for (let key of __getOwnPropNames(from))
10
+ !__hasOwnProp.call(to, key) && key !== except && __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
11
+ return to;
12
+ };
13
+ var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(
14
+ // If the importer is in node compatibility mode or this is not an ESM
15
+ // file that has been converted to a CommonJS file using a Babel-
16
+ // compatible transform (i.e. "__esModule" has not been set), then set
17
+ // "default" to the CommonJS "module.exports" for node compatibility.
18
+ isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: !0 }) : target,
19
+ mod
20
+ ));
21
+ var fs = require("node:fs/promises"), path = require("node:path"), process = require("node:process"), node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), workerChannels = require("@sanity/worker-channels"), chalk = require("chalk"), cliWorker = require("./cliWorker.js"), getCliConfig = require("./getCliConfig.js"), telemetry = require("@sanity/telemetry");
3
22
  function _interopDefaultCompat(e) {
4
23
  return e && typeof e == "object" && "default" in e ? e : { default: e };
5
24
  }
@@ -45,6 +64,7 @@ The config from the Sanity CLI config is used.
45
64
  )
46
65
  ), {
47
66
  config: codegen.configDefinition.parse(config.config.typegen || {}),
67
+ path: config.path,
48
68
  type: "cli"
49
69
  }) : hasLegacyConfig ? (console.warn(
50
70
  chalk__default.default.yellow(
@@ -52,130 +72,91 @@ The config from the Sanity CLI config is used.
52
72
  )
53
73
  ), {
54
74
  config: await codegen.readConfig(legacyConfigPath),
75
+ path: legacyConfigPath,
55
76
  type: "legacy"
56
77
  }) : {
57
78
  config: codegen.configDefinition.parse(config?.config?.typegen || {}),
79
+ path: config?.path,
58
80
  type: "cli"
59
81
  };
60
82
  }
61
- async function typegenGenerateAction(args, context) {
62
- const flags = args.extOptions, { output, workDir, telemetry: telemetry2 } = context, trace = telemetry2.trace(TypesGeneratedTrace);
83
+ const formatter = new Intl.NumberFormat("en-US", {
84
+ style: "percent",
85
+ minimumFractionDigits: 1,
86
+ maximumFractionDigits: 1
87
+ }), percent = (value) => formatter.format(Math.min(value, 1)), count = (amount, plural = "", singular = plural.slice(0, Math.max(0, plural.length - 1))) => [amount.toLocaleString("en-US"), amount === 1 ? singular : plural].filter(Boolean).join(" "), getMessage = (error) => typeof error == "object" && error && "message" in error && typeof error.message == "string" ? error.message : "Unknown error";
88
+ async function typegenGenerateAction({ extOptions: flags }, { output, workDir, telemetry: telemetry2 }) {
89
+ const trace = telemetry2.trace(TypesGeneratedTrace);
63
90
  trace.start();
64
- const { config: codegenConfig, type: codegenConfigMethod } = await getConfig(
91
+ const spinner = output.spinner({}).start("Loading config\u2026"), {
92
+ config: typegenConfig,
93
+ type: typegenConfigMethod,
94
+ path: configPath
95
+ } = await getConfig(workDir, flags["config-path"]);
96
+ spinner.succeed(`Config loaded from ${configPath?.replace(workDir, ".")}`);
97
+ const {
98
+ generates,
99
+ path: searchPath,
100
+ schema: schemaPath,
101
+ formatGeneratedCode,
102
+ overloadClientMethods
103
+ } = typegenConfig, outputPath = path.isAbsolute(typegenConfig.generates) ? typegenConfig.generates : path.join(workDir, typegenConfig.generates), outputDir = path.dirname(outputPath);
104
+ await fs.mkdir(outputDir, { recursive: !0 });
105
+ const workerPath = await cliWorker.getCliWorkerPath("typegenGenerate"), workerData = {
65
106
  workDir,
66
- flags["config-path"]
67
- );
107
+ schemaPath,
108
+ searchPath,
109
+ overloadClientMethods
110
+ }, worker = new node_worker_threads.Worker(workerPath, { workerData, env: process.env }), receiver = workerChannels.WorkerChannelReceiver.from(worker);
68
111
  try {
69
- if (!(await fs.stat(codegenConfig.schema)).isFile())
70
- throw new Error(`Schema path is not a file: ${codegenConfig.schema}`);
71
- } catch (err) {
72
- if (err.code === "ENOENT") {
73
- const hint = codegenConfig.schema === "./schema.json" ? ' - did you run "sanity schema extract"?' : "";
74
- throw new Error(`Schema file not found: ${codegenConfig.schema}${hint}`, { cause: err });
112
+ spinner.start("Loading schema\u2026"), await receiver.event.loadedSchema(), spinner.succeed(`Schema loaded from ${schemaPath}`), spinner.start("Generating schema types\u2026");
113
+ const { expectedFileCount } = await receiver.event.typegenStarted(), { schemaTypeDeclarations } = await receiver.event.generatedSchemaTypes(), schemaTypesCount = schemaTypeDeclarations.length;
114
+ spinner.succeed(`Generated ${count(schemaTypesCount, "schema types")}`), spinner.start("Generating query types\u2026");
115
+ let queriesCount = 0, evaluatedFiles = 0, filesWithErrors = 0, queryFilesCount = 0, typeNodesGenerated = 0, unknownTypeNodesGenerated = 0, emptyUnionTypeNodesGenerated = 0;
116
+ for await (const { queries, errors } of receiver.stream.evaluatedModules()) {
117
+ evaluatedFiles++, queriesCount += queries.length, queryFilesCount += queries.length ? 1 : 0, filesWithErrors += errors.length ? 1 : 0;
118
+ for (const { stats } of queries)
119
+ typeNodesGenerated += stats.allTypes, unknownTypeNodesGenerated += stats.unknownTypes, emptyUnionTypeNodesGenerated += stats.emptyUnions;
120
+ for (const error of errors)
121
+ spinner.fail(getMessage(error));
122
+ spinner.text = `Generating query types\u2026 (${percent(evaluatedFiles / expectedFileCount)})
123
+ \u2514\u2500 Processed ${count(evaluatedFiles)} of ${count(expectedFileCount, "files")}. Found ${count(queriesCount, "queries", "query")} from ${count(queryFilesCount, "files")}.`;
75
124
  }
76
- throw err;
77
- }
78
- const outputPath = path.isAbsolute(codegenConfig.generates) ? codegenConfig.generates : path.join(process.cwd(), codegenConfig.generates), outputDir = path.dirname(outputPath);
79
- await fs.mkdir(outputDir, { recursive: !0 });
80
- const workerPath = await cliWorker.getCliWorkerPath("typegenGenerate"), spinner = output.spinner({}).start("Generating types"), worker = new node_worker_threads.Worker(workerPath, {
81
- workerData: {
82
- workDir,
83
- schemaPath: codegenConfig.schema,
84
- searchPath: codegenConfig.path,
85
- overloadClientMethods: codegenConfig.overloadClientMethods
86
- },
87
- env: process.env
88
- }), typeFile = await fs.open(
89
- outputPath,
90
- // eslint-disable-next-line no-bitwise
91
- fs.constants.O_TRUNC | fs.constants.O_CREAT | fs.constants.O_WRONLY
92
- );
93
- typeFile.write(generatedFileWarning);
94
- const stats = {
95
- queryFilesCount: 0,
96
- errors: 0,
97
- queriesCount: 0,
98
- schemaTypesCount: 0,
99
- unknownTypeNodesGenerated: 0,
100
- typeNodesGenerated: 0,
101
- emptyUnionTypeNodesGenerated: 0,
102
- size: 0
103
- };
104
- await new Promise((resolve, reject) => {
105
- worker.addListener("message", (msg) => {
106
- if (msg.type === "error") {
107
- if (msg.fatal) {
108
- trace.error(msg.error), reject(msg.error);
109
- return;
110
- }
111
- const errorMessage = msg.filename ? `${msg.error.message} in "${msg.filename}"` : msg.error.message;
112
- spinner.fail(errorMessage), stats.errors++;
113
- return;
114
- }
115
- if (msg.type === "complete") {
116
- resolve();
117
- return;
125
+ const result = await receiver.event.typegenComplete(), code = `${generatedFileWarning}${result.code}`;
126
+ if (await fs.writeFile(outputPath, code), spinner.succeed(
127
+ `Generated ${count(queriesCount, "query types")} from ${count(queryFilesCount, "files")} out of ${count(evaluatedFiles, "scanned files")}`
128
+ ), formatGeneratedCode) {
129
+ spinner.start("Formatting generated types with prettier\u2026");
130
+ try {
131
+ const prettier = await import("prettier"), prettierConfig = await prettier.resolveConfig(outputPath), formattedCode = await prettier.format(code, {
132
+ ...prettierConfig,
133
+ parser: "typescript"
134
+ });
135
+ await fs.writeFile(outputPath, formattedCode), spinner.succeed("Formatted generated types with prettier");
136
+ } catch (err) {
137
+ spinner.warn(`Failed to format generated types with prettier: ${getMessage(err)}`);
118
138
  }
119
- if (msg.type === "typemap") {
120
- let typeMapStr = `// Query TypeMap
121
- `;
122
- typeMapStr += msg.typeMap, typeFile.write(typeMapStr), stats.size += Buffer.byteLength(typeMapStr);
123
- return;
124
- }
125
- let fileTypeString = `// Source: ${msg.filename}
126
- `;
127
- if (msg.type === "schema") {
128
- stats.schemaTypesCount += msg.length, fileTypeString += msg.schema, typeFile.write(fileTypeString);
129
- return;
130
- }
131
- if (msg.type === "types") {
132
- stats.queryFilesCount++;
133
- for (const {
134
- queryName,
135
- query,
136
- type,
137
- typeNodesGenerated,
138
- unknownTypeNodesGenerated,
139
- emptyUnionTypeNodesGenerated
140
- } of msg.types)
141
- fileTypeString += `// Variable: ${queryName}
142
- `, fileTypeString += `// Query: ${query.replace(/(\r\n|\n|\r)/gm, "").trim()}
143
- `, fileTypeString += type, stats.queriesCount++, stats.typeNodesGenerated += typeNodesGenerated, stats.unknownTypeNodesGenerated += unknownTypeNodesGenerated, stats.emptyUnionTypeNodesGenerated += emptyUnionTypeNodesGenerated;
144
- typeFile.write(`${fileTypeString}
145
- `), stats.size += Buffer.byteLength(fileTypeString);
146
- }
147
- }), worker.addListener("error", reject);
148
- }), await typeFile.close();
149
- const prettierConfig = codegenConfig.formatGeneratedCode ? await prettier.resolveConfig(outputPath).catch((err) => (output.warn(`Failed to load prettier config: ${err.message}`), null)) : null;
150
- if (prettierConfig) {
151
- const formatFile = await fs.open(outputPath, fs.constants.O_RDWR);
152
- try {
153
- const code = await formatFile.readFile(), formattedCode = await prettier.format(code.toString(), {
154
- ...prettierConfig,
155
- parser: "typescript"
156
- });
157
- await formatFile.truncate(), await formatFile.write(formattedCode, 0), spinner.info("Formatted generated types with Prettier");
158
- } catch (err) {
159
- output.warn(`Failed to format generated types with Prettier: ${err.message}`);
160
- } finally {
161
- await formatFile.close();
162
139
  }
140
+ trace.log({
141
+ configOverloadClientMethods: overloadClientMethods,
142
+ outputSize: Buffer.byteLength(result.code),
143
+ queriesCount,
144
+ schemaTypesCount,
145
+ queryFilesCount,
146
+ filesWithErrors,
147
+ typeNodesGenerated,
148
+ unknownTypeNodesGenerated,
149
+ emptyUnionTypeNodesGenerated,
150
+ unknownTypeNodesRatio: typeNodesGenerated > 0 ? unknownTypeNodesGenerated / typeNodesGenerated : 0,
151
+ configMethod: typegenConfigMethod
152
+ }), filesWithErrors > 0 && spinner.warn(
153
+ `Encountered errors in ${count(filesWithErrors, "files")} while generating types`
154
+ ), spinner.succeed(`Successfully generated types to ${generates}`);
155
+ } catch (err) {
156
+ throw trace.error(err), err;
157
+ } finally {
158
+ receiver.unsubscribe(), trace.complete(), await worker.terminate();
163
159
  }
164
- trace.log({
165
- outputSize: stats.size,
166
- queriesCount: stats.queriesCount,
167
- schemaTypesCount: stats.schemaTypesCount,
168
- queryFilesCount: stats.queryFilesCount,
169
- filesWithErrors: stats.errors,
170
- configMethod: codegenConfigMethod,
171
- typeNodesGenerated: stats.typeNodesGenerated,
172
- unknownTypeNodesGenerated: stats.unknownTypeNodesGenerated,
173
- unknownTypeNodesRatio: stats.typeNodesGenerated > 0 ? stats.unknownTypeNodesGenerated / stats.typeNodesGenerated : 0,
174
- emptyUnionTypeNodesGenerated: stats.emptyUnionTypeNodesGenerated,
175
- configOverloadClientMethods: codegenConfig.overloadClientMethods
176
- }), trace.complete(), stats.errors > 0 && spinner.warn(`Encountered errors in ${stats.errors} files while generating types`), spinner.succeed(
177
- `Generated TypeScript types for ${stats.schemaTypesCount} schema types and ${stats.queriesCount} GROQ queries in ${stats.queryFilesCount} files into: ${codegenConfig.generates}`
178
- );
179
160
  }
180
161
  exports.default = typegenGenerateAction;
181
162
  //# sourceMappingURL=generateAction.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"generateAction.js","sources":["../../src/actions/typegen/generate.telemetry.ts","../../src/actions/typegen/generateAction.ts"],"sourcesContent":["import {defineTrace} from '@sanity/telemetry'\n\ninterface TypesGeneratedTraceAttrubutes {\n outputSize: number\n queriesCount: number\n schemaTypesCount: number\n queryFilesCount: number\n filesWithErrors: number\n typeNodesGenerated: number\n unknownTypeNodesGenerated: number\n unknownTypeNodesRatio: number\n emptyUnionTypeNodesGenerated: number\n configOverloadClientMethods: boolean\n configMethod: 'legacy' | 'cli'\n}\n\nexport const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttrubutes>({\n name: 'Types Generated',\n version: 0,\n description: 'Trace emitted when generating TypeScript types for queries',\n})\n","import {constants, mkdir, open, stat} from 'node:fs/promises'\nimport {dirname, isAbsolute, join} from 'node:path'\nimport {Worker} from 'node:worker_threads'\n\nimport {type CodegenConfig, configDefinition, readConfig} from '@sanity/codegen'\nimport chalk from 'chalk'\nimport {format as prettierFormat, resolveConfig as resolvePrettierConfig} from 'prettier'\n\nimport {type CliCommandArguments, type CliCommandContext} from '../../types'\nimport {getCliWorkerPath} from '../../util/cliWorker'\nimport {getCliConfig} from '../../util/getCliConfig'\nimport {\n type TypegenGenerateTypesWorkerData,\n type TypegenGenerateTypesWorkerMessage,\n} from '../../workers/typegenGenerate'\nimport {TypesGeneratedTrace} from './generate.telemetry'\n\nexport interface TypegenGenerateTypesCommandFlags {\n 'config-path'?: string\n}\n\nconst generatedFileWarning = `/**\n * ---------------------------------------------------------------------------------\n * This file has been generated by Sanity TypeGen.\n * Command: \\`sanity typegen generate\\`\n *\n * Any modifications made directly to this file will be overwritten the next time\n * the TypeScript definitions are generated. Please make changes to the Sanity\n * schema definitions and/or GROQ queries if you need to update these types.\n *\n * For more information on how to use Sanity TypeGen, visit the official documentation:\n * https://www.sanity.io/docs/sanity-typegen\n * ---------------------------------------------------------------------------------\n */\\n\\n`\n\nasync function getConfig(\n workDir: string,\n configPath?: string,\n): Promise<{config: CodegenConfig; type: 'legacy' | 'cli'}> {\n const config = await getCliConfig(workDir)\n\n // check if the legacy config exist\n const legacyConfigPath = configPath || 'sanity-typegen.json'\n let hasLegacyConfig = false\n try {\n const file = await stat(legacyConfigPath)\n hasLegacyConfig = file.isFile()\n } catch (err) {\n if (err.code === 'ENOENT' && configPath) {\n throw new Error(`Typegen config file not found: ${configPath}`, {cause: err})\n }\n\n if (err.code !== 'ENOENT') {\n throw new Error(`Error when checking if typegen config file exists: ${legacyConfigPath}`, {\n cause: err,\n })\n }\n }\n\n // we have both legacy and cli config with typegen\n if (config?.config?.typegen && hasLegacyConfig) {\n console.warn(\n chalk.yellow(\n `You've specified typegen in your Sanity CLI config, but also have a typegen config.\n\nThe config from the Sanity CLI config is used.\n`,\n ),\n )\n\n return {\n config: configDefinition.parse(config.config.typegen || {}),\n type: 'cli',\n }\n }\n\n // we only have legacy typegen config\n if (hasLegacyConfig) {\n console.warn(\n chalk.yellow(\n `The separate typegen config has been deprecated. Use \\`typegen\\` in the sanity CLI config instead.\n\nSee: https://www.sanity.io/docs/help/configuring-typegen-in-sanity-cli-config`,\n ),\n )\n return {\n config: await readConfig(legacyConfigPath),\n type: 'legacy',\n }\n }\n\n // we only have cli config\n return {\n config: configDefinition.parse(config?.config?.typegen || {}),\n type: 'cli',\n }\n}\n\nexport default async function typegenGenerateAction(\n args: CliCommandArguments<TypegenGenerateTypesCommandFlags>,\n context: CliCommandContext,\n): Promise<void> {\n const flags = args.extOptions\n const {output, workDir, telemetry} = context\n\n const trace = telemetry.trace(TypesGeneratedTrace)\n trace.start()\n\n const {config: codegenConfig, type: codegenConfigMethod} = await getConfig(\n workDir,\n flags['config-path'],\n )\n\n try {\n const schemaStats = await stat(codegenConfig.schema)\n if (!schemaStats.isFile()) {\n throw new Error(`Schema path is not a file: ${codegenConfig.schema}`)\n }\n } catch (err) {\n if (err.code === 'ENOENT') {\n // If the user has not provided a specific schema path (eg we're using the default), give some help\n const hint =\n codegenConfig.schema === './schema.json' ? ` - did you run \"sanity schema extract\"?` : ''\n throw new Error(`Schema file not found: ${codegenConfig.schema}${hint}`, {cause: err})\n }\n throw err\n }\n\n const outputPath = isAbsolute(codegenConfig.generates)\n ? codegenConfig.generates\n : join(process.cwd(), codegenConfig.generates)\n const outputDir = dirname(outputPath)\n await mkdir(outputDir, {recursive: true})\n const workerPath = await getCliWorkerPath('typegenGenerate')\n\n const spinner = output.spinner({}).start('Generating types')\n\n const worker = new Worker(workerPath, {\n workerData: {\n workDir,\n schemaPath: codegenConfig.schema,\n searchPath: codegenConfig.path,\n overloadClientMethods: codegenConfig.overloadClientMethods,\n } satisfies TypegenGenerateTypesWorkerData,\n env: process.env,\n })\n\n const typeFile = await open(\n outputPath,\n // eslint-disable-next-line no-bitwise\n constants.O_TRUNC | constants.O_CREAT | constants.O_WRONLY,\n )\n\n void typeFile.write(generatedFileWarning)\n\n const stats = {\n queryFilesCount: 0,\n errors: 0,\n queriesCount: 0,\n schemaTypesCount: 0,\n unknownTypeNodesGenerated: 0,\n typeNodesGenerated: 0,\n emptyUnionTypeNodesGenerated: 0,\n size: 0,\n }\n\n await new Promise<void>((resolve, reject) => {\n worker.addListener('message', (msg: TypegenGenerateTypesWorkerMessage) => {\n if (msg.type === 'error') {\n if (msg.fatal) {\n trace.error(msg.error)\n reject(msg.error)\n return\n }\n const errorMessage = msg.filename\n ? `${msg.error.message} in \"${msg.filename}\"`\n : msg.error.message\n spinner.fail(errorMessage)\n stats.errors++\n return\n }\n if (msg.type === 'complete') {\n resolve()\n return\n }\n\n if (msg.type === 'typemap') {\n let typeMapStr = `// Query TypeMap\\n`\n typeMapStr += msg.typeMap\n void typeFile.write(typeMapStr)\n stats.size += Buffer.byteLength(typeMapStr)\n return\n }\n\n let fileTypeString = `// Source: ${msg.filename}\\n`\n\n if (msg.type === 'schema') {\n stats.schemaTypesCount += msg.length\n fileTypeString += msg.schema\n void typeFile.write(fileTypeString)\n return\n }\n\n if (msg.type === 'types') {\n stats.queryFilesCount++\n for (const {\n queryName,\n query,\n type,\n typeNodesGenerated,\n unknownTypeNodesGenerated,\n emptyUnionTypeNodesGenerated,\n } of msg.types) {\n fileTypeString += `// Variable: ${queryName}\\n`\n fileTypeString += `// Query: ${query.replace(/(\\r\\n|\\n|\\r)/gm, '').trim()}\\n`\n fileTypeString += type\n stats.queriesCount++\n stats.typeNodesGenerated += typeNodesGenerated\n stats.unknownTypeNodesGenerated += unknownTypeNodesGenerated\n stats.emptyUnionTypeNodesGenerated += emptyUnionTypeNodesGenerated\n }\n void typeFile.write(`${fileTypeString}\\n`)\n stats.size += Buffer.byteLength(fileTypeString)\n }\n })\n worker.addListener('error', reject)\n })\n\n await typeFile.close()\n\n const prettierConfig = codegenConfig.formatGeneratedCode\n ? await resolvePrettierConfig(outputPath).catch((err) => {\n output.warn(`Failed to load prettier config: ${err.message}`)\n return null\n })\n : null\n\n if (prettierConfig) {\n const formatFile = await open(outputPath, constants.O_RDWR)\n try {\n const code = await formatFile.readFile()\n const formattedCode = await prettierFormat(code.toString(), {\n ...prettierConfig,\n parser: 'typescript' as const,\n })\n await formatFile.truncate()\n await formatFile.write(formattedCode, 0)\n\n spinner.info('Formatted generated types with Prettier')\n } catch (err) {\n output.warn(`Failed to format generated types with Prettier: ${err.message}`)\n } finally {\n await formatFile.close()\n }\n }\n\n trace.log({\n outputSize: stats.size,\n queriesCount: stats.queriesCount,\n schemaTypesCount: stats.schemaTypesCount,\n queryFilesCount: stats.queryFilesCount,\n filesWithErrors: stats.errors,\n configMethod: codegenConfigMethod,\n typeNodesGenerated: stats.typeNodesGenerated,\n unknownTypeNodesGenerated: stats.unknownTypeNodesGenerated,\n unknownTypeNodesRatio:\n stats.typeNodesGenerated > 0 ? stats.unknownTypeNodesGenerated / stats.typeNodesGenerated : 0,\n emptyUnionTypeNodesGenerated: stats.emptyUnionTypeNodesGenerated,\n configOverloadClientMethods: codegenConfig.overloadClientMethods,\n })\n\n trace.complete()\n if (stats.errors > 0) {\n spinner.warn(`Encountered errors in ${stats.errors} files while generating types`)\n }\n\n spinner.succeed(\n `Generated TypeScript types for ${stats.schemaTypesCount} schema types and ${stats.queriesCount} GROQ queries in ${stats.queryFilesCount} files into: ${codegenConfig.generates}`,\n )\n}\n"],"names":["defineTrace","getCliConfig","stat","chalk","configDefinition","readConfig","telemetry","isAbsolute","join","dirname","mkdir","getCliWorkerPath","Worker","open","constants","resolvePrettierConfig","prettierFormat"],"mappings":";;;;;;AAgBO,MAAM,sBAAsBA,UAAAA,YAA2C;AAAA,EAC5E,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACf,CAAC,GCCK,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc7B,eAAe,UACb,SACA,YAC0D;AAC1D,QAAM,SAAS,MAAMC,aAAAA,aAAa,OAAO,GAGnC,mBAAmB,cAAc;AACvC,MAAI,kBAAkB;AACtB,MAAI;AAEF,uBADa,MAAMC,GAAAA,KAAK,gBAAgB,GACjB,OAAA;AAAA,EACzB,SAAS,KAAK;AACZ,QAAI,IAAI,SAAS,YAAY;AAC3B,YAAM,IAAI,MAAM,kCAAkC,UAAU,IAAI,EAAC,OAAO,KAAI;AAG9E,QAAI,IAAI,SAAS;AACf,YAAM,IAAI,MAAM,sDAAsD,gBAAgB,IAAI;AAAA,QACxF,OAAO;AAAA,MAAA,CACR;AAAA,EAEL;AAGA,SAAI,QAAQ,QAAQ,WAAW,mBAC7B,QAAQ;AAAA,IACNC,eAAAA,QAAM;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,IAAA;AAAA,EAIF,GAGK;AAAA,IACL,QAAQC,QAAAA,iBAAiB,MAAM,OAAO,OAAO,WAAW,EAAE;AAAA,IAC1D,MAAM;AAAA,EAAA,KAKN,mBACF,QAAQ;AAAA,IACND,eAAAA,QAAM;AAAA,MACJ;AAAA,IAAA;AAAA,EAGF,GAEK;AAAA,IACL,QAAQ,MAAME,QAAAA,WAAW,gBAAgB;AAAA,IACzC,MAAM;AAAA,EAAA,KAKH;AAAA,IACL,QAAQD,QAAAA,iBAAiB,MAAM,QAAQ,QAAQ,WAAW,EAAE;AAAA,IAC5D,MAAM;AAAA,EAAA;AAEV;AAEA,eAA8B,sBAC5B,MACA,SACe;AACf,QAAM,QAAQ,KAAK,YACb,EAAC,QAAQ,SAAS,WAAAE,WAAA,IAAa,SAE/B,QAAQA,WAAU,MAAM,mBAAmB;AACjD,QAAM,MAAA;AAEN,QAAM,EAAC,QAAQ,eAAe,MAAM,oBAAA,IAAuB,MAAM;AAAA,IAC/D;AAAA,IACA,MAAM,aAAa;AAAA,EAAA;AAGrB,MAAI;AAEF,QAAI,EADgB,MAAMJ,GAAAA,KAAK,cAAc,MAAM,GAClC,OAAA;AACf,YAAM,IAAI,MAAM,8BAA8B,cAAc,MAAM,EAAE;AAAA,EAExE,SAAS,KAAK;AACZ,QAAI,IAAI,SAAS,UAAU;AAEzB,YAAM,OACJ,cAAc,WAAW,kBAAkB,4CAA4C;AACzF,YAAM,IAAI,MAAM,0BAA0B,cAAc,MAAM,GAAG,IAAI,IAAI,EAAC,OAAO,IAAA,CAAI;AAAA,IACvF;AACA,UAAM;AAAA,EACR;AAEA,QAAM,aAAaK,KAAAA,WAAW,cAAc,SAAS,IACjD,cAAc,YACdC,KAAAA,KAAK,QAAQ,IAAA,GAAO,cAAc,SAAS,GACzC,YAAYC,KAAAA,QAAQ,UAAU;AACpC,QAAMC,GAAAA,MAAM,WAAW,EAAC,WAAW,IAAK;AACxC,QAAM,aAAa,MAAMC,UAAAA,iBAAiB,iBAAiB,GAErD,UAAU,OAAO,QAAQ,CAAA,CAAE,EAAE,MAAM,kBAAkB,GAErD,SAAS,IAAIC,oBAAAA,OAAO,YAAY;AAAA,IACpC,YAAY;AAAA,MACV;AAAA,MACA,YAAY,cAAc;AAAA,MAC1B,YAAY,cAAc;AAAA,MAC1B,uBAAuB,cAAc;AAAA,IAAA;AAAA,IAEvC,KAAK,QAAQ;AAAA,EAAA,CACd,GAEK,WAAW,MAAMC,GAAAA;AAAAA,IACrB;AAAA;AAAA,IAEAC,GAAAA,UAAU,UAAUA,aAAU,UAAUA,GAAAA,UAAU;AAAA,EAAA;AAG/C,WAAS,MAAM,oBAAoB;AAExC,QAAM,QAAQ;AAAA,IACZ,iBAAiB;AAAA,IACjB,QAAQ;AAAA,IACR,cAAc;AAAA,IACd,kBAAkB;AAAA,IAClB,2BAA2B;AAAA,IAC3B,oBAAoB;AAAA,IACpB,8BAA8B;AAAA,IAC9B,MAAM;AAAA,EAAA;AAGR,QAAM,IAAI,QAAc,CAAC,SAAS,WAAW;AAC3C,WAAO,YAAY,WAAW,CAAC,QAA2C;AACxE,UAAI,IAAI,SAAS,SAAS;AACxB,YAAI,IAAI,OAAO;AACb,gBAAM,MAAM,IAAI,KAAK,GACrB,OAAO,IAAI,KAAK;AAChB;AAAA,QACF;AACA,cAAM,eAAe,IAAI,WACrB,GAAG,IAAI,MAAM,OAAO,QAAQ,IAAI,QAAQ,MACxC,IAAI,MAAM;AACd,gBAAQ,KAAK,YAAY,GACzB,MAAM;AACN;AAAA,MACF;AACA,UAAI,IAAI,SAAS,YAAY;AAC3B,gBAAA;AACA;AAAA,MACF;AAEA,UAAI,IAAI,SAAS,WAAW;AAC1B,YAAI,aAAa;AAAA;AACjB,sBAAc,IAAI,SACb,SAAS,MAAM,UAAU,GAC9B,MAAM,QAAQ,OAAO,WAAW,UAAU;AAC1C;AAAA,MACF;AAEA,UAAI,iBAAiB,cAAc,IAAI,QAAQ;AAAA;AAE/C,UAAI,IAAI,SAAS,UAAU;AACzB,cAAM,oBAAoB,IAAI,QAC9B,kBAAkB,IAAI,QACjB,SAAS,MAAM,cAAc;AAClC;AAAA,MACF;AAEA,UAAI,IAAI,SAAS,SAAS;AACxB,cAAM;AACN,mBAAW;AAAA,UACT;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,UACA;AAAA,QAAA,KACG,IAAI;AACP,4BAAkB,gBAAgB,SAAS;AAAA,GAC3C,kBAAkB,aAAa,MAAM,QAAQ,kBAAkB,EAAE,EAAE,MAAM;AAAA,GACzE,kBAAkB,MAClB,MAAM,gBACN,MAAM,sBAAsB,oBAC5B,MAAM,6BAA6B,2BACnC,MAAM,gCAAgC;AAEnC,iBAAS,MAAM,GAAG,cAAc;AAAA,CAAI,GACzC,MAAM,QAAQ,OAAO,WAAW,cAAc;AAAA,MAChD;AAAA,IACF,CAAC,GACD,OAAO,YAAY,SAAS,MAAM;AAAA,EACpC,CAAC,GAED,MAAM,SAAS,MAAA;AAEf,QAAM,iBAAiB,cAAc,sBACjC,MAAMC,SAAAA,cAAsB,UAAU,EAAE,MAAM,CAAC,SAC7C,OAAO,KAAK,mCAAmC,IAAI,OAAO,EAAE,GACrD,KACR,IACD;AAEJ,MAAI,gBAAgB;AAClB,UAAM,aAAa,MAAMF,GAAAA,KAAK,YAAYC,GAAAA,UAAU,MAAM;AAC1D,QAAI;AACF,YAAM,OAAO,MAAM,WAAW,SAAA,GACxB,gBAAgB,MAAME,SAAAA,OAAe,KAAK,YAAY;AAAA,QAC1D,GAAG;AAAA,QACH,QAAQ;AAAA,MAAA,CACT;AACD,YAAM,WAAW,YACjB,MAAM,WAAW,MAAM,eAAe,CAAC,GAEvC,QAAQ,KAAK,yCAAyC;AAAA,IACxD,SAAS,KAAK;AACZ,aAAO,KAAK,mDAAmD,IAAI,OAAO,EAAE;AAAA,IAC9E,UAAA;AACE,YAAM,WAAW,MAAA;AAAA,IACnB;AAAA,EACF;AAEA,QAAM,IAAI;AAAA,IACR,YAAY,MAAM;AAAA,IAClB,cAAc,MAAM;AAAA,IACpB,kBAAkB,MAAM;AAAA,IACxB,iBAAiB,MAAM;AAAA,IACvB,iBAAiB,MAAM;AAAA,IACvB,cAAc;AAAA,IACd,oBAAoB,MAAM;AAAA,IAC1B,2BAA2B,MAAM;AAAA,IACjC,uBACE,MAAM,qBAAqB,IAAI,MAAM,4BAA4B,MAAM,qBAAqB;AAAA,IAC9F,8BAA8B,MAAM;AAAA,IACpC,6BAA6B,cAAc;AAAA,EAAA,CAC5C,GAED,MAAM,SAAA,GACF,MAAM,SAAS,KACjB,QAAQ,KAAK,yBAAyB,MAAM,MAAM,+BAA+B,GAGnF,QAAQ;AAAA,IACN,kCAAkC,MAAM,gBAAgB,qBAAqB,MAAM,YAAY,oBAAoB,MAAM,eAAe,gBAAgB,cAAc,SAAS;AAAA,EAAA;AAEnL;;"}
1
+ {"version":3,"file":"generateAction.js","sources":["../../src/actions/typegen/generate.telemetry.ts","../../src/actions/typegen/generateAction.ts"],"sourcesContent":["import {defineTrace} from '@sanity/telemetry'\n\ninterface TypesGeneratedTraceAttributes {\n outputSize: number\n queriesCount: number\n schemaTypesCount: number\n queryFilesCount: number\n filesWithErrors: number\n typeNodesGenerated: number\n unknownTypeNodesGenerated: number\n unknownTypeNodesRatio: number\n emptyUnionTypeNodesGenerated: number\n configOverloadClientMethods: boolean\n configMethod: 'legacy' | 'cli'\n}\n\nexport const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttributes>({\n name: 'Types Generated',\n version: 0,\n description: 'Trace emitted when generating TypeScript types for queries',\n})\n","/* eslint-disable max-statements */\nimport {mkdir, stat, writeFile} from 'node:fs/promises'\nimport {dirname, isAbsolute, join} from 'node:path'\nimport {env} from 'node:process'\nimport {Worker} from 'node:worker_threads'\n\nimport {configDefinition, readConfig, type TypeGenConfig} from '@sanity/codegen'\nimport {WorkerChannelReceiver} from '@sanity/worker-channels'\nimport chalk from 'chalk'\n\nimport {type CliCommandArguments, type CliCommandContext} from '../../types'\nimport {getCliWorkerPath} from '../../util/cliWorker'\nimport {getCliConfig} from '../../util/getCliConfig'\nimport {\n type TypegenGenerateTypesWorkerData,\n type TypegenWorkerChannel,\n} from '../../workers/typegenGenerate'\nimport {TypesGeneratedTrace} from './generate.telemetry'\n\nexport interface TypegenGenerateTypesCommandFlags {\n 'config-path'?: string\n}\n\nconst generatedFileWarning = `/**\n * ---------------------------------------------------------------------------------\n * This file has been generated by Sanity TypeGen.\n * Command: \\`sanity typegen generate\\`\n *\n * Any modifications made directly to this file will be overwritten the next time\n * the TypeScript definitions are generated. Please make changes to the Sanity\n * schema definitions and/or GROQ queries if you need to update these types.\n *\n * For more information on how to use Sanity TypeGen, visit the official documentation:\n * https://www.sanity.io/docs/sanity-typegen\n * ---------------------------------------------------------------------------------\n */\\n\\n`\n\nasync function getConfig(\n workDir: string,\n configPath?: string,\n): Promise<{config: TypeGenConfig; path?: string; type: 'legacy' | 'cli'}> {\n const config = await getCliConfig(workDir)\n\n // check if the legacy config exist\n const legacyConfigPath = configPath || 'sanity-typegen.json'\n let hasLegacyConfig = false\n try {\n const file = await stat(legacyConfigPath)\n hasLegacyConfig = file.isFile()\n } catch (err) {\n if (err.code === 'ENOENT' && configPath) {\n throw new Error(`Typegen config file not found: ${configPath}`, {cause: err})\n }\n\n if (err.code !== 'ENOENT') {\n throw new Error(`Error when checking if typegen config file exists: ${legacyConfigPath}`, {\n cause: err,\n })\n }\n }\n\n // we have both legacy and cli config with typegen\n if (config?.config?.typegen && hasLegacyConfig) {\n console.warn(\n chalk.yellow(\n `You've specified typegen in your Sanity CLI config, but also have a typegen config.\n\nThe config from the Sanity CLI config is used.\n`,\n ),\n )\n\n return {\n config: configDefinition.parse(config.config.typegen || {}),\n path: config.path,\n type: 'cli',\n }\n }\n\n // we only have legacy typegen config\n if (hasLegacyConfig) {\n console.warn(\n chalk.yellow(\n `The separate typegen config has been deprecated. Use \\`typegen\\` in the sanity CLI config instead.\n\nSee: https://www.sanity.io/docs/help/configuring-typegen-in-sanity-cli-config`,\n ),\n )\n return {\n config: await readConfig(legacyConfigPath),\n path: legacyConfigPath,\n type: 'legacy',\n }\n }\n\n // we only have cli config\n return {\n config: configDefinition.parse(config?.config?.typegen || {}),\n path: config?.path,\n type: 'cli',\n }\n}\n\nconst formatter = new Intl.NumberFormat('en-US', {\n style: 'percent',\n minimumFractionDigits: 1,\n maximumFractionDigits: 1,\n})\nconst percent = (value: number): string => formatter.format(Math.min(value, 1))\nconst count = (\n amount: number,\n plural: string = '',\n singular: string = plural.slice(0, Math.max(0, plural.length - 1)),\n): string =>\n [amount.toLocaleString('en-US'), amount === 1 ? singular : plural].filter(Boolean).join(' ')\nconst getMessage = (error: unknown) =>\n typeof error === 'object' && !!error && 'message' in error && typeof error.message === 'string'\n ? error.message\n : 'Unknown error'\n\nexport default async function typegenGenerateAction(\n {extOptions: flags}: CliCommandArguments<TypegenGenerateTypesCommandFlags>,\n {output, workDir, telemetry}: CliCommandContext,\n): Promise<void> {\n const trace = telemetry.trace(TypesGeneratedTrace)\n trace.start()\n\n const spinner = output.spinner({}).start('Loading config…')\n\n const {\n config: typegenConfig,\n type: typegenConfigMethod,\n path: configPath,\n } = await getConfig(workDir, flags['config-path'])\n\n spinner.succeed(`Config loaded from ${configPath?.replace(workDir, '.')}`)\n\n const {\n generates,\n path: searchPath,\n schema: schemaPath,\n formatGeneratedCode,\n overloadClientMethods,\n } = typegenConfig\n\n const outputPath = isAbsolute(typegenConfig.generates)\n ? typegenConfig.generates\n : join(workDir, typegenConfig.generates)\n\n const outputDir = dirname(outputPath)\n await mkdir(outputDir, {recursive: true})\n\n const workerPath = await getCliWorkerPath('typegenGenerate')\n const workerData: TypegenGenerateTypesWorkerData = {\n workDir,\n schemaPath,\n searchPath,\n overloadClientMethods,\n }\n const worker = new Worker(workerPath, {workerData, env})\n const receiver = WorkerChannelReceiver.from<TypegenWorkerChannel>(worker)\n\n try {\n spinner.start(`Loading schema…`)\n await receiver.event.loadedSchema()\n spinner.succeed(`Schema loaded from ${schemaPath}`)\n\n spinner.start('Generating schema types…')\n const {expectedFileCount} = await receiver.event.typegenStarted()\n const {schemaTypeDeclarations} = await receiver.event.generatedSchemaTypes()\n const schemaTypesCount = schemaTypeDeclarations.length\n spinner.succeed(`Generated ${count(schemaTypesCount, 'schema types')}`)\n\n spinner.start('Generating query types…')\n let queriesCount = 0\n let evaluatedFiles = 0\n let filesWithErrors = 0\n let queryFilesCount = 0\n let typeNodesGenerated = 0\n let unknownTypeNodesGenerated = 0\n let emptyUnionTypeNodesGenerated = 0\n\n for await (const {queries, errors} of receiver.stream.evaluatedModules()) {\n evaluatedFiles++\n queriesCount += queries.length\n queryFilesCount += queries.length ? 1 : 0\n filesWithErrors += errors.length ? 1 : 0\n\n for (const {stats} of queries) {\n typeNodesGenerated += stats.allTypes\n unknownTypeNodesGenerated += stats.unknownTypes\n emptyUnionTypeNodesGenerated += stats.emptyUnions\n }\n\n for (const error of errors) {\n spinner.fail(getMessage(error))\n }\n\n spinner.text =\n `Generating query types… (${percent(evaluatedFiles / expectedFileCount)})\\n` +\n ` └─ Processed ${count(evaluatedFiles)} of ${count(expectedFileCount, 'files')}. ` +\n `Found ${count(queriesCount, 'queries', 'query')} from ${count(queryFilesCount, 'files')}.`\n }\n\n const result = await receiver.event.typegenComplete()\n const code = `${generatedFileWarning}${result.code}`\n await writeFile(outputPath, code)\n\n spinner.succeed(\n `Generated ${count(queriesCount, 'query types')} from ${count(queryFilesCount, 'files')} out of ${count(evaluatedFiles, 'scanned files')}`,\n )\n\n if (formatGeneratedCode) {\n spinner.start(`Formatting generated types with prettier…`)\n\n try {\n const prettier = await import('prettier')\n const prettierConfig = await prettier.resolveConfig(outputPath)\n const formattedCode = await prettier.format(code, {\n ...prettierConfig,\n parser: 'typescript' as const,\n })\n await writeFile(outputPath, formattedCode)\n\n spinner.succeed('Formatted generated types with prettier')\n } catch (err) {\n spinner.warn(`Failed to format generated types with prettier: ${getMessage(err)}`)\n }\n }\n\n trace.log({\n configOverloadClientMethods: overloadClientMethods,\n outputSize: Buffer.byteLength(result.code),\n queriesCount,\n schemaTypesCount,\n queryFilesCount,\n filesWithErrors,\n typeNodesGenerated,\n unknownTypeNodesGenerated,\n emptyUnionTypeNodesGenerated,\n unknownTypeNodesRatio:\n typeNodesGenerated > 0 ? unknownTypeNodesGenerated / typeNodesGenerated : 0,\n configMethod: typegenConfigMethod,\n })\n\n if (filesWithErrors > 0) {\n spinner.warn(\n `Encountered errors in ${count(filesWithErrors, 'files')} while generating types`,\n )\n }\n\n spinner.succeed(`Successfully generated types to ${generates}`)\n } catch (err) {\n trace.error(err)\n throw err\n } finally {\n receiver.unsubscribe()\n trace.complete()\n await worker.terminate()\n }\n}\n"],"names":["defineTrace","getCliConfig","stat","chalk","configDefinition","readConfig","telemetry","isAbsolute","join","dirname","mkdir","getCliWorkerPath","Worker","env","WorkerChannelReceiver","writeFile"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;AAgBO,MAAM,sBAAsBA,UAAAA,YAA2C;AAAA,EAC5E,MAAM;AAAA,EACN,SAAS;AAAA,EACT,aAAa;AACf,CAAC,GCGK,uBAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAc7B,eAAe,UACb,SACA,YACyE;AACzE,QAAM,SAAS,MAAMC,aAAAA,aAAa,OAAO,GAGnC,mBAAmB,cAAc;AACvC,MAAI,kBAAkB;AACtB,MAAI;AAEF,uBADa,MAAMC,GAAAA,KAAK,gBAAgB,GACjB,OAAA;AAAA,EACzB,SAAS,KAAK;AACZ,QAAI,IAAI,SAAS,YAAY;AAC3B,YAAM,IAAI,MAAM,kCAAkC,UAAU,IAAI,EAAC,OAAO,KAAI;AAG9E,QAAI,IAAI,SAAS;AACf,YAAM,IAAI,MAAM,sDAAsD,gBAAgB,IAAI;AAAA,QACxF,OAAO;AAAA,MAAA,CACR;AAAA,EAEL;AAGA,SAAI,QAAQ,QAAQ,WAAW,mBAC7B,QAAQ;AAAA,IACNC,eAAAA,QAAM;AAAA,MACJ;AAAA;AAAA;AAAA;AAAA,IAAA;AAAA,EAIF,GAGK;AAAA,IACL,QAAQC,QAAAA,iBAAiB,MAAM,OAAO,OAAO,WAAW,EAAE;AAAA,IAC1D,MAAM,OAAO;AAAA,IACb,MAAM;AAAA,EAAA,KAKN,mBACF,QAAQ;AAAA,IACND,eAAAA,QAAM;AAAA,MACJ;AAAA,IAAA;AAAA,EAGF,GAEK;AAAA,IACL,QAAQ,MAAME,QAAAA,WAAW,gBAAgB;AAAA,IACzC,MAAM;AAAA,IACN,MAAM;AAAA,EAAA,KAKH;AAAA,IACL,QAAQD,QAAAA,iBAAiB,MAAM,QAAQ,QAAQ,WAAW,EAAE;AAAA,IAC5D,MAAM,QAAQ;AAAA,IACd,MAAM;AAAA,EAAA;AAEV;AAEA,MAAM,YAAY,IAAI,KAAK,aAAa,SAAS;AAAA,EAC/C,OAAO;AAAA,EACP,uBAAuB;AAAA,EACvB,uBAAuB;AACzB,CAAC,GACK,UAAU,CAAC,UAA0B,UAAU,OAAO,KAAK,IAAI,OAAO,CAAC,CAAC,GACxE,QAAQ,CACZ,QACA,SAAiB,IACjB,WAAmB,OAAO,MAAM,GAAG,KAAK,IAAI,GAAG,OAAO,SAAS,CAAC,CAAC,MAEjE,CAAC,OAAO,eAAe,OAAO,GAAG,WAAW,IAAI,WAAW,MAAM,EAAE,OAAO,OAAO,EAAE,KAAK,GAAG,GACvF,aAAa,CAAC,UAClB,OAAO,SAAU,YAAc,SAAS,aAAa,SAAS,OAAO,MAAM,WAAY,WACnF,MAAM,UACN;AAEN,eAA8B,sBAC5B,EAAC,YAAY,MAAA,GACb,EAAC,QAAQ,SAAS,WAAAE,cACH;AACf,QAAM,QAAQA,WAAU,MAAM,mBAAmB;AACjD,QAAM,MAAA;AAEN,QAAM,UAAU,OAAO,QAAQ,CAAA,CAAE,EAAE,MAAM,sBAAiB,GAEpD;AAAA,IACJ,QAAQ;AAAA,IACR,MAAM;AAAA,IACN,MAAM;AAAA,EAAA,IACJ,MAAM,UAAU,SAAS,MAAM,aAAa,CAAC;AAEjD,UAAQ,QAAQ,sBAAsB,YAAY,QAAQ,SAAS,GAAG,CAAC,EAAE;AAEzE,QAAM;AAAA,IACJ;AAAA,IACA,MAAM;AAAA,IACN,QAAQ;AAAA,IACR;AAAA,IACA;AAAA,EAAA,IACE,eAEE,aAAaC,KAAAA,WAAW,cAAc,SAAS,IACjD,cAAc,YACdC,KAAAA,KAAK,SAAS,cAAc,SAAS,GAEnC,YAAYC,KAAAA,QAAQ,UAAU;AACpC,QAAMC,GAAAA,MAAM,WAAW,EAAC,WAAW,IAAK;AAExC,QAAM,aAAa,MAAMC,UAAAA,iBAAiB,iBAAiB,GACrD,aAA6C;AAAA,IACjD;AAAA,IACA;AAAA,IACA;AAAA,IACA;AAAA,EAAA,GAEI,SAAS,IAAIC,2BAAO,YAAY,EAAC,YAAA,KAAYC,YAAA,CAAI,GACjD,WAAWC,qCAAsB,KAA2B,MAAM;AAExE,MAAI;AACF,YAAQ,MAAM,sBAAiB,GAC/B,MAAM,SAAS,MAAM,gBACrB,QAAQ,QAAQ,sBAAsB,UAAU,EAAE,GAElD,QAAQ,MAAM,+BAA0B;AACxC,UAAM,EAAC,kBAAA,IAAqB,MAAM,SAAS,MAAM,eAAA,GAC3C,EAAC,uBAAA,IAA0B,MAAM,SAAS,MAAM,qBAAA,GAChD,mBAAmB,uBAAuB;AAChD,YAAQ,QAAQ,aAAa,MAAM,kBAAkB,cAAc,CAAC,EAAE,GAEtE,QAAQ,MAAM,8BAAyB;AACvC,QAAI,eAAe,GACf,iBAAiB,GACjB,kBAAkB,GAClB,kBAAkB,GAClB,qBAAqB,GACrB,4BAA4B,GAC5B,+BAA+B;AAEnC,qBAAiB,EAAC,SAAS,OAAA,KAAW,SAAS,OAAO,oBAAoB;AACxE,wBACA,gBAAgB,QAAQ,QACxB,mBAAmB,QAAQ,SAAS,IAAI,GACxC,mBAAmB,OAAO,SAAS,IAAI;AAEvC,iBAAW,EAAC,WAAU;AACpB,8BAAsB,MAAM,UAC5B,6BAA6B,MAAM,cACnC,gCAAgC,MAAM;AAGxC,iBAAW,SAAS;AAClB,gBAAQ,KAAK,WAAW,KAAK,CAAC;AAGhC,cAAQ,OACN,iCAA4B,QAAQ,iBAAiB,iBAAiB,CAAC;AAAA,2BACrD,MAAM,cAAc,CAAC,OAAO,MAAM,mBAAmB,OAAO,CAAC,WACtE,MAAM,cAAc,WAAW,OAAO,CAAC,SAAS,MAAM,iBAAiB,OAAO,CAAC;AAAA,IAC5F;AAEA,UAAM,SAAS,MAAM,SAAS,MAAM,gBAAA,GAC9B,OAAO,GAAG,oBAAoB,GAAG,OAAO,IAAI;AAOlD,QANA,MAAMC,GAAAA,UAAU,YAAY,IAAI,GAEhC,QAAQ;AAAA,MACN,aAAa,MAAM,cAAc,aAAa,CAAC,SAAS,MAAM,iBAAiB,OAAO,CAAC,WAAW,MAAM,gBAAgB,eAAe,CAAC;AAAA,IAAA,GAGtI,qBAAqB;AACvB,cAAQ,MAAM,gDAA2C;AAEzD,UAAI;AACF,cAAM,WAAW,MAAM,OAAO,UAAU,GAClC,iBAAiB,MAAM,SAAS,cAAc,UAAU,GACxD,gBAAgB,MAAM,SAAS,OAAO,MAAM;AAAA,UAChD,GAAG;AAAA,UACH,QAAQ;AAAA,QAAA,CACT;AACD,cAAMA,GAAAA,UAAU,YAAY,aAAa,GAEzC,QAAQ,QAAQ,yCAAyC;AAAA,MAC3D,SAAS,KAAK;AACZ,gBAAQ,KAAK,mDAAmD,WAAW,GAAG,CAAC,EAAE;AAAA,MACnF;AAAA,IACF;AAEA,UAAM,IAAI;AAAA,MACR,6BAA6B;AAAA,MAC7B,YAAY,OAAO,WAAW,OAAO,IAAI;AAAA,MACzC;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA;AAAA,MACA,uBACE,qBAAqB,IAAI,4BAA4B,qBAAqB;AAAA,MAC5E,cAAc;AAAA,IAAA,CACf,GAEG,kBAAkB,KACpB,QAAQ;AAAA,MACN,yBAAyB,MAAM,iBAAiB,OAAO,CAAC;AAAA,IAAA,GAI5D,QAAQ,QAAQ,mCAAmC,SAAS,EAAE;AAAA,EAChE,SAAS,KAAK;AACZ,UAAA,MAAM,MAAM,GAAG,GACT;AAAA,EACR,UAAA;AACE,aAAS,eACT,MAAM,YACN,MAAM,OAAO,UAAA;AAAA,EACf;AACF;;"}
@@ -1,43 +1,23 @@
1
+ import {TypegenWorkerChannel as TypegenWorkerChannel_2} from '@sanity/codegen'
2
+ import {WorkerChannel} from '@sanity/worker-channels'
3
+
1
4
  export declare interface TypegenGenerateTypesWorkerData {
2
5
  workDir: string
3
- workspaceName?: string
4
6
  schemaPath: string
5
7
  searchPath: string | string[]
6
8
  overloadClientMethods?: boolean
7
9
  }
8
10
 
9
- export declare type TypegenGenerateTypesWorkerMessage =
10
- | {
11
- type: 'error'
12
- error: Error
13
- fatal: boolean
14
- query?: string
15
- filename?: string
16
- }
17
- | {
18
- type: 'types'
19
- filename: string
20
- types: {
21
- queryName: string
22
- query: string
23
- type: string
24
- unknownTypeNodesGenerated: number
25
- typeNodesGenerated: number
26
- emptyUnionTypeNodesGenerated: number
27
- }[]
28
- }
29
- | {
30
- type: 'schema'
31
- filename: string
32
- schema: string
33
- length: number
34
- }
35
- | {
36
- type: 'typemap'
37
- typeMap: string
38
- }
39
- | {
40
- type: 'complete'
41
- }
11
+ export declare type TypegenWorkerChannel = WorkerChannel.Definition<
12
+ {
13
+ loadedSchema: WorkerChannel.Event
14
+ typegenStarted: WorkerChannel.Event<{
15
+ expectedFileCount: number
16
+ }>
17
+ typegenComplete: WorkerChannel.Event<{
18
+ code: string
19
+ }>
20
+ } & TypegenWorkerChannel_2['__definition']
21
+ >
42
22
 
43
23
  export {}
@@ -1,123 +1,47 @@
1
1
  "use strict";
2
- var node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), debugIt = require("debug"), groqJs = require("groq-js");
2
+ var fs = require("node:fs/promises"), path = require("node:path"), node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), workerChannels = require("@sanity/worker-channels");
3
3
  function _interopDefaultCompat(e) {
4
4
  return e && typeof e == "object" && "default" in e ? e : { default: e };
5
5
  }
6
- var debugIt__default = /* @__PURE__ */ _interopDefaultCompat(debugIt);
7
- const $info = debugIt__default.default("sanity:codegen:generate:info");
6
+ var path__default = /* @__PURE__ */ _interopDefaultCompat(path);
8
7
  if (node_worker_threads.isMainThread || !node_worker_threads.parentPort)
9
8
  throw new Error("This module must be run as a worker thread");
10
- const opts = node_worker_threads.workerData;
11
9
  codegen.registerBabel();
12
- async function main() {
13
- const schema = await codegen.readSchema(opts.schemaPath), typeGenerator = new codegen.TypeGenerator(schema), schemaTypes = [typeGenerator.generateSchemaTypes(), codegen.TypeGenerator.generateKnownTypes()].join(`
14
- `).trim(), resolver = codegen.getResolver();
15
- node_worker_threads.parentPort?.postMessage({
16
- type: "schema",
17
- schema: `${schemaTypes.trim()}
18
- `,
19
- filename: "schema.json",
20
- length: schema.length
21
- });
22
- const queries = codegen.findQueriesInPath({
23
- path: opts.searchPath,
24
- resolver
25
- }), allQueries = [];
26
- for await (const result of queries) {
27
- if (result.type === "error") {
28
- node_worker_threads.parentPort?.postMessage({
29
- type: "error",
30
- error: result.error,
31
- fatal: !1,
32
- filename: result.filename
33
- });
34
- continue;
10
+ async function main({
11
+ schemaPath,
12
+ searchPath,
13
+ workDir,
14
+ overloadClientMethods
15
+ }) {
16
+ const report = workerChannels.WorkerChannelReporter.from(node_worker_threads.parentPort), fullPath = path__default.default.join(workDir, schemaPath);
17
+ try {
18
+ if (!(await fs.stat(fullPath)).isFile())
19
+ throw new Error(`Schema path is not a file: ${schemaPath}`);
20
+ } catch (err) {
21
+ if (err.code === "ENOENT") {
22
+ const hint = schemaPath === "./schema.json" ? ' - did you run "sanity schema extract"?' : "";
23
+ throw new Error(`Schema file not found: ${fullPath}${hint}`, { cause: err });
35
24
  }
36
- $info(`Processing ${result.queries.length} queries in "${result.filename}"...`);
37
- const fileQueryTypes = [];
38
- for (const { name: queryName, result: query } of result.queries)
39
- try {
40
- const ast = codegen.safeParseQuery(query), queryTypes = groqJs.typeEvaluate(ast, schema), typeName = `${queryName}Result`, type = typeGenerator.generateTypeNodeTypes(typeName, queryTypes), queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes);
41
- fileQueryTypes.push({
42
- queryName,
43
- query,
44
- typeName,
45
- typeNode: queryTypes,
46
- type: `${type.trim()}
47
- `,
48
- unknownTypeNodesGenerated: queryTypeStats.unknownTypes,
49
- typeNodesGenerated: queryTypeStats.allTypes,
50
- emptyUnionTypeNodesGenerated: queryTypeStats.emptyUnions
51
- });
52
- } catch (err) {
53
- node_worker_threads.parentPort?.postMessage({
54
- type: "error",
55
- error: new Error(
56
- `Error generating types for query "${queryName}" in "${result.filename}": ${err.message}`,
57
- { cause: err }
58
- ),
59
- fatal: !1,
60
- query
61
- });
62
- }
63
- fileQueryTypes.length > 0 && ($info(`Generated types for ${fileQueryTypes.length} queries in "${result.filename}"
64
- `), node_worker_threads.parentPort?.postMessage({
65
- type: "types",
66
- types: fileQueryTypes,
67
- filename: result.filename
68
- })), fileQueryTypes.length > 0 && allQueries.push(...fileQueryTypes);
69
- }
70
- if (opts.overloadClientMethods && allQueries.length > 0) {
71
- const typeMap = `${typeGenerator.generateQueryMap(allQueries).trim()}
72
- `;
73
- node_worker_threads.parentPort?.postMessage({
74
- type: "typemap",
75
- typeMap
76
- });
25
+ throw err;
77
26
  }
78
- node_worker_threads.parentPort?.postMessage({
79
- type: "complete"
27
+ const schema = await codegen.readSchema(fullPath);
28
+ report.event.loadedSchema();
29
+ const typeGenerator = new codegen.TypeGenerator(), { files, queries } = codegen.findQueriesInPath({
30
+ path: searchPath,
31
+ resolver: codegen.getResolver(workDir)
80
32
  });
33
+ report.event.typegenStarted({ expectedFileCount: files.length });
34
+ const result = await typeGenerator.generateTypes({
35
+ queries,
36
+ schema,
37
+ reporter: report,
38
+ schemaPath,
39
+ root: workDir,
40
+ overloadClientMethods
41
+ });
42
+ report.event.typegenComplete(result);
81
43
  }
82
- function walkAndCountQueryTypeNodeStats(typeNode) {
83
- switch (typeNode.type) {
84
- case "unknown":
85
- return { allTypes: 1, unknownTypes: 1, emptyUnions: 0 };
86
- case "array": {
87
- const acc = walkAndCountQueryTypeNodeStats(typeNode.of);
88
- return acc.allTypes += 1, acc;
89
- }
90
- case "object": {
91
- if (typeNode.rest && typeNode.rest.type === "unknown")
92
- return { allTypes: 2, unknownTypes: 1, emptyUnions: 0 };
93
- const restStats = typeNode.rest ? walkAndCountQueryTypeNodeStats(typeNode.rest) : { allTypes: 1, unknownTypes: 0, emptyUnions: 0 };
94
- return Object.values(typeNode.attributes).reduce((acc, attribute) => {
95
- const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(
96
- attribute.value
97
- );
98
- return {
99
- allTypes: acc.allTypes + allTypes,
100
- unknownTypes: acc.unknownTypes + unknownTypes,
101
- emptyUnions: acc.emptyUnions + emptyUnions
102
- };
103
- }, restStats);
104
- }
105
- case "union":
106
- return typeNode.of.length === 0 ? { allTypes: 1, unknownTypes: 0, emptyUnions: 1 } : typeNode.of.reduce(
107
- (acc, type) => {
108
- const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(type);
109
- return {
110
- allTypes: acc.allTypes + allTypes,
111
- unknownTypes: acc.unknownTypes + unknownTypes,
112
- emptyUnions: acc.emptyUnions + emptyUnions
113
- };
114
- },
115
- { allTypes: 1, unknownTypes: 0, emptyUnions: 0 }
116
- // count the union type itself
117
- );
118
- default:
119
- return { allTypes: 1, unknownTypes: 0, emptyUnions: 0 };
120
- }
121
- }
122
- main();
44
+ main(node_worker_threads.workerData).catch((err) => {
45
+ throw err;
46
+ });
123
47
  //# sourceMappingURL=typegenGenerate.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"typegenGenerate.js","sources":["../../src/workers/typegenGenerate.ts"],"sourcesContent":["import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n findQueriesInPath,\n getResolver,\n readSchema,\n registerBabel,\n safeParseQuery,\n TypeGenerator,\n} from '@sanity/codegen'\nimport createDebug from 'debug'\nimport {typeEvaluate, type TypeNode} from 'groq-js'\n\nconst $info = createDebug('sanity:codegen:generate:info')\n\nexport interface TypegenGenerateTypesWorkerData {\n workDir: string\n workspaceName?: string\n schemaPath: string\n searchPath: string | string[]\n overloadClientMethods?: boolean\n}\n\nexport type TypegenGenerateTypesWorkerMessage =\n | {\n type: 'error'\n error: Error\n fatal: boolean\n query?: string\n filename?: string\n }\n | {\n type: 'types'\n filename: string\n types: {\n queryName: string\n query: string\n type: string\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n emptyUnionTypeNodesGenerated: number\n }[]\n }\n | {\n type: 'schema'\n filename: string\n schema: string\n length: number\n }\n | {\n type: 'typemap'\n typeMap: string\n }\n | {\n type: 'complete'\n }\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst opts = _workerData as TypegenGenerateTypesWorkerData\n\nregisterBabel()\n\nasync function main() {\n const schema = await readSchema(opts.schemaPath)\n\n const typeGenerator = new TypeGenerator(schema)\n const schemaTypes = [typeGenerator.generateSchemaTypes(), TypeGenerator.generateKnownTypes()]\n .join('\\n')\n .trim()\n const resolver = getResolver()\n\n parentPort?.postMessage({\n type: 'schema',\n schema: `${schemaTypes.trim()}\\n`,\n filename: 'schema.json',\n length: schema.length,\n } satisfies TypegenGenerateTypesWorkerMessage)\n\n const queries = findQueriesInPath({\n path: opts.searchPath,\n resolver,\n })\n\n const allQueries = []\n\n for await (const result of queries) {\n if (result.type === 'error') {\n parentPort?.postMessage({\n type: 'error',\n error: result.error,\n fatal: false,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n continue\n }\n $info(`Processing ${result.queries.length} queries in \"${result.filename}\"...`)\n\n const fileQueryTypes: {\n queryName: string\n query: string\n type: string\n typeName: string\n typeNode: TypeNode\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n emptyUnionTypeNodesGenerated: number\n }[] = []\n for (const {name: queryName, result: query} of result.queries) {\n try {\n const ast = safeParseQuery(query)\n const queryTypes = typeEvaluate(ast, schema)\n\n const typeName = `${queryName}Result`\n const type = typeGenerator.generateTypeNodeTypes(typeName, queryTypes)\n\n const queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes)\n fileQueryTypes.push({\n queryName,\n query,\n typeName,\n typeNode: queryTypes,\n type: `${type.trim()}\\n`,\n unknownTypeNodesGenerated: queryTypeStats.unknownTypes,\n typeNodesGenerated: queryTypeStats.allTypes,\n emptyUnionTypeNodesGenerated: queryTypeStats.emptyUnions,\n })\n } catch (err) {\n parentPort?.postMessage({\n type: 'error',\n error: new Error(\n `Error generating types for query \"${queryName}\" in \"${result.filename}\": ${err.message}`,\n {cause: err},\n ),\n fatal: false,\n query,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n }\n\n if (fileQueryTypes.length > 0) {\n $info(`Generated types for ${fileQueryTypes.length} queries in \"${result.filename}\"\\n`)\n parentPort?.postMessage({\n type: 'types',\n types: fileQueryTypes,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n\n if (fileQueryTypes.length > 0) {\n allQueries.push(...fileQueryTypes)\n }\n }\n\n if (opts.overloadClientMethods && allQueries.length > 0) {\n const typeMap = `${typeGenerator.generateQueryMap(allQueries).trim()}\\n`\n parentPort?.postMessage({\n type: 'typemap',\n typeMap,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n\n parentPort?.postMessage({\n type: 'complete',\n } satisfies TypegenGenerateTypesWorkerMessage)\n}\n\nfunction walkAndCountQueryTypeNodeStats(typeNode: TypeNode): {\n allTypes: number\n unknownTypes: number\n emptyUnions: number\n} {\n switch (typeNode.type) {\n case 'unknown': {\n return {allTypes: 1, unknownTypes: 1, emptyUnions: 0}\n }\n case 'array': {\n const acc = walkAndCountQueryTypeNodeStats(typeNode.of)\n acc.allTypes += 1 // count the array type itself\n return acc\n }\n case 'object': {\n // if the rest is unknown, we count it as one unknown type\n if (typeNode.rest && typeNode.rest.type === 'unknown') {\n return {allTypes: 2, unknownTypes: 1, emptyUnions: 0} // count the object type itself as well\n }\n\n const restStats = typeNode.rest\n ? walkAndCountQueryTypeNodeStats(typeNode.rest)\n : {allTypes: 1, unknownTypes: 0, emptyUnions: 0} // count the object type itself\n\n return Object.values(typeNode.attributes).reduce((acc, attribute) => {\n const {allTypes, unknownTypes, emptyUnions} = walkAndCountQueryTypeNodeStats(\n attribute.value,\n )\n return {\n allTypes: acc.allTypes + allTypes,\n unknownTypes: acc.unknownTypes + unknownTypes,\n emptyUnions: acc.emptyUnions + emptyUnions,\n }\n }, restStats)\n }\n case 'union': {\n if (typeNode.of.length === 0) {\n return {allTypes: 1, unknownTypes: 0, emptyUnions: 1}\n }\n\n return typeNode.of.reduce(\n (acc, type) => {\n const {allTypes, unknownTypes, emptyUnions} = walkAndCountQueryTypeNodeStats(type)\n return {\n allTypes: acc.allTypes + allTypes,\n unknownTypes: acc.unknownTypes + unknownTypes,\n emptyUnions: acc.emptyUnions + emptyUnions,\n }\n },\n {allTypes: 1, unknownTypes: 0, emptyUnions: 0}, // count the union type itself\n )\n }\n default: {\n return {allTypes: 1, unknownTypes: 0, emptyUnions: 0}\n }\n }\n}\n\nvoid main()\n"],"names":["createDebug","isMainThread","parentPort","_workerData","registerBabel","readSchema","TypeGenerator","getResolver","findQueriesInPath","safeParseQuery","typeEvaluate"],"mappings":";;;;;;AAaA,MAAM,QAAQA,iBAAAA,QAAY,8BAA8B;AA4CxD,IAAIC,oBAAAA,gBAAgB,CAACC,oBAAAA;AACnB,QAAM,IAAI,MAAM,4CAA4C;AAG9D,MAAM,OAAOC,oBAAAA;AAEbC,QAAAA,cAAA;AAEA,eAAe,OAAO;AACpB,QAAM,SAAS,MAAMC,mBAAW,KAAK,UAAU,GAEzC,gBAAgB,IAAIC,sBAAc,MAAM,GACxC,cAAc,CAAC,cAAc,oBAAA,GAAuBA,QAAAA,cAAc,mBAAA,CAAoB,EACzF,KAAK;AAAA,CAAI,EACT,KAAA,GACG,WAAWC,oBAAA;AAEjBL,sBAAAA,YAAY,YAAY;AAAA,IACtB,MAAM;AAAA,IACN,QAAQ,GAAG,YAAY,KAAA,CAAM;AAAA;AAAA,IAC7B,UAAU;AAAA,IACV,QAAQ,OAAO;AAAA,EAAA,CAC4B;AAE7C,QAAM,UAAUM,QAAAA,kBAAkB;AAAA,IAChC,MAAM,KAAK;AAAA,IACX;AAAA,EAAA,CACD,GAEK,aAAa,CAAA;AAEnB,mBAAiB,UAAU,SAAS;AAClC,QAAI,OAAO,SAAS,SAAS;AAC3BN,0BAAAA,YAAY,YAAY;AAAA,QACtB,MAAM;AAAA,QACN,OAAO,OAAO;AAAA,QACd,OAAO;AAAA,QACP,UAAU,OAAO;AAAA,MAAA,CAC0B;AAC7C;AAAA,IACF;AACA,UAAM,cAAc,OAAO,QAAQ,MAAM,gBAAgB,OAAO,QAAQ,MAAM;AAE9E,UAAM,iBASA,CAAA;AACN,eAAW,EAAC,MAAM,WAAW,QAAQ,MAAA,KAAU,OAAO;AACpD,UAAI;AACF,cAAM,MAAMO,uBAAe,KAAK,GAC1B,aAAaC,OAAAA,aAAa,KAAK,MAAM,GAErC,WAAW,GAAG,SAAS,UACvB,OAAO,cAAc,sBAAsB,UAAU,UAAU,GAE/D,iBAAiB,+BAA+B,UAAU;AAChE,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAU;AAAA,UACV,MAAM,GAAG,KAAK,KAAA,CAAM;AAAA;AAAA,UACpB,2BAA2B,eAAe;AAAA,UAC1C,oBAAoB,eAAe;AAAA,UACnC,8BAA8B,eAAe;AAAA,QAAA,CAC9C;AAAA,MACH,SAAS,KAAK;AACZR,4BAAAA,YAAY,YAAY;AAAA,UACtB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,YACT,qCAAqC,SAAS,SAAS,OAAO,QAAQ,MAAM,IAAI,OAAO;AAAA,YACvF,EAAC,OAAO,IAAA;AAAA,UAAG;AAAA,UAEb,OAAO;AAAA,UACP;AAAA,QAAA,CAC2C;AAAA,MAC/C;AAGE,mBAAe,SAAS,MAC1B,MAAM,uBAAuB,eAAe,MAAM,gBAAgB,OAAO,QAAQ;AAAA,CAAK,GACtFA,oBAAAA,YAAY,YAAY;AAAA,MACtB,MAAM;AAAA,MACN,OAAO;AAAA,MACP,UAAU,OAAO;AAAA,IAAA,CAC0B,IAG3C,eAAe,SAAS,KAC1B,WAAW,KAAK,GAAG,cAAc;AAAA,EAErC;AAEA,MAAI,KAAK,yBAAyB,WAAW,SAAS,GAAG;AACvD,UAAM,UAAU,GAAG,cAAc,iBAAiB,UAAU,EAAE,MAAM;AAAA;AACpEA,wBAAAA,YAAY,YAAY;AAAA,MACtB,MAAM;AAAA,MACN;AAAA,IAAA,CAC2C;AAAA,EAC/C;AAEAA,sBAAAA,YAAY,YAAY;AAAA,IACtB,MAAM;AAAA,EAAA,CACqC;AAC/C;AAEA,SAAS,+BAA+B,UAItC;AACA,UAAQ,SAAS,MAAA;AAAA,IACf,KAAK;AACH,aAAO,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAA;AAAA,IAErD,KAAK,SAAS;AACZ,YAAM,MAAM,+BAA+B,SAAS,EAAE;AACtD,aAAA,IAAI,YAAY,GACT;AAAA,IACT;AAAA,IACA,KAAK,UAAU;AAEb,UAAI,SAAS,QAAQ,SAAS,KAAK,SAAS;AAC1C,eAAO,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAA;AAGrD,YAAM,YAAY,SAAS,OACvB,+BAA+B,SAAS,IAAI,IAC5C,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAA;AAEhD,aAAO,OAAO,OAAO,SAAS,UAAU,EAAE,OAAO,CAAC,KAAK,cAAc;AACnE,cAAM,EAAC,UAAU,cAAc,YAAA,IAAe;AAAA,UAC5C,UAAU;AAAA,QAAA;AAEZ,eAAO;AAAA,UACL,UAAU,IAAI,WAAW;AAAA,UACzB,cAAc,IAAI,eAAe;AAAA,UACjC,aAAa,IAAI,cAAc;AAAA,QAAA;AAAA,MAEnC,GAAG,SAAS;AAAA,IACd;AAAA,IACA,KAAK;AACH,aAAI,SAAS,GAAG,WAAW,IAClB,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAA,IAG9C,SAAS,GAAG;AAAA,QACjB,CAAC,KAAK,SAAS;AACb,gBAAM,EAAC,UAAU,cAAc,YAAA,IAAe,+BAA+B,IAAI;AACjF,iBAAO;AAAA,YACL,UAAU,IAAI,WAAW;AAAA,YACzB,cAAc,IAAI,eAAe;AAAA,YACjC,aAAa,IAAI,cAAc;AAAA,UAAA;AAAA,QAEnC;AAAA,QACA,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAA;AAAA;AAAA,MAAC;AAAA,IAGjD;AACE,aAAO,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAA;AAAA,EAAC;AAG1D;AAEK,KAAA;"}
1
+ {"version":3,"file":"typegenGenerate.js","sources":["../../src/workers/typegenGenerate.ts"],"sourcesContent":["import {stat} from 'node:fs/promises'\nimport path from 'node:path'\nimport {isMainThread, parentPort, workerData} from 'node:worker_threads'\n\nimport {\n findQueriesInPath,\n getResolver,\n readSchema,\n registerBabel,\n TypeGenerator,\n type TypegenWorkerChannel as CodegenTypegenWorkerChannel,\n} from '@sanity/codegen'\nimport {type WorkerChannel, WorkerChannelReporter} from '@sanity/worker-channels'\n\nexport interface TypegenGenerateTypesWorkerData {\n workDir: string\n schemaPath: string\n searchPath: string | string[]\n overloadClientMethods?: boolean\n}\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nregisterBabel()\n\nexport type TypegenWorkerChannel = WorkerChannel.Definition<\n {\n loadedSchema: WorkerChannel.Event\n typegenStarted: WorkerChannel.Event<{expectedFileCount: number}>\n typegenComplete: WorkerChannel.Event<{code: string}>\n } & CodegenTypegenWorkerChannel['__definition']\n>\n\nasync function main({\n schemaPath,\n searchPath,\n workDir,\n overloadClientMethods,\n}: TypegenGenerateTypesWorkerData) {\n const report = WorkerChannelReporter.from<TypegenWorkerChannel>(parentPort)\n\n const fullPath = path.join(workDir, schemaPath)\n\n try {\n const schemaStats = await stat(fullPath)\n if (!schemaStats.isFile()) {\n throw new Error(`Schema path is not a file: ${schemaPath}`)\n }\n } catch (err) {\n if (err.code === 'ENOENT') {\n // If the user has not provided a specific schema path (eg we're using the default), give some help\n const hint = schemaPath === './schema.json' ? ` - did you run \"sanity schema extract\"?` : ''\n throw new Error(`Schema file not found: ${fullPath}${hint}`, {cause: err})\n }\n throw err\n }\n\n const schema = await readSchema(fullPath)\n\n report.event.loadedSchema()\n\n const typeGenerator = new TypeGenerator()\n\n const {files, queries} = findQueriesInPath({\n path: searchPath,\n resolver: getResolver(workDir),\n })\n report.event.typegenStarted({expectedFileCount: files.length})\n\n const result = await typeGenerator.generateTypes({\n queries,\n schema,\n reporter: report,\n schemaPath,\n root: workDir,\n overloadClientMethods,\n })\n report.event.typegenComplete(result)\n}\n\nmain(workerData).catch((err) => {\n // worker will terminate and parent will catch the error\n throw err\n})\n"],"names":["isMainThread","parentPort","registerBabel","WorkerChannelReporter","path","stat","readSchema","TypeGenerator","findQueriesInPath","getResolver","workerData"],"mappings":";;;;;;AAqBA,IAAIA,oBAAAA,gBAAgB,CAACC,oBAAAA;AACnB,QAAM,IAAI,MAAM,4CAA4C;AAG9DC,QAAAA,cAAA;AAUA,eAAe,KAAK;AAAA,EAClB;AAAA,EACA;AAAA,EACA;AAAA,EACA;AACF,GAAmC;AACjC,QAAM,SAASC,qCAAsB,KAA2BF,oBAAAA,UAAU,GAEpE,WAAWG,sBAAK,KAAK,SAAS,UAAU;AAE9C,MAAI;AAEF,QAAI,EADgB,MAAMC,GAAAA,KAAK,QAAQ,GACtB,OAAA;AACf,YAAM,IAAI,MAAM,8BAA8B,UAAU,EAAE;AAAA,EAE9D,SAAS,KAAK;AACZ,QAAI,IAAI,SAAS,UAAU;AAEzB,YAAM,OAAO,eAAe,kBAAkB,4CAA4C;AAC1F,YAAM,IAAI,MAAM,0BAA0B,QAAQ,GAAG,IAAI,IAAI,EAAC,OAAO,KAAI;AAAA,IAC3E;AACA,UAAM;AAAA,EACR;AAEA,QAAM,SAAS,MAAMC,QAAAA,WAAW,QAAQ;AAExC,SAAO,MAAM,aAAA;AAEb,QAAM,gBAAgB,IAAIC,sBAAA,GAEpB,EAAC,OAAO,QAAA,IAAWC,0BAAkB;AAAA,IACzC,MAAM;AAAA,IACN,UAAUC,QAAAA,YAAY,OAAO;AAAA,EAAA,CAC9B;AACD,SAAO,MAAM,eAAe,EAAC,mBAAmB,MAAM,QAAO;AAE7D,QAAM,SAAS,MAAM,cAAc,cAAc;AAAA,IAC/C;AAAA,IACA;AAAA,IACA,UAAU;AAAA,IACV;AAAA,IACA,MAAM;AAAA,IACN;AAAA,EAAA,CACD;AACD,SAAO,MAAM,gBAAgB,MAAM;AACrC;AAEA,KAAKC,8BAAU,EAAE,MAAM,CAAC,QAAQ;AAE9B,QAAM;AACR,CAAC;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sanity/cli",
3
- "version": "5.0.0-next-major.12+dad441dd1a",
3
+ "version": "5.0.0-next-major.6+555be27666",
4
4
  "description": "Sanity CLI tool for managing Sanity installations, managing plugins, schemas and datasets",
5
5
  "keywords": [
6
6
  "sanity",
@@ -46,26 +46,26 @@
46
46
  "@babel/parser": "^7.28.5",
47
47
  "@babel/traverse": "^7.28.5",
48
48
  "@sanity/client": "^7.13.1",
49
- "@sanity/runtime-cli": "^11.1.4",
49
+ "@sanity/runtime-cli": "^12.1.0",
50
50
  "@sanity/telemetry": "^0.8.0",
51
51
  "@sanity/template-validator": "^2.4.3",
52
+ "@sanity/worker-channels": "^1.1.0",
52
53
  "chalk": "^4.1.2",
53
54
  "debug": "^4.4.3",
54
55
  "esbuild": "0.27.0",
55
56
  "esbuild-register": "^3.6.0",
56
- "get-it": "^8.6.10",
57
+ "get-it": "^8.7.0",
57
58
  "get-latest-version": "^5.1.0",
58
- "groq-js": "^1.22.0",
59
59
  "pkg-dir": "^5.0.0",
60
60
  "prettier": "^3.7.3",
61
61
  "semver": "^7.7.2",
62
- "@sanity/codegen": "5.0.0-next-major.12+dad441dd1a"
62
+ "@sanity/codegen": "5.0.0-next-major.6+555be27666"
63
63
  },
64
64
  "devDependencies": {
65
65
  "@rexxars/gitconfiglocal": "^3.0.1",
66
66
  "@rollup/plugin-node-resolve": "^16.0.3",
67
- "@sanity/generate-help-url": "^3.0.0",
68
- "@sanity/pkg-utils": "^10.0.0",
67
+ "@sanity/generate-help-url": "^3.0.1",
68
+ "@sanity/pkg-utils": "^10.1.2",
69
69
  "@types/babel__traverse": "^7.28.0",
70
70
  "@types/configstore": "^5.0.1",
71
71
  "@types/cpx": "^1.5.5",
@@ -109,15 +109,15 @@
109
109
  "semver": "^7.7.2",
110
110
  "semver-compare": "^1.0.0",
111
111
  "tar": "^6.2.1",
112
- "vite": "^7.2.4",
112
+ "vite": "^7.2.6",
113
113
  "vitest": "^3.2.4",
114
114
  "which": "^2.0.2",
115
115
  "xdg-basedir": "^4.0.0",
116
- "@repo/package.config": "5.0.0-next-major.12+dad441dd1a",
117
- "@repo/test-config": "5.0.0-next-major.12+dad441dd1a",
118
- "@repo/eslint-config": "5.0.0-next-major.12+dad441dd1a",
119
- "@repo/tsconfig": "5.0.0-next-major.12+dad441dd1a",
120
- "@sanity/types": "5.0.0-next-major.12+dad441dd1a"
116
+ "@repo/eslint-config": "5.0.0-next-major.6+555be27666",
117
+ "@repo/test-config": "5.0.0-next-major.6+555be27666",
118
+ "@repo/tsconfig": "5.0.0-next-major.6+555be27666",
119
+ "@repo/package.config": "5.0.0-next-major.6+555be27666",
120
+ "@sanity/types": "5.0.0-next-major.6+555be27666"
121
121
  },
122
122
  "peerDependencies": {
123
123
  "babel-plugin-react-compiler": "*"