@sanity/cli 3.86.2-experimental.0 → 3.87.1-canary.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,111 +1,43 @@
1
- import {GeneratedQueries} from '@sanity/codegen'
2
- import {GeneratedSchema} from '@sanity/codegen'
3
- import {GeneratedTypemap} from '@sanity/codegen'
4
-
5
1
  export declare interface TypegenGenerateTypesWorkerData {
6
2
  workDir: string
7
- schemas: {
8
- projectId: string | 'default'
9
- dataset: string | 'default'
10
- schemaPath: string
11
- }[]
3
+ workspaceName?: string
4
+ schemaPath: string
12
5
  searchPath: string | string[]
13
- overloadClientMethods: boolean
14
- augmentGroqModule: boolean
15
- }
16
-
17
- /** @internal */
18
- export declare type TypegenWorkerChannel = WorkerChannel<{
19
- schema: WorkerChannelEvent<GeneratedSchema>
20
- queries: WorkerChannelStream<GeneratedQueries>
21
- typemap: WorkerChannelEvent<GeneratedTypemap>
22
- }>
23
-
24
- /**
25
- * Represents the definition of a "worker channel" to report progress from the
26
- * worker to the parent. Worker channels can define named events or streams and
27
- * the worker will report events and streams while the parent will await them.
28
- * This allows the control flow of the parent to follow the control flow of the
29
- * worker 1-to-1.
30
- *
31
- * @example
32
- *
33
- * ```ts
34
- * // Define the channel interface (shared between parent and worker)
35
- * type MyWorkerChannel = WorkerChannel<{
36
- * compileStart: WorkerChannelEvent<void>
37
- * compileProgress: WorkerChannelStream<{ file: string; progress: number }>
38
- * compileEnd: WorkerChannelEvent<{ duration: number }>
39
- * }>;
40
- *
41
- * // --- In the worker file (e.g., worker.ts) ---
42
- * import { parentPort } from 'node:worker_threads';
43
- * import { createReporter } from './workerChannels';
44
- *
45
- * const report = createReporter<MyWorkerChannel>(parentPort);
46
- *
47
- * async function runCompilation() {
48
- * report.event.compileStart(); // Signal start
49
- *
50
- * const files = ['a.js', 'b.js', 'c.js'];
51
- * for (const file of files) {
52
- * // Simulate work and report progress
53
- * await new Promise(resolve => setTimeout(resolve, 100));
54
- * report.stream.compileProgress.emit({ file, progress: 100 });
55
- * }
56
- * report.stream.compileProgress.end(); // Signal end of progress stream
57
- *
58
- * report.event.compileEnd({ duration: 300 }); // Signal end with result
59
- * }
60
- *
61
- * runCompilation();
62
- *
63
- * // --- In the parent file (e.g., main.ts) ---
64
- * import { Worker } from 'node:worker_threads';
65
- * import { createReceiver } from './workerChannels';
66
- *
67
- * const worker = new Worker('./worker.js');
68
- * const receiver = createReceiver<MyWorkerChannel>(worker);
69
- *
70
- * async function monitorCompilation() {
71
- * console.log('Waiting for compilation to start...');
72
- * await receiver.event.compileStart();
73
- * console.log('Compilation started.');
74
- *
75
- * console.log('Receiving progress:');
76
- * for await (const progress of receiver.stream.compileProgress()) {
77
- * console.log(` - ${progress.file}: ${progress.progress}%`);
78
- * }
79
- *
80
- * console.log('Waiting for compilation to end...');
81
- * const { duration } = await receiver.event.compileEnd();
82
- * console.log(`Compilation finished in ${duration}ms.`);
83
- *
84
- * await receiver.dispose(); // Clean up listeners and terminate worker
85
- * }
86
- *
87
- * monitorCompilation();
88
- * ```
89
- *
90
- * @internal
91
- */
92
- declare type WorkerChannel<
93
- TWorkerChannel extends Record<
94
- string,
95
- WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>
96
- > = Record<string, WorkerChannelEvent<unknown> | WorkerChannelStream<unknown>>,
97
- > = TWorkerChannel
98
-
99
- /** @internal */
100
- declare type WorkerChannelEvent<TPayload = void> = {
101
- type: 'event'
102
- payload: TPayload
6
+ overloadClientMethods?: boolean
103
7
  }
104
8
 
105
- /** @internal */
106
- declare type WorkerChannelStream<TPayload = void> = {
107
- type: 'stream'
108
- payload: TPayload
109
- }
9
+ export declare type TypegenGenerateTypesWorkerMessage =
10
+ | {
11
+ type: 'error'
12
+ error: Error
13
+ fatal: boolean
14
+ query?: string
15
+ filename?: string
16
+ }
17
+ | {
18
+ type: 'types'
19
+ filename: string
20
+ types: {
21
+ queryName: string
22
+ query: string
23
+ type: string
24
+ unknownTypeNodesGenerated: number
25
+ typeNodesGenerated: number
26
+ emptyUnionTypeNodesGenerated: number
27
+ }[]
28
+ }
29
+ | {
30
+ type: 'schema'
31
+ filename: string
32
+ schema: string
33
+ length: number
34
+ }
35
+ | {
36
+ type: 'typemap'
37
+ typeMap: string
38
+ }
39
+ | {
40
+ type: 'complete'
41
+ }
110
42
 
111
43
  export {}
@@ -1,37 +1,124 @@
1
1
  "use strict";
2
- var node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), debugIt = require("debug"), workerChannel = require("../_chunks-cjs/workerChannel.js");
2
+ var node_worker_threads = require("node:worker_threads"), codegen = require("@sanity/codegen"), createDebug = require("debug"), groqJs = require("groq-js");
3
3
  function _interopDefaultCompat(e) {
4
4
  return e && typeof e == "object" && "default" in e ? e : { default: e };
5
5
  }
6
- var debugIt__default = /* @__PURE__ */ _interopDefaultCompat(debugIt);
7
- const $info = debugIt__default.default("sanity:codegen:generate:info");
6
+ var createDebug__default = /* @__PURE__ */ _interopDefaultCompat(createDebug);
7
+ const $info = createDebug__default.default("sanity:codegen:generate:info");
8
+ createDebug__default.default("sanity:codegen:generate:warn");
8
9
  if (node_worker_threads.isMainThread || !node_worker_threads.parentPort)
9
10
  throw new Error("This module must be run as a worker thread");
10
- const report = workerChannel.createReporter(node_worker_threads.parentPort), opts = node_worker_threads.workerData;
11
+ const opts = node_worker_threads.workerData;
12
+ codegen.registerBabel();
11
13
  async function main() {
12
- const schemas = [];
13
- for (const schemaConfig of opts.schemas) {
14
- $info(`Reading schema from ${schemaConfig.schemaPath}...`);
15
- const schema = await codegen.readSchema(schemaConfig.schemaPath);
16
- schemas.push({
17
- schema,
18
- projectId: schemaConfig.projectId,
19
- dataset: schemaConfig.dataset,
20
- filename: schemaConfig.schemaPath
14
+ const schema = await codegen.readSchema(opts.schemaPath), typeGenerator = new codegen.TypeGenerator(schema), schemaTypes = [typeGenerator.generateSchemaTypes(), codegen.TypeGenerator.generateKnownTypes()].join(`
15
+ `).trim(), resolver = codegen.getResolver();
16
+ node_worker_threads.parentPort?.postMessage({
17
+ type: "schema",
18
+ schema: `${schemaTypes.trim()}
19
+ `,
20
+ filename: "schema.json",
21
+ length: schema.length
22
+ });
23
+ const queries = codegen.findQueriesInPath({
24
+ path: opts.searchPath,
25
+ resolver
26
+ }), allQueries = [];
27
+ for await (const result of queries) {
28
+ if (result.type === "error") {
29
+ node_worker_threads.parentPort?.postMessage({
30
+ type: "error",
31
+ error: result.error,
32
+ fatal: !1,
33
+ filename: result.filename
34
+ });
35
+ continue;
36
+ }
37
+ $info(`Processing ${result.queries.length} queries in "${result.filename}"...`);
38
+ const fileQueryTypes = [];
39
+ for (const { name: queryName, result: query } of result.queries)
40
+ try {
41
+ const ast = codegen.safeParseQuery(query), queryTypes = groqJs.typeEvaluate(ast, schema), typeName = `${queryName}Result`, type = typeGenerator.generateTypeNodeTypes(typeName, queryTypes), queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes);
42
+ fileQueryTypes.push({
43
+ queryName,
44
+ query,
45
+ typeName,
46
+ typeNode: queryTypes,
47
+ type: `${type.trim()}
48
+ `,
49
+ unknownTypeNodesGenerated: queryTypeStats.unknownTypes,
50
+ typeNodesGenerated: queryTypeStats.allTypes,
51
+ emptyUnionTypeNodesGenerated: queryTypeStats.emptyUnions
52
+ });
53
+ } catch (err) {
54
+ node_worker_threads.parentPort?.postMessage({
55
+ type: "error",
56
+ error: new Error(
57
+ `Error generating types for query "${queryName}" in "${result.filename}": ${err.message}`,
58
+ { cause: err }
59
+ ),
60
+ fatal: !1,
61
+ query
62
+ });
63
+ }
64
+ fileQueryTypes.length > 0 && ($info(`Generated types for ${fileQueryTypes.length} queries in "${result.filename}"
65
+ `), node_worker_threads.parentPort?.postMessage({
66
+ type: "types",
67
+ types: fileQueryTypes,
68
+ filename: result.filename
69
+ })), fileQueryTypes.length > 0 && allQueries.push(...fileQueryTypes);
70
+ }
71
+ if (opts.overloadClientMethods && allQueries.length > 0) {
72
+ const typeMap = `${typeGenerator.generateQueryMap(allQueries).trim()}
73
+ `;
74
+ node_worker_threads.parentPort?.postMessage({
75
+ type: "typemap",
76
+ typeMap
21
77
  });
22
78
  }
23
- $info(`Read ${schemas.length} schema definition${schemas.length === 1 ? "" : "s"} successfully.`);
24
- const resolver = codegen.getResolver(), result = codegen.generateTypes({
25
- schemas,
26
- queriesByFile: codegen.findQueriesInPath({ path: opts.searchPath, resolver }),
27
- augmentGroqModule: opts.augmentGroqModule,
28
- overloadClientMethods: opts.overloadClientMethods
79
+ node_worker_threads.parentPort?.postMessage({
80
+ type: "complete"
29
81
  });
30
- report.event.schema(await result.generatedSchema());
31
- for await (const { filename, results } of result.generatedQueries())
32
- report.stream.queries.emit({ filename, results });
33
- report.stream.queries.end(), report.event.typemap(await result.generatedTypemap());
34
82
  }
35
- codegen.registerBabel();
83
+ function walkAndCountQueryTypeNodeStats(typeNode) {
84
+ switch (typeNode.type) {
85
+ case "unknown":
86
+ return { allTypes: 1, unknownTypes: 1, emptyUnions: 0 };
87
+ case "array": {
88
+ const acc = walkAndCountQueryTypeNodeStats(typeNode.of);
89
+ return acc.allTypes += 1, acc;
90
+ }
91
+ case "object": {
92
+ if (typeNode.rest && typeNode.rest.type === "unknown")
93
+ return { allTypes: 2, unknownTypes: 1, emptyUnions: 0 };
94
+ const restStats = typeNode.rest ? walkAndCountQueryTypeNodeStats(typeNode.rest) : { allTypes: 1, unknownTypes: 0, emptyUnions: 0 };
95
+ return Object.values(typeNode.attributes).reduce((acc, attribute) => {
96
+ const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(
97
+ attribute.value
98
+ );
99
+ return {
100
+ allTypes: acc.allTypes + allTypes,
101
+ unknownTypes: acc.unknownTypes + unknownTypes,
102
+ emptyUnions: acc.emptyUnions + emptyUnions
103
+ };
104
+ }, restStats);
105
+ }
106
+ case "union":
107
+ return typeNode.of.length === 0 ? { allTypes: 1, unknownTypes: 0, emptyUnions: 1 } : typeNode.of.reduce(
108
+ (acc, type) => {
109
+ const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(type);
110
+ return {
111
+ allTypes: acc.allTypes + allTypes,
112
+ unknownTypes: acc.unknownTypes + unknownTypes,
113
+ emptyUnions: acc.emptyUnions + emptyUnions
114
+ };
115
+ },
116
+ { allTypes: 1, unknownTypes: 0, emptyUnions: 0 }
117
+ // count the union type itself
118
+ );
119
+ default:
120
+ return { allTypes: 1, unknownTypes: 0, emptyUnions: 0 };
121
+ }
122
+ }
36
123
  main();
37
124
  //# sourceMappingURL=typegenGenerate.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"typegenGenerate.js","sources":["../../src/workers/typegenGenerate.ts"],"sourcesContent":["import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n findQueriesInPath,\n type GeneratedQueries,\n type GeneratedSchema,\n type GeneratedTypemap,\n generateTypes,\n getResolver,\n readSchema,\n registerBabel,\n} from '@sanity/codegen'\nimport createDebug from 'debug'\nimport {type SchemaType} from 'groq-js'\n\nimport {\n createReporter,\n type WorkerChannel,\n type WorkerChannelEvent,\n type WorkerChannelStream,\n} from '../util/workerChannel'\n\nconst $info = createDebug('sanity:codegen:generate:info')\n\nexport interface TypegenGenerateTypesWorkerData {\n workDir: string\n schemas: {\n projectId: string | 'default'\n dataset: string | 'default'\n schemaPath: string\n }[]\n searchPath: string | string[]\n overloadClientMethods: boolean\n augmentGroqModule: boolean\n}\n\n/** @internal */\nexport type TypegenWorkerChannel = WorkerChannel<{\n schema: WorkerChannelEvent<GeneratedSchema>\n queries: WorkerChannelStream<GeneratedQueries>\n typemap: WorkerChannelEvent<GeneratedTypemap>\n}>\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst report = createReporter<TypegenWorkerChannel>(parentPort)\nconst opts = _workerData as TypegenGenerateTypesWorkerData\n\nasync function main() {\n const schemas: {\n schema: SchemaType\n projectId: string | 'default'\n dataset: string | 'default'\n filename: string\n }[] = []\n\n for (const schemaConfig of opts.schemas) {\n $info(`Reading schema from ${schemaConfig.schemaPath}...`)\n const schema = await readSchema(schemaConfig.schemaPath)\n schemas.push({\n schema,\n projectId: schemaConfig.projectId,\n dataset: schemaConfig.dataset,\n filename: schemaConfig.schemaPath,\n })\n }\n $info(`Read ${schemas.length} schema definition${schemas.length === 1 ? '' : 's'} successfully.`)\n\n const resolver = getResolver()\n\n const result = generateTypes({\n schemas,\n queriesByFile: findQueriesInPath({path: opts.searchPath, resolver}),\n augmentGroqModule: opts.augmentGroqModule,\n overloadClientMethods: opts.overloadClientMethods,\n })\n\n report.event.schema(await result.generatedSchema())\n\n for await (const {filename, results} of result.generatedQueries()) {\n report.stream.queries.emit({filename, results})\n }\n report.stream.queries.end()\n\n report.event.typemap(await result.generatedTypemap())\n}\n\nregisterBabel()\nmain()\n"],"names":["createDebug","isMainThread","parentPort","createReporter","_workerData","readSchema","getResolver","generateTypes","findQueriesInPath","registerBabel"],"mappings":";;;;;;AAsBA,MAAM,QAAQA,yBAAY,8BAA8B;AAqBxD,IAAIC,oBAAAA,gBAAgB,CAACC,oBAAA;AACb,QAAA,IAAI,MAAM,4CAA4C;AAG9D,MAAM,SAASC,cAAA,eAAqCD,8BAAU,GACxD,OAAOE,oBAAA;AAEb,eAAe,OAAO;AACpB,QAAM,UAKA,CAAC;AAEI,aAAA,gBAAgB,KAAK,SAAS;AACjC,UAAA,uBAAuB,aAAa,UAAU,KAAK;AACzD,UAAM,SAAS,MAAMC,mBAAW,aAAa,UAAU;AACvD,YAAQ,KAAK;AAAA,MACX;AAAA,MACA,WAAW,aAAa;AAAA,MACxB,SAAS,aAAa;AAAA,MACtB,UAAU,aAAa;AAAA,IAAA,CACxB;AAAA,EAAA;AAEG,QAAA,QAAQ,QAAQ,MAAM,qBAAqB,QAAQ,WAAW,IAAI,KAAK,GAAG,gBAAgB;AAEhG,QAAM,WAAWC,QAAAA,eAEX,SAASC,QAAAA,cAAc;AAAA,IAC3B;AAAA,IACA,eAAeC,QAAkB,kBAAA,EAAC,MAAM,KAAK,YAAY,UAAS;AAAA,IAClE,mBAAmB,KAAK;AAAA,IACxB,uBAAuB,KAAK;AAAA,EAAA,CAC7B;AAED,SAAO,MAAM,OAAO,MAAM,OAAO,iBAAiB;AAElD,mBAAiB,EAAC,UAAU,QAAO,KAAK,OAAO,iBAAiB;AAC9D,WAAO,OAAO,QAAQ,KAAK,EAAC,UAAU,SAAQ;AAEzC,SAAA,OAAO,QAAQ,OAEtB,OAAO,MAAM,QAAQ,MAAM,OAAO,kBAAkB;AACtD;AAEAC,QAAAA,cAAc;AACd,KAAK;"}
1
+ {"version":3,"file":"typegenGenerate.js","sources":["../../src/workers/typegenGenerate.ts"],"sourcesContent":["import {isMainThread, parentPort, workerData as _workerData} from 'node:worker_threads'\n\nimport {\n findQueriesInPath,\n getResolver,\n readSchema,\n registerBabel,\n safeParseQuery,\n TypeGenerator,\n} from '@sanity/codegen'\nimport createDebug from 'debug'\nimport {typeEvaluate, type TypeNode} from 'groq-js'\n\nconst $info = createDebug('sanity:codegen:generate:info')\nconst $warn = createDebug('sanity:codegen:generate:warn')\n\nexport interface TypegenGenerateTypesWorkerData {\n workDir: string\n workspaceName?: string\n schemaPath: string\n searchPath: string | string[]\n overloadClientMethods?: boolean\n}\n\nexport type TypegenGenerateTypesWorkerMessage =\n | {\n type: 'error'\n error: Error\n fatal: boolean\n query?: string\n filename?: string\n }\n | {\n type: 'types'\n filename: string\n types: {\n queryName: string\n query: string\n type: string\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n emptyUnionTypeNodesGenerated: number\n }[]\n }\n | {\n type: 'schema'\n filename: string\n schema: string\n length: number\n }\n | {\n type: 'typemap'\n typeMap: string\n }\n | {\n type: 'complete'\n }\n\nif (isMainThread || !parentPort) {\n throw new Error('This module must be run as a worker thread')\n}\n\nconst opts = _workerData as TypegenGenerateTypesWorkerData\n\nregisterBabel()\n\nasync function main() {\n const schema = await readSchema(opts.schemaPath)\n\n const typeGenerator = new TypeGenerator(schema)\n const schemaTypes = [typeGenerator.generateSchemaTypes(), TypeGenerator.generateKnownTypes()]\n .join('\\n')\n .trim()\n const resolver = getResolver()\n\n parentPort?.postMessage({\n type: 'schema',\n schema: `${schemaTypes.trim()}\\n`,\n filename: 'schema.json',\n length: schema.length,\n } satisfies TypegenGenerateTypesWorkerMessage)\n\n const queries = findQueriesInPath({\n path: opts.searchPath,\n resolver,\n })\n\n const allQueries = []\n\n for await (const result of queries) {\n if (result.type === 'error') {\n parentPort?.postMessage({\n type: 'error',\n error: result.error,\n fatal: false,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n continue\n }\n $info(`Processing ${result.queries.length} queries in \"${result.filename}\"...`)\n\n const fileQueryTypes: {\n queryName: string\n query: string\n type: string\n typeName: string\n typeNode: TypeNode\n unknownTypeNodesGenerated: number\n typeNodesGenerated: number\n emptyUnionTypeNodesGenerated: number\n }[] = []\n for (const {name: queryName, result: query} of result.queries) {\n try {\n const ast = safeParseQuery(query)\n const queryTypes = typeEvaluate(ast, schema)\n\n const typeName = `${queryName}Result`\n const type = typeGenerator.generateTypeNodeTypes(typeName, queryTypes)\n\n const queryTypeStats = walkAndCountQueryTypeNodeStats(queryTypes)\n fileQueryTypes.push({\n queryName,\n query,\n typeName,\n typeNode: queryTypes,\n type: `${type.trim()}\\n`,\n unknownTypeNodesGenerated: queryTypeStats.unknownTypes,\n typeNodesGenerated: queryTypeStats.allTypes,\n emptyUnionTypeNodesGenerated: queryTypeStats.emptyUnions,\n })\n } catch (err) {\n parentPort?.postMessage({\n type: 'error',\n error: new Error(\n `Error generating types for query \"${queryName}\" in \"${result.filename}\": ${err.message}`,\n {cause: err},\n ),\n fatal: false,\n query,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n }\n\n if (fileQueryTypes.length > 0) {\n $info(`Generated types for ${fileQueryTypes.length} queries in \"${result.filename}\"\\n`)\n parentPort?.postMessage({\n type: 'types',\n types: fileQueryTypes,\n filename: result.filename,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n\n if (fileQueryTypes.length > 0) {\n allQueries.push(...fileQueryTypes)\n }\n }\n\n if (opts.overloadClientMethods && allQueries.length > 0) {\n const typeMap = `${typeGenerator.generateQueryMap(allQueries).trim()}\\n`\n parentPort?.postMessage({\n type: 'typemap',\n typeMap,\n } satisfies TypegenGenerateTypesWorkerMessage)\n }\n\n parentPort?.postMessage({\n type: 'complete',\n } satisfies TypegenGenerateTypesWorkerMessage)\n}\n\nfunction walkAndCountQueryTypeNodeStats(typeNode: TypeNode): {\n allTypes: number\n unknownTypes: number\n emptyUnions: number\n} {\n switch (typeNode.type) {\n case 'unknown': {\n return {allTypes: 1, unknownTypes: 1, emptyUnions: 0}\n }\n case 'array': {\n const acc = walkAndCountQueryTypeNodeStats(typeNode.of)\n acc.allTypes += 1 // count the array type itself\n return acc\n }\n case 'object': {\n // if the rest is unknown, we count it as one unknown type\n if (typeNode.rest && typeNode.rest.type === 'unknown') {\n return {allTypes: 2, unknownTypes: 1, emptyUnions: 0} // count the object type itself as well\n }\n\n const restStats = typeNode.rest\n ? walkAndCountQueryTypeNodeStats(typeNode.rest)\n : {allTypes: 1, unknownTypes: 0, emptyUnions: 0} // count the object type itself\n\n return Object.values(typeNode.attributes).reduce((acc, attribute) => {\n const {allTypes, unknownTypes, emptyUnions} = walkAndCountQueryTypeNodeStats(\n attribute.value,\n )\n return {\n allTypes: acc.allTypes + allTypes,\n unknownTypes: acc.unknownTypes + unknownTypes,\n emptyUnions: acc.emptyUnions + emptyUnions,\n }\n }, restStats)\n }\n case 'union': {\n if (typeNode.of.length === 0) {\n return {allTypes: 1, unknownTypes: 0, emptyUnions: 1}\n }\n\n return typeNode.of.reduce(\n (acc, type) => {\n const {allTypes, unknownTypes, emptyUnions} = walkAndCountQueryTypeNodeStats(type)\n return {\n allTypes: acc.allTypes + allTypes,\n unknownTypes: acc.unknownTypes + unknownTypes,\n emptyUnions: acc.emptyUnions + emptyUnions,\n }\n },\n {allTypes: 1, unknownTypes: 0, emptyUnions: 0}, // count the union type itself\n )\n }\n default: {\n return {allTypes: 1, unknownTypes: 0, emptyUnions: 0}\n }\n }\n}\n\nmain()\n"],"names":["createDebug","isMainThread","parentPort","_workerData","registerBabel","readSchema","TypeGenerator","getResolver","findQueriesInPath","safeParseQuery","typeEvaluate"],"mappings":";;;;;;AAaA,MAAM,QAAQA,qBAAAA,QAAY,8BAA8B;AAC1CA,6BAAY,8BAA8B;AA4CxD,IAAIC,oBAAAA,gBAAgB,CAACC,oBAAA;AACb,QAAA,IAAI,MAAM,4CAA4C;AAG9D,MAAM,OAAOC,oBAAA;AAEbC,QAAAA,cAAc;AAEd,eAAe,OAAO;AACd,QAAA,SAAS,MAAMC,mBAAW,KAAK,UAAU,GAEzC,gBAAgB,IAAIC,sBAAc,MAAM,GACxC,cAAc,CAAC,cAAc,oBAAoB,GAAGA,QAAAA,cAAc,mBAAmB,CAAC,EACzF,KAAK;AAAA,CAAI,EACT,KAAA,GACG,WAAWC,oBAAY;AAE7BL,sBAAAA,YAAY,YAAY;AAAA,IACtB,MAAM;AAAA,IACN,QAAQ,GAAG,YAAY,KAAM,CAAA;AAAA;AAAA,IAC7B,UAAU;AAAA,IACV,QAAQ,OAAO;AAAA,EAAA,CAC4B;AAE7C,QAAM,UAAUM,QAAAA,kBAAkB;AAAA,IAChC,MAAM,KAAK;AAAA,IACX;AAAA,EAAA,CACD,GAEK,aAAa,CAAC;AAEpB,mBAAiB,UAAU,SAAS;AAC9B,QAAA,OAAO,SAAS,SAAS;AAC3BN,0BAAAA,YAAY,YAAY;AAAA,QACtB,MAAM;AAAA,QACN,OAAO,OAAO;AAAA,QACd,OAAO;AAAA,QACP,UAAU,OAAO;AAAA,MAAA,CAC0B;AAC7C;AAAA,IAAA;AAEF,UAAM,cAAc,OAAO,QAAQ,MAAM,gBAAgB,OAAO,QAAQ,MAAM;AAE9E,UAAM,iBASA,CAAC;AACP,eAAW,EAAC,MAAM,WAAW,QAAQ,MAAA,KAAU,OAAO;AAChD,UAAA;AACI,cAAA,MAAMO,uBAAe,KAAK,GAC1B,aAAaC,OAAa,aAAA,KAAK,MAAM,GAErC,WAAW,GAAG,SAAS,UACvB,OAAO,cAAc,sBAAsB,UAAU,UAAU,GAE/D,iBAAiB,+BAA+B,UAAU;AAChE,uBAAe,KAAK;AAAA,UAClB;AAAA,UACA;AAAA,UACA;AAAA,UACA,UAAU;AAAA,UACV,MAAM,GAAG,KAAK,KAAM,CAAA;AAAA;AAAA,UACpB,2BAA2B,eAAe;AAAA,UAC1C,oBAAoB,eAAe;AAAA,UACnC,8BAA8B,eAAe;AAAA,QAAA,CAC9C;AAAA,eACM,KAAK;AACZR,4BAAAA,YAAY,YAAY;AAAA,UACtB,MAAM;AAAA,UACN,OAAO,IAAI;AAAA,YACT,qCAAqC,SAAS,SAAS,OAAO,QAAQ,MAAM,IAAI,OAAO;AAAA,YACvF,EAAC,OAAO,IAAG;AAAA,UACb;AAAA,UACA,OAAO;AAAA,UACP;AAAA,QAAA,CAC2C;AAAA,MAAA;AAI7C,mBAAe,SAAS,MAC1B,MAAM,uBAAuB,eAAe,MAAM,gBAAgB,OAAO,QAAQ;AAAA,CAAK,GACtFA,gCAAY,YAAY;AAAA,MACtB,MAAM;AAAA,MACN,OAAO;AAAA,MACP,UAAU,OAAO;AAAA,IAAA,CAC0B,IAG3C,eAAe,SAAS,KAC1B,WAAW,KAAK,GAAG,cAAc;AAAA,EAAA;AAIrC,MAAI,KAAK,yBAAyB,WAAW,SAAS,GAAG;AACvD,UAAM,UAAU,GAAG,cAAc,iBAAiB,UAAU,EAAE,KAAM,CAAA;AAAA;AACpEA,wBAAAA,YAAY,YAAY;AAAA,MACtB,MAAM;AAAA,MACN;AAAA,IAAA,CAC2C;AAAA,EAAA;AAG/CA,sBAAAA,YAAY,YAAY;AAAA,IACtB,MAAM;AAAA,EAAA,CACqC;AAC/C;AAEA,SAAS,+BAA+B,UAItC;AACA,UAAQ,SAAS,MAAM;AAAA,IACrB,KAAK;AACH,aAAO,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAC;AAAA,IAEtD,KAAK,SAAS;AACN,YAAA,MAAM,+BAA+B,SAAS,EAAE;AACtD,aAAA,IAAI,YAAY,GACT;AAAA,IAAA;AAAA,IAET,KAAK,UAAU;AAEb,UAAI,SAAS,QAAQ,SAAS,KAAK,SAAS;AAC1C,eAAO,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAC;AAGtD,YAAM,YAAY,SAAS,OACvB,+BAA+B,SAAS,IAAI,IAC5C,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAC;AAE1C,aAAA,OAAO,OAAO,SAAS,UAAU,EAAE,OAAO,CAAC,KAAK,cAAc;AACnE,cAAM,EAAC,UAAU,cAAc,YAAe,IAAA;AAAA,UAC5C,UAAU;AAAA,QACZ;AACO,eAAA;AAAA,UACL,UAAU,IAAI,WAAW;AAAA,UACzB,cAAc,IAAI,eAAe;AAAA,UACjC,aAAa,IAAI,cAAc;AAAA,QACjC;AAAA,SACC,SAAS;AAAA,IAAA;AAAA,IAEd,KAAK;AACH,aAAI,SAAS,GAAG,WAAW,IAClB,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAC,IAG/C,SAAS,GAAG;AAAA,QACjB,CAAC,KAAK,SAAS;AACb,gBAAM,EAAC,UAAU,cAAc,YAAW,IAAI,+BAA+B,IAAI;AAC1E,iBAAA;AAAA,YACL,UAAU,IAAI,WAAW;AAAA,YACzB,cAAc,IAAI,eAAe;AAAA,YACjC,aAAa,IAAI,cAAc;AAAA,UACjC;AAAA,QACF;AAAA,QACA,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAC;AAAA;AAAA,MAC/C;AAAA,IAEF;AACE,aAAO,EAAC,UAAU,GAAG,cAAc,GAAG,aAAa,EAAC;AAAA,EAAA;AAG1D;AAEA,KAAK;"}
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@sanity/cli",
3
- "version": "3.86.2-experimental.0",
3
+ "version": "3.87.1-canary.4+84c6362d84",
4
4
  "description": "Sanity CLI tool for managing Sanity installations, managing plugins, schemas and datasets",
5
5
  "keywords": [
6
6
  "sanity",
@@ -46,13 +46,24 @@
46
46
  "src",
47
47
  "templates"
48
48
  ],
49
+ "scripts": {
50
+ "build": "pkg-utils build --strict --check --clean",
51
+ "check:types": "tsc --project tsconfig.lib.json",
52
+ "clean": "rimraf lib",
53
+ "lint": "eslint --cache .",
54
+ "prepublishOnly": "turbo run build",
55
+ "test": "vitest",
56
+ "ts": "node -r esbuild-register",
57
+ "watch": "pkg-utils watch"
58
+ },
49
59
  "dependencies": {
50
60
  "@babel/traverse": "^7.23.5",
51
- "@sanity/client": "^6.29.0",
61
+ "@sanity/client": "^7.0.0",
62
+ "@sanity/codegen": "3.87.1-canary.4+84c6362d84",
52
63
  "@sanity/runtime-cli": "^3.0.0",
53
64
  "@sanity/telemetry": "^0.8.0",
54
65
  "@sanity/template-validator": "^2.4.3",
55
- "@sanity/util": "",
66
+ "@sanity/util": "3.87.1-canary.4+84c6362d84",
56
67
  "chalk": "^4.1.2",
57
68
  "debug": "^4.3.4",
58
69
  "decompress": "^4.2.0",
@@ -63,14 +74,16 @@
63
74
  "pkg-dir": "^5.0.0",
64
75
  "prettier": "^3.3.0",
65
76
  "semver": "^7.3.5",
66
- "validate-npm-package-name": "^3.0.0",
67
- "@sanity/codegen": "3.86.2-experimental.0"
77
+ "validate-npm-package-name": "^3.0.0"
68
78
  },
69
79
  "devDependencies": {
80
+ "@repo/package.config": "3.87.0",
81
+ "@repo/test-config": "3.87.0",
70
82
  "@rexxars/gitconfiglocal": "^3.0.1",
71
83
  "@rollup/plugin-node-resolve": "^15.2.3",
72
84
  "@sanity/eslint-config-studio": "^4.0.0",
73
85
  "@sanity/generate-help-url": "^3.0.0",
86
+ "@sanity/types": "3.87.1-canary.4+84c6362d84",
74
87
  "@types/babel__traverse": "^7.20.5",
75
88
  "@types/configstore": "^5.0.1",
76
89
  "@types/cpx": "^1.5.2",
@@ -117,21 +130,10 @@
117
130
  "vite": "^6.2.4",
118
131
  "vitest": "^3.1.1",
119
132
  "which": "^2.0.2",
120
- "xdg-basedir": "^4.0.0",
121
- "@repo/package.config": "0.0.0",
122
- "@sanity/types": "3.86.1",
123
- "@repo/test-config": "0.0.0"
133
+ "xdg-basedir": "^4.0.0"
124
134
  },
125
135
  "engines": {
126
136
  "node": ">=18"
127
137
  },
128
- "scripts": {
129
- "build": "pkg-utils build --strict --check --clean",
130
- "check:types": "tsc --project tsconfig.lib.json",
131
- "clean": "rimraf lib",
132
- "lint": "eslint --cache .",
133
- "test": "vitest",
134
- "ts": "node -r esbuild-register",
135
- "watch": "pkg-utils watch"
136
- }
137
- }
138
+ "gitHead": "84c6362d849ed7a45de7126d3652f9f1e1d2ba03"
139
+ }
@@ -290,6 +290,8 @@ export default async function initSanity(
290
290
  print('')
291
291
  }
292
292
 
293
+ const isNextJs = detectedFramework?.slug === 'nextjs'
294
+
293
295
  const flags = await prepareFlags()
294
296
 
295
297
  // We're authenticated, now lets select or create a project (for studios) or org (for core apps)
@@ -313,14 +315,16 @@ export default async function initSanity(
313
315
  }
314
316
 
315
317
  let initNext = false
316
- const isNextJs = detectedFramework?.slug === 'nextjs'
317
318
  if (isNextJs) {
318
- initNext = await prompt.single({
319
- type: 'confirm',
320
- message:
321
- 'Would you like to add configuration files for a Sanity project in this Next.js folder?',
322
- default: true,
323
- })
319
+ initNext =
320
+ unattended ||
321
+ (await prompt.single({
322
+ type: 'confirm',
323
+ message:
324
+ 'Would you like to add configuration files for a Sanity project in this Next.js folder?',
325
+ default: true,
326
+ }))
327
+
324
328
  trace.log({
325
329
  step: 'useDetectedFramework',
326
330
  selectedOption: initNext ? 'yes' : 'no',
@@ -682,7 +686,7 @@ export default async function initSanity(
682
686
  )
683
687
  } else {
684
688
  print(`\n${chalk.green('Success!')} Now, use these commands to continue:\n`)
685
- print(`First: ${chalk.cyan(`cd ${outputPath}`)} - to enter projects directory`)
689
+ print(`First: ${chalk.cyan(`cd ${outputPath}`)} - to enter project's directory`)
686
690
  print(
687
691
  `Then: ${chalk.cyan(devCommand)} -to run ${isAppTemplate ? 'your Sanity application' : 'Sanity Studio'}\n`,
688
692
  )
@@ -750,9 +754,15 @@ export default async function initSanity(
750
754
 
751
755
  if (isAppTemplate) {
752
756
  const client = apiClient({requireUser: true, requireProject: false})
753
- const organizations = await client.request({uri: '/organizations'})
757
+ const organizations = await client.request({
758
+ uri: '/organizations',
759
+ query: {
760
+ includeMembers: 'true',
761
+ includeImplicitMemberships: 'true',
762
+ },
763
+ })
754
764
 
755
- const appOrganizationId = await getOrganizationId(organizations)
765
+ const appOrganizationId = await getOrganizationIdForAppTemplate(organizations)
756
766
 
757
767
  return {
758
768
  projectId: '',
@@ -1223,12 +1233,15 @@ export default async function initSanity(
1223
1233
 
1224
1234
  if (unattended) {
1225
1235
  debug('Unattended mode, validating required options')
1226
- const requiredForUnattended = ['dataset', 'output-path'] as const
1227
- requiredForUnattended.forEach((flag) => {
1228
- if (!cliFlags[flag]) {
1229
- throw new Error(`\`--${flag}\` must be specified in unattended mode`)
1230
- }
1231
- })
1236
+
1237
+ if (!cliFlags['dataset' as const]) {
1238
+ throw new Error(`\`--dataset\` must be specified in unattended mode`)
1239
+ }
1240
+
1241
+ // output-path is not used in unattended mode within nextjs
1242
+ if (!isNextJs && !cliFlags['output-path' as const]) {
1243
+ throw new Error(`\`--output-path\` must be specified in unattended mode`)
1244
+ }
1232
1245
 
1233
1246
  if (!cliFlags.project && !createProjectName) {
1234
1247
  throw new Error(
@@ -1300,6 +1313,38 @@ export default async function initSanity(
1300
1313
  return organization
1301
1314
  }
1302
1315
 
1316
+ async function getOrganizationIdForAppTemplate(organizations: ProjectOrganization[]) {
1317
+ // If the user is using an app template, we don't need to check for attach access
1318
+ const organizationChoices = [
1319
+ ...organizations.map((organization) => ({
1320
+ value: organization.id,
1321
+ name: `${organization.name} [${organization.id}]`,
1322
+ })),
1323
+ new prompt.Separator(),
1324
+ {value: '-new-', name: 'Create new organization'},
1325
+ new prompt.Separator(),
1326
+ ]
1327
+
1328
+ // If the user only has a single organization, we'll default to that one.
1329
+ const defaultOrganizationId =
1330
+ organizations.length === 1
1331
+ ? organizations[0].id
1332
+ : organizations.find((org) => org.name === user?.name)?.id
1333
+
1334
+ const chosenOrg = await prompt.single({
1335
+ message: 'Select organization:',
1336
+ type: 'list',
1337
+ default: defaultOrganizationId || undefined,
1338
+ choices: organizationChoices,
1339
+ })
1340
+
1341
+ if (chosenOrg === '-new-') {
1342
+ return createOrganization().then((org) => org.id)
1343
+ }
1344
+
1345
+ return chosenOrg || undefined
1346
+ }
1347
+
1303
1348
  async function getOrganizationId(organizations: ProjectOrganization[]) {
1304
1349
  // In unattended mode, if the user hasn't specified an organization, sending null as
1305
1350
  // organization ID to the API will create a new organization for the user with their
@@ -1363,21 +1408,38 @@ export default async function initSanity(
1363
1408
  .clone()
1364
1409
  .config({apiVersion: 'v2021-06-07'})
1365
1410
 
1366
- const grants = await client.request({uri: `organizations/${orgId}/grants`})
1367
- const group: {grants: {name: string}[]}[] = grants[requiredGrantGroup] || []
1368
- return group.some(
1369
- (resource) =>
1370
- resource.grants && resource.grants.some((grant) => grant.name === requiredGrant),
1371
- )
1411
+ try {
1412
+ const grants = await client.request({uri: `organizations/${orgId}/grants`})
1413
+ const group: {grants: {name: string}[]}[] = grants[requiredGrantGroup] || []
1414
+ return group.some(
1415
+ (resource) =>
1416
+ resource.grants && resource.grants.some((grant) => grant.name === requiredGrant),
1417
+ )
1418
+ } catch (err) {
1419
+ // If we get a 401, it means we don't have access to this organization
1420
+ // probably because of implicit membership
1421
+ if (err.statusCode === 401) {
1422
+ debug('No access to organization %s (401)', orgId)
1423
+ return false
1424
+ }
1425
+ // For other errors, log them but still return false to be safe
1426
+ debug('Error checking grants for organization %s: %s', orgId, err.message)
1427
+ return false
1428
+ }
1372
1429
  }
1373
1430
 
1374
1431
  function getOrganizationsWithAttachGrantInfo(organizations: ProjectOrganization[]) {
1375
1432
  return pMap(
1376
1433
  organizations,
1377
- async (organization) => ({
1378
- hasAttachGrant: await hasProjectAttachGrant(organization.id),
1379
- organization,
1380
- }),
1434
+ async (organization) => {
1435
+ try {
1436
+ const hasAttachGrant = await hasProjectAttachGrant(organization.id)
1437
+ return {hasAttachGrant, organization}
1438
+ } catch (err) {
1439
+ debug('Error checking grants for organization %s: %s', organization.id, err.message)
1440
+ return {hasAttachGrant: false, organization}
1441
+ }
1442
+ },
1381
1443
  {concurrency: 3},
1382
1444
  )
1383
1445
  }
@@ -1,9 +1,8 @@
1
1
  import {defineTrace} from '@sanity/telemetry'
2
2
 
3
- interface TypesGeneratedTraceAttributes {
3
+ interface TypesGeneratedTraceAttrubutes {
4
4
  outputSize: number
5
5
  queriesCount: number
6
- projectionsCount: number
7
6
  schemaTypesCount: number
8
7
  queryFilesCount: number
9
8
  filesWithErrors: number
@@ -12,11 +11,10 @@ interface TypesGeneratedTraceAttributes {
12
11
  unknownTypeNodesRatio: number
13
12
  emptyUnionTypeNodesGenerated: number
14
13
  configOverloadClientMethods: boolean
15
- configAugmentGroqModule: boolean
16
14
  }
17
15
 
18
- export const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttributes>({
16
+ export const TypesGeneratedTrace = defineTrace<TypesGeneratedTraceAttrubutes>({
19
17
  name: 'Types Generated',
20
- version: 1,
18
+ version: 0,
21
19
  description: 'Trace emitted when generating TypeScript types for queries',
22
20
  })