@sanity/codegen 5.0.0-next.0-9b570ece82-202507150640 → 5.0.0-next.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/lib/index.d.ts +158 -92
- package/lib/index.js +497 -298
- package/lib/index.js.map +1 -1
- package/package.json +30 -26
- package/lib/index.d.mts +0 -204
- package/lib/index.mjs +0 -753
- package/lib/index.mjs.map +0 -1
package/lib/index.js
CHANGED
|
@@ -1,59 +1,58 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
}
|
|
22
|
-
var fs__default$1 = /* @__PURE__ */ _interopDefaultCompat(fs), json5__namespace = /* @__PURE__ */ _interopNamespaceCompat(json5), z__namespace = /* @__PURE__ */ _interopNamespaceCompat(z), createDebug__default = /* @__PURE__ */ _interopDefaultCompat(createDebug), glob__default = /* @__PURE__ */ _interopDefaultCompat(glob), fs__default = /* @__PURE__ */ _interopDefaultCompat(fs$1), path__default = /* @__PURE__ */ _interopDefaultCompat(path), t__namespace = /* @__PURE__ */ _interopNamespaceCompat(t), traverse__default = /* @__PURE__ */ _interopDefaultCompat(traverse), register__default = /* @__PURE__ */ _interopDefaultCompat(register);
|
|
23
|
-
const configDefintion = z__namespace.object({
|
|
24
|
-
path: z__namespace.string().or(z__namespace.array(z__namespace.string())).default([
|
|
1
|
+
import fs$1, { readFile } from "node:fs/promises";
|
|
2
|
+
import json5 from "json5";
|
|
3
|
+
import * as z from "zod";
|
|
4
|
+
import { parse, typeEvaluate } from "groq-js";
|
|
5
|
+
import createDebug from "debug";
|
|
6
|
+
import glob from "globby";
|
|
7
|
+
import fs, { existsSync } from "node:fs";
|
|
8
|
+
import path, { dirname, join, resolve } from "node:path";
|
|
9
|
+
import { fileURLToPath } from "node:url";
|
|
10
|
+
import { createRequire } from "node:module";
|
|
11
|
+
import { parse as parse$1, traverse } from "@babel/core";
|
|
12
|
+
import * as t from "@babel/types";
|
|
13
|
+
import { Scope } from "@babel/traverse";
|
|
14
|
+
import { loadConfig, createMatchPath } from "tsconfig-paths";
|
|
15
|
+
import register from "@babel/register";
|
|
16
|
+
import process from "node:process";
|
|
17
|
+
import { createSelector } from "reselect";
|
|
18
|
+
import { CodeGenerator } from "@babel/generator";
|
|
19
|
+
const configDefinition = z.object({
|
|
20
|
+
path: z.string().or(z.array(z.string())).default([
|
|
25
21
|
"./src/**/*.{ts,tsx,js,jsx,mjs,cjs,astro}",
|
|
26
22
|
"./app/**/*.{ts,tsx,js,jsx,mjs,cjs}",
|
|
27
23
|
"./sanity/**/*.{ts,tsx,js,jsx,mjs,cjs}"
|
|
28
24
|
]),
|
|
29
|
-
schema:
|
|
30
|
-
generates:
|
|
31
|
-
formatGeneratedCode:
|
|
32
|
-
overloadClientMethods:
|
|
25
|
+
schema: z.string().default("./schema.json"),
|
|
26
|
+
generates: z.string().default("./sanity.types.ts"),
|
|
27
|
+
formatGeneratedCode: z.boolean().default(!0),
|
|
28
|
+
overloadClientMethods: z.boolean().default(!0)
|
|
33
29
|
});
|
|
34
30
|
async function readConfig(path2) {
|
|
35
31
|
try {
|
|
36
|
-
const content = await
|
|
37
|
-
return
|
|
32
|
+
const content = await readFile(path2, "utf-8"), json = json5.parse(content);
|
|
33
|
+
return configDefinition.parseAsync(json);
|
|
38
34
|
} catch (error) {
|
|
39
|
-
if (error instanceof
|
|
40
|
-
throw new Error(
|
|
35
|
+
if (error instanceof z.ZodError)
|
|
36
|
+
throw new Error(
|
|
37
|
+
`Error in config file
|
|
41
38
|
${error.errors.map((err) => err.message).join(`
|
|
42
|
-
`)}
|
|
39
|
+
`)}`,
|
|
40
|
+
{ cause: error }
|
|
41
|
+
);
|
|
43
42
|
if (typeof error == "object" && error !== null && "code" in error && error.code === "ENOENT")
|
|
44
|
-
return
|
|
43
|
+
return configDefinition.parse({});
|
|
45
44
|
throw error;
|
|
46
45
|
}
|
|
47
46
|
}
|
|
48
47
|
async function readSchema(path2) {
|
|
49
|
-
const content = await
|
|
48
|
+
const content = await readFile(path2, "utf-8");
|
|
50
49
|
return JSON.parse(content);
|
|
51
50
|
}
|
|
52
51
|
function safeParseQuery(query) {
|
|
53
52
|
const params = {};
|
|
54
53
|
for (const param of extractSliceParams(query))
|
|
55
54
|
params[param] = 0;
|
|
56
|
-
return
|
|
55
|
+
return parse(query, { params });
|
|
57
56
|
}
|
|
58
57
|
function* extractSliceParams(query) {
|
|
59
58
|
const sliceRegex = /\[(\$(\w+)|\d)\.\.\.?(\$(\w+)|\d)\]/g, matches = query.matchAll(sliceRegex);
|
|
@@ -65,22 +64,23 @@ function* extractSliceParams(query) {
|
|
|
65
64
|
end !== null && (yield end);
|
|
66
65
|
}
|
|
67
66
|
}
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
67
|
+
const __dirname$1 = dirname(fileURLToPath(import.meta.url));
|
|
68
|
+
function findBabelConfig(path2) {
|
|
69
|
+
const configPath = join(path2, "babel.config.json");
|
|
70
|
+
if (existsSync(configPath))
|
|
71
71
|
return configPath;
|
|
72
|
-
const parent =
|
|
73
|
-
if (parent && parent !==
|
|
72
|
+
const parent = resolve(join(path2, ".."));
|
|
73
|
+
if (parent && parent !== path2)
|
|
74
74
|
return findBabelConfig(parent);
|
|
75
75
|
throw new Error("Could not find `babel.config.json` in @sanity/codegen");
|
|
76
76
|
}
|
|
77
77
|
function getBabelConfig(path2) {
|
|
78
|
-
return { extends: findBabelConfig(__dirname) };
|
|
78
|
+
return { extends: findBabelConfig(__dirname$1) };
|
|
79
79
|
}
|
|
80
80
|
function parseSourceFile(_source, _filename, babelOptions) {
|
|
81
81
|
let source = _source, filename = _filename;
|
|
82
|
-
filename.endsWith(".astro")
|
|
83
|
-
const result =
|
|
82
|
+
filename.endsWith(".astro") ? (filename += ".ts", source = parseAstro(source)) : filename.endsWith(".vue") && (filename += ".ts", source = parseVue(source));
|
|
83
|
+
const result = parse$1(source, {
|
|
84
84
|
...babelOptions,
|
|
85
85
|
filename
|
|
86
86
|
});
|
|
@@ -95,7 +95,21 @@ function parseAstro(source) {
|
|
|
95
95
|
`)).join(`
|
|
96
96
|
`) : "";
|
|
97
97
|
}
|
|
98
|
-
|
|
98
|
+
function parseVue(source) {
|
|
99
|
+
const matches = matchAllPolyfill(source, /<script(?:\s+generic=["'][^"']*["'])?[^>]*>([\s\S]*?)<\/script>/g);
|
|
100
|
+
return matches.length ? matches.map((match) => match[1]).join(`
|
|
101
|
+
`) : "";
|
|
102
|
+
}
|
|
103
|
+
function matchAllPolyfill(str, regex) {
|
|
104
|
+
if (!regex.global)
|
|
105
|
+
throw new Error("matchAll polyfill requires a global regex (with /g flag)");
|
|
106
|
+
const matches = [];
|
|
107
|
+
let match;
|
|
108
|
+
for (; (match = regex.exec(str)) !== null; )
|
|
109
|
+
matches.push(match);
|
|
110
|
+
return matches;
|
|
111
|
+
}
|
|
112
|
+
const debug$2 = createDebug("sanity:codegen:findQueries:debug"), TAGGED_TEMPLATE_ALLOW_LIST = ["groq"], FUNCTION_WRAPPER_ALLOW_LIST = ["defineQuery"];
|
|
99
113
|
function resolveExpression({
|
|
100
114
|
node,
|
|
101
115
|
file,
|
|
@@ -108,7 +122,7 @@ function resolveExpression({
|
|
|
108
122
|
}) {
|
|
109
123
|
if (debug$2(
|
|
110
124
|
`Resolving node ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
111
|
-
),
|
|
125
|
+
), t.isTaggedTemplateExpression(node) && t.isIdentifier(node.tag) && TAGGED_TEMPLATE_ALLOW_LIST.includes(node.tag.name))
|
|
112
126
|
return resolveExpression({
|
|
113
127
|
node: node.quasi,
|
|
114
128
|
scope,
|
|
@@ -119,7 +133,7 @@ function resolveExpression({
|
|
|
119
133
|
babelConfig,
|
|
120
134
|
fnArguments
|
|
121
135
|
});
|
|
122
|
-
if (
|
|
136
|
+
if (t.isTemplateLiteral(node)) {
|
|
123
137
|
const resolvedExpressions = node.expressions.map(
|
|
124
138
|
(expression) => resolveExpression({
|
|
125
139
|
node: expression,
|
|
@@ -134,12 +148,12 @@ function resolveExpression({
|
|
|
134
148
|
);
|
|
135
149
|
return node.quasis.map((quasi, idx) => (quasi.value.cooked || "") + (resolvedExpressions[idx] || "")).join("");
|
|
136
150
|
}
|
|
137
|
-
if (
|
|
151
|
+
if (t.isLiteral(node)) {
|
|
138
152
|
if (node.type === "NullLiteral" || node.type === "RegExpLiteral")
|
|
139
153
|
throw new Error(`Unsupported literal type: ${node.type}`);
|
|
140
154
|
return node.value.toString();
|
|
141
155
|
}
|
|
142
|
-
if (
|
|
156
|
+
if (t.isIdentifier(node))
|
|
143
157
|
return resolveIdentifier({
|
|
144
158
|
node,
|
|
145
159
|
scope,
|
|
@@ -150,8 +164,8 @@ function resolveExpression({
|
|
|
150
164
|
babelConfig,
|
|
151
165
|
params
|
|
152
166
|
});
|
|
153
|
-
if (
|
|
154
|
-
const init = node.init ?? (
|
|
167
|
+
if (t.isVariableDeclarator(node)) {
|
|
168
|
+
const init = node.init ?? (t.isAssignmentPattern(node.id) && node.id.right);
|
|
155
169
|
if (!init)
|
|
156
170
|
throw new Error("Unsupported variable declarator");
|
|
157
171
|
return resolveExpression({
|
|
@@ -164,7 +178,7 @@ function resolveExpression({
|
|
|
164
178
|
resolver
|
|
165
179
|
});
|
|
166
180
|
}
|
|
167
|
-
if (
|
|
181
|
+
if (t.isCallExpression(node) && t.isIdentifier(node.callee) && FUNCTION_WRAPPER_ALLOW_LIST.includes(node.callee.name))
|
|
168
182
|
return resolveExpression({
|
|
169
183
|
node: node.arguments[0],
|
|
170
184
|
scope,
|
|
@@ -174,7 +188,7 @@ function resolveExpression({
|
|
|
174
188
|
babelConfig,
|
|
175
189
|
params
|
|
176
190
|
});
|
|
177
|
-
if (
|
|
191
|
+
if (t.isCallExpression(node))
|
|
178
192
|
return resolveCallExpression({
|
|
179
193
|
node,
|
|
180
194
|
scope,
|
|
@@ -184,8 +198,8 @@ function resolveExpression({
|
|
|
184
198
|
babelConfig,
|
|
185
199
|
params
|
|
186
200
|
});
|
|
187
|
-
if (
|
|
188
|
-
const newScope = new
|
|
201
|
+
if (t.isArrowFunctionExpression(node) || t.isFunctionDeclaration(node) || t.isFunctionExpression(node)) {
|
|
202
|
+
const newScope = new Scope(scope.path, scope);
|
|
189
203
|
return params.forEach((param, i) => {
|
|
190
204
|
newScope.push({
|
|
191
205
|
id: param,
|
|
@@ -202,7 +216,7 @@ function resolveExpression({
|
|
|
202
216
|
resolver
|
|
203
217
|
});
|
|
204
218
|
}
|
|
205
|
-
if (
|
|
219
|
+
if (t.isNewExpression(node))
|
|
206
220
|
return resolveExpression({
|
|
207
221
|
node: node.callee,
|
|
208
222
|
scope,
|
|
@@ -211,9 +225,9 @@ function resolveExpression({
|
|
|
211
225
|
babelConfig,
|
|
212
226
|
resolver
|
|
213
227
|
});
|
|
214
|
-
if (
|
|
228
|
+
if (t.isImportDefaultSpecifier(node) || t.isImportSpecifier(node))
|
|
215
229
|
return resolveImportSpecifier({ node, file, filename, fnArguments, resolver, babelConfig });
|
|
216
|
-
if (
|
|
230
|
+
if (t.isAssignmentPattern(node))
|
|
217
231
|
return resolveExpression({
|
|
218
232
|
node: node.right,
|
|
219
233
|
scope,
|
|
@@ -224,6 +238,17 @@ function resolveExpression({
|
|
|
224
238
|
babelConfig,
|
|
225
239
|
fnArguments
|
|
226
240
|
});
|
|
241
|
+
if (t.isTSAsExpression(node))
|
|
242
|
+
return resolveExpression({
|
|
243
|
+
node: node.expression,
|
|
244
|
+
scope,
|
|
245
|
+
filename,
|
|
246
|
+
file,
|
|
247
|
+
resolver,
|
|
248
|
+
params,
|
|
249
|
+
babelConfig,
|
|
250
|
+
fnArguments
|
|
251
|
+
});
|
|
227
252
|
throw new Error(
|
|
228
253
|
`Unsupported expression type: ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
229
254
|
);
|
|
@@ -239,10 +264,10 @@ function resolveIdentifier({
|
|
|
239
264
|
params
|
|
240
265
|
}) {
|
|
241
266
|
const paramIndex = params.findIndex(
|
|
242
|
-
(param) =>
|
|
267
|
+
(param) => t.isIdentifier(param) && node.name === param.name || t.isAssignmentPattern(param) && t.isIdentifier(param.left) && node.name === param.left.name
|
|
243
268
|
);
|
|
244
269
|
let argument = fnArguments[paramIndex];
|
|
245
|
-
if (!argument && paramIndex >= 0 &&
|
|
270
|
+
if (!argument && paramIndex >= 0 && t.isAssignmentPattern(params[paramIndex]) && (argument = params[paramIndex].right), argument && t.isLiteral(argument))
|
|
246
271
|
return resolveExpression({
|
|
247
272
|
node: argument,
|
|
248
273
|
scope,
|
|
@@ -255,7 +280,7 @@ function resolveIdentifier({
|
|
|
255
280
|
});
|
|
256
281
|
const binding = scope.getBinding(node.name);
|
|
257
282
|
if (binding) {
|
|
258
|
-
if (
|
|
283
|
+
if (t.isIdentifier(binding.path.node) && binding.path.node.name === node.name)
|
|
259
284
|
throw new Error(
|
|
260
285
|
`Could not resolve same identifier "${node.name}" in "${filename}:${node.loc?.start.line}:${node.loc?.start.column}"`
|
|
261
286
|
);
|
|
@@ -304,11 +329,11 @@ function resolveImportSpecifier({
|
|
|
304
329
|
babelConfig
|
|
305
330
|
}) {
|
|
306
331
|
let importDeclaration;
|
|
307
|
-
if (
|
|
332
|
+
if (traverse(file, {
|
|
308
333
|
ImportDeclaration(n) {
|
|
309
|
-
if (
|
|
334
|
+
if (t.isImportDeclaration(n.node))
|
|
310
335
|
for (const specifier of n.node.specifiers) {
|
|
311
|
-
if (
|
|
336
|
+
if (t.isImportDefaultSpecifier(specifier) && specifier.local.loc?.identifierName === node.local.name) {
|
|
312
337
|
importDeclaration = n.node;
|
|
313
338
|
break;
|
|
314
339
|
}
|
|
@@ -317,9 +342,9 @@ function resolveImportSpecifier({
|
|
|
317
342
|
}
|
|
318
343
|
}), !importDeclaration)
|
|
319
344
|
throw new Error(`Could not find import declaration for ${node.local.name}`);
|
|
320
|
-
const importName = node.local.name, importFileName = importDeclaration.source.value, importPath = importFileName.startsWith("./") || importFileName.startsWith("../") ?
|
|
345
|
+
const importName = node.local.name, importFileName = importDeclaration.source.value, importPath = importFileName.startsWith("./") || importFileName.startsWith("../") ? path.resolve(path.dirname(filename), importFileName) : importFileName, resolvedFile = resolver(importPath), source = fs.readFileSync(resolvedFile), tree = parseSourceFile(source.toString(), resolvedFile, babelConfig);
|
|
321
346
|
let newScope;
|
|
322
|
-
if (
|
|
347
|
+
if (traverse(tree, {
|
|
323
348
|
Program(p) {
|
|
324
349
|
newScope = p.scope;
|
|
325
350
|
}
|
|
@@ -337,7 +362,7 @@ function resolveImportSpecifier({
|
|
|
337
362
|
resolver
|
|
338
363
|
});
|
|
339
364
|
let namedExport, newImportName;
|
|
340
|
-
if (
|
|
365
|
+
if (traverse(tree, {
|
|
341
366
|
ExportDeclaration(p) {
|
|
342
367
|
if (p.node.type === "ExportNamedDeclaration")
|
|
343
368
|
for (const specifier of p.node.specifiers)
|
|
@@ -353,7 +378,7 @@ function resolveImportSpecifier({
|
|
|
353
378
|
babelConfig
|
|
354
379
|
});
|
|
355
380
|
let result;
|
|
356
|
-
if (
|
|
381
|
+
if (traverse(tree, {
|
|
357
382
|
ExportDeclaration(p) {
|
|
358
383
|
if (p.node.type === "ExportAllDeclaration")
|
|
359
384
|
try {
|
|
@@ -382,9 +407,9 @@ function resolveExportSpecifier({
|
|
|
382
407
|
}) {
|
|
383
408
|
if (!node.source)
|
|
384
409
|
throw new Error(`Could not find source for export "${importName}" in ${filename}`);
|
|
385
|
-
const importFileName = node.source.value, importPath =
|
|
410
|
+
const importFileName = node.source.value, importPath = path.resolve(path.dirname(filename), importFileName), resolvedFile = resolver(importPath), source = fs.readFileSync(resolvedFile), tree = parseSourceFile(source.toString(), resolvedFile, babelConfig);
|
|
386
411
|
let newScope;
|
|
387
|
-
if (
|
|
412
|
+
if (traverse(tree, {
|
|
388
413
|
Program(p) {
|
|
389
414
|
newScope = p.scope;
|
|
390
415
|
}
|
|
@@ -396,7 +421,7 @@ function resolveExportSpecifier({
|
|
|
396
421
|
node: binding.path.node,
|
|
397
422
|
file: tree,
|
|
398
423
|
scope: newScope,
|
|
399
|
-
filename:
|
|
424
|
+
filename: resolvedFile,
|
|
400
425
|
babelConfig,
|
|
401
426
|
resolver,
|
|
402
427
|
fnArguments
|
|
@@ -405,36 +430,53 @@ function resolveExportSpecifier({
|
|
|
405
430
|
cause: `noBinding:${importName}`
|
|
406
431
|
});
|
|
407
432
|
}
|
|
408
|
-
const
|
|
409
|
-
|
|
410
|
-
|
|
411
|
-
|
|
433
|
+
const isRecord = (value) => (typeof value == "object" || typeof value == "function") && !!value;
|
|
434
|
+
class QueryExtractionError extends Error {
|
|
435
|
+
variable;
|
|
436
|
+
filename;
|
|
437
|
+
constructor({ variable, cause, filename }) {
|
|
438
|
+
super(
|
|
439
|
+
`Error while extracting query ${variable ? `from variable '${variable.id.name}' ` : ""}in ${filename}: ${isRecord(cause) && typeof cause.message == "string" ? cause.message : "Unknown error"}`
|
|
440
|
+
), this.name = "QueryExtractionError", this.cause = cause, this.variable = variable, this.filename = filename;
|
|
441
|
+
}
|
|
442
|
+
}
|
|
443
|
+
class QueryEvaluationError extends Error {
|
|
444
|
+
variable;
|
|
445
|
+
filename;
|
|
446
|
+
constructor({ variable, cause, filename }) {
|
|
447
|
+
super(
|
|
448
|
+
`Error while evaluating query ${variable ? `from variable '${variable.id.name}' ` : ""}in ${filename}: ${isRecord(cause) && typeof cause.message == "string" ? cause.message : "Unknown error"}`
|
|
449
|
+
), this.name = "QueryEvaluationError", this.cause = cause, this.variable = variable, this.filename = filename;
|
|
450
|
+
}
|
|
451
|
+
}
|
|
452
|
+
const require$2 = createRequire(import.meta.url), groqTagName = "groq", defineQueryFunctionName = "defineQuery", groqModuleName = "groq", nextSanityModuleName = "next-sanity", ignoreValue = "@sanity-typegen-ignore";
|
|
453
|
+
function findQueriesInSource(source, filename, babelConfig = getBabelConfig(), resolver = require$2.resolve) {
|
|
454
|
+
const queries = [], errors = [], file = parseSourceFile(source, filename, babelConfig);
|
|
455
|
+
return traverse(file, {
|
|
412
456
|
// Look for variable declarations, e.g. `const myQuery = groq`... and extract the query.
|
|
413
457
|
// The variable name is used as the name of the query result type
|
|
414
458
|
VariableDeclarator(path2) {
|
|
415
|
-
const { node, scope } = path2, init = node.init, isGroqTemplateTag =
|
|
416
|
-
if (
|
|
459
|
+
const { node, scope } = path2, init = node.init, isGroqTemplateTag = t.isTaggedTemplateExpression(init) && t.isIdentifier(init.tag) && init.tag.name === groqTagName, isDefineQueryCall = t.isCallExpression(init) && (isImportFrom(groqModuleName, defineQueryFunctionName, scope, init.callee) || isImportFrom(nextSanityModuleName, defineQueryFunctionName, scope, init.callee));
|
|
460
|
+
if (t.isIdentifier(node.id) && (isGroqTemplateTag || isDefineQueryCall)) {
|
|
417
461
|
if (declarationLeadingCommentContains(path2, ignoreValue))
|
|
418
462
|
return;
|
|
419
|
-
const
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
424
|
-
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
}
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
433
|
-
} : {};
|
|
434
|
-
queries.push({ name: queryName, result: queryResult, location });
|
|
463
|
+
const { id, start, end } = node, variable = { id, ...start && { start }, ...end && { end } };
|
|
464
|
+
try {
|
|
465
|
+
const query = resolveExpression({
|
|
466
|
+
node: init,
|
|
467
|
+
file,
|
|
468
|
+
scope,
|
|
469
|
+
babelConfig,
|
|
470
|
+
filename,
|
|
471
|
+
resolver
|
|
472
|
+
});
|
|
473
|
+
queries.push({ variable, query, filename });
|
|
474
|
+
} catch (cause) {
|
|
475
|
+
errors.push(new QueryExtractionError({ filename, variable, cause }));
|
|
476
|
+
}
|
|
435
477
|
}
|
|
436
478
|
}
|
|
437
|
-
}), queries;
|
|
479
|
+
}), { filename, queries, errors };
|
|
438
480
|
}
|
|
439
481
|
function declarationLeadingCommentContains(path2, comment) {
|
|
440
482
|
const variableDeclaration = path2.find((node) => node.isVariableDeclaration());
|
|
@@ -445,205 +487,172 @@ function declarationLeadingCommentContains(path2, comment) {
|
|
|
445
487
|
)) : !1;
|
|
446
488
|
}
|
|
447
489
|
function isImportFrom(moduleName, importName, scope, node) {
|
|
448
|
-
if (
|
|
490
|
+
if (t.isIdentifier(node)) {
|
|
449
491
|
const binding = scope.getBinding(node.name);
|
|
450
492
|
if (!binding)
|
|
451
493
|
return !1;
|
|
452
494
|
const { path: path2 } = binding;
|
|
453
|
-
if (
|
|
454
|
-
return path2.node.importKind === "value" && path2.parentPath &&
|
|
455
|
-
if (
|
|
495
|
+
if (t.isImportSpecifier(path2.node))
|
|
496
|
+
return path2.node.importKind === "value" && path2.parentPath && t.isImportDeclaration(path2.parentPath.node) && path2.parentPath.node.source.value === moduleName && t.isIdentifier(path2.node.imported) && path2.node.imported.name === importName;
|
|
497
|
+
if (t.isVariableDeclarator(path2.node)) {
|
|
456
498
|
const { init } = path2.node;
|
|
457
|
-
return
|
|
499
|
+
return t.isCallExpression(init) && t.isIdentifier(init.callee) && init.callee.name === "require" && t.isStringLiteral(init.arguments[0]) && init.arguments[0].value === moduleName;
|
|
458
500
|
}
|
|
459
501
|
}
|
|
460
|
-
if (
|
|
502
|
+
if (t.isMemberExpression(node)) {
|
|
461
503
|
const { object, property } = node;
|
|
462
|
-
if (!
|
|
504
|
+
if (!t.isIdentifier(object))
|
|
463
505
|
return !1;
|
|
464
506
|
const binding = scope.getBinding(object.name);
|
|
465
507
|
if (!binding)
|
|
466
508
|
return !1;
|
|
467
509
|
const { path: path2 } = binding;
|
|
468
|
-
return
|
|
510
|
+
return t.isIdentifier(object) && t.isIdentifier(property) && property.name === importName && t.isImportNamespaceSpecifier(path2.node) && path2.parentPath && t.isImportDeclaration(path2.parentPath.node) && path2.parentPath.node.source.value === moduleName;
|
|
469
511
|
}
|
|
470
512
|
return !1;
|
|
471
513
|
}
|
|
472
|
-
const debug$1 =
|
|
514
|
+
const require$1 = createRequire(import.meta.url), debug$1 = createDebug("sanity:codegen:moduleResolver");
|
|
473
515
|
function getResolver(cwd) {
|
|
474
|
-
const tsConfig =
|
|
516
|
+
const tsConfig = loadConfig(cwd);
|
|
475
517
|
if (tsConfig.resultType === "failed")
|
|
476
|
-
return debug$1("Could not load tsconfig, using default resolver: %s", tsConfig.message), require.resolve;
|
|
477
|
-
const matchPath =
|
|
518
|
+
return debug$1("Could not load tsconfig, using default resolver: %s", tsConfig.message), require$1.resolve;
|
|
519
|
+
const matchPath = createMatchPath(
|
|
478
520
|
tsConfig.absoluteBaseUrl,
|
|
479
521
|
tsConfig.paths,
|
|
480
522
|
tsConfig.mainFields,
|
|
481
523
|
tsConfig.addMatchAll
|
|
482
|
-
),
|
|
524
|
+
), resolve2 = function(request, options) {
|
|
483
525
|
const found = matchPath(request);
|
|
484
|
-
return found !== void 0 ? require.resolve(found, options) : require.resolve(request, options);
|
|
526
|
+
return found !== void 0 ? require$1.resolve(found, options) : require$1.resolve(request, options);
|
|
485
527
|
};
|
|
486
|
-
return
|
|
528
|
+
return resolve2.paths = (request) => require$1.resolve.paths(request), resolve2;
|
|
487
529
|
}
|
|
488
|
-
const debug =
|
|
489
|
-
|
|
530
|
+
const debug = createDebug("sanity:codegen:findQueries:debug");
|
|
531
|
+
function findQueriesInPath({
|
|
490
532
|
path: path2,
|
|
491
533
|
babelOptions = getBabelConfig(),
|
|
492
534
|
resolver = getResolver()
|
|
493
535
|
}) {
|
|
494
536
|
const queryNames = /* @__PURE__ */ new Set();
|
|
495
537
|
debug(`Globing ${path2}`);
|
|
496
|
-
const files =
|
|
538
|
+
const files = glob.sync(path2, {
|
|
497
539
|
absolute: !1,
|
|
498
540
|
ignore: ["**/node_modules/**"],
|
|
499
541
|
// we never want to look in node_modules
|
|
500
542
|
onlyFiles: !0
|
|
501
543
|
}).sort();
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
510
|
-
|
|
511
|
-
|
|
512
|
-
|
|
544
|
+
async function* getQueries() {
|
|
545
|
+
for (const filename of files)
|
|
546
|
+
if (typeof filename == "string") {
|
|
547
|
+
debug(`Found file "${filename}"`);
|
|
548
|
+
try {
|
|
549
|
+
const source = await fs$1.readFile(filename, "utf8"), pluckedModuleResult = findQueriesInSource(source, filename, babelOptions, resolver);
|
|
550
|
+
for (const { variable } of pluckedModuleResult.queries) {
|
|
551
|
+
if (queryNames.has(variable.id.name))
|
|
552
|
+
throw new Error(
|
|
553
|
+
`Duplicate query name found: "${variable.id.name}". Query names must be unique across all files.`
|
|
554
|
+
);
|
|
555
|
+
queryNames.add(variable.id.name);
|
|
556
|
+
}
|
|
557
|
+
yield pluckedModuleResult;
|
|
558
|
+
} catch (cause) {
|
|
559
|
+
debug(`Error in file "${filename}"`, cause), yield {
|
|
560
|
+
filename,
|
|
561
|
+
queries: [],
|
|
562
|
+
errors: [new QueryExtractionError({ cause, filename })]
|
|
563
|
+
};
|
|
513
564
|
}
|
|
514
|
-
yield { type: "queries", filename, queries };
|
|
515
|
-
} catch (error) {
|
|
516
|
-
debug(`Error in file "${filename}"`, error), yield { type: "error", error, filename };
|
|
517
565
|
}
|
|
518
|
-
|
|
566
|
+
}
|
|
567
|
+
return { files, queries: getQueries() };
|
|
519
568
|
}
|
|
520
569
|
function registerBabel(babelOptions) {
|
|
521
570
|
const options = babelOptions || getBabelConfig();
|
|
522
|
-
|
|
571
|
+
register({ ...options, extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"] });
|
|
523
572
|
}
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
573
|
+
function resultSuffix(variableName) {
|
|
574
|
+
if (!variableName) return "result";
|
|
575
|
+
const isUpperSnake = /^[A-Z0-9_]+$/.test(variableName), isSnake = /^[a-z0-9_]+$/.test(variableName) && variableName.includes("_");
|
|
576
|
+
return /^[a-z][A-Za-z0-9]*$/.test(variableName) ? `${variableName}Result` : isUpperSnake ? `${variableName}_RESULT` : isSnake ? `${variableName}_result` : `${variableName.replace(/[^A-Za-z0-9]/g, "")}Result`;
|
|
577
|
+
}
|
|
578
|
+
const INTERNAL_REFERENCE_SYMBOL = t.identifier("internalGroqTypeReferenceTo"), ALL_SANITY_SCHEMA_TYPES = t.identifier("AllSanitySchemaTypes"), SANITY_QUERIES = t.identifier("SanityQueries"), RESERVED_IDENTIFIERS = /* @__PURE__ */ new Set();
|
|
579
|
+
RESERVED_IDENTIFIERS.add(SANITY_QUERIES.name);
|
|
580
|
+
RESERVED_IDENTIFIERS.add(ALL_SANITY_SCHEMA_TYPES.name);
|
|
581
|
+
RESERVED_IDENTIFIERS.add(INTERNAL_REFERENCE_SYMBOL.name);
|
|
582
|
+
function normalizePath(root, filename) {
|
|
583
|
+
const resolved = path.resolve(root, filename);
|
|
584
|
+
return path.relative(root, resolved);
|
|
585
|
+
}
|
|
586
|
+
function sanitizeIdentifier(input) {
|
|
587
|
+
return `${input.replace(/^\d/, "_").replace(/[^$\w]+(.)/g, (_, char) => char.toUpperCase())}`;
|
|
588
|
+
}
|
|
589
|
+
function normalizeIdentifier(input) {
|
|
590
|
+
const sanitized = sanitizeIdentifier(input);
|
|
591
|
+
return `${sanitized.charAt(0).toUpperCase()}${sanitized.slice(1)}`;
|
|
592
|
+
}
|
|
593
|
+
function getUniqueIdentifierForName(name, currentIdentifiers) {
|
|
594
|
+
const desiredName = normalizeIdentifier(name);
|
|
595
|
+
let resultingName = desiredName, index = 2;
|
|
596
|
+
for (; currentIdentifiers.has(resultingName) || RESERVED_IDENTIFIERS.has(resultingName); )
|
|
597
|
+
resultingName = `${desiredName}_${index}`, index++;
|
|
598
|
+
return t.identifier(resultingName);
|
|
599
|
+
}
|
|
600
|
+
function computeOnce(fn) {
|
|
601
|
+
const ref = { current: void 0, computed: !1 };
|
|
602
|
+
return function() {
|
|
603
|
+
return ref.computed || (ref.current = fn(), ref.computed = !0), ref.current;
|
|
604
|
+
};
|
|
605
|
+
}
|
|
606
|
+
function weakMapMemo(fn) {
|
|
607
|
+
const cache = /* @__PURE__ */ new WeakMap();
|
|
608
|
+
return function(arg) {
|
|
609
|
+
if (cache.has(arg)) return cache.get(arg);
|
|
610
|
+
const result = fn(arg);
|
|
611
|
+
return cache.set(arg, result), result;
|
|
612
|
+
};
|
|
613
|
+
}
|
|
614
|
+
function generateCode(node) {
|
|
615
|
+
return `${new CodeGenerator(node).generate().code.trim()}
|
|
616
|
+
|
|
617
|
+
`;
|
|
618
|
+
}
|
|
619
|
+
class SchemaTypeGenerator {
|
|
532
620
|
schema;
|
|
621
|
+
tsTypes = /* @__PURE__ */ new Map();
|
|
622
|
+
identifiers = /* @__PURE__ */ new Map();
|
|
533
623
|
constructor(schema) {
|
|
534
|
-
this.schema = schema
|
|
535
|
-
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
|
|
541
|
-
|
|
542
|
-
* @beta
|
|
543
|
-
*/
|
|
544
|
-
generateSchemaTypes() {
|
|
545
|
-
const typeDeclarations = [], schemaNames = /* @__PURE__ */ new Set();
|
|
546
|
-
return this.schema.forEach((schema) => {
|
|
547
|
-
const typeLiteral = this.getTypeNodeType(schema), schemaName = this.typeNodeNameMap.get(schema);
|
|
548
|
-
if (!schemaName)
|
|
549
|
-
throw new Error(`Schema name not found for schema ${schema.name}`);
|
|
550
|
-
schemaNames.add(schemaName);
|
|
551
|
-
const typeAlias = t__namespace.tsTypeAliasDeclaration(t__namespace.identifier(schemaName), null, typeLiteral);
|
|
552
|
-
typeDeclarations.push(t__namespace.exportNamedDeclaration(typeAlias));
|
|
553
|
-
}), typeDeclarations.push(
|
|
554
|
-
t__namespace.exportNamedDeclaration(
|
|
555
|
-
t__namespace.tsTypeAliasDeclaration(
|
|
556
|
-
t__namespace.identifier(this.getTypeName(ALL_SCHEMA_TYPES)),
|
|
557
|
-
null,
|
|
558
|
-
t__namespace.tsUnionType(
|
|
559
|
-
[...schemaNames].map((typeName) => t__namespace.tsTypeReference(t__namespace.identifier(typeName)))
|
|
560
|
-
)
|
|
561
|
-
)
|
|
562
|
-
)
|
|
563
|
-
), typeDeclarations.map((decl) => new generator.CodeGenerator(decl).generate().code).join(`
|
|
564
|
-
|
|
565
|
-
`);
|
|
566
|
-
}
|
|
567
|
-
/**
|
|
568
|
-
* Takes a identifier and a type node and generates a type alias for the type node.
|
|
569
|
-
* @param identifierName - The name of the type to generated
|
|
570
|
-
* @param typeNode - The type node to generate the type for
|
|
571
|
-
* @returns
|
|
572
|
-
* @internal
|
|
573
|
-
* @beta
|
|
574
|
-
*/
|
|
575
|
-
generateTypeNodeTypes(identifierName, typeNode) {
|
|
576
|
-
const type = this.getTypeNodeType(typeNode), typeName = this.getTypeName(identifierName, typeNode), typeAlias = t__namespace.tsTypeAliasDeclaration(t__namespace.identifier(typeName), null, type);
|
|
577
|
-
return new generator.CodeGenerator(t__namespace.exportNamedDeclaration(typeAlias)).generate().code.trim();
|
|
578
|
-
}
|
|
579
|
-
static generateKnownTypes() {
|
|
580
|
-
const typeOperator = t__namespace.tsTypeOperator(t__namespace.tsSymbolKeyword(), "unique"), identifier = t__namespace.identifier(REFERENCE_SYMBOL_NAME);
|
|
581
|
-
identifier.typeAnnotation = t__namespace.tsTypeAnnotation(typeOperator);
|
|
582
|
-
const decleration = t__namespace.variableDeclaration("const", [t__namespace.variableDeclarator(identifier)]);
|
|
583
|
-
return decleration.declare = !0, new generator.CodeGenerator(t__namespace.exportNamedDeclaration(decleration)).generate().code.trim();
|
|
584
|
-
}
|
|
585
|
-
/**
|
|
586
|
-
* Takes a list of queries from the codebase and generates a type declaration
|
|
587
|
-
* for SanityClient to consume.
|
|
588
|
-
*
|
|
589
|
-
* Note: only types that have previously been generated with `generateTypeNodeTypes`
|
|
590
|
-
* will be included in the query map.
|
|
591
|
-
*
|
|
592
|
-
* @param queries - A list of queries to generate a type declaration for
|
|
593
|
-
* @returns
|
|
594
|
-
* @internal
|
|
595
|
-
* @beta
|
|
596
|
-
*/
|
|
597
|
-
generateQueryMap(queries) {
|
|
598
|
-
const typesByQuerystring = {};
|
|
599
|
-
for (const query of queries) {
|
|
600
|
-
const name = this.typeNodeNameMap.get(query.typeNode);
|
|
601
|
-
name && (typesByQuerystring[query.query] ??= [], typesByQuerystring[query.query].push(name));
|
|
624
|
+
this.schema = schema;
|
|
625
|
+
const uniqueTypeNames = /* @__PURE__ */ new Set();
|
|
626
|
+
for (const type of schema) {
|
|
627
|
+
if (uniqueTypeNames.has(type.name))
|
|
628
|
+
throw new Error(
|
|
629
|
+
`Duplicate type name "${type.name}" in schema. Type names must be unique within the same schema.`
|
|
630
|
+
);
|
|
631
|
+
uniqueTypeNames.add(type.name);
|
|
602
632
|
}
|
|
603
|
-
const
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
t__namespace.tsUnionType(types.map((type) => t__namespace.tsTypeReference(t__namespace.identifier(type))))
|
|
612
|
-
)
|
|
613
|
-
))
|
|
614
|
-
)
|
|
615
|
-
), declareModule = t__namespace.declareModule(
|
|
616
|
-
t__namespace.stringLiteral("@sanity/client"),
|
|
617
|
-
t__namespace.blockStatement([queryReturnInterface])
|
|
618
|
-
), clientImport = t__namespace.importDeclaration([], t__namespace.stringLiteral("@sanity/client"));
|
|
619
|
-
return new generator.CodeGenerator(t__namespace.program([clientImport, declareModule])).generate().code.trim();
|
|
620
|
-
}
|
|
621
|
-
/**
|
|
622
|
-
* Since we are sanitizing identifiers we migt end up with collisions. Ie there might be a type mux.video and muxVideo, both these
|
|
623
|
-
* types would be sanityized into MuxVideo. To avoid this we keep track of the generated type names and add a index to the name.
|
|
624
|
-
* When we reference a type we also keep track of the original name so we can reference the correct type later.
|
|
625
|
-
*/
|
|
626
|
-
getTypeName(name, typeNode) {
|
|
627
|
-
const desiredName = uppercaseFirstLetter(sanitizeIdentifier(name));
|
|
628
|
-
let generatedName = desiredName, i = 2;
|
|
629
|
-
for (; this.generatedTypeName.has(generatedName); )
|
|
630
|
-
generatedName = `${desiredName}_${i++}`;
|
|
631
|
-
return this.generatedTypeName.add(generatedName), this.typeNameMap.set(name, generatedName), typeNode && this.typeNodeNameMap.set(typeNode, generatedName), generatedName;
|
|
633
|
+
for (const type of schema) {
|
|
634
|
+
const currentIdentifierNames = new Set(
|
|
635
|
+
Array.from(this.identifiers.values()).map((id) => id.name)
|
|
636
|
+
), uniqueIdentifier = getUniqueIdentifierForName(type.name, currentIdentifierNames);
|
|
637
|
+
this.identifiers.set(type.name, uniqueIdentifier);
|
|
638
|
+
}
|
|
639
|
+
for (const type of schema)
|
|
640
|
+
this.tsTypes.set(type.name, this.generateTsType(type));
|
|
632
641
|
}
|
|
633
|
-
|
|
642
|
+
generateTsType(typeNode) {
|
|
634
643
|
switch (typeNode.type) {
|
|
635
644
|
case "string":
|
|
636
|
-
return typeNode.value !== void 0 ?
|
|
645
|
+
return typeNode.value !== void 0 ? t.tsLiteralType(t.stringLiteral(typeNode.value)) : t.tsStringKeyword();
|
|
637
646
|
case "number":
|
|
638
|
-
return typeNode.value !== void 0 ?
|
|
647
|
+
return typeNode.value !== void 0 ? t.tsLiteralType(t.numericLiteral(typeNode.value)) : t.tsNumberKeyword();
|
|
639
648
|
case "boolean":
|
|
640
|
-
return typeNode.value !== void 0 ?
|
|
649
|
+
return typeNode.value !== void 0 ? t.tsLiteralType(t.booleanLiteral(typeNode.value)) : t.tsBooleanKeyword();
|
|
641
650
|
case "unknown":
|
|
642
|
-
return
|
|
651
|
+
return t.tsUnknownKeyword();
|
|
643
652
|
case "document":
|
|
644
|
-
return this.
|
|
653
|
+
return this.generateDocumentTsType(typeNode);
|
|
645
654
|
case "type":
|
|
646
|
-
return this.
|
|
655
|
+
return this.generateTsType(typeNode.value);
|
|
647
656
|
case "array":
|
|
648
657
|
return this.generateArrayTsType(typeNode);
|
|
649
658
|
case "object":
|
|
@@ -653,24 +662,24 @@ class TypeGenerator {
|
|
|
653
662
|
case "inline":
|
|
654
663
|
return this.generateInlineTsType(typeNode);
|
|
655
664
|
case "null":
|
|
656
|
-
return
|
|
665
|
+
return t.tsNullKeyword();
|
|
657
666
|
default:
|
|
658
|
-
throw new Error(
|
|
667
|
+
throw new Error(
|
|
668
|
+
`Encountered unsupported node type "${// @ts-expect-error This should never happen
|
|
669
|
+
typeNode.type}" while generating schema types`
|
|
670
|
+
);
|
|
659
671
|
}
|
|
660
672
|
}
|
|
661
673
|
// Helper function used to generate TS types for array type nodes.
|
|
662
674
|
generateArrayTsType(typeNode) {
|
|
663
|
-
const typeNodes = this.
|
|
664
|
-
return
|
|
665
|
-
t__namespace.identifier("Array"),
|
|
666
|
-
t__namespace.tsTypeParameterInstantiation([typeNodes])
|
|
667
|
-
);
|
|
675
|
+
const typeNodes = this.generateTsType(typeNode.of);
|
|
676
|
+
return t.tsTypeReference(t.identifier("Array"), t.tsTypeParameterInstantiation([typeNodes]));
|
|
668
677
|
}
|
|
669
678
|
// Helper function used to generate TS types for object properties.
|
|
670
|
-
|
|
671
|
-
const type = this.
|
|
672
|
-
|
|
673
|
-
|
|
679
|
+
generateTsObjectProperty(key, attribute) {
|
|
680
|
+
const type = this.generateTsType(attribute.value), propertySignature = t.tsPropertySignature(
|
|
681
|
+
t.identifier(sanitizeIdentifier(key)),
|
|
682
|
+
t.tsTypeAnnotation(type)
|
|
674
683
|
);
|
|
675
684
|
return propertySignature.optional = attribute.optional, propertySignature;
|
|
676
685
|
}
|
|
@@ -678,81 +687,271 @@ class TypeGenerator {
|
|
|
678
687
|
generateObjectTsType(typeNode) {
|
|
679
688
|
const props = [];
|
|
680
689
|
Object.entries(typeNode.attributes).forEach(([key, attribute]) => {
|
|
681
|
-
props.push(this.
|
|
690
|
+
props.push(this.generateTsObjectProperty(key, attribute));
|
|
682
691
|
});
|
|
683
692
|
const rest = typeNode.rest;
|
|
684
|
-
if (rest
|
|
693
|
+
if (rest)
|
|
685
694
|
switch (rest.type) {
|
|
686
695
|
case "unknown":
|
|
687
|
-
return
|
|
696
|
+
return t.tsUnknownKeyword();
|
|
688
697
|
case "object": {
|
|
689
698
|
Object.entries(rest.attributes).forEach(([key, attribute]) => {
|
|
690
|
-
props.push(this.
|
|
699
|
+
props.push(this.generateTsObjectProperty(key, attribute));
|
|
691
700
|
});
|
|
692
701
|
break;
|
|
693
702
|
}
|
|
694
703
|
case "inline": {
|
|
695
704
|
const resolved = this.generateInlineTsType(rest);
|
|
696
|
-
return
|
|
705
|
+
return t.isTSUnknownKeyword(resolved) ? resolved : t.tsIntersectionType([t.tsTypeLiteral(props), resolved]);
|
|
697
706
|
}
|
|
698
707
|
default:
|
|
699
708
|
throw new Error(`Type "${rest.type}" not found in schema`);
|
|
700
709
|
}
|
|
701
|
-
if (typeNode.dereferencesTo
|
|
702
|
-
const derefType =
|
|
703
|
-
|
|
704
|
-
|
|
710
|
+
if (typeNode.dereferencesTo) {
|
|
711
|
+
const derefType = Object.assign(
|
|
712
|
+
t.tsPropertySignature(
|
|
713
|
+
INTERNAL_REFERENCE_SYMBOL,
|
|
714
|
+
t.tsTypeAnnotation(t.tsLiteralType(t.stringLiteral(typeNode.dereferencesTo)))
|
|
715
|
+
),
|
|
716
|
+
{ computed: !0, optional: !0 }
|
|
705
717
|
);
|
|
706
|
-
|
|
718
|
+
props.push(derefType);
|
|
707
719
|
}
|
|
708
|
-
return
|
|
720
|
+
return t.tsTypeLiteral(props);
|
|
709
721
|
}
|
|
710
722
|
generateInlineTsType(typeNode) {
|
|
711
|
-
const
|
|
712
|
-
|
|
713
|
-
|
|
714
|
-
|
|
715
|
-
|
|
716
|
-
|
|
717
|
-
|
|
718
|
-
{
|
|
719
|
-
type: "CommentLine",
|
|
720
|
-
value: ` Unable to locate the referenced type "${typeNode.name}" in schema`
|
|
721
|
-
}
|
|
722
|
-
], missing;
|
|
723
|
-
}
|
|
724
|
-
const generatedName = this.typeNameMap.get(referencedTypeNode.name);
|
|
725
|
-
return generatedName ? t__namespace.tsTypeReference(t__namespace.identifier(generatedName)) : t__namespace.tsUnknownKeyword();
|
|
723
|
+
const id = this.identifiers.get(typeNode.name);
|
|
724
|
+
return id ? t.tsTypeReference(id) : t.addComment(
|
|
725
|
+
t.tsUnknownKeyword(),
|
|
726
|
+
"trailing",
|
|
727
|
+
` Unable to locate the referenced type "${typeNode.name}" in schema`,
|
|
728
|
+
!0
|
|
729
|
+
);
|
|
726
730
|
}
|
|
727
731
|
// Helper function used to generate TS types for union type nodes.
|
|
728
732
|
generateUnionTsType(typeNode) {
|
|
729
|
-
|
|
730
|
-
return t__namespace.tsNeverKeyword();
|
|
731
|
-
if (typeNode.of.length === 1)
|
|
732
|
-
return this.getTypeNodeType(typeNode.of[0]);
|
|
733
|
-
const typeNodes = typeNode.of.map((node) => this.getTypeNodeType(node));
|
|
734
|
-
return t__namespace.tsUnionType(typeNodes);
|
|
733
|
+
return typeNode.of.length === 0 ? t.tsNeverKeyword() : typeNode.of.length === 1 ? this.generateTsType(typeNode.of[0]) : t.tsUnionType(typeNode.of.map((node) => this.generateTsType(node)));
|
|
735
734
|
}
|
|
736
735
|
// Helper function used to generate TS types for document type nodes.
|
|
737
|
-
|
|
736
|
+
generateDocumentTsType(document) {
|
|
738
737
|
const props = Object.entries(document.attributes).map(
|
|
739
|
-
([key, node]) => this.
|
|
738
|
+
([key, node]) => this.generateTsObjectProperty(key, node)
|
|
740
739
|
);
|
|
741
|
-
return
|
|
740
|
+
return t.tsTypeLiteral(props);
|
|
741
|
+
}
|
|
742
|
+
typeNames() {
|
|
743
|
+
return this.schema.map((schemaType) => schemaType.name);
|
|
744
|
+
}
|
|
745
|
+
getType(typeName) {
|
|
746
|
+
const tsType = this.tsTypes.get(typeName), id = this.identifiers.get(typeName);
|
|
747
|
+
if (tsType && id) return { tsType, id };
|
|
748
|
+
}
|
|
749
|
+
hasType(typeName) {
|
|
750
|
+
return this.tsTypes.has(typeName);
|
|
751
|
+
}
|
|
752
|
+
evaluateQuery = weakMapMemo(
|
|
753
|
+
({ query }) => {
|
|
754
|
+
const ast = safeParseQuery(query), typeNode = typeEvaluate(ast, this.schema), tsType = this.generateTsType(typeNode), stats = walkAndCountQueryTypeNodeStats(typeNode);
|
|
755
|
+
return { tsType, stats };
|
|
756
|
+
}
|
|
757
|
+
);
|
|
758
|
+
*[Symbol.iterator]() {
|
|
759
|
+
for (const { name } of this.schema)
|
|
760
|
+
yield { name, ...this.getType(name) };
|
|
742
761
|
}
|
|
743
762
|
}
|
|
744
|
-
function
|
|
745
|
-
|
|
763
|
+
function walkAndCountQueryTypeNodeStats(typeNode) {
|
|
764
|
+
switch (typeNode.type) {
|
|
765
|
+
case "unknown":
|
|
766
|
+
return { allTypes: 1, unknownTypes: 1, emptyUnions: 0 };
|
|
767
|
+
case "array": {
|
|
768
|
+
const acc = walkAndCountQueryTypeNodeStats(typeNode.of);
|
|
769
|
+
return acc.allTypes += 1, acc;
|
|
770
|
+
}
|
|
771
|
+
case "object": {
|
|
772
|
+
if (typeNode.rest && typeNode.rest.type === "unknown")
|
|
773
|
+
return { allTypes: 2, unknownTypes: 1, emptyUnions: 0 };
|
|
774
|
+
const restStats = typeNode.rest ? walkAndCountQueryTypeNodeStats(typeNode.rest) : { allTypes: 0, unknownTypes: 0, emptyUnions: 0 };
|
|
775
|
+
return restStats.allTypes += 1, Object.values(typeNode.attributes).reduce((acc, attribute) => {
|
|
776
|
+
const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(
|
|
777
|
+
attribute.value
|
|
778
|
+
);
|
|
779
|
+
return {
|
|
780
|
+
allTypes: acc.allTypes + allTypes,
|
|
781
|
+
unknownTypes: acc.unknownTypes + unknownTypes,
|
|
782
|
+
emptyUnions: acc.emptyUnions + emptyUnions
|
|
783
|
+
};
|
|
784
|
+
}, restStats);
|
|
785
|
+
}
|
|
786
|
+
case "union":
|
|
787
|
+
return typeNode.of.length === 0 ? { allTypes: 1, unknownTypes: 0, emptyUnions: 1 } : typeNode.of.reduce(
|
|
788
|
+
(acc, type) => {
|
|
789
|
+
const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(type);
|
|
790
|
+
return {
|
|
791
|
+
allTypes: acc.allTypes + allTypes,
|
|
792
|
+
unknownTypes: acc.unknownTypes + unknownTypes,
|
|
793
|
+
emptyUnions: acc.emptyUnions + emptyUnions
|
|
794
|
+
};
|
|
795
|
+
},
|
|
796
|
+
{ allTypes: 1, unknownTypes: 0, emptyUnions: 0 }
|
|
797
|
+
// count the union type itself
|
|
798
|
+
);
|
|
799
|
+
default:
|
|
800
|
+
return { allTypes: 1, unknownTypes: 0, emptyUnions: 0 };
|
|
801
|
+
}
|
|
746
802
|
}
|
|
747
|
-
|
|
748
|
-
|
|
803
|
+
class TypeGenerator {
|
|
804
|
+
getInternalReferenceSymbolDeclaration = computeOnce(() => {
|
|
805
|
+
const typeOperator = t.tsTypeOperator(t.tsSymbolKeyword(), "unique"), id = INTERNAL_REFERENCE_SYMBOL;
|
|
806
|
+
id.typeAnnotation = t.tsTypeAnnotation(typeOperator);
|
|
807
|
+
const declaration = t.variableDeclaration("const", [t.variableDeclarator(id)]);
|
|
808
|
+
declaration.declare = !0;
|
|
809
|
+
const ast = t.exportNamedDeclaration(declaration), code = generateCode(ast);
|
|
810
|
+
return { id, code, ast };
|
|
811
|
+
});
|
|
812
|
+
getSchemaTypeGenerator = createSelector(
|
|
813
|
+
[(options) => options.schema],
|
|
814
|
+
(schema) => new SchemaTypeGenerator(schema)
|
|
815
|
+
);
|
|
816
|
+
getSchemaTypeDeclarations = createSelector(
|
|
817
|
+
[
|
|
818
|
+
(options) => options.root,
|
|
819
|
+
(options) => options.schemaPath,
|
|
820
|
+
this.getSchemaTypeGenerator
|
|
821
|
+
],
|
|
822
|
+
(root = process.cwd(), schemaPath, schema) => Array.from(schema).map(({ id, name, tsType }, index) => {
|
|
823
|
+
const typeAlias = t.tsTypeAliasDeclaration(id, null, tsType);
|
|
824
|
+
let ast = t.exportNamedDeclaration(typeAlias);
|
|
825
|
+
index === 0 && schemaPath && (ast = t.addComments(ast, "leading", [
|
|
826
|
+
{ type: "CommentLine", value: ` Source: ${normalizePath(root, schemaPath)}` }
|
|
827
|
+
]));
|
|
828
|
+
const code = generateCode(ast);
|
|
829
|
+
return { id, code, name, tsType, ast };
|
|
830
|
+
})
|
|
831
|
+
);
|
|
832
|
+
getAllSanitySchemaTypesDeclaration = createSelector(
|
|
833
|
+
[this.getSchemaTypeDeclarations],
|
|
834
|
+
(schemaTypes) => {
|
|
835
|
+
const ast = t.exportNamedDeclaration(
|
|
836
|
+
t.tsTypeAliasDeclaration(
|
|
837
|
+
ALL_SANITY_SCHEMA_TYPES,
|
|
838
|
+
null,
|
|
839
|
+
schemaTypes.length ? t.tsUnionType(schemaTypes.map(({ id }) => t.tsTypeReference(id))) : t.tsNeverKeyword()
|
|
840
|
+
)
|
|
841
|
+
), code = generateCode(ast);
|
|
842
|
+
return { id: ALL_SANITY_SCHEMA_TYPES, code, ast };
|
|
843
|
+
}
|
|
844
|
+
);
|
|
845
|
+
static async getEvaluatedModules({
|
|
846
|
+
root = process.cwd(),
|
|
847
|
+
reporter: report,
|
|
848
|
+
schemaTypeGenerator,
|
|
849
|
+
schemaTypeDeclarations,
|
|
850
|
+
queries: extractedModules
|
|
851
|
+
}) {
|
|
852
|
+
if (!extractedModules)
|
|
853
|
+
return report?.stream.evaluatedModules.end(), [];
|
|
854
|
+
const currentIdentifiers = new Set(schemaTypeDeclarations.map(({ id }) => id.name)), evaluatedModuleResults = [];
|
|
855
|
+
for await (const { filename, ...extractedModule } of extractedModules) {
|
|
856
|
+
const queries = [], errors = [...extractedModule.errors];
|
|
857
|
+
for (const extractedQuery of extractedModule.queries) {
|
|
858
|
+
const { variable } = extractedQuery;
|
|
859
|
+
try {
|
|
860
|
+
const { tsType, stats } = schemaTypeGenerator.evaluateQuery(extractedQuery), id = getUniqueIdentifierForName(resultSuffix(variable.id.name), currentIdentifiers), typeAlias = t.tsTypeAliasDeclaration(id, null, tsType), trimmedQuery = extractedQuery.query.replace(/(\r\n|\n|\r)/gm, "").trim(), ast = t.addComments(t.exportNamedDeclaration(typeAlias), "leading", [
|
|
861
|
+
{ type: "CommentLine", value: ` Source: ${normalizePath(root, filename)}` },
|
|
862
|
+
{ type: "CommentLine", value: ` Variable: ${variable.id.name}` },
|
|
863
|
+
{ type: "CommentLine", value: ` Query: ${trimmedQuery}` }
|
|
864
|
+
]), evaluatedQueryResult = {
|
|
865
|
+
id,
|
|
866
|
+
code: generateCode(ast),
|
|
867
|
+
ast,
|
|
868
|
+
stats,
|
|
869
|
+
tsType,
|
|
870
|
+
...extractedQuery
|
|
871
|
+
};
|
|
872
|
+
currentIdentifiers.add(id.name), queries.push(evaluatedQueryResult);
|
|
873
|
+
} catch (cause) {
|
|
874
|
+
errors.push(new QueryEvaluationError({ variable, cause, filename }));
|
|
875
|
+
}
|
|
876
|
+
}
|
|
877
|
+
const evaluatedModule = {
|
|
878
|
+
filename,
|
|
879
|
+
queries,
|
|
880
|
+
errors
|
|
881
|
+
};
|
|
882
|
+
report?.stream.evaluatedModules.emit(evaluatedModule), evaluatedModuleResults.push(evaluatedModule);
|
|
883
|
+
}
|
|
884
|
+
return report?.stream.evaluatedModules.end(), evaluatedModuleResults;
|
|
885
|
+
}
|
|
886
|
+
static async getQueryMapDeclaration({
|
|
887
|
+
overloadClientMethods = !0,
|
|
888
|
+
evaluatedModules
|
|
889
|
+
}) {
|
|
890
|
+
if (!overloadClientMethods) return { code: "", ast: t.program([]) };
|
|
891
|
+
const queries = evaluatedModules.flatMap((module) => module.queries);
|
|
892
|
+
if (!queries.length) return { code: "", ast: t.program([]) };
|
|
893
|
+
const typesByQuerystring = {};
|
|
894
|
+
for (const { id, query } of queries)
|
|
895
|
+
typesByQuerystring[query] ??= [], typesByQuerystring[query].push(id.name);
|
|
896
|
+
const queryReturnInterface = t.tsInterfaceDeclaration(
|
|
897
|
+
SANITY_QUERIES,
|
|
898
|
+
null,
|
|
899
|
+
[],
|
|
900
|
+
t.tsInterfaceBody(
|
|
901
|
+
Object.entries(typesByQuerystring).map(([query, types]) => t.tsPropertySignature(
|
|
902
|
+
t.stringLiteral(query),
|
|
903
|
+
t.tsTypeAnnotation(
|
|
904
|
+
types.length ? t.tsUnionType(types.map((type) => t.tsTypeReference(t.identifier(type)))) : t.tsNeverKeyword()
|
|
905
|
+
)
|
|
906
|
+
))
|
|
907
|
+
)
|
|
908
|
+
), declareModule = t.declareModule(
|
|
909
|
+
t.stringLiteral("@sanity/client"),
|
|
910
|
+
t.blockStatement([queryReturnInterface])
|
|
911
|
+
), clientImport = t.addComments(
|
|
912
|
+
t.importDeclaration([], t.stringLiteral("@sanity/client")),
|
|
913
|
+
"leading",
|
|
914
|
+
[{ type: "CommentLine", value: " Query TypeMap" }]
|
|
915
|
+
), ast = t.program([clientImport, declareModule]);
|
|
916
|
+
return { code: generateCode(ast), ast };
|
|
917
|
+
}
|
|
918
|
+
async generateTypes(options) {
|
|
919
|
+
const { reporter: report } = options, internalReferenceSymbol = this.getInternalReferenceSymbolDeclaration(), schemaTypeDeclarations = this.getSchemaTypeDeclarations(options), allSanitySchemaTypesDeclaration = this.getAllSanitySchemaTypesDeclaration(options);
|
|
920
|
+
report?.event.generatedSchemaTypes({
|
|
921
|
+
internalReferenceSymbol,
|
|
922
|
+
schemaTypeDeclarations,
|
|
923
|
+
allSanitySchemaTypesDeclaration
|
|
924
|
+
});
|
|
925
|
+
const program = t.program([]);
|
|
926
|
+
let code = "";
|
|
927
|
+
for (const declaration of schemaTypeDeclarations)
|
|
928
|
+
program.body.push(declaration.ast), code += declaration.code;
|
|
929
|
+
program.body.push(allSanitySchemaTypesDeclaration.ast), code += allSanitySchemaTypesDeclaration.code, program.body.push(internalReferenceSymbol.ast), code += internalReferenceSymbol.code;
|
|
930
|
+
const evaluatedModules = await TypeGenerator.getEvaluatedModules({
|
|
931
|
+
...options,
|
|
932
|
+
schemaTypeDeclarations,
|
|
933
|
+
schemaTypeGenerator: this.getSchemaTypeGenerator(options)
|
|
934
|
+
});
|
|
935
|
+
for (const { queries } of evaluatedModules)
|
|
936
|
+
for (const query of queries)
|
|
937
|
+
program.body.push(query.ast), code += query.code;
|
|
938
|
+
const queryMapDeclaration = await TypeGenerator.getQueryMapDeclaration({
|
|
939
|
+
...options,
|
|
940
|
+
evaluatedModules
|
|
941
|
+
});
|
|
942
|
+
return program.body.push(...queryMapDeclaration.ast.body), code += queryMapDeclaration.code, report?.event.generatedQueryTypes({ queryMapDeclaration }), { code, ast: program };
|
|
943
|
+
}
|
|
749
944
|
}
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
945
|
+
export {
|
|
946
|
+
QueryExtractionError,
|
|
947
|
+
TypeGenerator,
|
|
948
|
+
configDefinition,
|
|
949
|
+
findQueriesInPath,
|
|
950
|
+
findQueriesInSource,
|
|
951
|
+
getResolver,
|
|
952
|
+
readConfig,
|
|
953
|
+
readSchema,
|
|
954
|
+
registerBabel,
|
|
955
|
+
safeParseQuery
|
|
956
|
+
};
|
|
758
957
|
//# sourceMappingURL=index.js.map
|