@sanity/codegen 5.7.0-next.8 → 5.7.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/run.js +31 -0
- package/dist/_exports/index.js +11 -0
- package/dist/_exports/index.js.map +1 -0
- package/dist/actions/generatedFileWarning.js +15 -0
- package/dist/actions/generatedFileWarning.js.map +1 -0
- package/dist/actions/typegenGenerate.worker.js +54 -0
- package/dist/actions/typegenGenerate.worker.js.map +1 -0
- package/dist/actions/types.js +3 -0
- package/dist/actions/types.js.map +1 -0
- package/dist/casing.js +27 -0
- package/dist/casing.js.map +1 -0
- package/dist/commands/typegen/generate.js +237 -0
- package/dist/commands/typegen/generate.js.map +1 -0
- package/dist/getBabelConfig.js +37 -0
- package/dist/getBabelConfig.js.map +1 -0
- package/dist/index.d.ts +459 -0
- package/dist/readConfig.js +38 -0
- package/dist/readConfig.js.map +1 -0
- package/dist/readSchema.js +14 -0
- package/dist/readSchema.js.map +1 -0
- package/dist/safeParseQuery.js +37 -0
- package/dist/safeParseQuery.js.map +1 -0
- package/dist/typeUtils.js +37 -0
- package/dist/typeUtils.js.map +1 -0
- package/dist/typescript/constants.js +12 -0
- package/dist/typescript/constants.js.map +1 -0
- package/dist/typescript/expressionResolvers.js +356 -0
- package/dist/typescript/expressionResolvers.js.map +1 -0
- package/dist/typescript/findQueriesInPath.js +69 -0
- package/dist/typescript/findQueriesInPath.js.map +1 -0
- package/dist/typescript/findQueriesInSource.js +175 -0
- package/dist/typescript/findQueriesInSource.js.map +1 -0
- package/dist/typescript/helpers.js +86 -0
- package/dist/typescript/helpers.js.map +1 -0
- package/dist/typescript/moduleResolver.js +33 -0
- package/dist/typescript/moduleResolver.js.map +1 -0
- package/dist/typescript/parseSource.js +75 -0
- package/dist/typescript/parseSource.js.map +1 -0
- package/dist/typescript/registerBabel.js +23 -0
- package/dist/typescript/registerBabel.js.map +1 -0
- package/dist/typescript/schemaTypeGenerator.js +323 -0
- package/dist/typescript/schemaTypeGenerator.js.map +1 -0
- package/dist/typescript/typeGenerator.js +240 -0
- package/dist/typescript/typeGenerator.js.map +1 -0
- package/dist/typescript/types.js +31 -0
- package/dist/typescript/types.js.map +1 -0
- package/dist/utils/count.js +6 -0
- package/dist/utils/count.js.map +1 -0
- package/dist/utils/formatPath.js +8 -0
- package/dist/utils/formatPath.js.map +1 -0
- package/dist/utils/getMessage.js +3 -0
- package/dist/utils/getMessage.js.map +1 -0
- package/dist/utils/percent.js +8 -0
- package/dist/utils/percent.js.map +1 -0
- package/oclif.manifest.json +39 -0
- package/package.json +49 -23
- package/lib/index.d.ts +0 -433
- package/lib/index.js +0 -1011
- package/lib/index.js.map +0 -1
package/lib/index.js
DELETED
|
@@ -1,1011 +0,0 @@
|
|
|
1
|
-
import fs$1, { readFile } from "node:fs/promises";
|
|
2
|
-
import json5 from "json5";
|
|
3
|
-
import * as z from "zod";
|
|
4
|
-
import { parse, typeEvaluate } from "groq-js";
|
|
5
|
-
import createDebug from "debug";
|
|
6
|
-
import glob from "globby";
|
|
7
|
-
import fs, { existsSync } from "node:fs";
|
|
8
|
-
import path, { dirname, join, resolve } from "node:path";
|
|
9
|
-
import { fileURLToPath } from "node:url";
|
|
10
|
-
import { createRequire } from "node:module";
|
|
11
|
-
import { parse as parse$1, traverse } from "@babel/core";
|
|
12
|
-
import * as t from "@babel/types";
|
|
13
|
-
import { Scope } from "@babel/traverse";
|
|
14
|
-
import { loadConfig, createMatchPath } from "tsconfig-paths";
|
|
15
|
-
import register from "@babel/register";
|
|
16
|
-
import process from "node:process";
|
|
17
|
-
import { createSelector } from "reselect";
|
|
18
|
-
import { CodeGenerator } from "@babel/generator";
|
|
19
|
-
const configDefinition = z.object({
|
|
20
|
-
path: z.string().or(z.array(z.string())).default([
|
|
21
|
-
"./src/**/*.{ts,tsx,js,jsx,mjs,cjs,astro,vue,svelte}",
|
|
22
|
-
"./app/**/*.{ts,tsx,js,jsx,mjs,cjs,astro,vue,svelte}",
|
|
23
|
-
"./sanity/**/*.{ts,tsx,js,jsx,mjs,cjs}"
|
|
24
|
-
]),
|
|
25
|
-
schema: z.string().default("./schema.json"),
|
|
26
|
-
generates: z.string().default("./sanity.types.ts"),
|
|
27
|
-
formatGeneratedCode: z.boolean().default(!0),
|
|
28
|
-
overloadClientMethods: z.boolean().default(!0)
|
|
29
|
-
});
|
|
30
|
-
async function readConfig(path2) {
|
|
31
|
-
try {
|
|
32
|
-
const content = await readFile(path2, "utf-8"), json = json5.parse(content);
|
|
33
|
-
return configDefinition.parseAsync(json);
|
|
34
|
-
} catch (error) {
|
|
35
|
-
if (error instanceof z.ZodError)
|
|
36
|
-
throw new Error(
|
|
37
|
-
`Error in config file
|
|
38
|
-
${error.errors.map((err) => err.message).join(`
|
|
39
|
-
`)}`,
|
|
40
|
-
{ cause: error }
|
|
41
|
-
);
|
|
42
|
-
if (typeof error == "object" && error !== null && "code" in error && error.code === "ENOENT")
|
|
43
|
-
return configDefinition.parse({});
|
|
44
|
-
throw error;
|
|
45
|
-
}
|
|
46
|
-
}
|
|
47
|
-
async function readSchema(path2) {
|
|
48
|
-
const content = await readFile(path2, "utf-8");
|
|
49
|
-
return JSON.parse(content);
|
|
50
|
-
}
|
|
51
|
-
function safeParseQuery(query) {
|
|
52
|
-
const params = {};
|
|
53
|
-
for (const param of extractSliceParams(query))
|
|
54
|
-
params[param] = 0;
|
|
55
|
-
return parse(query, { params });
|
|
56
|
-
}
|
|
57
|
-
function* extractSliceParams(query) {
|
|
58
|
-
const sliceRegex = /\[(\$(\w+)|\d)\.\.\.?(\$(\w+)|\d)\]/g, matches = query.matchAll(sliceRegex);
|
|
59
|
-
if (matches)
|
|
60
|
-
for (const match of matches) {
|
|
61
|
-
const start = match[1] === `$${match[2]}` ? match[2] : null;
|
|
62
|
-
start !== null && (yield start);
|
|
63
|
-
const end = match[3] === `$${match[4]}` ? match[4] : null;
|
|
64
|
-
end !== null && (yield end);
|
|
65
|
-
}
|
|
66
|
-
}
|
|
67
|
-
const __dirname$1 = dirname(fileURLToPath(import.meta.url));
|
|
68
|
-
function findBabelConfig(path2) {
|
|
69
|
-
const configPath = join(path2, "babel.config.json");
|
|
70
|
-
if (existsSync(configPath))
|
|
71
|
-
return configPath;
|
|
72
|
-
const parent = resolve(join(path2, ".."));
|
|
73
|
-
if (parent && parent !== path2)
|
|
74
|
-
return findBabelConfig(parent);
|
|
75
|
-
throw new Error("Could not find `babel.config.json` in @sanity/codegen");
|
|
76
|
-
}
|
|
77
|
-
function getBabelConfig(path2) {
|
|
78
|
-
return { extends: findBabelConfig(__dirname$1) };
|
|
79
|
-
}
|
|
80
|
-
function parseSourceFile(_source, _filename, babelOptions) {
|
|
81
|
-
let source = _source, filename = _filename;
|
|
82
|
-
filename.endsWith(".astro") ? (filename += ".ts", source = parseAstro(source)) : filename.endsWith(".vue") ? (filename += ".ts", source = parseVue(source)) : filename.endsWith(".svelte") && (filename += ".ts", source = parseSvelte(source));
|
|
83
|
-
const result = parse$1(source, {
|
|
84
|
-
...babelOptions,
|
|
85
|
-
filename
|
|
86
|
-
});
|
|
87
|
-
if (!result)
|
|
88
|
-
throw new Error(`Failed to parse ${filename}`);
|
|
89
|
-
return result;
|
|
90
|
-
}
|
|
91
|
-
function parseAstro(source) {
|
|
92
|
-
const codeFences = source.match(/---\n([\s\S]*?)\n---/g);
|
|
93
|
-
return codeFences ? codeFences.map((codeFence) => codeFence.split(`
|
|
94
|
-
`).slice(1, -1).join(`
|
|
95
|
-
`)).join(`
|
|
96
|
-
`) : "";
|
|
97
|
-
}
|
|
98
|
-
function parseVue(source) {
|
|
99
|
-
const matches = matchAllPolyfill(source, /<script(?:\s+generic=["'][^"']*["'])?[^>]*>([\s\S]*?)<\/script>/g);
|
|
100
|
-
return matches.length ? matches.map((match) => match[1]).join(`
|
|
101
|
-
`) : "";
|
|
102
|
-
}
|
|
103
|
-
function parseSvelte(source) {
|
|
104
|
-
const matches = matchAllPolyfill(source, /<script[^>]*>([\s\S]*?)<\/script>/g);
|
|
105
|
-
return matches.length ? matches.map((match) => match[1]).join(`
|
|
106
|
-
`) : "";
|
|
107
|
-
}
|
|
108
|
-
function matchAllPolyfill(str, regex) {
|
|
109
|
-
if (!regex.global)
|
|
110
|
-
throw new Error("matchAll polyfill requires a global regex (with /g flag)");
|
|
111
|
-
const matches = [];
|
|
112
|
-
let match;
|
|
113
|
-
for (; (match = regex.exec(str)) !== null; )
|
|
114
|
-
matches.push(match);
|
|
115
|
-
return matches;
|
|
116
|
-
}
|
|
117
|
-
const debug$2 = createDebug("sanity:codegen:findQueries:debug"), TAGGED_TEMPLATE_ALLOW_LIST = ["groq"], FUNCTION_WRAPPER_ALLOW_LIST = ["defineQuery"];
|
|
118
|
-
function resolveExpression({
|
|
119
|
-
node,
|
|
120
|
-
file,
|
|
121
|
-
scope,
|
|
122
|
-
filename,
|
|
123
|
-
resolver,
|
|
124
|
-
babelConfig,
|
|
125
|
-
params = [],
|
|
126
|
-
fnArguments = []
|
|
127
|
-
}) {
|
|
128
|
-
if (debug$2(
|
|
129
|
-
`Resolving node ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
130
|
-
), t.isTaggedTemplateExpression(node) && t.isIdentifier(node.tag) && TAGGED_TEMPLATE_ALLOW_LIST.includes(node.tag.name))
|
|
131
|
-
return resolveExpression({
|
|
132
|
-
node: node.quasi,
|
|
133
|
-
scope,
|
|
134
|
-
filename,
|
|
135
|
-
file,
|
|
136
|
-
resolver,
|
|
137
|
-
params,
|
|
138
|
-
babelConfig,
|
|
139
|
-
fnArguments
|
|
140
|
-
});
|
|
141
|
-
if (t.isTemplateLiteral(node)) {
|
|
142
|
-
const resolvedExpressions = node.expressions.map(
|
|
143
|
-
(expression) => resolveExpression({
|
|
144
|
-
node: expression,
|
|
145
|
-
scope,
|
|
146
|
-
filename,
|
|
147
|
-
file,
|
|
148
|
-
resolver,
|
|
149
|
-
params,
|
|
150
|
-
babelConfig,
|
|
151
|
-
fnArguments
|
|
152
|
-
})
|
|
153
|
-
);
|
|
154
|
-
return node.quasis.map((quasi, idx) => (quasi.value.cooked || "") + (resolvedExpressions[idx] || "")).join("");
|
|
155
|
-
}
|
|
156
|
-
if (t.isLiteral(node)) {
|
|
157
|
-
if (node.type === "NullLiteral" || node.type === "RegExpLiteral")
|
|
158
|
-
throw new Error(`Unsupported literal type: ${node.type}`);
|
|
159
|
-
return node.value.toString();
|
|
160
|
-
}
|
|
161
|
-
if (t.isIdentifier(node))
|
|
162
|
-
return resolveIdentifier({
|
|
163
|
-
node,
|
|
164
|
-
scope,
|
|
165
|
-
filename,
|
|
166
|
-
file,
|
|
167
|
-
resolver,
|
|
168
|
-
fnArguments,
|
|
169
|
-
babelConfig,
|
|
170
|
-
params
|
|
171
|
-
});
|
|
172
|
-
if (t.isVariableDeclarator(node)) {
|
|
173
|
-
const init = node.init ?? (t.isAssignmentPattern(node.id) && node.id.right);
|
|
174
|
-
if (!init)
|
|
175
|
-
throw new Error("Unsupported variable declarator");
|
|
176
|
-
return resolveExpression({
|
|
177
|
-
node: init,
|
|
178
|
-
fnArguments,
|
|
179
|
-
scope,
|
|
180
|
-
filename,
|
|
181
|
-
file,
|
|
182
|
-
babelConfig,
|
|
183
|
-
resolver
|
|
184
|
-
});
|
|
185
|
-
}
|
|
186
|
-
if (t.isCallExpression(node) && t.isIdentifier(node.callee) && FUNCTION_WRAPPER_ALLOW_LIST.includes(node.callee.name))
|
|
187
|
-
return resolveExpression({
|
|
188
|
-
node: node.arguments[0],
|
|
189
|
-
scope,
|
|
190
|
-
filename,
|
|
191
|
-
file,
|
|
192
|
-
resolver,
|
|
193
|
-
babelConfig,
|
|
194
|
-
params
|
|
195
|
-
});
|
|
196
|
-
if (t.isCallExpression(node))
|
|
197
|
-
return resolveCallExpression({
|
|
198
|
-
node,
|
|
199
|
-
scope,
|
|
200
|
-
filename,
|
|
201
|
-
file,
|
|
202
|
-
resolver,
|
|
203
|
-
babelConfig,
|
|
204
|
-
params
|
|
205
|
-
});
|
|
206
|
-
if (t.isArrowFunctionExpression(node) || t.isFunctionDeclaration(node) || t.isFunctionExpression(node)) {
|
|
207
|
-
const newScope = new Scope(scope.path, scope);
|
|
208
|
-
return params.forEach((param, i) => {
|
|
209
|
-
newScope.push({
|
|
210
|
-
id: param,
|
|
211
|
-
init: fnArguments[i]
|
|
212
|
-
});
|
|
213
|
-
}), resolveExpression({
|
|
214
|
-
node: node.body,
|
|
215
|
-
params: node.params,
|
|
216
|
-
fnArguments,
|
|
217
|
-
scope: newScope,
|
|
218
|
-
filename,
|
|
219
|
-
file,
|
|
220
|
-
babelConfig,
|
|
221
|
-
resolver
|
|
222
|
-
});
|
|
223
|
-
}
|
|
224
|
-
if (t.isNewExpression(node))
|
|
225
|
-
return resolveExpression({
|
|
226
|
-
node: node.callee,
|
|
227
|
-
scope,
|
|
228
|
-
filename,
|
|
229
|
-
file,
|
|
230
|
-
babelConfig,
|
|
231
|
-
resolver
|
|
232
|
-
});
|
|
233
|
-
if (t.isImportDefaultSpecifier(node) || t.isImportSpecifier(node))
|
|
234
|
-
return resolveImportSpecifier({ node, file, filename, fnArguments, resolver, babelConfig });
|
|
235
|
-
if (t.isAssignmentPattern(node))
|
|
236
|
-
return resolveExpression({
|
|
237
|
-
node: node.right,
|
|
238
|
-
scope,
|
|
239
|
-
filename,
|
|
240
|
-
file,
|
|
241
|
-
resolver,
|
|
242
|
-
params,
|
|
243
|
-
babelConfig,
|
|
244
|
-
fnArguments
|
|
245
|
-
});
|
|
246
|
-
if (t.isTSAsExpression(node))
|
|
247
|
-
return resolveExpression({
|
|
248
|
-
node: node.expression,
|
|
249
|
-
scope,
|
|
250
|
-
filename,
|
|
251
|
-
file,
|
|
252
|
-
resolver,
|
|
253
|
-
params,
|
|
254
|
-
babelConfig,
|
|
255
|
-
fnArguments
|
|
256
|
-
});
|
|
257
|
-
throw new Error(
|
|
258
|
-
`Unsupported expression type: ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
259
|
-
);
|
|
260
|
-
}
|
|
261
|
-
function resolveIdentifier({
|
|
262
|
-
node,
|
|
263
|
-
scope,
|
|
264
|
-
filename,
|
|
265
|
-
file,
|
|
266
|
-
resolver,
|
|
267
|
-
babelConfig,
|
|
268
|
-
fnArguments,
|
|
269
|
-
params
|
|
270
|
-
}) {
|
|
271
|
-
const paramIndex = params.findIndex(
|
|
272
|
-
(param) => t.isIdentifier(param) && node.name === param.name || t.isAssignmentPattern(param) && t.isIdentifier(param.left) && node.name === param.left.name
|
|
273
|
-
);
|
|
274
|
-
let argument = fnArguments[paramIndex];
|
|
275
|
-
if (!argument && paramIndex >= 0 && t.isAssignmentPattern(params[paramIndex]) && (argument = params[paramIndex].right), argument && t.isLiteral(argument))
|
|
276
|
-
return resolveExpression({
|
|
277
|
-
node: argument,
|
|
278
|
-
scope,
|
|
279
|
-
filename,
|
|
280
|
-
file,
|
|
281
|
-
resolver,
|
|
282
|
-
params,
|
|
283
|
-
babelConfig,
|
|
284
|
-
fnArguments
|
|
285
|
-
});
|
|
286
|
-
const binding = scope.getBinding(node.name);
|
|
287
|
-
if (binding) {
|
|
288
|
-
if (t.isIdentifier(binding.path.node) && binding.path.node.name === node.name)
|
|
289
|
-
throw new Error(
|
|
290
|
-
`Could not resolve same identifier "${node.name}" in "${filename}:${node.loc?.start.line}:${node.loc?.start.column}"`
|
|
291
|
-
);
|
|
292
|
-
return resolveExpression({
|
|
293
|
-
node: binding.path.node,
|
|
294
|
-
params,
|
|
295
|
-
fnArguments,
|
|
296
|
-
scope,
|
|
297
|
-
filename,
|
|
298
|
-
babelConfig,
|
|
299
|
-
file,
|
|
300
|
-
resolver
|
|
301
|
-
});
|
|
302
|
-
}
|
|
303
|
-
throw new Error(
|
|
304
|
-
`Could not find binding for node "${node.name}" in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
305
|
-
);
|
|
306
|
-
}
|
|
307
|
-
function resolveCallExpression({
|
|
308
|
-
node,
|
|
309
|
-
scope,
|
|
310
|
-
filename,
|
|
311
|
-
file,
|
|
312
|
-
resolver,
|
|
313
|
-
babelConfig,
|
|
314
|
-
params
|
|
315
|
-
}) {
|
|
316
|
-
const { callee } = node;
|
|
317
|
-
return resolveExpression({
|
|
318
|
-
node: callee,
|
|
319
|
-
scope,
|
|
320
|
-
filename,
|
|
321
|
-
file,
|
|
322
|
-
resolver,
|
|
323
|
-
babelConfig,
|
|
324
|
-
params,
|
|
325
|
-
fnArguments: node.arguments
|
|
326
|
-
});
|
|
327
|
-
}
|
|
328
|
-
function resolveImportSpecifier({
|
|
329
|
-
node,
|
|
330
|
-
file,
|
|
331
|
-
filename,
|
|
332
|
-
fnArguments,
|
|
333
|
-
resolver,
|
|
334
|
-
babelConfig
|
|
335
|
-
}) {
|
|
336
|
-
let importDeclaration;
|
|
337
|
-
if (traverse(file, {
|
|
338
|
-
ImportDeclaration(n) {
|
|
339
|
-
if (t.isImportDeclaration(n.node))
|
|
340
|
-
for (const specifier of n.node.specifiers) {
|
|
341
|
-
if (t.isImportDefaultSpecifier(specifier) && specifier.local.loc?.identifierName === node.local.name) {
|
|
342
|
-
importDeclaration = n.node;
|
|
343
|
-
break;
|
|
344
|
-
}
|
|
345
|
-
specifier.local.name === node.local.name && (importDeclaration = n.node);
|
|
346
|
-
}
|
|
347
|
-
}
|
|
348
|
-
}), !importDeclaration)
|
|
349
|
-
throw new Error(`Could not find import declaration for ${node.local.name}`);
|
|
350
|
-
const importName = node.local.name, importFileName = importDeclaration.source.value, importPath = importFileName.startsWith("./") || importFileName.startsWith("../") ? path.resolve(path.dirname(filename), importFileName) : importFileName, resolvedFile = resolver(importPath), source = fs.readFileSync(resolvedFile), tree = parseSourceFile(source.toString(), resolvedFile, babelConfig);
|
|
351
|
-
let newScope;
|
|
352
|
-
if (traverse(tree, {
|
|
353
|
-
Program(p) {
|
|
354
|
-
newScope = p.scope;
|
|
355
|
-
}
|
|
356
|
-
}), !newScope)
|
|
357
|
-
throw new Error(`Could not find scope for ${filename}`);
|
|
358
|
-
const binding = newScope.getBinding(importName);
|
|
359
|
-
if (binding)
|
|
360
|
-
return resolveExpression({
|
|
361
|
-
node: binding.path.node,
|
|
362
|
-
file: tree,
|
|
363
|
-
scope: newScope,
|
|
364
|
-
fnArguments,
|
|
365
|
-
babelConfig,
|
|
366
|
-
filename: resolvedFile,
|
|
367
|
-
resolver
|
|
368
|
-
});
|
|
369
|
-
let namedExport, newImportName;
|
|
370
|
-
if (traverse(tree, {
|
|
371
|
-
ExportDeclaration(p) {
|
|
372
|
-
if (p.node.type === "ExportNamedDeclaration")
|
|
373
|
-
for (const specifier of p.node.specifiers)
|
|
374
|
-
specifier.type === "ExportSpecifier" && specifier.exported.type === "Identifier" && specifier.exported.name === importName && (namedExport = p.node, newImportName = specifier.exported.name);
|
|
375
|
-
}
|
|
376
|
-
}), namedExport && newImportName)
|
|
377
|
-
return resolveExportSpecifier({
|
|
378
|
-
node: namedExport,
|
|
379
|
-
importName: newImportName,
|
|
380
|
-
filename: resolvedFile,
|
|
381
|
-
fnArguments,
|
|
382
|
-
resolver,
|
|
383
|
-
babelConfig
|
|
384
|
-
});
|
|
385
|
-
let result;
|
|
386
|
-
if (traverse(tree, {
|
|
387
|
-
ExportDeclaration(p) {
|
|
388
|
-
if (p.node.type === "ExportAllDeclaration")
|
|
389
|
-
try {
|
|
390
|
-
result = resolveExportSpecifier({
|
|
391
|
-
node: p.node,
|
|
392
|
-
importName,
|
|
393
|
-
filename: resolvedFile,
|
|
394
|
-
fnArguments,
|
|
395
|
-
resolver,
|
|
396
|
-
babelConfig
|
|
397
|
-
});
|
|
398
|
-
} catch (e) {
|
|
399
|
-
if (e.cause !== `noBinding:${importName}`) throw e;
|
|
400
|
-
}
|
|
401
|
-
}
|
|
402
|
-
}), result) return result;
|
|
403
|
-
throw new Error(`Could not find binding for import "${importName}" in ${importFileName}`);
|
|
404
|
-
}
|
|
405
|
-
function resolveExportSpecifier({
|
|
406
|
-
node,
|
|
407
|
-
importName,
|
|
408
|
-
filename,
|
|
409
|
-
fnArguments,
|
|
410
|
-
babelConfig,
|
|
411
|
-
resolver
|
|
412
|
-
}) {
|
|
413
|
-
if (!node.source)
|
|
414
|
-
throw new Error(`Could not find source for export "${importName}" in ${filename}`);
|
|
415
|
-
const importFileName = node.source.value, importPath = path.resolve(path.dirname(filename), importFileName), resolvedFile = resolver(importPath), source = fs.readFileSync(resolvedFile), tree = parseSourceFile(source.toString(), resolvedFile, babelConfig);
|
|
416
|
-
let newScope;
|
|
417
|
-
if (traverse(tree, {
|
|
418
|
-
Program(p) {
|
|
419
|
-
newScope = p.scope;
|
|
420
|
-
}
|
|
421
|
-
}), !newScope)
|
|
422
|
-
throw new Error(`Could not find scope for ${filename}`);
|
|
423
|
-
const binding = newScope.getBinding(importName);
|
|
424
|
-
if (binding)
|
|
425
|
-
return resolveExpression({
|
|
426
|
-
node: binding.path.node,
|
|
427
|
-
file: tree,
|
|
428
|
-
scope: newScope,
|
|
429
|
-
filename: resolvedFile,
|
|
430
|
-
babelConfig,
|
|
431
|
-
resolver,
|
|
432
|
-
fnArguments
|
|
433
|
-
});
|
|
434
|
-
throw new Error(`Could not find binding for export "${importName}" in ${importFileName}`, {
|
|
435
|
-
cause: `noBinding:${importName}`
|
|
436
|
-
});
|
|
437
|
-
}
|
|
438
|
-
const isRecord = (value) => (typeof value == "object" || typeof value == "function") && !!value;
|
|
439
|
-
class QueryExtractionError extends Error {
|
|
440
|
-
variable;
|
|
441
|
-
filename;
|
|
442
|
-
constructor({ variable, cause, filename }) {
|
|
443
|
-
super(
|
|
444
|
-
`Error while extracting query ${variable ? `from variable '${variable.id.name}' ` : ""}in ${filename}: ${isRecord(cause) && typeof cause.message == "string" ? cause.message : "Unknown error"}`
|
|
445
|
-
), this.name = "QueryExtractionError", this.cause = cause, this.variable = variable, this.filename = filename;
|
|
446
|
-
}
|
|
447
|
-
}
|
|
448
|
-
class QueryEvaluationError extends Error {
|
|
449
|
-
variable;
|
|
450
|
-
filename;
|
|
451
|
-
constructor({ variable, cause, filename }) {
|
|
452
|
-
super(
|
|
453
|
-
`Error while evaluating query ${variable ? `from variable '${variable.id.name}' ` : ""}in ${filename}: ${isRecord(cause) && typeof cause.message == "string" ? cause.message : "Unknown error"}`
|
|
454
|
-
), this.name = "QueryEvaluationError", this.cause = cause, this.variable = variable, this.filename = filename;
|
|
455
|
-
}
|
|
456
|
-
}
|
|
457
|
-
const require$2 = createRequire(import.meta.url), groqTagName = "groq", defineQueryFunctionName = "defineQuery", groqModuleName = "groq", nextSanityModuleName = "next-sanity", sveltekitModuleName = "@sanity/sveltekit", ignoreValue = "@sanity-typegen-ignore";
|
|
458
|
-
function findQueriesInSource(source, filename, babelConfig = getBabelConfig(), resolver = require$2.resolve) {
|
|
459
|
-
const queries = [], errors = [], file = parseSourceFile(source, filename, babelConfig);
|
|
460
|
-
return traverse(file, {
|
|
461
|
-
// Look for variable declarations, e.g. `const myQuery = groq`... and extract the query.
|
|
462
|
-
// The variable name is used as the name of the query result type
|
|
463
|
-
VariableDeclarator(path2) {
|
|
464
|
-
const { node, scope } = path2, init = node.init, isGroqTemplateTag = t.isTaggedTemplateExpression(init) && t.isIdentifier(init.tag) && init.tag.name === groqTagName, isDefineQueryCall = t.isCallExpression(init) && (isImportFrom(groqModuleName, defineQueryFunctionName, scope, init.callee) || isImportFrom(nextSanityModuleName, defineQueryFunctionName, scope, init.callee) || isImportFrom(sveltekitModuleName, defineQueryFunctionName, scope, init.callee));
|
|
465
|
-
if (t.isIdentifier(node.id) && (isGroqTemplateTag || isDefineQueryCall)) {
|
|
466
|
-
if (declarationLeadingCommentContains(path2, ignoreValue))
|
|
467
|
-
return;
|
|
468
|
-
const { id, start, end } = node, variable = { id, ...start && { start }, ...end && { end } };
|
|
469
|
-
try {
|
|
470
|
-
const query = resolveExpression({
|
|
471
|
-
node: init,
|
|
472
|
-
file,
|
|
473
|
-
scope,
|
|
474
|
-
babelConfig,
|
|
475
|
-
filename,
|
|
476
|
-
resolver
|
|
477
|
-
});
|
|
478
|
-
queries.push({ variable, query, filename });
|
|
479
|
-
} catch (cause) {
|
|
480
|
-
errors.push(new QueryExtractionError({ filename, variable, cause }));
|
|
481
|
-
}
|
|
482
|
-
}
|
|
483
|
-
}
|
|
484
|
-
}), { filename, queries, errors };
|
|
485
|
-
}
|
|
486
|
-
function declarationLeadingCommentContains(path2, comment) {
|
|
487
|
-
const variableDeclaration = path2.find((node) => node.isVariableDeclaration());
|
|
488
|
-
return variableDeclaration ? !!(variableDeclaration.node.leadingComments?.find(
|
|
489
|
-
(commentItem) => commentItem.value.trim() === comment
|
|
490
|
-
) || variableDeclaration.parent.leadingComments?.find(
|
|
491
|
-
(commentItem) => commentItem.value.trim() === comment
|
|
492
|
-
)) : !1;
|
|
493
|
-
}
|
|
494
|
-
function isImportFrom(moduleName, importName, scope, node) {
|
|
495
|
-
if (t.isIdentifier(node)) {
|
|
496
|
-
const binding = scope.getBinding(node.name);
|
|
497
|
-
if (!binding)
|
|
498
|
-
return !1;
|
|
499
|
-
const { path: path2 } = binding;
|
|
500
|
-
if (t.isImportSpecifier(path2.node))
|
|
501
|
-
return path2.node.importKind === "value" && path2.parentPath && t.isImportDeclaration(path2.parentPath.node) && path2.parentPath.node.source.value === moduleName && t.isIdentifier(path2.node.imported) && path2.node.imported.name === importName;
|
|
502
|
-
if (t.isVariableDeclarator(path2.node)) {
|
|
503
|
-
const { init } = path2.node;
|
|
504
|
-
return t.isCallExpression(init) && t.isIdentifier(init.callee) && init.callee.name === "require" && t.isStringLiteral(init.arguments[0]) && init.arguments[0].value === moduleName;
|
|
505
|
-
}
|
|
506
|
-
}
|
|
507
|
-
if (t.isMemberExpression(node)) {
|
|
508
|
-
const { object, property } = node;
|
|
509
|
-
if (!t.isIdentifier(object))
|
|
510
|
-
return !1;
|
|
511
|
-
const binding = scope.getBinding(object.name);
|
|
512
|
-
if (!binding)
|
|
513
|
-
return !1;
|
|
514
|
-
const { path: path2 } = binding;
|
|
515
|
-
return t.isIdentifier(object) && t.isIdentifier(property) && property.name === importName && t.isImportNamespaceSpecifier(path2.node) && path2.parentPath && t.isImportDeclaration(path2.parentPath.node) && path2.parentPath.node.source.value === moduleName;
|
|
516
|
-
}
|
|
517
|
-
return !1;
|
|
518
|
-
}
|
|
519
|
-
const require$1 = createRequire(import.meta.url), debug$1 = createDebug("sanity:codegen:moduleResolver");
|
|
520
|
-
function getResolver(cwd) {
|
|
521
|
-
const tsConfig = loadConfig(cwd);
|
|
522
|
-
if (tsConfig.resultType === "failed")
|
|
523
|
-
return debug$1("Could not load tsconfig, using default resolver: %s", tsConfig.message), require$1.resolve;
|
|
524
|
-
const matchPath = createMatchPath(
|
|
525
|
-
tsConfig.absoluteBaseUrl,
|
|
526
|
-
tsConfig.paths,
|
|
527
|
-
tsConfig.mainFields,
|
|
528
|
-
tsConfig.addMatchAll
|
|
529
|
-
), resolve2 = function(request, options) {
|
|
530
|
-
const found = matchPath(request);
|
|
531
|
-
return found !== void 0 ? require$1.resolve(found, options) : require$1.resolve(request, options);
|
|
532
|
-
};
|
|
533
|
-
return resolve2.paths = (request) => require$1.resolve.paths(request), resolve2;
|
|
534
|
-
}
|
|
535
|
-
const debug = createDebug("sanity:codegen:findQueries:debug");
|
|
536
|
-
function findQueriesInPath({
|
|
537
|
-
path: path2,
|
|
538
|
-
babelOptions = getBabelConfig(),
|
|
539
|
-
resolver = getResolver()
|
|
540
|
-
}) {
|
|
541
|
-
const queryNames = /* @__PURE__ */ new Set();
|
|
542
|
-
debug(`Globing ${path2}`);
|
|
543
|
-
const files = glob.sync(path2, {
|
|
544
|
-
absolute: !1,
|
|
545
|
-
ignore: ["**/node_modules/**"],
|
|
546
|
-
// we never want to look in node_modules
|
|
547
|
-
onlyFiles: !0
|
|
548
|
-
}).sort();
|
|
549
|
-
async function* getQueries() {
|
|
550
|
-
for (const filename of files)
|
|
551
|
-
if (typeof filename == "string") {
|
|
552
|
-
debug(`Found file "${filename}"`);
|
|
553
|
-
try {
|
|
554
|
-
const source = await fs$1.readFile(filename, "utf8"), pluckedModuleResult = findQueriesInSource(source, filename, babelOptions, resolver);
|
|
555
|
-
for (const { variable } of pluckedModuleResult.queries) {
|
|
556
|
-
if (queryNames.has(variable.id.name))
|
|
557
|
-
throw new Error(
|
|
558
|
-
`Duplicate query name found: "${variable.id.name}". Query names must be unique across all files.`
|
|
559
|
-
);
|
|
560
|
-
queryNames.add(variable.id.name);
|
|
561
|
-
}
|
|
562
|
-
yield pluckedModuleResult;
|
|
563
|
-
} catch (cause) {
|
|
564
|
-
debug(`Error in file "${filename}"`, cause), yield {
|
|
565
|
-
filename,
|
|
566
|
-
queries: [],
|
|
567
|
-
errors: [new QueryExtractionError({ cause, filename })]
|
|
568
|
-
};
|
|
569
|
-
}
|
|
570
|
-
}
|
|
571
|
-
}
|
|
572
|
-
return { files, queries: getQueries() };
|
|
573
|
-
}
|
|
574
|
-
function registerBabel(babelOptions) {
|
|
575
|
-
const options = babelOptions || getBabelConfig();
|
|
576
|
-
register({ ...options, extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"] });
|
|
577
|
-
}
|
|
578
|
-
function resultSuffix(variableName) {
|
|
579
|
-
if (!variableName) return "result";
|
|
580
|
-
const isUpperSnake = /^[A-Z0-9_]+$/.test(variableName), isSnake = /^[a-z0-9_]+$/.test(variableName) && variableName.includes("_");
|
|
581
|
-
return /^[a-z][A-Za-z0-9]*$/.test(variableName) ? `${variableName}Result` : isUpperSnake ? `${variableName}_RESULT` : isSnake ? `${variableName}_result` : `${variableName.replace(/[^A-Za-z0-9]/g, "")}Result`;
|
|
582
|
-
}
|
|
583
|
-
const INTERNAL_REFERENCE_SYMBOL = t.identifier("internalGroqTypeReferenceTo"), ALL_SANITY_SCHEMA_TYPES = t.identifier("AllSanitySchemaTypes"), SANITY_QUERIES = t.identifier("SanityQueries"), ARRAY_OF = t.identifier("ArrayOf"), RESERVED_IDENTIFIERS = /* @__PURE__ */ new Set();
|
|
584
|
-
RESERVED_IDENTIFIERS.add(SANITY_QUERIES.name);
|
|
585
|
-
RESERVED_IDENTIFIERS.add(ALL_SANITY_SCHEMA_TYPES.name);
|
|
586
|
-
RESERVED_IDENTIFIERS.add(INTERNAL_REFERENCE_SYMBOL.name);
|
|
587
|
-
RESERVED_IDENTIFIERS.add(ARRAY_OF.name);
|
|
588
|
-
function normalizePath(root, filename) {
|
|
589
|
-
const resolved = path.resolve(root, filename);
|
|
590
|
-
return path.relative(root, resolved);
|
|
591
|
-
}
|
|
592
|
-
function sanitizeIdentifier(input) {
|
|
593
|
-
return `${input.replace(/^\d/, "_").replace(/[^$\w]+(.)/g, (_, char) => char.toUpperCase())}`;
|
|
594
|
-
}
|
|
595
|
-
function isIdentifierName(input) {
|
|
596
|
-
return /^[a-zA-Z_$][a-zA-Z0-9_$]*$/.test(input);
|
|
597
|
-
}
|
|
598
|
-
function normalizeIdentifier(input) {
|
|
599
|
-
const sanitized = sanitizeIdentifier(input);
|
|
600
|
-
return `${sanitized.charAt(0).toUpperCase()}${sanitized.slice(1)}`;
|
|
601
|
-
}
|
|
602
|
-
function getUniqueIdentifierForName(name, currentIdentifiers) {
|
|
603
|
-
const desiredName = normalizeIdentifier(name);
|
|
604
|
-
let resultingName = desiredName, index = 2;
|
|
605
|
-
for (; currentIdentifiers.has(resultingName) || RESERVED_IDENTIFIERS.has(resultingName); )
|
|
606
|
-
resultingName = `${desiredName}_${index}`, index++;
|
|
607
|
-
return t.identifier(resultingName);
|
|
608
|
-
}
|
|
609
|
-
function computeOnce(fn) {
|
|
610
|
-
const ref = { current: void 0, computed: !1 };
|
|
611
|
-
return function() {
|
|
612
|
-
return ref.computed || (ref.current = fn(), ref.computed = !0), ref.current;
|
|
613
|
-
};
|
|
614
|
-
}
|
|
615
|
-
function weakMapMemo(fn) {
|
|
616
|
-
const cache = /* @__PURE__ */ new WeakMap();
|
|
617
|
-
return function(arg) {
|
|
618
|
-
if (cache.has(arg)) return cache.get(arg);
|
|
619
|
-
const result = fn(arg);
|
|
620
|
-
return cache.set(arg, result), result;
|
|
621
|
-
};
|
|
622
|
-
}
|
|
623
|
-
function generateCode(node) {
|
|
624
|
-
return `${new CodeGenerator(node).generate().code.trim()}
|
|
625
|
-
|
|
626
|
-
`;
|
|
627
|
-
}
|
|
628
|
-
function getFilterArrayUnionType(typeNode, predicate) {
|
|
629
|
-
return typeNode.of.type !== "union" ? typeNode : {
|
|
630
|
-
...typeNode,
|
|
631
|
-
of: {
|
|
632
|
-
...typeNode.of,
|
|
633
|
-
of: typeNode.of.of.filter(predicate)
|
|
634
|
-
}
|
|
635
|
-
};
|
|
636
|
-
}
|
|
637
|
-
class SchemaTypeGenerator {
|
|
638
|
-
schema;
|
|
639
|
-
tsTypes = /* @__PURE__ */ new Map();
|
|
640
|
-
identifiers = /* @__PURE__ */ new Map();
|
|
641
|
-
constructor(schema) {
|
|
642
|
-
this.schema = schema;
|
|
643
|
-
const uniqueTypeNames = /* @__PURE__ */ new Set();
|
|
644
|
-
for (const type of schema) {
|
|
645
|
-
if (uniqueTypeNames.has(type.name))
|
|
646
|
-
throw new Error(
|
|
647
|
-
`Duplicate type name "${type.name}" in schema. Type names must be unique within the same schema.`
|
|
648
|
-
);
|
|
649
|
-
uniqueTypeNames.add(type.name);
|
|
650
|
-
}
|
|
651
|
-
for (const type of schema) {
|
|
652
|
-
const currentIdentifierNames = new Set(
|
|
653
|
-
Array.from(this.identifiers.values()).map((id) => id.name)
|
|
654
|
-
), uniqueIdentifier = getUniqueIdentifierForName(type.name, currentIdentifierNames);
|
|
655
|
-
this.identifiers.set(type.name, uniqueIdentifier);
|
|
656
|
-
}
|
|
657
|
-
for (const type of schema)
|
|
658
|
-
this.tsTypes.set(type.name, this.generateTsType(type));
|
|
659
|
-
}
|
|
660
|
-
generateTsType(typeNode) {
|
|
661
|
-
switch (typeNode.type) {
|
|
662
|
-
case "string":
|
|
663
|
-
return typeNode.value !== void 0 ? t.tsLiteralType(t.stringLiteral(typeNode.value)) : t.tsStringKeyword();
|
|
664
|
-
case "number":
|
|
665
|
-
return typeNode.value !== void 0 ? t.tsLiteralType(t.numericLiteral(typeNode.value)) : t.tsNumberKeyword();
|
|
666
|
-
case "boolean":
|
|
667
|
-
return typeNode.value !== void 0 ? t.tsLiteralType(t.booleanLiteral(typeNode.value)) : t.tsBooleanKeyword();
|
|
668
|
-
case "unknown":
|
|
669
|
-
return t.tsUnknownKeyword();
|
|
670
|
-
case "document":
|
|
671
|
-
return this.generateDocumentTsType(typeNode);
|
|
672
|
-
case "type":
|
|
673
|
-
return this.generateTsType(typeNode.value);
|
|
674
|
-
case "array":
|
|
675
|
-
return this.generateArrayTsType(typeNode);
|
|
676
|
-
case "object":
|
|
677
|
-
return this.generateObjectTsType(typeNode);
|
|
678
|
-
case "union":
|
|
679
|
-
return this.generateUnionTsType(typeNode);
|
|
680
|
-
case "inline":
|
|
681
|
-
return this.generateInlineTsType(typeNode);
|
|
682
|
-
case "null":
|
|
683
|
-
return t.tsNullKeyword();
|
|
684
|
-
default:
|
|
685
|
-
throw new Error(
|
|
686
|
-
`Encountered unsupported node type "${// @ts-expect-error This should never happen
|
|
687
|
-
typeNode.type}" while generating schema types`
|
|
688
|
-
);
|
|
689
|
-
}
|
|
690
|
-
}
|
|
691
|
-
/**
|
|
692
|
-
* Helper function used to generate TS types for arrays of inline types, or arrays of inline types
|
|
693
|
-
* wrapped in the ArrayOf wrapper that adds _key prop
|
|
694
|
-
*/
|
|
695
|
-
generateArrayOfTsType(typeNode) {
|
|
696
|
-
const typeNodes = this.generateTsType(typeNode.of);
|
|
697
|
-
return t.tsTypeReference(ARRAY_OF, t.tsTypeParameterInstantiation([typeNodes]));
|
|
698
|
-
}
|
|
699
|
-
// Helper function used to generate TS types for array type nodes.
|
|
700
|
-
generateArrayTsType(typeNode) {
|
|
701
|
-
if (typeNode.of.type === "inline")
|
|
702
|
-
return this.generateArrayOfTsType(typeNode);
|
|
703
|
-
if (typeNode.of.type !== "union") {
|
|
704
|
-
const typeNodes = this.generateTsType(typeNode.of);
|
|
705
|
-
return t.tsTypeReference(t.identifier("Array"), t.tsTypeParameterInstantiation([typeNodes]));
|
|
706
|
-
}
|
|
707
|
-
if (typeNode.of.of.every((unionTypeNode) => unionTypeNode.type !== "inline")) {
|
|
708
|
-
const typeNodes = this.generateTsType(typeNode.of);
|
|
709
|
-
return t.tsTypeReference(t.identifier("Array"), t.tsTypeParameterInstantiation([typeNodes]));
|
|
710
|
-
}
|
|
711
|
-
if (typeNode.of.of.every((unionMember) => unionMember.type === "inline"))
|
|
712
|
-
return this.generateArrayOfTsType(typeNode);
|
|
713
|
-
const arrayOfNonInline = getFilterArrayUnionType(typeNode, (member) => member.type !== "inline"), arrayOfInline = getFilterArrayUnionType(typeNode, (member) => member.type === "inline");
|
|
714
|
-
return t.tsUnionType([
|
|
715
|
-
this.generateArrayTsType(arrayOfNonInline),
|
|
716
|
-
this.generateArrayTsType(arrayOfInline)
|
|
717
|
-
]);
|
|
718
|
-
}
|
|
719
|
-
// Helper function used to generate TS types for object properties.
|
|
720
|
-
generateTsObjectProperty(key, attribute) {
|
|
721
|
-
const type = this.generateTsType(attribute.value), keyNode = isIdentifierName(key) ? t.identifier(key) : t.stringLiteral(key), propertySignature = t.tsPropertySignature(keyNode, t.tsTypeAnnotation(type));
|
|
722
|
-
return propertySignature.optional = attribute.optional, propertySignature;
|
|
723
|
-
}
|
|
724
|
-
// Helper function used to generate TS types for object type nodes.
|
|
725
|
-
generateObjectTsType(typeNode) {
|
|
726
|
-
const props = [];
|
|
727
|
-
Object.entries(typeNode.attributes).forEach(([key, attribute]) => {
|
|
728
|
-
props.push(this.generateTsObjectProperty(key, attribute));
|
|
729
|
-
});
|
|
730
|
-
const rest = typeNode.rest;
|
|
731
|
-
if (rest)
|
|
732
|
-
switch (rest.type) {
|
|
733
|
-
case "unknown":
|
|
734
|
-
return t.tsUnknownKeyword();
|
|
735
|
-
case "object": {
|
|
736
|
-
Object.entries(rest.attributes).forEach(([key, attribute]) => {
|
|
737
|
-
props.push(this.generateTsObjectProperty(key, attribute));
|
|
738
|
-
});
|
|
739
|
-
break;
|
|
740
|
-
}
|
|
741
|
-
case "inline": {
|
|
742
|
-
const resolved = this.generateInlineTsType(rest);
|
|
743
|
-
return t.isTSUnknownKeyword(resolved) ? resolved : t.tsIntersectionType([t.tsTypeLiteral(props), resolved]);
|
|
744
|
-
}
|
|
745
|
-
default:
|
|
746
|
-
throw new Error(`Type "${rest.type}" not found in schema`);
|
|
747
|
-
}
|
|
748
|
-
if (typeNode.dereferencesTo) {
|
|
749
|
-
const derefType = Object.assign(
|
|
750
|
-
t.tsPropertySignature(
|
|
751
|
-
INTERNAL_REFERENCE_SYMBOL,
|
|
752
|
-
t.tsTypeAnnotation(t.tsLiteralType(t.stringLiteral(typeNode.dereferencesTo)))
|
|
753
|
-
),
|
|
754
|
-
{ computed: !0, optional: !0 }
|
|
755
|
-
);
|
|
756
|
-
props.push(derefType);
|
|
757
|
-
}
|
|
758
|
-
return t.tsTypeLiteral(props);
|
|
759
|
-
}
|
|
760
|
-
generateInlineTsType(typeNode) {
|
|
761
|
-
const id = this.identifiers.get(typeNode.name);
|
|
762
|
-
return id ? t.tsTypeReference(id) : t.addComment(
|
|
763
|
-
t.tsUnknownKeyword(),
|
|
764
|
-
"trailing",
|
|
765
|
-
` Unable to locate the referenced type "${typeNode.name}" in schema`,
|
|
766
|
-
!0
|
|
767
|
-
);
|
|
768
|
-
}
|
|
769
|
-
// Helper function used to generate TS types for union type nodes.
|
|
770
|
-
generateUnionTsType(typeNode) {
|
|
771
|
-
return typeNode.of.length === 0 ? t.tsNeverKeyword() : typeNode.of.length === 1 ? this.generateTsType(typeNode.of[0]) : t.tsUnionType(typeNode.of.map((node) => this.generateTsType(node)));
|
|
772
|
-
}
|
|
773
|
-
// Helper function used to generate TS types for document type nodes.
|
|
774
|
-
generateDocumentTsType(document) {
|
|
775
|
-
const props = Object.entries(document.attributes).map(
|
|
776
|
-
([key, node]) => this.generateTsObjectProperty(key, node)
|
|
777
|
-
);
|
|
778
|
-
return t.tsTypeLiteral(props);
|
|
779
|
-
}
|
|
780
|
-
typeNames() {
|
|
781
|
-
return this.schema.map((schemaType) => schemaType.name);
|
|
782
|
-
}
|
|
783
|
-
getType(typeName) {
|
|
784
|
-
const tsType = this.tsTypes.get(typeName), id = this.identifiers.get(typeName);
|
|
785
|
-
if (tsType && id) return { tsType, id };
|
|
786
|
-
}
|
|
787
|
-
hasType(typeName) {
|
|
788
|
-
return this.tsTypes.has(typeName);
|
|
789
|
-
}
|
|
790
|
-
evaluateQuery = weakMapMemo(
|
|
791
|
-
({ query }) => {
|
|
792
|
-
const ast = safeParseQuery(query), typeNode = typeEvaluate(ast, this.schema), tsType = this.generateTsType(typeNode), stats = walkAndCountQueryTypeNodeStats(typeNode);
|
|
793
|
-
return { tsType, stats };
|
|
794
|
-
}
|
|
795
|
-
);
|
|
796
|
-
*[Symbol.iterator]() {
|
|
797
|
-
for (const { name } of this.schema)
|
|
798
|
-
yield { name, ...this.getType(name) };
|
|
799
|
-
}
|
|
800
|
-
}
|
|
801
|
-
function walkAndCountQueryTypeNodeStats(typeNode) {
|
|
802
|
-
switch (typeNode.type) {
|
|
803
|
-
case "unknown":
|
|
804
|
-
return { allTypes: 1, unknownTypes: 1, emptyUnions: 0 };
|
|
805
|
-
case "array": {
|
|
806
|
-
const acc = walkAndCountQueryTypeNodeStats(typeNode.of);
|
|
807
|
-
return acc.allTypes += 1, acc;
|
|
808
|
-
}
|
|
809
|
-
case "object": {
|
|
810
|
-
if (typeNode.rest && typeNode.rest.type === "unknown")
|
|
811
|
-
return { allTypes: 2, unknownTypes: 1, emptyUnions: 0 };
|
|
812
|
-
const restStats = typeNode.rest ? walkAndCountQueryTypeNodeStats(typeNode.rest) : { allTypes: 0, unknownTypes: 0, emptyUnions: 0 };
|
|
813
|
-
return restStats.allTypes += 1, Object.values(typeNode.attributes).reduce((acc, attribute) => {
|
|
814
|
-
const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(
|
|
815
|
-
attribute.value
|
|
816
|
-
);
|
|
817
|
-
return {
|
|
818
|
-
allTypes: acc.allTypes + allTypes,
|
|
819
|
-
unknownTypes: acc.unknownTypes + unknownTypes,
|
|
820
|
-
emptyUnions: acc.emptyUnions + emptyUnions
|
|
821
|
-
};
|
|
822
|
-
}, restStats);
|
|
823
|
-
}
|
|
824
|
-
case "union":
|
|
825
|
-
return typeNode.of.length === 0 ? { allTypes: 1, unknownTypes: 0, emptyUnions: 1 } : typeNode.of.reduce(
|
|
826
|
-
(acc, type) => {
|
|
827
|
-
const { allTypes, unknownTypes, emptyUnions } = walkAndCountQueryTypeNodeStats(type);
|
|
828
|
-
return {
|
|
829
|
-
allTypes: acc.allTypes + allTypes,
|
|
830
|
-
unknownTypes: acc.unknownTypes + unknownTypes,
|
|
831
|
-
emptyUnions: acc.emptyUnions + emptyUnions
|
|
832
|
-
};
|
|
833
|
-
},
|
|
834
|
-
{ allTypes: 1, unknownTypes: 0, emptyUnions: 0 }
|
|
835
|
-
// count the union type itself
|
|
836
|
-
);
|
|
837
|
-
default:
|
|
838
|
-
return { allTypes: 1, unknownTypes: 0, emptyUnions: 0 };
|
|
839
|
-
}
|
|
840
|
-
}
|
|
841
|
-
class TypeGenerator {
|
|
842
|
-
getInternalReferenceSymbolDeclaration = computeOnce(() => {
|
|
843
|
-
const typeOperator = t.tsTypeOperator(t.tsSymbolKeyword(), "unique"), id = INTERNAL_REFERENCE_SYMBOL;
|
|
844
|
-
id.typeAnnotation = t.tsTypeAnnotation(typeOperator);
|
|
845
|
-
const declaration = t.variableDeclaration("const", [t.variableDeclarator(id)]);
|
|
846
|
-
declaration.declare = !0;
|
|
847
|
-
const ast = t.exportNamedDeclaration(declaration), code = generateCode(ast);
|
|
848
|
-
return { id, code, ast };
|
|
849
|
-
});
|
|
850
|
-
getArrayOfDeclaration = computeOnce(() => {
|
|
851
|
-
const typeParam = t.tsTypeParameter(null, null, "T"), intersectionType = t.tsIntersectionType([
|
|
852
|
-
t.tsTypeReference(t.identifier("T")),
|
|
853
|
-
t.tsTypeLiteral([
|
|
854
|
-
t.tsPropertySignature(t.identifier("_key"), t.tsTypeAnnotation(t.tsStringKeyword()))
|
|
855
|
-
])
|
|
856
|
-
]), arrayType = t.tsTypeReference(
|
|
857
|
-
t.identifier("Array"),
|
|
858
|
-
t.tsTypeParameterInstantiation([intersectionType])
|
|
859
|
-
), ast = t.tsTypeAliasDeclaration(
|
|
860
|
-
ARRAY_OF,
|
|
861
|
-
t.tsTypeParameterDeclaration([typeParam]),
|
|
862
|
-
arrayType
|
|
863
|
-
), code = generateCode(ast);
|
|
864
|
-
return { id: ARRAY_OF, code, ast };
|
|
865
|
-
});
|
|
866
|
-
getSchemaTypeGenerator = createSelector(
|
|
867
|
-
[(options) => options.schema],
|
|
868
|
-
(schema) => new SchemaTypeGenerator(schema)
|
|
869
|
-
);
|
|
870
|
-
getSchemaTypeDeclarations = createSelector(
|
|
871
|
-
[
|
|
872
|
-
(options) => options.root,
|
|
873
|
-
(options) => options.schemaPath,
|
|
874
|
-
this.getSchemaTypeGenerator
|
|
875
|
-
],
|
|
876
|
-
(root = process.cwd(), schemaPath, schema) => Array.from(schema).map(({ id, name, tsType }, index) => {
|
|
877
|
-
const typeAlias = t.tsTypeAliasDeclaration(id, null, tsType);
|
|
878
|
-
let ast = t.exportNamedDeclaration(typeAlias);
|
|
879
|
-
index === 0 && schemaPath && (ast = t.addComments(ast, "leading", [
|
|
880
|
-
{ type: "CommentLine", value: ` Source: ${normalizePath(root, schemaPath)}` }
|
|
881
|
-
]));
|
|
882
|
-
const code = generateCode(ast);
|
|
883
|
-
return { id, code, name, tsType, ast };
|
|
884
|
-
})
|
|
885
|
-
);
|
|
886
|
-
getAllSanitySchemaTypesDeclaration = createSelector(
|
|
887
|
-
[this.getSchemaTypeDeclarations],
|
|
888
|
-
(schemaTypes) => {
|
|
889
|
-
const ast = t.exportNamedDeclaration(
|
|
890
|
-
t.tsTypeAliasDeclaration(
|
|
891
|
-
ALL_SANITY_SCHEMA_TYPES,
|
|
892
|
-
null,
|
|
893
|
-
schemaTypes.length ? t.tsUnionType(schemaTypes.map(({ id }) => t.tsTypeReference(id))) : t.tsNeverKeyword()
|
|
894
|
-
)
|
|
895
|
-
), code = generateCode(ast);
|
|
896
|
-
return { id: ALL_SANITY_SCHEMA_TYPES, code, ast };
|
|
897
|
-
}
|
|
898
|
-
);
|
|
899
|
-
static async getEvaluatedModules({
|
|
900
|
-
root = process.cwd(),
|
|
901
|
-
reporter: report,
|
|
902
|
-
schemaTypeGenerator,
|
|
903
|
-
schemaTypeDeclarations,
|
|
904
|
-
queries: extractedModules
|
|
905
|
-
}) {
|
|
906
|
-
if (!extractedModules)
|
|
907
|
-
return report?.stream.evaluatedModules.end(), [];
|
|
908
|
-
const currentIdentifiers = new Set(schemaTypeDeclarations.map(({ id }) => id.name)), evaluatedModuleResults = [];
|
|
909
|
-
for await (const { filename, ...extractedModule } of extractedModules) {
|
|
910
|
-
const queries = [], errors = [...extractedModule.errors];
|
|
911
|
-
for (const extractedQuery of extractedModule.queries) {
|
|
912
|
-
const { variable } = extractedQuery;
|
|
913
|
-
try {
|
|
914
|
-
const { tsType, stats } = schemaTypeGenerator.evaluateQuery(extractedQuery), id = getUniqueIdentifierForName(resultSuffix(variable.id.name), currentIdentifiers), typeAlias = t.tsTypeAliasDeclaration(id, null, tsType), trimmedQuery = extractedQuery.query.replace(/(\r\n|\n|\r)/gm, "").trim(), ast = t.addComments(t.exportNamedDeclaration(typeAlias), "leading", [
|
|
915
|
-
{ type: "CommentLine", value: ` Source: ${normalizePath(root, filename)}` },
|
|
916
|
-
{ type: "CommentLine", value: ` Variable: ${variable.id.name}` },
|
|
917
|
-
{ type: "CommentLine", value: ` Query: ${trimmedQuery}` }
|
|
918
|
-
]), evaluatedQueryResult = {
|
|
919
|
-
id,
|
|
920
|
-
code: generateCode(ast),
|
|
921
|
-
ast,
|
|
922
|
-
stats,
|
|
923
|
-
tsType,
|
|
924
|
-
...extractedQuery
|
|
925
|
-
};
|
|
926
|
-
currentIdentifiers.add(id.name), queries.push(evaluatedQueryResult);
|
|
927
|
-
} catch (cause) {
|
|
928
|
-
errors.push(new QueryEvaluationError({ variable, cause, filename }));
|
|
929
|
-
}
|
|
930
|
-
}
|
|
931
|
-
const evaluatedModule = {
|
|
932
|
-
filename,
|
|
933
|
-
queries,
|
|
934
|
-
errors
|
|
935
|
-
};
|
|
936
|
-
report?.stream.evaluatedModules.emit(evaluatedModule), evaluatedModuleResults.push(evaluatedModule);
|
|
937
|
-
}
|
|
938
|
-
return report?.stream.evaluatedModules.end(), evaluatedModuleResults;
|
|
939
|
-
}
|
|
940
|
-
static async getQueryMapDeclaration({
|
|
941
|
-
overloadClientMethods = !0,
|
|
942
|
-
evaluatedModules
|
|
943
|
-
}) {
|
|
944
|
-
if (!overloadClientMethods) return { code: "", ast: t.program([]) };
|
|
945
|
-
const queries = evaluatedModules.flatMap((module) => module.queries);
|
|
946
|
-
if (!queries.length) return { code: "", ast: t.program([]) };
|
|
947
|
-
const typesByQuerystring = {};
|
|
948
|
-
for (const { id, query } of queries)
|
|
949
|
-
typesByQuerystring[query] ??= [], typesByQuerystring[query].push(id.name);
|
|
950
|
-
const queryReturnInterface = t.tsInterfaceDeclaration(
|
|
951
|
-
SANITY_QUERIES,
|
|
952
|
-
null,
|
|
953
|
-
[],
|
|
954
|
-
t.tsInterfaceBody(
|
|
955
|
-
Object.entries(typesByQuerystring).map(([query, types]) => t.tsPropertySignature(
|
|
956
|
-
t.stringLiteral(query),
|
|
957
|
-
t.tsTypeAnnotation(
|
|
958
|
-
types.length ? t.tsUnionType(types.map((type) => t.tsTypeReference(t.identifier(type)))) : t.tsNeverKeyword()
|
|
959
|
-
)
|
|
960
|
-
))
|
|
961
|
-
)
|
|
962
|
-
), declareModule = t.declareModule(
|
|
963
|
-
t.stringLiteral("@sanity/client"),
|
|
964
|
-
t.blockStatement([queryReturnInterface])
|
|
965
|
-
), clientImport = t.addComments(
|
|
966
|
-
t.importDeclaration([], t.stringLiteral("@sanity/client")),
|
|
967
|
-
"leading",
|
|
968
|
-
[{ type: "CommentLine", value: " Query TypeMap" }]
|
|
969
|
-
), ast = t.program([clientImport, declareModule]);
|
|
970
|
-
return { code: generateCode(ast), ast };
|
|
971
|
-
}
|
|
972
|
-
async generateTypes(options) {
|
|
973
|
-
const { reporter: report } = options, internalReferenceSymbol = this.getInternalReferenceSymbolDeclaration(), arrayOfDeclaration = this.getArrayOfDeclaration(), schemaTypeDeclarations = this.getSchemaTypeDeclarations(options), allSanitySchemaTypesDeclaration = this.getAllSanitySchemaTypesDeclaration(options);
|
|
974
|
-
report?.event.generatedSchemaTypes({
|
|
975
|
-
internalReferenceSymbol,
|
|
976
|
-
schemaTypeDeclarations,
|
|
977
|
-
allSanitySchemaTypesDeclaration
|
|
978
|
-
});
|
|
979
|
-
const program = t.program([]);
|
|
980
|
-
let code = "";
|
|
981
|
-
for (const declaration of schemaTypeDeclarations)
|
|
982
|
-
program.body.push(declaration.ast), code += declaration.code;
|
|
983
|
-
program.body.push(allSanitySchemaTypesDeclaration.ast), code += allSanitySchemaTypesDeclaration.code, program.body.push(internalReferenceSymbol.ast), code += internalReferenceSymbol.code, program.body.push(arrayOfDeclaration.ast), code += arrayOfDeclaration.code;
|
|
984
|
-
const evaluatedModules = await TypeGenerator.getEvaluatedModules({
|
|
985
|
-
...options,
|
|
986
|
-
schemaTypeDeclarations,
|
|
987
|
-
schemaTypeGenerator: this.getSchemaTypeGenerator(options)
|
|
988
|
-
});
|
|
989
|
-
for (const { queries } of evaluatedModules)
|
|
990
|
-
for (const query of queries)
|
|
991
|
-
program.body.push(query.ast), code += query.code;
|
|
992
|
-
const queryMapDeclaration = await TypeGenerator.getQueryMapDeclaration({
|
|
993
|
-
...options,
|
|
994
|
-
evaluatedModules
|
|
995
|
-
});
|
|
996
|
-
return program.body.push(...queryMapDeclaration.ast.body), code += queryMapDeclaration.code, report?.event.generatedQueryTypes({ queryMapDeclaration }), { code, ast: program };
|
|
997
|
-
}
|
|
998
|
-
}
|
|
999
|
-
export {
|
|
1000
|
-
QueryExtractionError,
|
|
1001
|
-
TypeGenerator,
|
|
1002
|
-
configDefinition,
|
|
1003
|
-
findQueriesInPath,
|
|
1004
|
-
findQueriesInSource,
|
|
1005
|
-
getResolver,
|
|
1006
|
-
readConfig,
|
|
1007
|
-
readSchema,
|
|
1008
|
-
registerBabel,
|
|
1009
|
-
safeParseQuery
|
|
1010
|
-
};
|
|
1011
|
-
//# sourceMappingURL=index.js.map
|