@sanity/codegen 5.0.0-next.0-9b570ece82-202507150640 → 5.0.0-next.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +1 -1
- package/lib/index.d.ts +158 -92
- package/lib/index.js +497 -298
- package/lib/index.js.map +1 -1
- package/package.json +30 -26
- package/lib/index.d.mts +0 -204
- package/lib/index.mjs +0 -753
- package/lib/index.mjs.map +0 -1
package/lib/index.mjs
DELETED
|
@@ -1,753 +0,0 @@
|
|
|
1
|
-
import fs$1, { readFile } from "node:fs/promises";
|
|
2
|
-
import * as json5 from "json5";
|
|
3
|
-
import * as z from "zod";
|
|
4
|
-
import { parse } from "groq-js";
|
|
5
|
-
import createDebug from "debug";
|
|
6
|
-
import glob from "globby";
|
|
7
|
-
import fs, { existsSync } from "node:fs";
|
|
8
|
-
import path, { join, resolve } from "node:path";
|
|
9
|
-
import { createRequire } from "node:module";
|
|
10
|
-
import { parse as parse$1, traverse as traverse$1 } from "@babel/core";
|
|
11
|
-
import * as t from "@babel/types";
|
|
12
|
-
import traverse, { Scope } from "@babel/traverse";
|
|
13
|
-
import { loadConfig, createMatchPath } from "tsconfig-paths";
|
|
14
|
-
import register from "@babel/register";
|
|
15
|
-
import { CodeGenerator } from "@babel/generator";
|
|
16
|
-
const configDefintion = z.object({
|
|
17
|
-
path: z.string().or(z.array(z.string())).default([
|
|
18
|
-
"./src/**/*.{ts,tsx,js,jsx,mjs,cjs,astro}",
|
|
19
|
-
"./app/**/*.{ts,tsx,js,jsx,mjs,cjs}",
|
|
20
|
-
"./sanity/**/*.{ts,tsx,js,jsx,mjs,cjs}"
|
|
21
|
-
]),
|
|
22
|
-
schema: z.string().default("./schema.json"),
|
|
23
|
-
generates: z.string().default("./sanity.types.ts"),
|
|
24
|
-
formatGeneratedCode: z.boolean().default(!0),
|
|
25
|
-
overloadClientMethods: z.boolean().default(!0)
|
|
26
|
-
});
|
|
27
|
-
async function readConfig(path2) {
|
|
28
|
-
try {
|
|
29
|
-
const content = await readFile(path2, "utf-8"), json = json5.parse(content);
|
|
30
|
-
return configDefintion.parseAsync(json);
|
|
31
|
-
} catch (error) {
|
|
32
|
-
if (error instanceof z.ZodError)
|
|
33
|
-
throw new Error(`Error in config file
|
|
34
|
-
${error.errors.map((err) => err.message).join(`
|
|
35
|
-
`)}`);
|
|
36
|
-
if (typeof error == "object" && error !== null && "code" in error && error.code === "ENOENT")
|
|
37
|
-
return configDefintion.parse({});
|
|
38
|
-
throw error;
|
|
39
|
-
}
|
|
40
|
-
}
|
|
41
|
-
async function readSchema(path2) {
|
|
42
|
-
const content = await readFile(path2, "utf-8");
|
|
43
|
-
return JSON.parse(content);
|
|
44
|
-
}
|
|
45
|
-
function safeParseQuery(query) {
|
|
46
|
-
const params = {};
|
|
47
|
-
for (const param of extractSliceParams(query))
|
|
48
|
-
params[param] = 0;
|
|
49
|
-
return parse(query, { params });
|
|
50
|
-
}
|
|
51
|
-
function* extractSliceParams(query) {
|
|
52
|
-
const sliceRegex = /\[(\$(\w+)|\d)\.\.\.?(\$(\w+)|\d)\]/g, matches = query.matchAll(sliceRegex);
|
|
53
|
-
if (matches)
|
|
54
|
-
for (const match of matches) {
|
|
55
|
-
const start = match[1] === `$${match[2]}` ? match[2] : null;
|
|
56
|
-
start !== null && (yield start);
|
|
57
|
-
const end = match[3] === `$${match[4]}` ? match[4] : null;
|
|
58
|
-
end !== null && (yield end);
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
function findBabelConfig(path2) {
|
|
62
|
-
const configPath = join(path2, "babel.config.json");
|
|
63
|
-
if (existsSync(configPath))
|
|
64
|
-
return configPath;
|
|
65
|
-
const parent = resolve(join(path2, ".."));
|
|
66
|
-
if (parent && parent !== path2)
|
|
67
|
-
return findBabelConfig(parent);
|
|
68
|
-
throw new Error("Could not find `babel.config.json` in @sanity/codegen");
|
|
69
|
-
}
|
|
70
|
-
function getBabelConfig(path2) {
|
|
71
|
-
return { extends: findBabelConfig(__dirname) };
|
|
72
|
-
}
|
|
73
|
-
function parseSourceFile(_source, _filename, babelOptions) {
|
|
74
|
-
let source = _source, filename = _filename;
|
|
75
|
-
filename.endsWith(".astro") && (filename += ".ts", source = parseAstro(source));
|
|
76
|
-
const result = parse$1(source, {
|
|
77
|
-
...babelOptions,
|
|
78
|
-
filename
|
|
79
|
-
});
|
|
80
|
-
if (!result)
|
|
81
|
-
throw new Error(`Failed to parse ${filename}`);
|
|
82
|
-
return result;
|
|
83
|
-
}
|
|
84
|
-
function parseAstro(source) {
|
|
85
|
-
const codeFences = source.match(/---\n([\s\S]*?)\n---/g);
|
|
86
|
-
return codeFences ? codeFences.map((codeFence) => codeFence.split(`
|
|
87
|
-
`).slice(1, -1).join(`
|
|
88
|
-
`)).join(`
|
|
89
|
-
`) : "";
|
|
90
|
-
}
|
|
91
|
-
const debug$2 = createDebug("sanity:codegen:findQueries:debug"), TAGGED_TEMPLATE_ALLOW_LIST = ["groq"], FUNCTION_WRAPPER_ALLOW_LIST = ["defineQuery"];
|
|
92
|
-
function resolveExpression({
|
|
93
|
-
node,
|
|
94
|
-
file,
|
|
95
|
-
scope,
|
|
96
|
-
filename,
|
|
97
|
-
resolver,
|
|
98
|
-
babelConfig,
|
|
99
|
-
params = [],
|
|
100
|
-
fnArguments = []
|
|
101
|
-
}) {
|
|
102
|
-
if (debug$2(
|
|
103
|
-
`Resolving node ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
104
|
-
), t.isTaggedTemplateExpression(node) && t.isIdentifier(node.tag) && TAGGED_TEMPLATE_ALLOW_LIST.includes(node.tag.name))
|
|
105
|
-
return resolveExpression({
|
|
106
|
-
node: node.quasi,
|
|
107
|
-
scope,
|
|
108
|
-
filename,
|
|
109
|
-
file,
|
|
110
|
-
resolver,
|
|
111
|
-
params,
|
|
112
|
-
babelConfig,
|
|
113
|
-
fnArguments
|
|
114
|
-
});
|
|
115
|
-
if (t.isTemplateLiteral(node)) {
|
|
116
|
-
const resolvedExpressions = node.expressions.map(
|
|
117
|
-
(expression) => resolveExpression({
|
|
118
|
-
node: expression,
|
|
119
|
-
scope,
|
|
120
|
-
filename,
|
|
121
|
-
file,
|
|
122
|
-
resolver,
|
|
123
|
-
params,
|
|
124
|
-
babelConfig,
|
|
125
|
-
fnArguments
|
|
126
|
-
})
|
|
127
|
-
);
|
|
128
|
-
return node.quasis.map((quasi, idx) => (quasi.value.cooked || "") + (resolvedExpressions[idx] || "")).join("");
|
|
129
|
-
}
|
|
130
|
-
if (t.isLiteral(node)) {
|
|
131
|
-
if (node.type === "NullLiteral" || node.type === "RegExpLiteral")
|
|
132
|
-
throw new Error(`Unsupported literal type: ${node.type}`);
|
|
133
|
-
return node.value.toString();
|
|
134
|
-
}
|
|
135
|
-
if (t.isIdentifier(node))
|
|
136
|
-
return resolveIdentifier({
|
|
137
|
-
node,
|
|
138
|
-
scope,
|
|
139
|
-
filename,
|
|
140
|
-
file,
|
|
141
|
-
resolver,
|
|
142
|
-
fnArguments,
|
|
143
|
-
babelConfig,
|
|
144
|
-
params
|
|
145
|
-
});
|
|
146
|
-
if (t.isVariableDeclarator(node)) {
|
|
147
|
-
const init = node.init ?? (t.isAssignmentPattern(node.id) && node.id.right);
|
|
148
|
-
if (!init)
|
|
149
|
-
throw new Error("Unsupported variable declarator");
|
|
150
|
-
return resolveExpression({
|
|
151
|
-
node: init,
|
|
152
|
-
fnArguments,
|
|
153
|
-
scope,
|
|
154
|
-
filename,
|
|
155
|
-
file,
|
|
156
|
-
babelConfig,
|
|
157
|
-
resolver
|
|
158
|
-
});
|
|
159
|
-
}
|
|
160
|
-
if (t.isCallExpression(node) && t.isIdentifier(node.callee) && FUNCTION_WRAPPER_ALLOW_LIST.includes(node.callee.name))
|
|
161
|
-
return resolveExpression({
|
|
162
|
-
node: node.arguments[0],
|
|
163
|
-
scope,
|
|
164
|
-
filename,
|
|
165
|
-
file,
|
|
166
|
-
resolver,
|
|
167
|
-
babelConfig,
|
|
168
|
-
params
|
|
169
|
-
});
|
|
170
|
-
if (t.isCallExpression(node))
|
|
171
|
-
return resolveCallExpression({
|
|
172
|
-
node,
|
|
173
|
-
scope,
|
|
174
|
-
filename,
|
|
175
|
-
file,
|
|
176
|
-
resolver,
|
|
177
|
-
babelConfig,
|
|
178
|
-
params
|
|
179
|
-
});
|
|
180
|
-
if (t.isArrowFunctionExpression(node) || t.isFunctionDeclaration(node) || t.isFunctionExpression(node)) {
|
|
181
|
-
const newScope = new Scope(scope.path, scope);
|
|
182
|
-
return params.forEach((param, i) => {
|
|
183
|
-
newScope.push({
|
|
184
|
-
id: param,
|
|
185
|
-
init: fnArguments[i]
|
|
186
|
-
});
|
|
187
|
-
}), resolveExpression({
|
|
188
|
-
node: node.body,
|
|
189
|
-
params: node.params,
|
|
190
|
-
fnArguments,
|
|
191
|
-
scope: newScope,
|
|
192
|
-
filename,
|
|
193
|
-
file,
|
|
194
|
-
babelConfig,
|
|
195
|
-
resolver
|
|
196
|
-
});
|
|
197
|
-
}
|
|
198
|
-
if (t.isNewExpression(node))
|
|
199
|
-
return resolveExpression({
|
|
200
|
-
node: node.callee,
|
|
201
|
-
scope,
|
|
202
|
-
filename,
|
|
203
|
-
file,
|
|
204
|
-
babelConfig,
|
|
205
|
-
resolver
|
|
206
|
-
});
|
|
207
|
-
if (t.isImportDefaultSpecifier(node) || t.isImportSpecifier(node))
|
|
208
|
-
return resolveImportSpecifier({ node, file, filename, fnArguments, resolver, babelConfig });
|
|
209
|
-
if (t.isAssignmentPattern(node))
|
|
210
|
-
return resolveExpression({
|
|
211
|
-
node: node.right,
|
|
212
|
-
scope,
|
|
213
|
-
filename,
|
|
214
|
-
file,
|
|
215
|
-
resolver,
|
|
216
|
-
params,
|
|
217
|
-
babelConfig,
|
|
218
|
-
fnArguments
|
|
219
|
-
});
|
|
220
|
-
throw new Error(
|
|
221
|
-
`Unsupported expression type: ${node.type} in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
222
|
-
);
|
|
223
|
-
}
|
|
224
|
-
function resolveIdentifier({
|
|
225
|
-
node,
|
|
226
|
-
scope,
|
|
227
|
-
filename,
|
|
228
|
-
file,
|
|
229
|
-
resolver,
|
|
230
|
-
babelConfig,
|
|
231
|
-
fnArguments,
|
|
232
|
-
params
|
|
233
|
-
}) {
|
|
234
|
-
const paramIndex = params.findIndex(
|
|
235
|
-
(param) => t.isIdentifier(param) && node.name === param.name || t.isAssignmentPattern(param) && t.isIdentifier(param.left) && node.name === param.left.name
|
|
236
|
-
);
|
|
237
|
-
let argument = fnArguments[paramIndex];
|
|
238
|
-
if (!argument && paramIndex >= 0 && t.isAssignmentPattern(params[paramIndex]) && (argument = params[paramIndex].right), argument && t.isLiteral(argument))
|
|
239
|
-
return resolveExpression({
|
|
240
|
-
node: argument,
|
|
241
|
-
scope,
|
|
242
|
-
filename,
|
|
243
|
-
file,
|
|
244
|
-
resolver,
|
|
245
|
-
params,
|
|
246
|
-
babelConfig,
|
|
247
|
-
fnArguments
|
|
248
|
-
});
|
|
249
|
-
const binding = scope.getBinding(node.name);
|
|
250
|
-
if (binding) {
|
|
251
|
-
if (t.isIdentifier(binding.path.node) && binding.path.node.name === node.name)
|
|
252
|
-
throw new Error(
|
|
253
|
-
`Could not resolve same identifier "${node.name}" in "${filename}:${node.loc?.start.line}:${node.loc?.start.column}"`
|
|
254
|
-
);
|
|
255
|
-
return resolveExpression({
|
|
256
|
-
node: binding.path.node,
|
|
257
|
-
params,
|
|
258
|
-
fnArguments,
|
|
259
|
-
scope,
|
|
260
|
-
filename,
|
|
261
|
-
babelConfig,
|
|
262
|
-
file,
|
|
263
|
-
resolver
|
|
264
|
-
});
|
|
265
|
-
}
|
|
266
|
-
throw new Error(
|
|
267
|
-
`Could not find binding for node "${node.name}" in ${filename}:${node.loc?.start.line}:${node.loc?.start.column}`
|
|
268
|
-
);
|
|
269
|
-
}
|
|
270
|
-
function resolveCallExpression({
|
|
271
|
-
node,
|
|
272
|
-
scope,
|
|
273
|
-
filename,
|
|
274
|
-
file,
|
|
275
|
-
resolver,
|
|
276
|
-
babelConfig,
|
|
277
|
-
params
|
|
278
|
-
}) {
|
|
279
|
-
const { callee } = node;
|
|
280
|
-
return resolveExpression({
|
|
281
|
-
node: callee,
|
|
282
|
-
scope,
|
|
283
|
-
filename,
|
|
284
|
-
file,
|
|
285
|
-
resolver,
|
|
286
|
-
babelConfig,
|
|
287
|
-
params,
|
|
288
|
-
fnArguments: node.arguments
|
|
289
|
-
});
|
|
290
|
-
}
|
|
291
|
-
function resolveImportSpecifier({
|
|
292
|
-
node,
|
|
293
|
-
file,
|
|
294
|
-
filename,
|
|
295
|
-
fnArguments,
|
|
296
|
-
resolver,
|
|
297
|
-
babelConfig
|
|
298
|
-
}) {
|
|
299
|
-
let importDeclaration;
|
|
300
|
-
if (traverse(file, {
|
|
301
|
-
ImportDeclaration(n) {
|
|
302
|
-
if (t.isImportDeclaration(n.node))
|
|
303
|
-
for (const specifier of n.node.specifiers) {
|
|
304
|
-
if (t.isImportDefaultSpecifier(specifier) && specifier.local.loc?.identifierName === node.local.name) {
|
|
305
|
-
importDeclaration = n.node;
|
|
306
|
-
break;
|
|
307
|
-
}
|
|
308
|
-
specifier.local.name === node.local.name && (importDeclaration = n.node);
|
|
309
|
-
}
|
|
310
|
-
}
|
|
311
|
-
}), !importDeclaration)
|
|
312
|
-
throw new Error(`Could not find import declaration for ${node.local.name}`);
|
|
313
|
-
const importName = node.local.name, importFileName = importDeclaration.source.value, importPath = importFileName.startsWith("./") || importFileName.startsWith("../") ? path.resolve(path.dirname(filename), importFileName) : importFileName, resolvedFile = resolver(importPath), source = fs.readFileSync(resolvedFile), tree = parseSourceFile(source.toString(), resolvedFile, babelConfig);
|
|
314
|
-
let newScope;
|
|
315
|
-
if (traverse(tree, {
|
|
316
|
-
Program(p) {
|
|
317
|
-
newScope = p.scope;
|
|
318
|
-
}
|
|
319
|
-
}), !newScope)
|
|
320
|
-
throw new Error(`Could not find scope for ${filename}`);
|
|
321
|
-
const binding = newScope.getBinding(importName);
|
|
322
|
-
if (binding)
|
|
323
|
-
return resolveExpression({
|
|
324
|
-
node: binding.path.node,
|
|
325
|
-
file: tree,
|
|
326
|
-
scope: newScope,
|
|
327
|
-
fnArguments,
|
|
328
|
-
babelConfig,
|
|
329
|
-
filename: resolvedFile,
|
|
330
|
-
resolver
|
|
331
|
-
});
|
|
332
|
-
let namedExport, newImportName;
|
|
333
|
-
if (traverse(tree, {
|
|
334
|
-
ExportDeclaration(p) {
|
|
335
|
-
if (p.node.type === "ExportNamedDeclaration")
|
|
336
|
-
for (const specifier of p.node.specifiers)
|
|
337
|
-
specifier.type === "ExportSpecifier" && specifier.exported.type === "Identifier" && specifier.exported.name === importName && (namedExport = p.node, newImportName = specifier.exported.name);
|
|
338
|
-
}
|
|
339
|
-
}), namedExport && newImportName)
|
|
340
|
-
return resolveExportSpecifier({
|
|
341
|
-
node: namedExport,
|
|
342
|
-
importName: newImportName,
|
|
343
|
-
filename: resolvedFile,
|
|
344
|
-
fnArguments,
|
|
345
|
-
resolver,
|
|
346
|
-
babelConfig
|
|
347
|
-
});
|
|
348
|
-
let result;
|
|
349
|
-
if (traverse(tree, {
|
|
350
|
-
ExportDeclaration(p) {
|
|
351
|
-
if (p.node.type === "ExportAllDeclaration")
|
|
352
|
-
try {
|
|
353
|
-
result = resolveExportSpecifier({
|
|
354
|
-
node: p.node,
|
|
355
|
-
importName,
|
|
356
|
-
filename: resolvedFile,
|
|
357
|
-
fnArguments,
|
|
358
|
-
resolver,
|
|
359
|
-
babelConfig
|
|
360
|
-
});
|
|
361
|
-
} catch (e) {
|
|
362
|
-
if (e.cause !== `noBinding:${importName}`) throw e;
|
|
363
|
-
}
|
|
364
|
-
}
|
|
365
|
-
}), result) return result;
|
|
366
|
-
throw new Error(`Could not find binding for import "${importName}" in ${importFileName}`);
|
|
367
|
-
}
|
|
368
|
-
function resolveExportSpecifier({
|
|
369
|
-
node,
|
|
370
|
-
importName,
|
|
371
|
-
filename,
|
|
372
|
-
fnArguments,
|
|
373
|
-
babelConfig,
|
|
374
|
-
resolver
|
|
375
|
-
}) {
|
|
376
|
-
if (!node.source)
|
|
377
|
-
throw new Error(`Could not find source for export "${importName}" in ${filename}`);
|
|
378
|
-
const importFileName = node.source.value, importPath = path.resolve(path.dirname(filename), importFileName), resolvedFile = resolver(importPath), source = fs.readFileSync(resolvedFile), tree = parseSourceFile(source.toString(), resolvedFile, babelConfig);
|
|
379
|
-
let newScope;
|
|
380
|
-
if (traverse(tree, {
|
|
381
|
-
Program(p) {
|
|
382
|
-
newScope = p.scope;
|
|
383
|
-
}
|
|
384
|
-
}), !newScope)
|
|
385
|
-
throw new Error(`Could not find scope for ${filename}`);
|
|
386
|
-
const binding = newScope.getBinding(importName);
|
|
387
|
-
if (binding)
|
|
388
|
-
return resolveExpression({
|
|
389
|
-
node: binding.path.node,
|
|
390
|
-
file: tree,
|
|
391
|
-
scope: newScope,
|
|
392
|
-
filename: importFileName,
|
|
393
|
-
babelConfig,
|
|
394
|
-
resolver,
|
|
395
|
-
fnArguments
|
|
396
|
-
});
|
|
397
|
-
throw new Error(`Could not find binding for export "${importName}" in ${importFileName}`, {
|
|
398
|
-
cause: `noBinding:${importName}`
|
|
399
|
-
});
|
|
400
|
-
}
|
|
401
|
-
const require$1 = createRequire(__filename), groqTagName = "groq", defineQueryFunctionName = "defineQuery", groqModuleName = "groq", nextSanityModuleName = "next-sanity", ignoreValue = "@sanity-typegen-ignore";
|
|
402
|
-
function findQueriesInSource(source, filename, babelConfig = getBabelConfig(), resolver = require$1.resolve) {
|
|
403
|
-
const queries = [], file = parseSourceFile(source, filename, babelConfig);
|
|
404
|
-
return traverse$1(file, {
|
|
405
|
-
// Look for variable declarations, e.g. `const myQuery = groq`... and extract the query.
|
|
406
|
-
// The variable name is used as the name of the query result type
|
|
407
|
-
VariableDeclarator(path2) {
|
|
408
|
-
const { node, scope } = path2, init = node.init, isGroqTemplateTag = t.isTaggedTemplateExpression(init) && t.isIdentifier(init.tag) && init.tag.name === groqTagName, isDefineQueryCall = t.isCallExpression(init) && (isImportFrom(groqModuleName, defineQueryFunctionName, scope, init.callee) || isImportFrom(nextSanityModuleName, defineQueryFunctionName, scope, init.callee));
|
|
409
|
-
if (t.isIdentifier(node.id) && (isGroqTemplateTag || isDefineQueryCall)) {
|
|
410
|
-
if (declarationLeadingCommentContains(path2, ignoreValue))
|
|
411
|
-
return;
|
|
412
|
-
const queryName = `${node.id.name}`, queryResult = resolveExpression({
|
|
413
|
-
node: init,
|
|
414
|
-
file,
|
|
415
|
-
scope,
|
|
416
|
-
babelConfig,
|
|
417
|
-
filename,
|
|
418
|
-
resolver
|
|
419
|
-
}), location = node.loc ? {
|
|
420
|
-
start: {
|
|
421
|
-
...node.loc?.start
|
|
422
|
-
},
|
|
423
|
-
end: {
|
|
424
|
-
...node.loc?.end
|
|
425
|
-
}
|
|
426
|
-
} : {};
|
|
427
|
-
queries.push({ name: queryName, result: queryResult, location });
|
|
428
|
-
}
|
|
429
|
-
}
|
|
430
|
-
}), queries;
|
|
431
|
-
}
|
|
432
|
-
function declarationLeadingCommentContains(path2, comment) {
|
|
433
|
-
const variableDeclaration = path2.find((node) => node.isVariableDeclaration());
|
|
434
|
-
return variableDeclaration ? !!(variableDeclaration.node.leadingComments?.find(
|
|
435
|
-
(commentItem) => commentItem.value.trim() === comment
|
|
436
|
-
) || variableDeclaration.parent.leadingComments?.find(
|
|
437
|
-
(commentItem) => commentItem.value.trim() === comment
|
|
438
|
-
)) : !1;
|
|
439
|
-
}
|
|
440
|
-
function isImportFrom(moduleName, importName, scope, node) {
|
|
441
|
-
if (t.isIdentifier(node)) {
|
|
442
|
-
const binding = scope.getBinding(node.name);
|
|
443
|
-
if (!binding)
|
|
444
|
-
return !1;
|
|
445
|
-
const { path: path2 } = binding;
|
|
446
|
-
if (t.isImportSpecifier(path2.node))
|
|
447
|
-
return path2.node.importKind === "value" && path2.parentPath && t.isImportDeclaration(path2.parentPath.node) && path2.parentPath.node.source.value === moduleName && t.isIdentifier(path2.node.imported) && path2.node.imported.name === importName;
|
|
448
|
-
if (t.isVariableDeclarator(path2.node)) {
|
|
449
|
-
const { init } = path2.node;
|
|
450
|
-
return t.isCallExpression(init) && t.isIdentifier(init.callee) && init.callee.name === "require" && t.isStringLiteral(init.arguments[0]) && init.arguments[0].value === moduleName;
|
|
451
|
-
}
|
|
452
|
-
}
|
|
453
|
-
if (t.isMemberExpression(node)) {
|
|
454
|
-
const { object, property } = node;
|
|
455
|
-
if (!t.isIdentifier(object))
|
|
456
|
-
return !1;
|
|
457
|
-
const binding = scope.getBinding(object.name);
|
|
458
|
-
if (!binding)
|
|
459
|
-
return !1;
|
|
460
|
-
const { path: path2 } = binding;
|
|
461
|
-
return t.isIdentifier(object) && t.isIdentifier(property) && property.name === importName && t.isImportNamespaceSpecifier(path2.node) && path2.parentPath && t.isImportDeclaration(path2.parentPath.node) && path2.parentPath.node.source.value === moduleName;
|
|
462
|
-
}
|
|
463
|
-
return !1;
|
|
464
|
-
}
|
|
465
|
-
const debug$1 = createDebug("sanity:codegen:moduleResolver");
|
|
466
|
-
function getResolver(cwd) {
|
|
467
|
-
const tsConfig = loadConfig(cwd);
|
|
468
|
-
if (tsConfig.resultType === "failed")
|
|
469
|
-
return debug$1("Could not load tsconfig, using default resolver: %s", tsConfig.message), require.resolve;
|
|
470
|
-
const matchPath = createMatchPath(
|
|
471
|
-
tsConfig.absoluteBaseUrl,
|
|
472
|
-
tsConfig.paths,
|
|
473
|
-
tsConfig.mainFields,
|
|
474
|
-
tsConfig.addMatchAll
|
|
475
|
-
), resolve2 = function(request, options) {
|
|
476
|
-
const found = matchPath(request);
|
|
477
|
-
return found !== void 0 ? require.resolve(found, options) : require.resolve(request, options);
|
|
478
|
-
};
|
|
479
|
-
return resolve2.paths = (request) => require.resolve.paths(request), resolve2;
|
|
480
|
-
}
|
|
481
|
-
const debug = createDebug("sanity:codegen:findQueries:debug");
|
|
482
|
-
async function* findQueriesInPath({
|
|
483
|
-
path: path2,
|
|
484
|
-
babelOptions = getBabelConfig(),
|
|
485
|
-
resolver = getResolver()
|
|
486
|
-
}) {
|
|
487
|
-
const queryNames = /* @__PURE__ */ new Set();
|
|
488
|
-
debug(`Globing ${path2}`);
|
|
489
|
-
const files = glob.sync(path2, {
|
|
490
|
-
absolute: !1,
|
|
491
|
-
ignore: ["**/node_modules/**"],
|
|
492
|
-
// we never want to look in node_modules
|
|
493
|
-
onlyFiles: !0
|
|
494
|
-
}).sort();
|
|
495
|
-
for (const filename of files)
|
|
496
|
-
if (typeof filename == "string") {
|
|
497
|
-
debug(`Found file "${filename}"`);
|
|
498
|
-
try {
|
|
499
|
-
const source = await fs$1.readFile(filename, "utf8"), queries = findQueriesInSource(source, filename, babelOptions, resolver);
|
|
500
|
-
for (const query of queries) {
|
|
501
|
-
if (queryNames.has(query.name))
|
|
502
|
-
throw new Error(
|
|
503
|
-
`Duplicate query name found: "${query.name}". Query names must be unique across all files.`
|
|
504
|
-
);
|
|
505
|
-
queryNames.add(query.name);
|
|
506
|
-
}
|
|
507
|
-
yield { type: "queries", filename, queries };
|
|
508
|
-
} catch (error) {
|
|
509
|
-
debug(`Error in file "${filename}"`, error), yield { type: "error", error, filename };
|
|
510
|
-
}
|
|
511
|
-
}
|
|
512
|
-
}
|
|
513
|
-
function registerBabel(babelOptions) {
|
|
514
|
-
const options = babelOptions || getBabelConfig();
|
|
515
|
-
register({ ...options, extensions: [".ts", ".tsx", ".js", ".jsx", ".mjs", ".cjs"] });
|
|
516
|
-
}
|
|
517
|
-
const REFERENCE_SYMBOL_NAME = "internalGroqTypeReferenceTo", ALL_SCHEMA_TYPES = "AllSanitySchemaTypes";
|
|
518
|
-
class TypeGenerator {
|
|
519
|
-
// Simple set to keep track of generated type names, to avoid conflicts
|
|
520
|
-
generatedTypeName = /* @__PURE__ */ new Set();
|
|
521
|
-
// Map between type names and their generated type names, used to resolve the correct generated type name
|
|
522
|
-
typeNameMap = /* @__PURE__ */ new Map();
|
|
523
|
-
// Map between type nodes and their generated type names, used for query mapping
|
|
524
|
-
typeNodeNameMap = /* @__PURE__ */ new Map();
|
|
525
|
-
schema;
|
|
526
|
-
constructor(schema) {
|
|
527
|
-
this.schema = schema, this.schema.forEach((s) => {
|
|
528
|
-
this.getTypeName(s.name, s);
|
|
529
|
-
});
|
|
530
|
-
}
|
|
531
|
-
/**
|
|
532
|
-
* Generate TypeScript types for the given schema
|
|
533
|
-
* @returns string
|
|
534
|
-
* @internal
|
|
535
|
-
* @beta
|
|
536
|
-
*/
|
|
537
|
-
generateSchemaTypes() {
|
|
538
|
-
const typeDeclarations = [], schemaNames = /* @__PURE__ */ new Set();
|
|
539
|
-
return this.schema.forEach((schema) => {
|
|
540
|
-
const typeLiteral = this.getTypeNodeType(schema), schemaName = this.typeNodeNameMap.get(schema);
|
|
541
|
-
if (!schemaName)
|
|
542
|
-
throw new Error(`Schema name not found for schema ${schema.name}`);
|
|
543
|
-
schemaNames.add(schemaName);
|
|
544
|
-
const typeAlias = t.tsTypeAliasDeclaration(t.identifier(schemaName), null, typeLiteral);
|
|
545
|
-
typeDeclarations.push(t.exportNamedDeclaration(typeAlias));
|
|
546
|
-
}), typeDeclarations.push(
|
|
547
|
-
t.exportNamedDeclaration(
|
|
548
|
-
t.tsTypeAliasDeclaration(
|
|
549
|
-
t.identifier(this.getTypeName(ALL_SCHEMA_TYPES)),
|
|
550
|
-
null,
|
|
551
|
-
t.tsUnionType(
|
|
552
|
-
[...schemaNames].map((typeName) => t.tsTypeReference(t.identifier(typeName)))
|
|
553
|
-
)
|
|
554
|
-
)
|
|
555
|
-
)
|
|
556
|
-
), typeDeclarations.map((decl) => new CodeGenerator(decl).generate().code).join(`
|
|
557
|
-
|
|
558
|
-
`);
|
|
559
|
-
}
|
|
560
|
-
/**
|
|
561
|
-
* Takes a identifier and a type node and generates a type alias for the type node.
|
|
562
|
-
* @param identifierName - The name of the type to generated
|
|
563
|
-
* @param typeNode - The type node to generate the type for
|
|
564
|
-
* @returns
|
|
565
|
-
* @internal
|
|
566
|
-
* @beta
|
|
567
|
-
*/
|
|
568
|
-
generateTypeNodeTypes(identifierName, typeNode) {
|
|
569
|
-
const type = this.getTypeNodeType(typeNode), typeName = this.getTypeName(identifierName, typeNode), typeAlias = t.tsTypeAliasDeclaration(t.identifier(typeName), null, type);
|
|
570
|
-
return new CodeGenerator(t.exportNamedDeclaration(typeAlias)).generate().code.trim();
|
|
571
|
-
}
|
|
572
|
-
static generateKnownTypes() {
|
|
573
|
-
const typeOperator = t.tsTypeOperator(t.tsSymbolKeyword(), "unique"), identifier = t.identifier(REFERENCE_SYMBOL_NAME);
|
|
574
|
-
identifier.typeAnnotation = t.tsTypeAnnotation(typeOperator);
|
|
575
|
-
const decleration = t.variableDeclaration("const", [t.variableDeclarator(identifier)]);
|
|
576
|
-
return decleration.declare = !0, new CodeGenerator(t.exportNamedDeclaration(decleration)).generate().code.trim();
|
|
577
|
-
}
|
|
578
|
-
/**
|
|
579
|
-
* Takes a list of queries from the codebase and generates a type declaration
|
|
580
|
-
* for SanityClient to consume.
|
|
581
|
-
*
|
|
582
|
-
* Note: only types that have previously been generated with `generateTypeNodeTypes`
|
|
583
|
-
* will be included in the query map.
|
|
584
|
-
*
|
|
585
|
-
* @param queries - A list of queries to generate a type declaration for
|
|
586
|
-
* @returns
|
|
587
|
-
* @internal
|
|
588
|
-
* @beta
|
|
589
|
-
*/
|
|
590
|
-
generateQueryMap(queries) {
|
|
591
|
-
const typesByQuerystring = {};
|
|
592
|
-
for (const query of queries) {
|
|
593
|
-
const name = this.typeNodeNameMap.get(query.typeNode);
|
|
594
|
-
name && (typesByQuerystring[query.query] ??= [], typesByQuerystring[query.query].push(name));
|
|
595
|
-
}
|
|
596
|
-
const queryReturnInterface = t.tsInterfaceDeclaration(
|
|
597
|
-
t.identifier("SanityQueries"),
|
|
598
|
-
null,
|
|
599
|
-
[],
|
|
600
|
-
t.tsInterfaceBody(
|
|
601
|
-
Object.entries(typesByQuerystring).map(([query, types]) => t.tsPropertySignature(
|
|
602
|
-
t.stringLiteral(query),
|
|
603
|
-
t.tsTypeAnnotation(
|
|
604
|
-
t.tsUnionType(types.map((type) => t.tsTypeReference(t.identifier(type))))
|
|
605
|
-
)
|
|
606
|
-
))
|
|
607
|
-
)
|
|
608
|
-
), declareModule = t.declareModule(
|
|
609
|
-
t.stringLiteral("@sanity/client"),
|
|
610
|
-
t.blockStatement([queryReturnInterface])
|
|
611
|
-
), clientImport = t.importDeclaration([], t.stringLiteral("@sanity/client"));
|
|
612
|
-
return new CodeGenerator(t.program([clientImport, declareModule])).generate().code.trim();
|
|
613
|
-
}
|
|
614
|
-
/**
|
|
615
|
-
* Since we are sanitizing identifiers we migt end up with collisions. Ie there might be a type mux.video and muxVideo, both these
|
|
616
|
-
* types would be sanityized into MuxVideo. To avoid this we keep track of the generated type names and add a index to the name.
|
|
617
|
-
* When we reference a type we also keep track of the original name so we can reference the correct type later.
|
|
618
|
-
*/
|
|
619
|
-
getTypeName(name, typeNode) {
|
|
620
|
-
const desiredName = uppercaseFirstLetter(sanitizeIdentifier(name));
|
|
621
|
-
let generatedName = desiredName, i = 2;
|
|
622
|
-
for (; this.generatedTypeName.has(generatedName); )
|
|
623
|
-
generatedName = `${desiredName}_${i++}`;
|
|
624
|
-
return this.generatedTypeName.add(generatedName), this.typeNameMap.set(name, generatedName), typeNode && this.typeNodeNameMap.set(typeNode, generatedName), generatedName;
|
|
625
|
-
}
|
|
626
|
-
getTypeNodeType(typeNode) {
|
|
627
|
-
switch (typeNode.type) {
|
|
628
|
-
case "string":
|
|
629
|
-
return typeNode.value !== void 0 ? t.tsLiteralType(t.stringLiteral(typeNode.value)) : t.tsStringKeyword();
|
|
630
|
-
case "number":
|
|
631
|
-
return typeNode.value !== void 0 ? t.tsLiteralType(t.numericLiteral(typeNode.value)) : t.tsNumberKeyword();
|
|
632
|
-
case "boolean":
|
|
633
|
-
return typeNode.value !== void 0 ? t.tsLiteralType(t.booleanLiteral(typeNode.value)) : t.tsBooleanKeyword();
|
|
634
|
-
case "unknown":
|
|
635
|
-
return t.tsUnknownKeyword();
|
|
636
|
-
case "document":
|
|
637
|
-
return this.generateDocumentType(typeNode);
|
|
638
|
-
case "type":
|
|
639
|
-
return this.getTypeNodeType(typeNode.value);
|
|
640
|
-
case "array":
|
|
641
|
-
return this.generateArrayTsType(typeNode);
|
|
642
|
-
case "object":
|
|
643
|
-
return this.generateObjectTsType(typeNode);
|
|
644
|
-
case "union":
|
|
645
|
-
return this.generateUnionTsType(typeNode);
|
|
646
|
-
case "inline":
|
|
647
|
-
return this.generateInlineTsType(typeNode);
|
|
648
|
-
case "null":
|
|
649
|
-
return t.tsNullKeyword();
|
|
650
|
-
default:
|
|
651
|
-
throw new Error(`Type "${typeNode.type}" not found in schema`);
|
|
652
|
-
}
|
|
653
|
-
}
|
|
654
|
-
// Helper function used to generate TS types for array type nodes.
|
|
655
|
-
generateArrayTsType(typeNode) {
|
|
656
|
-
const typeNodes = this.getTypeNodeType(typeNode.of);
|
|
657
|
-
return t.tsTypeReference(
|
|
658
|
-
t.identifier("Array"),
|
|
659
|
-
t.tsTypeParameterInstantiation([typeNodes])
|
|
660
|
-
);
|
|
661
|
-
}
|
|
662
|
-
// Helper function used to generate TS types for object properties.
|
|
663
|
-
generateObjectProperty(key, attribute) {
|
|
664
|
-
const type = this.getTypeNodeType(attribute.value), propertySignature = t.tsPropertySignature(
|
|
665
|
-
t.identifier(sanitizeIdentifier(key)),
|
|
666
|
-
t.tsTypeAnnotation(type)
|
|
667
|
-
);
|
|
668
|
-
return propertySignature.optional = attribute.optional, propertySignature;
|
|
669
|
-
}
|
|
670
|
-
// Helper function used to generate TS types for object type nodes.
|
|
671
|
-
generateObjectTsType(typeNode) {
|
|
672
|
-
const props = [];
|
|
673
|
-
Object.entries(typeNode.attributes).forEach(([key, attribute]) => {
|
|
674
|
-
props.push(this.generateObjectProperty(key, attribute));
|
|
675
|
-
});
|
|
676
|
-
const rest = typeNode.rest;
|
|
677
|
-
if (rest !== void 0)
|
|
678
|
-
switch (rest.type) {
|
|
679
|
-
case "unknown":
|
|
680
|
-
return t.tsUnknownKeyword();
|
|
681
|
-
case "object": {
|
|
682
|
-
Object.entries(rest.attributes).forEach(([key, attribute]) => {
|
|
683
|
-
props.push(this.generateObjectProperty(key, attribute));
|
|
684
|
-
});
|
|
685
|
-
break;
|
|
686
|
-
}
|
|
687
|
-
case "inline": {
|
|
688
|
-
const resolved = this.generateInlineTsType(rest);
|
|
689
|
-
return t.isTSUnknownKeyword(resolved) ? resolved : t.tsIntersectionType([t.tsTypeLiteral(props), resolved]);
|
|
690
|
-
}
|
|
691
|
-
default:
|
|
692
|
-
throw new Error(`Type "${rest.type}" not found in schema`);
|
|
693
|
-
}
|
|
694
|
-
if (typeNode.dereferencesTo !== void 0) {
|
|
695
|
-
const derefType = t.tsPropertySignature(
|
|
696
|
-
t.identifier(REFERENCE_SYMBOL_NAME),
|
|
697
|
-
t.tsTypeAnnotation(t.tsLiteralType(t.stringLiteral(typeNode.dereferencesTo)))
|
|
698
|
-
);
|
|
699
|
-
derefType.computed = !0, derefType.optional = !0, props.push(derefType);
|
|
700
|
-
}
|
|
701
|
-
return t.tsTypeLiteral(props);
|
|
702
|
-
}
|
|
703
|
-
generateInlineTsType(typeNode) {
|
|
704
|
-
const referencedTypeNode = this.schema.find((schema) => schema.name === typeNode.name);
|
|
705
|
-
if (referencedTypeNode === void 0) {
|
|
706
|
-
const generatedName2 = this.typeNameMap.get(typeNode.name);
|
|
707
|
-
if (generatedName2)
|
|
708
|
-
return t.tsTypeReference(t.identifier(generatedName2));
|
|
709
|
-
const missing = t.tsUnknownKeyword();
|
|
710
|
-
return missing.trailingComments = [
|
|
711
|
-
{
|
|
712
|
-
type: "CommentLine",
|
|
713
|
-
value: ` Unable to locate the referenced type "${typeNode.name}" in schema`
|
|
714
|
-
}
|
|
715
|
-
], missing;
|
|
716
|
-
}
|
|
717
|
-
const generatedName = this.typeNameMap.get(referencedTypeNode.name);
|
|
718
|
-
return generatedName ? t.tsTypeReference(t.identifier(generatedName)) : t.tsUnknownKeyword();
|
|
719
|
-
}
|
|
720
|
-
// Helper function used to generate TS types for union type nodes.
|
|
721
|
-
generateUnionTsType(typeNode) {
|
|
722
|
-
if (typeNode.of.length === 0)
|
|
723
|
-
return t.tsNeverKeyword();
|
|
724
|
-
if (typeNode.of.length === 1)
|
|
725
|
-
return this.getTypeNodeType(typeNode.of[0]);
|
|
726
|
-
const typeNodes = typeNode.of.map((node) => this.getTypeNodeType(node));
|
|
727
|
-
return t.tsUnionType(typeNodes);
|
|
728
|
-
}
|
|
729
|
-
// Helper function used to generate TS types for document type nodes.
|
|
730
|
-
generateDocumentType(document) {
|
|
731
|
-
const props = Object.entries(document.attributes).map(
|
|
732
|
-
([key, node]) => this.generateObjectProperty(key, node)
|
|
733
|
-
);
|
|
734
|
-
return t.tsTypeLiteral(props);
|
|
735
|
-
}
|
|
736
|
-
}
|
|
737
|
-
function uppercaseFirstLetter(input) {
|
|
738
|
-
return input.charAt(0).toUpperCase() + input.slice(1);
|
|
739
|
-
}
|
|
740
|
-
function sanitizeIdentifier(input) {
|
|
741
|
-
return `${input.replace(/^\d/, "_").replace(/[^$\w]+(.)/g, (_, char) => char.toUpperCase())}`;
|
|
742
|
-
}
|
|
743
|
-
export {
|
|
744
|
-
TypeGenerator,
|
|
745
|
-
findQueriesInPath,
|
|
746
|
-
findQueriesInSource,
|
|
747
|
-
getResolver,
|
|
748
|
-
readConfig,
|
|
749
|
-
readSchema,
|
|
750
|
-
registerBabel,
|
|
751
|
-
safeParseQuery
|
|
752
|
-
};
|
|
753
|
-
//# sourceMappingURL=index.mjs.map
|