@soda-gql/tools 0.13.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +72 -0
- package/codegen.d.ts +2 -0
- package/codegen.js +1 -0
- package/dist/bin.cjs +15509 -0
- package/dist/bin.cjs.map +1 -0
- package/dist/bin.d.cts +839 -0
- package/dist/bin.d.cts.map +1 -0
- package/dist/chunk-BrXtsOCC.cjs +41 -0
- package/dist/codegen.cjs +4704 -0
- package/dist/codegen.cjs.map +1 -0
- package/dist/codegen.d.cts +416 -0
- package/dist/codegen.d.cts.map +1 -0
- package/dist/codegen.d.mts +416 -0
- package/dist/codegen.d.mts.map +1 -0
- package/dist/codegen.mjs +4712 -0
- package/dist/codegen.mjs.map +1 -0
- package/dist/formatter-Glj5a663.cjs +510 -0
- package/dist/formatter-Glj5a663.cjs.map +1 -0
- package/dist/formatter.cjs +509 -0
- package/dist/formatter.cjs.map +1 -0
- package/dist/formatter.d.cts +33 -0
- package/dist/formatter.d.cts.map +1 -0
- package/dist/formatter.d.mts +33 -0
- package/dist/formatter.d.mts.map +1 -0
- package/dist/formatter.mjs +507 -0
- package/dist/formatter.mjs.map +1 -0
- package/dist/index.cjs +13 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +11 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +11 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +12 -0
- package/dist/index.mjs.map +1 -0
- package/dist/typegen.cjs +864 -0
- package/dist/typegen.cjs.map +1 -0
- package/dist/typegen.d.cts +205 -0
- package/dist/typegen.d.cts.map +1 -0
- package/dist/typegen.d.mts +205 -0
- package/dist/typegen.d.mts.map +1 -0
- package/dist/typegen.mjs +859 -0
- package/dist/typegen.mjs.map +1 -0
- package/formatter.d.ts +2 -0
- package/formatter.js +1 -0
- package/index.d.ts +2 -0
- package/index.js +1 -0
- package/package.json +102 -0
- package/typegen.d.ts +2 -0
- package/typegen.js +1 -0
package/dist/codegen.cjs
ADDED
|
@@ -0,0 +1,4704 @@
|
|
|
1
|
+
const require_chunk = require('./chunk-BrXtsOCC.cjs');
|
|
2
|
+
let neverthrow = require("neverthrow");
|
|
3
|
+
let __soda_gql_core = require("@soda-gql/core");
|
|
4
|
+
let graphql = require("graphql");
|
|
5
|
+
let node_fs = require("node:fs");
|
|
6
|
+
let node_path = require("node:path");
|
|
7
|
+
let esbuild = require("esbuild");
|
|
8
|
+
let node_crypto = require("node:crypto");
|
|
9
|
+
|
|
10
|
+
//#region node_modules/picomatch/lib/constants.js
|
|
11
|
+
var require_constants = /* @__PURE__ */ require_chunk.__commonJSMin(((exports, module) => {
|
|
12
|
+
const WIN_SLASH = "\\\\/";
|
|
13
|
+
const WIN_NO_SLASH = `[^${WIN_SLASH}]`;
|
|
14
|
+
/**
|
|
15
|
+
* Posix glob regex
|
|
16
|
+
*/
|
|
17
|
+
const DOT_LITERAL = "\\.";
|
|
18
|
+
const PLUS_LITERAL = "\\+";
|
|
19
|
+
const QMARK_LITERAL = "\\?";
|
|
20
|
+
const SLASH_LITERAL = "\\/";
|
|
21
|
+
const ONE_CHAR = "(?=.)";
|
|
22
|
+
const QMARK = "[^/]";
|
|
23
|
+
const END_ANCHOR = `(?:${SLASH_LITERAL}|$)`;
|
|
24
|
+
const START_ANCHOR = `(?:^|${SLASH_LITERAL})`;
|
|
25
|
+
const DOTS_SLASH = `${DOT_LITERAL}{1,2}${END_ANCHOR}`;
|
|
26
|
+
const NO_DOT = `(?!${DOT_LITERAL})`;
|
|
27
|
+
const NO_DOTS = `(?!${START_ANCHOR}${DOTS_SLASH})`;
|
|
28
|
+
const NO_DOT_SLASH = `(?!${DOT_LITERAL}{0,1}${END_ANCHOR})`;
|
|
29
|
+
const NO_DOTS_SLASH = `(?!${DOTS_SLASH})`;
|
|
30
|
+
const QMARK_NO_DOT = `[^.${SLASH_LITERAL}]`;
|
|
31
|
+
const STAR = `${QMARK}*?`;
|
|
32
|
+
const SEP = "/";
|
|
33
|
+
const POSIX_CHARS = {
|
|
34
|
+
DOT_LITERAL,
|
|
35
|
+
PLUS_LITERAL,
|
|
36
|
+
QMARK_LITERAL,
|
|
37
|
+
SLASH_LITERAL,
|
|
38
|
+
ONE_CHAR,
|
|
39
|
+
QMARK,
|
|
40
|
+
END_ANCHOR,
|
|
41
|
+
DOTS_SLASH,
|
|
42
|
+
NO_DOT,
|
|
43
|
+
NO_DOTS,
|
|
44
|
+
NO_DOT_SLASH,
|
|
45
|
+
NO_DOTS_SLASH,
|
|
46
|
+
QMARK_NO_DOT,
|
|
47
|
+
STAR,
|
|
48
|
+
START_ANCHOR,
|
|
49
|
+
SEP
|
|
50
|
+
};
|
|
51
|
+
/**
|
|
52
|
+
* Windows glob regex
|
|
53
|
+
*/
|
|
54
|
+
const WINDOWS_CHARS = {
|
|
55
|
+
...POSIX_CHARS,
|
|
56
|
+
SLASH_LITERAL: `[${WIN_SLASH}]`,
|
|
57
|
+
QMARK: WIN_NO_SLASH,
|
|
58
|
+
STAR: `${WIN_NO_SLASH}*?`,
|
|
59
|
+
DOTS_SLASH: `${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$)`,
|
|
60
|
+
NO_DOT: `(?!${DOT_LITERAL})`,
|
|
61
|
+
NO_DOTS: `(?!(?:^|[${WIN_SLASH}])${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
|
|
62
|
+
NO_DOT_SLASH: `(?!${DOT_LITERAL}{0,1}(?:[${WIN_SLASH}]|$))`,
|
|
63
|
+
NO_DOTS_SLASH: `(?!${DOT_LITERAL}{1,2}(?:[${WIN_SLASH}]|$))`,
|
|
64
|
+
QMARK_NO_DOT: `[^.${WIN_SLASH}]`,
|
|
65
|
+
START_ANCHOR: `(?:^|[${WIN_SLASH}])`,
|
|
66
|
+
END_ANCHOR: `(?:[${WIN_SLASH}]|$)`,
|
|
67
|
+
SEP: "\\"
|
|
68
|
+
};
|
|
69
|
+
/**
|
|
70
|
+
* POSIX Bracket Regex
|
|
71
|
+
*/
|
|
72
|
+
const POSIX_REGEX_SOURCE$1 = {
|
|
73
|
+
alnum: "a-zA-Z0-9",
|
|
74
|
+
alpha: "a-zA-Z",
|
|
75
|
+
ascii: "\\x00-\\x7F",
|
|
76
|
+
blank: " \\t",
|
|
77
|
+
cntrl: "\\x00-\\x1F\\x7F",
|
|
78
|
+
digit: "0-9",
|
|
79
|
+
graph: "\\x21-\\x7E",
|
|
80
|
+
lower: "a-z",
|
|
81
|
+
print: "\\x20-\\x7E ",
|
|
82
|
+
punct: "\\-!\"#$%&'()\\*+,./:;<=>?@[\\]^_`{|}~",
|
|
83
|
+
space: " \\t\\r\\n\\v\\f",
|
|
84
|
+
upper: "A-Z",
|
|
85
|
+
word: "A-Za-z0-9_",
|
|
86
|
+
xdigit: "A-Fa-f0-9"
|
|
87
|
+
};
|
|
88
|
+
module.exports = {
|
|
89
|
+
MAX_LENGTH: 1024 * 64,
|
|
90
|
+
POSIX_REGEX_SOURCE: POSIX_REGEX_SOURCE$1,
|
|
91
|
+
REGEX_BACKSLASH: /\\(?![*+?^${}(|)[\]])/g,
|
|
92
|
+
REGEX_NON_SPECIAL_CHARS: /^[^@![\].,$*+?^{}()|\\/]+/,
|
|
93
|
+
REGEX_SPECIAL_CHARS: /[-*+?.^${}(|)[\]]/,
|
|
94
|
+
REGEX_SPECIAL_CHARS_BACKREF: /(\\?)((\W)(\3*))/g,
|
|
95
|
+
REGEX_SPECIAL_CHARS_GLOBAL: /([-*+?.^${}(|)[\]])/g,
|
|
96
|
+
REGEX_REMOVE_BACKSLASH: /(?:\[.*?[^\\]\]|\\(?=.))/g,
|
|
97
|
+
REPLACEMENTS: {
|
|
98
|
+
__proto__: null,
|
|
99
|
+
"***": "*",
|
|
100
|
+
"**/**": "**",
|
|
101
|
+
"**/**/**": "**"
|
|
102
|
+
},
|
|
103
|
+
CHAR_0: 48,
|
|
104
|
+
CHAR_9: 57,
|
|
105
|
+
CHAR_UPPERCASE_A: 65,
|
|
106
|
+
CHAR_LOWERCASE_A: 97,
|
|
107
|
+
CHAR_UPPERCASE_Z: 90,
|
|
108
|
+
CHAR_LOWERCASE_Z: 122,
|
|
109
|
+
CHAR_LEFT_PARENTHESES: 40,
|
|
110
|
+
CHAR_RIGHT_PARENTHESES: 41,
|
|
111
|
+
CHAR_ASTERISK: 42,
|
|
112
|
+
CHAR_AMPERSAND: 38,
|
|
113
|
+
CHAR_AT: 64,
|
|
114
|
+
CHAR_BACKWARD_SLASH: 92,
|
|
115
|
+
CHAR_CARRIAGE_RETURN: 13,
|
|
116
|
+
CHAR_CIRCUMFLEX_ACCENT: 94,
|
|
117
|
+
CHAR_COLON: 58,
|
|
118
|
+
CHAR_COMMA: 44,
|
|
119
|
+
CHAR_DOT: 46,
|
|
120
|
+
CHAR_DOUBLE_QUOTE: 34,
|
|
121
|
+
CHAR_EQUAL: 61,
|
|
122
|
+
CHAR_EXCLAMATION_MARK: 33,
|
|
123
|
+
CHAR_FORM_FEED: 12,
|
|
124
|
+
CHAR_FORWARD_SLASH: 47,
|
|
125
|
+
CHAR_GRAVE_ACCENT: 96,
|
|
126
|
+
CHAR_HASH: 35,
|
|
127
|
+
CHAR_HYPHEN_MINUS: 45,
|
|
128
|
+
CHAR_LEFT_ANGLE_BRACKET: 60,
|
|
129
|
+
CHAR_LEFT_CURLY_BRACE: 123,
|
|
130
|
+
CHAR_LEFT_SQUARE_BRACKET: 91,
|
|
131
|
+
CHAR_LINE_FEED: 10,
|
|
132
|
+
CHAR_NO_BREAK_SPACE: 160,
|
|
133
|
+
CHAR_PERCENT: 37,
|
|
134
|
+
CHAR_PLUS: 43,
|
|
135
|
+
CHAR_QUESTION_MARK: 63,
|
|
136
|
+
CHAR_RIGHT_ANGLE_BRACKET: 62,
|
|
137
|
+
CHAR_RIGHT_CURLY_BRACE: 125,
|
|
138
|
+
CHAR_RIGHT_SQUARE_BRACKET: 93,
|
|
139
|
+
CHAR_SEMICOLON: 59,
|
|
140
|
+
CHAR_SINGLE_QUOTE: 39,
|
|
141
|
+
CHAR_SPACE: 32,
|
|
142
|
+
CHAR_TAB: 9,
|
|
143
|
+
CHAR_UNDERSCORE: 95,
|
|
144
|
+
CHAR_VERTICAL_LINE: 124,
|
|
145
|
+
CHAR_ZERO_WIDTH_NOBREAK_SPACE: 65279,
|
|
146
|
+
extglobChars(chars) {
|
|
147
|
+
return {
|
|
148
|
+
"!": {
|
|
149
|
+
type: "negate",
|
|
150
|
+
open: "(?:(?!(?:",
|
|
151
|
+
close: `))${chars.STAR})`
|
|
152
|
+
},
|
|
153
|
+
"?": {
|
|
154
|
+
type: "qmark",
|
|
155
|
+
open: "(?:",
|
|
156
|
+
close: ")?"
|
|
157
|
+
},
|
|
158
|
+
"+": {
|
|
159
|
+
type: "plus",
|
|
160
|
+
open: "(?:",
|
|
161
|
+
close: ")+"
|
|
162
|
+
},
|
|
163
|
+
"*": {
|
|
164
|
+
type: "star",
|
|
165
|
+
open: "(?:",
|
|
166
|
+
close: ")*"
|
|
167
|
+
},
|
|
168
|
+
"@": {
|
|
169
|
+
type: "at",
|
|
170
|
+
open: "(?:",
|
|
171
|
+
close: ")"
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
},
|
|
175
|
+
globChars(win32) {
|
|
176
|
+
return win32 === true ? WINDOWS_CHARS : POSIX_CHARS;
|
|
177
|
+
}
|
|
178
|
+
};
|
|
179
|
+
}));
|
|
180
|
+
|
|
181
|
+
//#endregion
|
|
182
|
+
//#region node_modules/picomatch/lib/utils.js
|
|
183
|
+
var require_utils = /* @__PURE__ */ require_chunk.__commonJSMin(((exports) => {
|
|
184
|
+
const { REGEX_BACKSLASH, REGEX_REMOVE_BACKSLASH, REGEX_SPECIAL_CHARS, REGEX_SPECIAL_CHARS_GLOBAL } = require_constants();
|
|
185
|
+
exports.isObject = (val) => val !== null && typeof val === "object" && !Array.isArray(val);
|
|
186
|
+
exports.hasRegexChars = (str) => REGEX_SPECIAL_CHARS.test(str);
|
|
187
|
+
exports.isRegexChar = (str) => str.length === 1 && exports.hasRegexChars(str);
|
|
188
|
+
exports.escapeRegex = (str) => str.replace(REGEX_SPECIAL_CHARS_GLOBAL, "\\$1");
|
|
189
|
+
exports.toPosixSlashes = (str) => str.replace(REGEX_BACKSLASH, "/");
|
|
190
|
+
exports.isWindows = () => {
|
|
191
|
+
if (typeof navigator !== "undefined" && navigator.platform) {
|
|
192
|
+
const platform = navigator.platform.toLowerCase();
|
|
193
|
+
return platform === "win32" || platform === "windows";
|
|
194
|
+
}
|
|
195
|
+
if (typeof process !== "undefined" && process.platform) {
|
|
196
|
+
return process.platform === "win32";
|
|
197
|
+
}
|
|
198
|
+
return false;
|
|
199
|
+
};
|
|
200
|
+
exports.removeBackslashes = (str) => {
|
|
201
|
+
return str.replace(REGEX_REMOVE_BACKSLASH, (match) => {
|
|
202
|
+
return match === "\\" ? "" : match;
|
|
203
|
+
});
|
|
204
|
+
};
|
|
205
|
+
exports.escapeLast = (input, char, lastIdx) => {
|
|
206
|
+
const idx = input.lastIndexOf(char, lastIdx);
|
|
207
|
+
if (idx === -1) return input;
|
|
208
|
+
if (input[idx - 1] === "\\") return exports.escapeLast(input, char, idx - 1);
|
|
209
|
+
return `${input.slice(0, idx)}\\${input.slice(idx)}`;
|
|
210
|
+
};
|
|
211
|
+
exports.removePrefix = (input, state = {}) => {
|
|
212
|
+
let output = input;
|
|
213
|
+
if (output.startsWith("./")) {
|
|
214
|
+
output = output.slice(2);
|
|
215
|
+
state.prefix = "./";
|
|
216
|
+
}
|
|
217
|
+
return output;
|
|
218
|
+
};
|
|
219
|
+
exports.wrapOutput = (input, state = {}, options = {}) => {
|
|
220
|
+
const prepend = options.contains ? "" : "^";
|
|
221
|
+
const append = options.contains ? "" : "$";
|
|
222
|
+
let output = `${prepend}(?:${input})${append}`;
|
|
223
|
+
if (state.negated === true) {
|
|
224
|
+
output = `(?:^(?!${output}).*$)`;
|
|
225
|
+
}
|
|
226
|
+
return output;
|
|
227
|
+
};
|
|
228
|
+
exports.basename = (path, { windows } = {}) => {
|
|
229
|
+
const segs = path.split(windows ? /[\\/]/ : "/");
|
|
230
|
+
const last = segs[segs.length - 1];
|
|
231
|
+
if (last === "") {
|
|
232
|
+
return segs[segs.length - 2];
|
|
233
|
+
}
|
|
234
|
+
return last;
|
|
235
|
+
};
|
|
236
|
+
}));
|
|
237
|
+
|
|
238
|
+
//#endregion
|
|
239
|
+
//#region node_modules/picomatch/lib/scan.js
|
|
240
|
+
var require_scan = /* @__PURE__ */ require_chunk.__commonJSMin(((exports, module) => {
|
|
241
|
+
const utils$3 = require_utils();
|
|
242
|
+
const { CHAR_ASTERISK, CHAR_AT, CHAR_BACKWARD_SLASH, CHAR_COMMA, CHAR_DOT, CHAR_EXCLAMATION_MARK, CHAR_FORWARD_SLASH, CHAR_LEFT_CURLY_BRACE, CHAR_LEFT_PARENTHESES, CHAR_LEFT_SQUARE_BRACKET, CHAR_PLUS, CHAR_QUESTION_MARK, CHAR_RIGHT_CURLY_BRACE, CHAR_RIGHT_PARENTHESES, CHAR_RIGHT_SQUARE_BRACKET } = require_constants();
|
|
243
|
+
const isPathSeparator = (code) => {
|
|
244
|
+
return code === CHAR_FORWARD_SLASH || code === CHAR_BACKWARD_SLASH;
|
|
245
|
+
};
|
|
246
|
+
const depth = (token) => {
|
|
247
|
+
if (token.isPrefix !== true) {
|
|
248
|
+
token.depth = token.isGlobstar ? Infinity : 1;
|
|
249
|
+
}
|
|
250
|
+
};
|
|
251
|
+
/**
|
|
252
|
+
* Quickly scans a glob pattern and returns an object with a handful of
|
|
253
|
+
* useful properties, like `isGlob`, `path` (the leading non-glob, if it exists),
|
|
254
|
+
* `glob` (the actual pattern), `negated` (true if the path starts with `!` but not
|
|
255
|
+
* with `!(`) and `negatedExtglob` (true if the path starts with `!(`).
|
|
256
|
+
*
|
|
257
|
+
* ```js
|
|
258
|
+
* const pm = require('picomatch');
|
|
259
|
+
* console.log(pm.scan('foo/bar/*.js'));
|
|
260
|
+
* { isGlob: true, input: 'foo/bar/*.js', base: 'foo/bar', glob: '*.js' }
|
|
261
|
+
* ```
|
|
262
|
+
* @param {String} `str`
|
|
263
|
+
* @param {Object} `options`
|
|
264
|
+
* @return {Object} Returns an object with tokens and regex source string.
|
|
265
|
+
* @api public
|
|
266
|
+
*/
|
|
267
|
+
const scan$1 = (input, options) => {
|
|
268
|
+
const opts = options || {};
|
|
269
|
+
const length = input.length - 1;
|
|
270
|
+
const scanToEnd = opts.parts === true || opts.scanToEnd === true;
|
|
271
|
+
const slashes = [];
|
|
272
|
+
const tokens = [];
|
|
273
|
+
const parts = [];
|
|
274
|
+
let str = input;
|
|
275
|
+
let index = -1;
|
|
276
|
+
let start = 0;
|
|
277
|
+
let lastIndex = 0;
|
|
278
|
+
let isBrace = false;
|
|
279
|
+
let isBracket = false;
|
|
280
|
+
let isGlob = false;
|
|
281
|
+
let isExtglob = false;
|
|
282
|
+
let isGlobstar = false;
|
|
283
|
+
let braceEscaped = false;
|
|
284
|
+
let backslashes = false;
|
|
285
|
+
let negated = false;
|
|
286
|
+
let negatedExtglob = false;
|
|
287
|
+
let finished = false;
|
|
288
|
+
let braces = 0;
|
|
289
|
+
let prev;
|
|
290
|
+
let code;
|
|
291
|
+
let token = {
|
|
292
|
+
value: "",
|
|
293
|
+
depth: 0,
|
|
294
|
+
isGlob: false
|
|
295
|
+
};
|
|
296
|
+
const eos = () => index >= length;
|
|
297
|
+
const peek = () => str.charCodeAt(index + 1);
|
|
298
|
+
const advance = () => {
|
|
299
|
+
prev = code;
|
|
300
|
+
return str.charCodeAt(++index);
|
|
301
|
+
};
|
|
302
|
+
while (index < length) {
|
|
303
|
+
code = advance();
|
|
304
|
+
let next;
|
|
305
|
+
if (code === CHAR_BACKWARD_SLASH) {
|
|
306
|
+
backslashes = token.backslashes = true;
|
|
307
|
+
code = advance();
|
|
308
|
+
if (code === CHAR_LEFT_CURLY_BRACE) {
|
|
309
|
+
braceEscaped = true;
|
|
310
|
+
}
|
|
311
|
+
continue;
|
|
312
|
+
}
|
|
313
|
+
if (braceEscaped === true || code === CHAR_LEFT_CURLY_BRACE) {
|
|
314
|
+
braces++;
|
|
315
|
+
while (eos() !== true && (code = advance())) {
|
|
316
|
+
if (code === CHAR_BACKWARD_SLASH) {
|
|
317
|
+
backslashes = token.backslashes = true;
|
|
318
|
+
advance();
|
|
319
|
+
continue;
|
|
320
|
+
}
|
|
321
|
+
if (code === CHAR_LEFT_CURLY_BRACE) {
|
|
322
|
+
braces++;
|
|
323
|
+
continue;
|
|
324
|
+
}
|
|
325
|
+
if (braceEscaped !== true && code === CHAR_DOT && (code = advance()) === CHAR_DOT) {
|
|
326
|
+
isBrace = token.isBrace = true;
|
|
327
|
+
isGlob = token.isGlob = true;
|
|
328
|
+
finished = true;
|
|
329
|
+
if (scanToEnd === true) {
|
|
330
|
+
continue;
|
|
331
|
+
}
|
|
332
|
+
break;
|
|
333
|
+
}
|
|
334
|
+
if (braceEscaped !== true && code === CHAR_COMMA) {
|
|
335
|
+
isBrace = token.isBrace = true;
|
|
336
|
+
isGlob = token.isGlob = true;
|
|
337
|
+
finished = true;
|
|
338
|
+
if (scanToEnd === true) {
|
|
339
|
+
continue;
|
|
340
|
+
}
|
|
341
|
+
break;
|
|
342
|
+
}
|
|
343
|
+
if (code === CHAR_RIGHT_CURLY_BRACE) {
|
|
344
|
+
braces--;
|
|
345
|
+
if (braces === 0) {
|
|
346
|
+
braceEscaped = false;
|
|
347
|
+
isBrace = token.isBrace = true;
|
|
348
|
+
finished = true;
|
|
349
|
+
break;
|
|
350
|
+
}
|
|
351
|
+
}
|
|
352
|
+
}
|
|
353
|
+
if (scanToEnd === true) {
|
|
354
|
+
continue;
|
|
355
|
+
}
|
|
356
|
+
break;
|
|
357
|
+
}
|
|
358
|
+
if (code === CHAR_FORWARD_SLASH) {
|
|
359
|
+
slashes.push(index);
|
|
360
|
+
tokens.push(token);
|
|
361
|
+
token = {
|
|
362
|
+
value: "",
|
|
363
|
+
depth: 0,
|
|
364
|
+
isGlob: false
|
|
365
|
+
};
|
|
366
|
+
if (finished === true) continue;
|
|
367
|
+
if (prev === CHAR_DOT && index === start + 1) {
|
|
368
|
+
start += 2;
|
|
369
|
+
continue;
|
|
370
|
+
}
|
|
371
|
+
lastIndex = index + 1;
|
|
372
|
+
continue;
|
|
373
|
+
}
|
|
374
|
+
if (opts.noext !== true) {
|
|
375
|
+
const isExtglobChar = code === CHAR_PLUS || code === CHAR_AT || code === CHAR_ASTERISK || code === CHAR_QUESTION_MARK || code === CHAR_EXCLAMATION_MARK;
|
|
376
|
+
if (isExtglobChar === true && peek() === CHAR_LEFT_PARENTHESES) {
|
|
377
|
+
isGlob = token.isGlob = true;
|
|
378
|
+
isExtglob = token.isExtglob = true;
|
|
379
|
+
finished = true;
|
|
380
|
+
if (code === CHAR_EXCLAMATION_MARK && index === start) {
|
|
381
|
+
negatedExtglob = true;
|
|
382
|
+
}
|
|
383
|
+
if (scanToEnd === true) {
|
|
384
|
+
while (eos() !== true && (code = advance())) {
|
|
385
|
+
if (code === CHAR_BACKWARD_SLASH) {
|
|
386
|
+
backslashes = token.backslashes = true;
|
|
387
|
+
code = advance();
|
|
388
|
+
continue;
|
|
389
|
+
}
|
|
390
|
+
if (code === CHAR_RIGHT_PARENTHESES) {
|
|
391
|
+
isGlob = token.isGlob = true;
|
|
392
|
+
finished = true;
|
|
393
|
+
break;
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
continue;
|
|
397
|
+
}
|
|
398
|
+
break;
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
if (code === CHAR_ASTERISK) {
|
|
402
|
+
if (prev === CHAR_ASTERISK) isGlobstar = token.isGlobstar = true;
|
|
403
|
+
isGlob = token.isGlob = true;
|
|
404
|
+
finished = true;
|
|
405
|
+
if (scanToEnd === true) {
|
|
406
|
+
continue;
|
|
407
|
+
}
|
|
408
|
+
break;
|
|
409
|
+
}
|
|
410
|
+
if (code === CHAR_QUESTION_MARK) {
|
|
411
|
+
isGlob = token.isGlob = true;
|
|
412
|
+
finished = true;
|
|
413
|
+
if (scanToEnd === true) {
|
|
414
|
+
continue;
|
|
415
|
+
}
|
|
416
|
+
break;
|
|
417
|
+
}
|
|
418
|
+
if (code === CHAR_LEFT_SQUARE_BRACKET) {
|
|
419
|
+
while (eos() !== true && (next = advance())) {
|
|
420
|
+
if (next === CHAR_BACKWARD_SLASH) {
|
|
421
|
+
backslashes = token.backslashes = true;
|
|
422
|
+
advance();
|
|
423
|
+
continue;
|
|
424
|
+
}
|
|
425
|
+
if (next === CHAR_RIGHT_SQUARE_BRACKET) {
|
|
426
|
+
isBracket = token.isBracket = true;
|
|
427
|
+
isGlob = token.isGlob = true;
|
|
428
|
+
finished = true;
|
|
429
|
+
break;
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
if (scanToEnd === true) {
|
|
433
|
+
continue;
|
|
434
|
+
}
|
|
435
|
+
break;
|
|
436
|
+
}
|
|
437
|
+
if (opts.nonegate !== true && code === CHAR_EXCLAMATION_MARK && index === start) {
|
|
438
|
+
negated = token.negated = true;
|
|
439
|
+
start++;
|
|
440
|
+
continue;
|
|
441
|
+
}
|
|
442
|
+
if (opts.noparen !== true && code === CHAR_LEFT_PARENTHESES) {
|
|
443
|
+
isGlob = token.isGlob = true;
|
|
444
|
+
if (scanToEnd === true) {
|
|
445
|
+
while (eos() !== true && (code = advance())) {
|
|
446
|
+
if (code === CHAR_LEFT_PARENTHESES) {
|
|
447
|
+
backslashes = token.backslashes = true;
|
|
448
|
+
code = advance();
|
|
449
|
+
continue;
|
|
450
|
+
}
|
|
451
|
+
if (code === CHAR_RIGHT_PARENTHESES) {
|
|
452
|
+
finished = true;
|
|
453
|
+
break;
|
|
454
|
+
}
|
|
455
|
+
}
|
|
456
|
+
continue;
|
|
457
|
+
}
|
|
458
|
+
break;
|
|
459
|
+
}
|
|
460
|
+
if (isGlob === true) {
|
|
461
|
+
finished = true;
|
|
462
|
+
if (scanToEnd === true) {
|
|
463
|
+
continue;
|
|
464
|
+
}
|
|
465
|
+
break;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
if (opts.noext === true) {
|
|
469
|
+
isExtglob = false;
|
|
470
|
+
isGlob = false;
|
|
471
|
+
}
|
|
472
|
+
let base = str;
|
|
473
|
+
let prefix = "";
|
|
474
|
+
let glob = "";
|
|
475
|
+
if (start > 0) {
|
|
476
|
+
prefix = str.slice(0, start);
|
|
477
|
+
str = str.slice(start);
|
|
478
|
+
lastIndex -= start;
|
|
479
|
+
}
|
|
480
|
+
if (base && isGlob === true && lastIndex > 0) {
|
|
481
|
+
base = str.slice(0, lastIndex);
|
|
482
|
+
glob = str.slice(lastIndex);
|
|
483
|
+
} else if (isGlob === true) {
|
|
484
|
+
base = "";
|
|
485
|
+
glob = str;
|
|
486
|
+
} else {
|
|
487
|
+
base = str;
|
|
488
|
+
}
|
|
489
|
+
if (base && base !== "" && base !== "/" && base !== str) {
|
|
490
|
+
if (isPathSeparator(base.charCodeAt(base.length - 1))) {
|
|
491
|
+
base = base.slice(0, -1);
|
|
492
|
+
}
|
|
493
|
+
}
|
|
494
|
+
if (opts.unescape === true) {
|
|
495
|
+
if (glob) glob = utils$3.removeBackslashes(glob);
|
|
496
|
+
if (base && backslashes === true) {
|
|
497
|
+
base = utils$3.removeBackslashes(base);
|
|
498
|
+
}
|
|
499
|
+
}
|
|
500
|
+
const state = {
|
|
501
|
+
prefix,
|
|
502
|
+
input,
|
|
503
|
+
start,
|
|
504
|
+
base,
|
|
505
|
+
glob,
|
|
506
|
+
isBrace,
|
|
507
|
+
isBracket,
|
|
508
|
+
isGlob,
|
|
509
|
+
isExtglob,
|
|
510
|
+
isGlobstar,
|
|
511
|
+
negated,
|
|
512
|
+
negatedExtglob
|
|
513
|
+
};
|
|
514
|
+
if (opts.tokens === true) {
|
|
515
|
+
state.maxDepth = 0;
|
|
516
|
+
if (!isPathSeparator(code)) {
|
|
517
|
+
tokens.push(token);
|
|
518
|
+
}
|
|
519
|
+
state.tokens = tokens;
|
|
520
|
+
}
|
|
521
|
+
if (opts.parts === true || opts.tokens === true) {
|
|
522
|
+
let prevIndex;
|
|
523
|
+
for (let idx = 0; idx < slashes.length; idx++) {
|
|
524
|
+
const n = prevIndex ? prevIndex + 1 : start;
|
|
525
|
+
const i = slashes[idx];
|
|
526
|
+
const value = input.slice(n, i);
|
|
527
|
+
if (opts.tokens) {
|
|
528
|
+
if (idx === 0 && start !== 0) {
|
|
529
|
+
tokens[idx].isPrefix = true;
|
|
530
|
+
tokens[idx].value = prefix;
|
|
531
|
+
} else {
|
|
532
|
+
tokens[idx].value = value;
|
|
533
|
+
}
|
|
534
|
+
depth(tokens[idx]);
|
|
535
|
+
state.maxDepth += tokens[idx].depth;
|
|
536
|
+
}
|
|
537
|
+
if (idx !== 0 || value !== "") {
|
|
538
|
+
parts.push(value);
|
|
539
|
+
}
|
|
540
|
+
prevIndex = i;
|
|
541
|
+
}
|
|
542
|
+
if (prevIndex && prevIndex + 1 < input.length) {
|
|
543
|
+
const value = input.slice(prevIndex + 1);
|
|
544
|
+
parts.push(value);
|
|
545
|
+
if (opts.tokens) {
|
|
546
|
+
tokens[tokens.length - 1].value = value;
|
|
547
|
+
depth(tokens[tokens.length - 1]);
|
|
548
|
+
state.maxDepth += tokens[tokens.length - 1].depth;
|
|
549
|
+
}
|
|
550
|
+
}
|
|
551
|
+
state.slashes = slashes;
|
|
552
|
+
state.parts = parts;
|
|
553
|
+
}
|
|
554
|
+
return state;
|
|
555
|
+
};
|
|
556
|
+
module.exports = scan$1;
|
|
557
|
+
}));
|
|
558
|
+
|
|
559
|
+
//#endregion
|
|
560
|
+
//#region node_modules/picomatch/lib/parse.js
|
|
561
|
+
var require_parse = /* @__PURE__ */ require_chunk.__commonJSMin(((exports, module) => {
|
|
562
|
+
const constants$1 = require_constants();
|
|
563
|
+
const utils$2 = require_utils();
|
|
564
|
+
/**
|
|
565
|
+
* Constants
|
|
566
|
+
*/
|
|
567
|
+
const { MAX_LENGTH, POSIX_REGEX_SOURCE, REGEX_NON_SPECIAL_CHARS, REGEX_SPECIAL_CHARS_BACKREF, REPLACEMENTS } = constants$1;
|
|
568
|
+
/**
|
|
569
|
+
* Helpers
|
|
570
|
+
*/
|
|
571
|
+
const expandRange = (args, options) => {
|
|
572
|
+
if (typeof options.expandRange === "function") {
|
|
573
|
+
return options.expandRange(...args, options);
|
|
574
|
+
}
|
|
575
|
+
args.sort();
|
|
576
|
+
const value = `[${args.join("-")}]`;
|
|
577
|
+
try {
|
|
578
|
+
new RegExp(value);
|
|
579
|
+
} catch (ex) {
|
|
580
|
+
return args.map((v) => utils$2.escapeRegex(v)).join("..");
|
|
581
|
+
}
|
|
582
|
+
return value;
|
|
583
|
+
};
|
|
584
|
+
/**
|
|
585
|
+
* Create the message for a syntax error
|
|
586
|
+
*/
|
|
587
|
+
const syntaxError = (type, char) => {
|
|
588
|
+
return `Missing ${type}: "${char}" - use "\\\\${char}" to match literal characters`;
|
|
589
|
+
};
|
|
590
|
+
/**
|
|
591
|
+
* Parse the given input string.
|
|
592
|
+
* @param {String} input
|
|
593
|
+
* @param {Object} options
|
|
594
|
+
* @return {Object}
|
|
595
|
+
*/
|
|
596
|
+
const parse$3 = (input, options) => {
|
|
597
|
+
if (typeof input !== "string") {
|
|
598
|
+
throw new TypeError("Expected a string");
|
|
599
|
+
}
|
|
600
|
+
input = REPLACEMENTS[input] || input;
|
|
601
|
+
const opts = { ...options };
|
|
602
|
+
const max = typeof opts.maxLength === "number" ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
|
|
603
|
+
let len = input.length;
|
|
604
|
+
if (len > max) {
|
|
605
|
+
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
|
|
606
|
+
}
|
|
607
|
+
const bos = {
|
|
608
|
+
type: "bos",
|
|
609
|
+
value: "",
|
|
610
|
+
output: opts.prepend || ""
|
|
611
|
+
};
|
|
612
|
+
const tokens = [bos];
|
|
613
|
+
const capture = opts.capture ? "" : "?:";
|
|
614
|
+
const PLATFORM_CHARS = constants$1.globChars(opts.windows);
|
|
615
|
+
const EXTGLOB_CHARS = constants$1.extglobChars(PLATFORM_CHARS);
|
|
616
|
+
const { DOT_LITERAL: DOT_LITERAL$1, PLUS_LITERAL: PLUS_LITERAL$1, SLASH_LITERAL: SLASH_LITERAL$1, ONE_CHAR: ONE_CHAR$1, DOTS_SLASH: DOTS_SLASH$1, NO_DOT: NO_DOT$1, NO_DOT_SLASH: NO_DOT_SLASH$1, NO_DOTS_SLASH: NO_DOTS_SLASH$1, QMARK: QMARK$1, QMARK_NO_DOT: QMARK_NO_DOT$1, STAR: STAR$1, START_ANCHOR: START_ANCHOR$1 } = PLATFORM_CHARS;
|
|
617
|
+
const globstar = (opts$1) => {
|
|
618
|
+
return `(${capture}(?:(?!${START_ANCHOR$1}${opts$1.dot ? DOTS_SLASH$1 : DOT_LITERAL$1}).)*?)`;
|
|
619
|
+
};
|
|
620
|
+
const nodot = opts.dot ? "" : NO_DOT$1;
|
|
621
|
+
const qmarkNoDot = opts.dot ? QMARK$1 : QMARK_NO_DOT$1;
|
|
622
|
+
let star = opts.bash === true ? globstar(opts) : STAR$1;
|
|
623
|
+
if (opts.capture) {
|
|
624
|
+
star = `(${star})`;
|
|
625
|
+
}
|
|
626
|
+
if (typeof opts.noext === "boolean") {
|
|
627
|
+
opts.noextglob = opts.noext;
|
|
628
|
+
}
|
|
629
|
+
const state = {
|
|
630
|
+
input,
|
|
631
|
+
index: -1,
|
|
632
|
+
start: 0,
|
|
633
|
+
dot: opts.dot === true,
|
|
634
|
+
consumed: "",
|
|
635
|
+
output: "",
|
|
636
|
+
prefix: "",
|
|
637
|
+
backtrack: false,
|
|
638
|
+
negated: false,
|
|
639
|
+
brackets: 0,
|
|
640
|
+
braces: 0,
|
|
641
|
+
parens: 0,
|
|
642
|
+
quotes: 0,
|
|
643
|
+
globstar: false,
|
|
644
|
+
tokens
|
|
645
|
+
};
|
|
646
|
+
input = utils$2.removePrefix(input, state);
|
|
647
|
+
len = input.length;
|
|
648
|
+
const extglobs = [];
|
|
649
|
+
const braces = [];
|
|
650
|
+
const stack = [];
|
|
651
|
+
let prev = bos;
|
|
652
|
+
let value;
|
|
653
|
+
/**
|
|
654
|
+
* Tokenizing helpers
|
|
655
|
+
*/
|
|
656
|
+
const eos = () => state.index === len - 1;
|
|
657
|
+
const peek = state.peek = (n = 1) => input[state.index + n];
|
|
658
|
+
const advance = state.advance = () => input[++state.index] || "";
|
|
659
|
+
const remaining = () => input.slice(state.index + 1);
|
|
660
|
+
const consume = (value$1 = "", num = 0) => {
|
|
661
|
+
state.consumed += value$1;
|
|
662
|
+
state.index += num;
|
|
663
|
+
};
|
|
664
|
+
const append = (token) => {
|
|
665
|
+
state.output += token.output != null ? token.output : token.value;
|
|
666
|
+
consume(token.value);
|
|
667
|
+
};
|
|
668
|
+
const negate = () => {
|
|
669
|
+
let count = 1;
|
|
670
|
+
while (peek() === "!" && (peek(2) !== "(" || peek(3) === "?")) {
|
|
671
|
+
advance();
|
|
672
|
+
state.start++;
|
|
673
|
+
count++;
|
|
674
|
+
}
|
|
675
|
+
if (count % 2 === 0) {
|
|
676
|
+
return false;
|
|
677
|
+
}
|
|
678
|
+
state.negated = true;
|
|
679
|
+
state.start++;
|
|
680
|
+
return true;
|
|
681
|
+
};
|
|
682
|
+
const increment = (type) => {
|
|
683
|
+
state[type]++;
|
|
684
|
+
stack.push(type);
|
|
685
|
+
};
|
|
686
|
+
const decrement = (type) => {
|
|
687
|
+
state[type]--;
|
|
688
|
+
stack.pop();
|
|
689
|
+
};
|
|
690
|
+
/**
|
|
691
|
+
* Push tokens onto the tokens array. This helper speeds up
|
|
692
|
+
* tokenizing by 1) helping us avoid backtracking as much as possible,
|
|
693
|
+
* and 2) helping us avoid creating extra tokens when consecutive
|
|
694
|
+
* characters are plain text. This improves performance and simplifies
|
|
695
|
+
* lookbehinds.
|
|
696
|
+
*/
|
|
697
|
+
const push = (tok) => {
|
|
698
|
+
if (prev.type === "globstar") {
|
|
699
|
+
const isBrace = state.braces > 0 && (tok.type === "comma" || tok.type === "brace");
|
|
700
|
+
const isExtglob = tok.extglob === true || extglobs.length && (tok.type === "pipe" || tok.type === "paren");
|
|
701
|
+
if (tok.type !== "slash" && tok.type !== "paren" && !isBrace && !isExtglob) {
|
|
702
|
+
state.output = state.output.slice(0, -prev.output.length);
|
|
703
|
+
prev.type = "star";
|
|
704
|
+
prev.value = "*";
|
|
705
|
+
prev.output = star;
|
|
706
|
+
state.output += prev.output;
|
|
707
|
+
}
|
|
708
|
+
}
|
|
709
|
+
if (extglobs.length && tok.type !== "paren") {
|
|
710
|
+
extglobs[extglobs.length - 1].inner += tok.value;
|
|
711
|
+
}
|
|
712
|
+
if (tok.value || tok.output) append(tok);
|
|
713
|
+
if (prev && prev.type === "text" && tok.type === "text") {
|
|
714
|
+
prev.output = (prev.output || prev.value) + tok.value;
|
|
715
|
+
prev.value += tok.value;
|
|
716
|
+
return;
|
|
717
|
+
}
|
|
718
|
+
tok.prev = prev;
|
|
719
|
+
tokens.push(tok);
|
|
720
|
+
prev = tok;
|
|
721
|
+
};
|
|
722
|
+
const extglobOpen = (type, value$1) => {
|
|
723
|
+
const token = {
|
|
724
|
+
...EXTGLOB_CHARS[value$1],
|
|
725
|
+
conditions: 1,
|
|
726
|
+
inner: ""
|
|
727
|
+
};
|
|
728
|
+
token.prev = prev;
|
|
729
|
+
token.parens = state.parens;
|
|
730
|
+
token.output = state.output;
|
|
731
|
+
const output = (opts.capture ? "(" : "") + token.open;
|
|
732
|
+
increment("parens");
|
|
733
|
+
push({
|
|
734
|
+
type,
|
|
735
|
+
value: value$1,
|
|
736
|
+
output: state.output ? "" : ONE_CHAR$1
|
|
737
|
+
});
|
|
738
|
+
push({
|
|
739
|
+
type: "paren",
|
|
740
|
+
extglob: true,
|
|
741
|
+
value: advance(),
|
|
742
|
+
output
|
|
743
|
+
});
|
|
744
|
+
extglobs.push(token);
|
|
745
|
+
};
|
|
746
|
+
const extglobClose = (token) => {
|
|
747
|
+
let output = token.close + (opts.capture ? ")" : "");
|
|
748
|
+
let rest;
|
|
749
|
+
if (token.type === "negate") {
|
|
750
|
+
let extglobStar = star;
|
|
751
|
+
if (token.inner && token.inner.length > 1 && token.inner.includes("/")) {
|
|
752
|
+
extglobStar = globstar(opts);
|
|
753
|
+
}
|
|
754
|
+
if (extglobStar !== star || eos() || /^\)+$/.test(remaining())) {
|
|
755
|
+
output = token.close = `)$))${extglobStar}`;
|
|
756
|
+
}
|
|
757
|
+
if (token.inner.includes("*") && (rest = remaining()) && /^\.[^\\/.]+$/.test(rest)) {
|
|
758
|
+
const expression = parse$3(rest, {
|
|
759
|
+
...options,
|
|
760
|
+
fastpaths: false
|
|
761
|
+
}).output;
|
|
762
|
+
output = token.close = `)${expression})${extglobStar})`;
|
|
763
|
+
}
|
|
764
|
+
if (token.prev.type === "bos") {
|
|
765
|
+
state.negatedExtglob = true;
|
|
766
|
+
}
|
|
767
|
+
}
|
|
768
|
+
push({
|
|
769
|
+
type: "paren",
|
|
770
|
+
extglob: true,
|
|
771
|
+
value,
|
|
772
|
+
output
|
|
773
|
+
});
|
|
774
|
+
decrement("parens");
|
|
775
|
+
};
|
|
776
|
+
/**
|
|
777
|
+
* Fast paths
|
|
778
|
+
*/
|
|
779
|
+
if (opts.fastpaths !== false && !/(^[*!]|[/()[\]{}"])/.test(input)) {
|
|
780
|
+
let backslashes = false;
|
|
781
|
+
let output = input.replace(REGEX_SPECIAL_CHARS_BACKREF, (m, esc, chars, first, rest, index) => {
|
|
782
|
+
if (first === "\\") {
|
|
783
|
+
backslashes = true;
|
|
784
|
+
return m;
|
|
785
|
+
}
|
|
786
|
+
if (first === "?") {
|
|
787
|
+
if (esc) {
|
|
788
|
+
return esc + first + (rest ? QMARK$1.repeat(rest.length) : "");
|
|
789
|
+
}
|
|
790
|
+
if (index === 0) {
|
|
791
|
+
return qmarkNoDot + (rest ? QMARK$1.repeat(rest.length) : "");
|
|
792
|
+
}
|
|
793
|
+
return QMARK$1.repeat(chars.length);
|
|
794
|
+
}
|
|
795
|
+
if (first === ".") {
|
|
796
|
+
return DOT_LITERAL$1.repeat(chars.length);
|
|
797
|
+
}
|
|
798
|
+
if (first === "*") {
|
|
799
|
+
if (esc) {
|
|
800
|
+
return esc + first + (rest ? star : "");
|
|
801
|
+
}
|
|
802
|
+
return star;
|
|
803
|
+
}
|
|
804
|
+
return esc ? m : `\\${m}`;
|
|
805
|
+
});
|
|
806
|
+
if (backslashes === true) {
|
|
807
|
+
if (opts.unescape === true) {
|
|
808
|
+
output = output.replace(/\\/g, "");
|
|
809
|
+
} else {
|
|
810
|
+
output = output.replace(/\\+/g, (m) => {
|
|
811
|
+
return m.length % 2 === 0 ? "\\\\" : m ? "\\" : "";
|
|
812
|
+
});
|
|
813
|
+
}
|
|
814
|
+
}
|
|
815
|
+
if (output === input && opts.contains === true) {
|
|
816
|
+
state.output = input;
|
|
817
|
+
return state;
|
|
818
|
+
}
|
|
819
|
+
state.output = utils$2.wrapOutput(output, state, options);
|
|
820
|
+
return state;
|
|
821
|
+
}
|
|
822
|
+
/**
|
|
823
|
+
* Tokenize input until we reach end-of-string
|
|
824
|
+
*/
|
|
825
|
+
while (!eos()) {
|
|
826
|
+
value = advance();
|
|
827
|
+
if (value === "\0") {
|
|
828
|
+
continue;
|
|
829
|
+
}
|
|
830
|
+
/**
|
|
831
|
+
* Escaped characters
|
|
832
|
+
*/
|
|
833
|
+
if (value === "\\") {
|
|
834
|
+
const next = peek();
|
|
835
|
+
if (next === "/" && opts.bash !== true) {
|
|
836
|
+
continue;
|
|
837
|
+
}
|
|
838
|
+
if (next === "." || next === ";") {
|
|
839
|
+
continue;
|
|
840
|
+
}
|
|
841
|
+
if (!next) {
|
|
842
|
+
value += "\\";
|
|
843
|
+
push({
|
|
844
|
+
type: "text",
|
|
845
|
+
value
|
|
846
|
+
});
|
|
847
|
+
continue;
|
|
848
|
+
}
|
|
849
|
+
const match = /^\\+/.exec(remaining());
|
|
850
|
+
let slashes = 0;
|
|
851
|
+
if (match && match[0].length > 2) {
|
|
852
|
+
slashes = match[0].length;
|
|
853
|
+
state.index += slashes;
|
|
854
|
+
if (slashes % 2 !== 0) {
|
|
855
|
+
value += "\\";
|
|
856
|
+
}
|
|
857
|
+
}
|
|
858
|
+
if (opts.unescape === true) {
|
|
859
|
+
value = advance();
|
|
860
|
+
} else {
|
|
861
|
+
value += advance();
|
|
862
|
+
}
|
|
863
|
+
if (state.brackets === 0) {
|
|
864
|
+
push({
|
|
865
|
+
type: "text",
|
|
866
|
+
value
|
|
867
|
+
});
|
|
868
|
+
continue;
|
|
869
|
+
}
|
|
870
|
+
}
|
|
871
|
+
/**
|
|
872
|
+
* If we're inside a regex character class, continue
|
|
873
|
+
* until we reach the closing bracket.
|
|
874
|
+
*/
|
|
875
|
+
if (state.brackets > 0 && (value !== "]" || prev.value === "[" || prev.value === "[^")) {
|
|
876
|
+
if (opts.posix !== false && value === ":") {
|
|
877
|
+
const inner = prev.value.slice(1);
|
|
878
|
+
if (inner.includes("[")) {
|
|
879
|
+
prev.posix = true;
|
|
880
|
+
if (inner.includes(":")) {
|
|
881
|
+
const idx = prev.value.lastIndexOf("[");
|
|
882
|
+
const pre = prev.value.slice(0, idx);
|
|
883
|
+
const rest$1 = prev.value.slice(idx + 2);
|
|
884
|
+
const posix = POSIX_REGEX_SOURCE[rest$1];
|
|
885
|
+
if (posix) {
|
|
886
|
+
prev.value = pre + posix;
|
|
887
|
+
state.backtrack = true;
|
|
888
|
+
advance();
|
|
889
|
+
if (!bos.output && tokens.indexOf(prev) === 1) {
|
|
890
|
+
bos.output = ONE_CHAR$1;
|
|
891
|
+
}
|
|
892
|
+
continue;
|
|
893
|
+
}
|
|
894
|
+
}
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
if (value === "[" && peek() !== ":" || value === "-" && peek() === "]") {
|
|
898
|
+
value = `\\${value}`;
|
|
899
|
+
}
|
|
900
|
+
if (value === "]" && (prev.value === "[" || prev.value === "[^")) {
|
|
901
|
+
value = `\\${value}`;
|
|
902
|
+
}
|
|
903
|
+
if (opts.posix === true && value === "!" && prev.value === "[") {
|
|
904
|
+
value = "^";
|
|
905
|
+
}
|
|
906
|
+
prev.value += value;
|
|
907
|
+
append({ value });
|
|
908
|
+
continue;
|
|
909
|
+
}
|
|
910
|
+
/**
|
|
911
|
+
* If we're inside a quoted string, continue
|
|
912
|
+
* until we reach the closing double quote.
|
|
913
|
+
*/
|
|
914
|
+
if (state.quotes === 1 && value !== "\"") {
|
|
915
|
+
value = utils$2.escapeRegex(value);
|
|
916
|
+
prev.value += value;
|
|
917
|
+
append({ value });
|
|
918
|
+
continue;
|
|
919
|
+
}
|
|
920
|
+
/**
|
|
921
|
+
* Double quotes
|
|
922
|
+
*/
|
|
923
|
+
if (value === "\"") {
|
|
924
|
+
state.quotes = state.quotes === 1 ? 0 : 1;
|
|
925
|
+
if (opts.keepQuotes === true) {
|
|
926
|
+
push({
|
|
927
|
+
type: "text",
|
|
928
|
+
value
|
|
929
|
+
});
|
|
930
|
+
}
|
|
931
|
+
continue;
|
|
932
|
+
}
|
|
933
|
+
/**
|
|
934
|
+
* Parentheses
|
|
935
|
+
*/
|
|
936
|
+
if (value === "(") {
|
|
937
|
+
increment("parens");
|
|
938
|
+
push({
|
|
939
|
+
type: "paren",
|
|
940
|
+
value
|
|
941
|
+
});
|
|
942
|
+
continue;
|
|
943
|
+
}
|
|
944
|
+
if (value === ")") {
|
|
945
|
+
if (state.parens === 0 && opts.strictBrackets === true) {
|
|
946
|
+
throw new SyntaxError(syntaxError("opening", "("));
|
|
947
|
+
}
|
|
948
|
+
const extglob = extglobs[extglobs.length - 1];
|
|
949
|
+
if (extglob && state.parens === extglob.parens + 1) {
|
|
950
|
+
extglobClose(extglobs.pop());
|
|
951
|
+
continue;
|
|
952
|
+
}
|
|
953
|
+
push({
|
|
954
|
+
type: "paren",
|
|
955
|
+
value,
|
|
956
|
+
output: state.parens ? ")" : "\\)"
|
|
957
|
+
});
|
|
958
|
+
decrement("parens");
|
|
959
|
+
continue;
|
|
960
|
+
}
|
|
961
|
+
/**
|
|
962
|
+
* Square brackets
|
|
963
|
+
*/
|
|
964
|
+
if (value === "[") {
|
|
965
|
+
if (opts.nobracket === true || !remaining().includes("]")) {
|
|
966
|
+
if (opts.nobracket !== true && opts.strictBrackets === true) {
|
|
967
|
+
throw new SyntaxError(syntaxError("closing", "]"));
|
|
968
|
+
}
|
|
969
|
+
value = `\\${value}`;
|
|
970
|
+
} else {
|
|
971
|
+
increment("brackets");
|
|
972
|
+
}
|
|
973
|
+
push({
|
|
974
|
+
type: "bracket",
|
|
975
|
+
value
|
|
976
|
+
});
|
|
977
|
+
continue;
|
|
978
|
+
}
|
|
979
|
+
if (value === "]") {
|
|
980
|
+
if (opts.nobracket === true || prev && prev.type === "bracket" && prev.value.length === 1) {
|
|
981
|
+
push({
|
|
982
|
+
type: "text",
|
|
983
|
+
value,
|
|
984
|
+
output: `\\${value}`
|
|
985
|
+
});
|
|
986
|
+
continue;
|
|
987
|
+
}
|
|
988
|
+
if (state.brackets === 0) {
|
|
989
|
+
if (opts.strictBrackets === true) {
|
|
990
|
+
throw new SyntaxError(syntaxError("opening", "["));
|
|
991
|
+
}
|
|
992
|
+
push({
|
|
993
|
+
type: "text",
|
|
994
|
+
value,
|
|
995
|
+
output: `\\${value}`
|
|
996
|
+
});
|
|
997
|
+
continue;
|
|
998
|
+
}
|
|
999
|
+
decrement("brackets");
|
|
1000
|
+
const prevValue = prev.value.slice(1);
|
|
1001
|
+
if (prev.posix !== true && prevValue[0] === "^" && !prevValue.includes("/")) {
|
|
1002
|
+
value = `/${value}`;
|
|
1003
|
+
}
|
|
1004
|
+
prev.value += value;
|
|
1005
|
+
append({ value });
|
|
1006
|
+
if (opts.literalBrackets === false || utils$2.hasRegexChars(prevValue)) {
|
|
1007
|
+
continue;
|
|
1008
|
+
}
|
|
1009
|
+
const escaped = utils$2.escapeRegex(prev.value);
|
|
1010
|
+
state.output = state.output.slice(0, -prev.value.length);
|
|
1011
|
+
if (opts.literalBrackets === true) {
|
|
1012
|
+
state.output += escaped;
|
|
1013
|
+
prev.value = escaped;
|
|
1014
|
+
continue;
|
|
1015
|
+
}
|
|
1016
|
+
prev.value = `(${capture}${escaped}|${prev.value})`;
|
|
1017
|
+
state.output += prev.value;
|
|
1018
|
+
continue;
|
|
1019
|
+
}
|
|
1020
|
+
/**
|
|
1021
|
+
* Braces
|
|
1022
|
+
*/
|
|
1023
|
+
if (value === "{" && opts.nobrace !== true) {
|
|
1024
|
+
increment("braces");
|
|
1025
|
+
const open = {
|
|
1026
|
+
type: "brace",
|
|
1027
|
+
value,
|
|
1028
|
+
output: "(",
|
|
1029
|
+
outputIndex: state.output.length,
|
|
1030
|
+
tokensIndex: state.tokens.length
|
|
1031
|
+
};
|
|
1032
|
+
braces.push(open);
|
|
1033
|
+
push(open);
|
|
1034
|
+
continue;
|
|
1035
|
+
}
|
|
1036
|
+
if (value === "}") {
|
|
1037
|
+
const brace = braces[braces.length - 1];
|
|
1038
|
+
if (opts.nobrace === true || !brace) {
|
|
1039
|
+
push({
|
|
1040
|
+
type: "text",
|
|
1041
|
+
value,
|
|
1042
|
+
output: value
|
|
1043
|
+
});
|
|
1044
|
+
continue;
|
|
1045
|
+
}
|
|
1046
|
+
let output = ")";
|
|
1047
|
+
if (brace.dots === true) {
|
|
1048
|
+
const arr = tokens.slice();
|
|
1049
|
+
const range = [];
|
|
1050
|
+
for (let i = arr.length - 1; i >= 0; i--) {
|
|
1051
|
+
tokens.pop();
|
|
1052
|
+
if (arr[i].type === "brace") {
|
|
1053
|
+
break;
|
|
1054
|
+
}
|
|
1055
|
+
if (arr[i].type !== "dots") {
|
|
1056
|
+
range.unshift(arr[i].value);
|
|
1057
|
+
}
|
|
1058
|
+
}
|
|
1059
|
+
output = expandRange(range, opts);
|
|
1060
|
+
state.backtrack = true;
|
|
1061
|
+
}
|
|
1062
|
+
if (brace.comma !== true && brace.dots !== true) {
|
|
1063
|
+
const out = state.output.slice(0, brace.outputIndex);
|
|
1064
|
+
const toks = state.tokens.slice(brace.tokensIndex);
|
|
1065
|
+
brace.value = brace.output = "\\{";
|
|
1066
|
+
value = output = "\\}";
|
|
1067
|
+
state.output = out;
|
|
1068
|
+
for (const t of toks) {
|
|
1069
|
+
state.output += t.output || t.value;
|
|
1070
|
+
}
|
|
1071
|
+
}
|
|
1072
|
+
push({
|
|
1073
|
+
type: "brace",
|
|
1074
|
+
value,
|
|
1075
|
+
output
|
|
1076
|
+
});
|
|
1077
|
+
decrement("braces");
|
|
1078
|
+
braces.pop();
|
|
1079
|
+
continue;
|
|
1080
|
+
}
|
|
1081
|
+
/**
|
|
1082
|
+
* Pipes
|
|
1083
|
+
*/
|
|
1084
|
+
if (value === "|") {
|
|
1085
|
+
if (extglobs.length > 0) {
|
|
1086
|
+
extglobs[extglobs.length - 1].conditions++;
|
|
1087
|
+
}
|
|
1088
|
+
push({
|
|
1089
|
+
type: "text",
|
|
1090
|
+
value
|
|
1091
|
+
});
|
|
1092
|
+
continue;
|
|
1093
|
+
}
|
|
1094
|
+
/**
|
|
1095
|
+
* Commas
|
|
1096
|
+
*/
|
|
1097
|
+
if (value === ",") {
|
|
1098
|
+
let output = value;
|
|
1099
|
+
const brace = braces[braces.length - 1];
|
|
1100
|
+
if (brace && stack[stack.length - 1] === "braces") {
|
|
1101
|
+
brace.comma = true;
|
|
1102
|
+
output = "|";
|
|
1103
|
+
}
|
|
1104
|
+
push({
|
|
1105
|
+
type: "comma",
|
|
1106
|
+
value,
|
|
1107
|
+
output
|
|
1108
|
+
});
|
|
1109
|
+
continue;
|
|
1110
|
+
}
|
|
1111
|
+
/**
|
|
1112
|
+
* Slashes
|
|
1113
|
+
*/
|
|
1114
|
+
if (value === "/") {
|
|
1115
|
+
if (prev.type === "dot" && state.index === state.start + 1) {
|
|
1116
|
+
state.start = state.index + 1;
|
|
1117
|
+
state.consumed = "";
|
|
1118
|
+
state.output = "";
|
|
1119
|
+
tokens.pop();
|
|
1120
|
+
prev = bos;
|
|
1121
|
+
continue;
|
|
1122
|
+
}
|
|
1123
|
+
push({
|
|
1124
|
+
type: "slash",
|
|
1125
|
+
value,
|
|
1126
|
+
output: SLASH_LITERAL$1
|
|
1127
|
+
});
|
|
1128
|
+
continue;
|
|
1129
|
+
}
|
|
1130
|
+
/**
|
|
1131
|
+
* Dots
|
|
1132
|
+
*/
|
|
1133
|
+
if (value === ".") {
|
|
1134
|
+
if (state.braces > 0 && prev.type === "dot") {
|
|
1135
|
+
if (prev.value === ".") prev.output = DOT_LITERAL$1;
|
|
1136
|
+
const brace = braces[braces.length - 1];
|
|
1137
|
+
prev.type = "dots";
|
|
1138
|
+
prev.output += value;
|
|
1139
|
+
prev.value += value;
|
|
1140
|
+
brace.dots = true;
|
|
1141
|
+
continue;
|
|
1142
|
+
}
|
|
1143
|
+
if (state.braces + state.parens === 0 && prev.type !== "bos" && prev.type !== "slash") {
|
|
1144
|
+
push({
|
|
1145
|
+
type: "text",
|
|
1146
|
+
value,
|
|
1147
|
+
output: DOT_LITERAL$1
|
|
1148
|
+
});
|
|
1149
|
+
continue;
|
|
1150
|
+
}
|
|
1151
|
+
push({
|
|
1152
|
+
type: "dot",
|
|
1153
|
+
value,
|
|
1154
|
+
output: DOT_LITERAL$1
|
|
1155
|
+
});
|
|
1156
|
+
continue;
|
|
1157
|
+
}
|
|
1158
|
+
/**
|
|
1159
|
+
* Question marks
|
|
1160
|
+
*/
|
|
1161
|
+
if (value === "?") {
|
|
1162
|
+
const isGroup = prev && prev.value === "(";
|
|
1163
|
+
if (!isGroup && opts.noextglob !== true && peek() === "(" && peek(2) !== "?") {
|
|
1164
|
+
extglobOpen("qmark", value);
|
|
1165
|
+
continue;
|
|
1166
|
+
}
|
|
1167
|
+
if (prev && prev.type === "paren") {
|
|
1168
|
+
const next = peek();
|
|
1169
|
+
let output = value;
|
|
1170
|
+
if (prev.value === "(" && !/[!=<:]/.test(next) || next === "<" && !/<([!=]|\w+>)/.test(remaining())) {
|
|
1171
|
+
output = `\\${value}`;
|
|
1172
|
+
}
|
|
1173
|
+
push({
|
|
1174
|
+
type: "text",
|
|
1175
|
+
value,
|
|
1176
|
+
output
|
|
1177
|
+
});
|
|
1178
|
+
continue;
|
|
1179
|
+
}
|
|
1180
|
+
if (opts.dot !== true && (prev.type === "slash" || prev.type === "bos")) {
|
|
1181
|
+
push({
|
|
1182
|
+
type: "qmark",
|
|
1183
|
+
value,
|
|
1184
|
+
output: QMARK_NO_DOT$1
|
|
1185
|
+
});
|
|
1186
|
+
continue;
|
|
1187
|
+
}
|
|
1188
|
+
push({
|
|
1189
|
+
type: "qmark",
|
|
1190
|
+
value,
|
|
1191
|
+
output: QMARK$1
|
|
1192
|
+
});
|
|
1193
|
+
continue;
|
|
1194
|
+
}
|
|
1195
|
+
/**
|
|
1196
|
+
* Exclamation
|
|
1197
|
+
*/
|
|
1198
|
+
if (value === "!") {
|
|
1199
|
+
if (opts.noextglob !== true && peek() === "(") {
|
|
1200
|
+
if (peek(2) !== "?" || !/[!=<:]/.test(peek(3))) {
|
|
1201
|
+
extglobOpen("negate", value);
|
|
1202
|
+
continue;
|
|
1203
|
+
}
|
|
1204
|
+
}
|
|
1205
|
+
if (opts.nonegate !== true && state.index === 0) {
|
|
1206
|
+
negate();
|
|
1207
|
+
continue;
|
|
1208
|
+
}
|
|
1209
|
+
}
|
|
1210
|
+
/**
|
|
1211
|
+
* Plus
|
|
1212
|
+
*/
|
|
1213
|
+
if (value === "+") {
|
|
1214
|
+
if (opts.noextglob !== true && peek() === "(" && peek(2) !== "?") {
|
|
1215
|
+
extglobOpen("plus", value);
|
|
1216
|
+
continue;
|
|
1217
|
+
}
|
|
1218
|
+
if (prev && prev.value === "(" || opts.regex === false) {
|
|
1219
|
+
push({
|
|
1220
|
+
type: "plus",
|
|
1221
|
+
value,
|
|
1222
|
+
output: PLUS_LITERAL$1
|
|
1223
|
+
});
|
|
1224
|
+
continue;
|
|
1225
|
+
}
|
|
1226
|
+
if (prev && (prev.type === "bracket" || prev.type === "paren" || prev.type === "brace") || state.parens > 0) {
|
|
1227
|
+
push({
|
|
1228
|
+
type: "plus",
|
|
1229
|
+
value
|
|
1230
|
+
});
|
|
1231
|
+
continue;
|
|
1232
|
+
}
|
|
1233
|
+
push({
|
|
1234
|
+
type: "plus",
|
|
1235
|
+
value: PLUS_LITERAL$1
|
|
1236
|
+
});
|
|
1237
|
+
continue;
|
|
1238
|
+
}
|
|
1239
|
+
/**
|
|
1240
|
+
* Plain text
|
|
1241
|
+
*/
|
|
1242
|
+
if (value === "@") {
|
|
1243
|
+
if (opts.noextglob !== true && peek() === "(" && peek(2) !== "?") {
|
|
1244
|
+
push({
|
|
1245
|
+
type: "at",
|
|
1246
|
+
extglob: true,
|
|
1247
|
+
value,
|
|
1248
|
+
output: ""
|
|
1249
|
+
});
|
|
1250
|
+
continue;
|
|
1251
|
+
}
|
|
1252
|
+
push({
|
|
1253
|
+
type: "text",
|
|
1254
|
+
value
|
|
1255
|
+
});
|
|
1256
|
+
continue;
|
|
1257
|
+
}
|
|
1258
|
+
/**
|
|
1259
|
+
* Plain text
|
|
1260
|
+
*/
|
|
1261
|
+
if (value !== "*") {
|
|
1262
|
+
if (value === "$" || value === "^") {
|
|
1263
|
+
value = `\\${value}`;
|
|
1264
|
+
}
|
|
1265
|
+
const match = REGEX_NON_SPECIAL_CHARS.exec(remaining());
|
|
1266
|
+
if (match) {
|
|
1267
|
+
value += match[0];
|
|
1268
|
+
state.index += match[0].length;
|
|
1269
|
+
}
|
|
1270
|
+
push({
|
|
1271
|
+
type: "text",
|
|
1272
|
+
value
|
|
1273
|
+
});
|
|
1274
|
+
continue;
|
|
1275
|
+
}
|
|
1276
|
+
/**
|
|
1277
|
+
* Stars
|
|
1278
|
+
*/
|
|
1279
|
+
if (prev && (prev.type === "globstar" || prev.star === true)) {
|
|
1280
|
+
prev.type = "star";
|
|
1281
|
+
prev.star = true;
|
|
1282
|
+
prev.value += value;
|
|
1283
|
+
prev.output = star;
|
|
1284
|
+
state.backtrack = true;
|
|
1285
|
+
state.globstar = true;
|
|
1286
|
+
consume(value);
|
|
1287
|
+
continue;
|
|
1288
|
+
}
|
|
1289
|
+
let rest = remaining();
|
|
1290
|
+
if (opts.noextglob !== true && /^\([^?]/.test(rest)) {
|
|
1291
|
+
extglobOpen("star", value);
|
|
1292
|
+
continue;
|
|
1293
|
+
}
|
|
1294
|
+
if (prev.type === "star") {
|
|
1295
|
+
if (opts.noglobstar === true) {
|
|
1296
|
+
consume(value);
|
|
1297
|
+
continue;
|
|
1298
|
+
}
|
|
1299
|
+
const prior = prev.prev;
|
|
1300
|
+
const before = prior.prev;
|
|
1301
|
+
const isStart = prior.type === "slash" || prior.type === "bos";
|
|
1302
|
+
const afterStar = before && (before.type === "star" || before.type === "globstar");
|
|
1303
|
+
if (opts.bash === true && (!isStart || rest[0] && rest[0] !== "/")) {
|
|
1304
|
+
push({
|
|
1305
|
+
type: "star",
|
|
1306
|
+
value,
|
|
1307
|
+
output: ""
|
|
1308
|
+
});
|
|
1309
|
+
continue;
|
|
1310
|
+
}
|
|
1311
|
+
const isBrace = state.braces > 0 && (prior.type === "comma" || prior.type === "brace");
|
|
1312
|
+
const isExtglob = extglobs.length && (prior.type === "pipe" || prior.type === "paren");
|
|
1313
|
+
if (!isStart && prior.type !== "paren" && !isBrace && !isExtglob) {
|
|
1314
|
+
push({
|
|
1315
|
+
type: "star",
|
|
1316
|
+
value,
|
|
1317
|
+
output: ""
|
|
1318
|
+
});
|
|
1319
|
+
continue;
|
|
1320
|
+
}
|
|
1321
|
+
while (rest.slice(0, 3) === "/**") {
|
|
1322
|
+
const after = input[state.index + 4];
|
|
1323
|
+
if (after && after !== "/") {
|
|
1324
|
+
break;
|
|
1325
|
+
}
|
|
1326
|
+
rest = rest.slice(3);
|
|
1327
|
+
consume("/**", 3);
|
|
1328
|
+
}
|
|
1329
|
+
if (prior.type === "bos" && eos()) {
|
|
1330
|
+
prev.type = "globstar";
|
|
1331
|
+
prev.value += value;
|
|
1332
|
+
prev.output = globstar(opts);
|
|
1333
|
+
state.output = prev.output;
|
|
1334
|
+
state.globstar = true;
|
|
1335
|
+
consume(value);
|
|
1336
|
+
continue;
|
|
1337
|
+
}
|
|
1338
|
+
if (prior.type === "slash" && prior.prev.type !== "bos" && !afterStar && eos()) {
|
|
1339
|
+
state.output = state.output.slice(0, -(prior.output + prev.output).length);
|
|
1340
|
+
prior.output = `(?:${prior.output}`;
|
|
1341
|
+
prev.type = "globstar";
|
|
1342
|
+
prev.output = globstar(opts) + (opts.strictSlashes ? ")" : "|$)");
|
|
1343
|
+
prev.value += value;
|
|
1344
|
+
state.globstar = true;
|
|
1345
|
+
state.output += prior.output + prev.output;
|
|
1346
|
+
consume(value);
|
|
1347
|
+
continue;
|
|
1348
|
+
}
|
|
1349
|
+
if (prior.type === "slash" && prior.prev.type !== "bos" && rest[0] === "/") {
|
|
1350
|
+
const end = rest[1] !== void 0 ? "|$" : "";
|
|
1351
|
+
state.output = state.output.slice(0, -(prior.output + prev.output).length);
|
|
1352
|
+
prior.output = `(?:${prior.output}`;
|
|
1353
|
+
prev.type = "globstar";
|
|
1354
|
+
prev.output = `${globstar(opts)}${SLASH_LITERAL$1}|${SLASH_LITERAL$1}${end})`;
|
|
1355
|
+
prev.value += value;
|
|
1356
|
+
state.output += prior.output + prev.output;
|
|
1357
|
+
state.globstar = true;
|
|
1358
|
+
consume(value + advance());
|
|
1359
|
+
push({
|
|
1360
|
+
type: "slash",
|
|
1361
|
+
value: "/",
|
|
1362
|
+
output: ""
|
|
1363
|
+
});
|
|
1364
|
+
continue;
|
|
1365
|
+
}
|
|
1366
|
+
if (prior.type === "bos" && rest[0] === "/") {
|
|
1367
|
+
prev.type = "globstar";
|
|
1368
|
+
prev.value += value;
|
|
1369
|
+
prev.output = `(?:^|${SLASH_LITERAL$1}|${globstar(opts)}${SLASH_LITERAL$1})`;
|
|
1370
|
+
state.output = prev.output;
|
|
1371
|
+
state.globstar = true;
|
|
1372
|
+
consume(value + advance());
|
|
1373
|
+
push({
|
|
1374
|
+
type: "slash",
|
|
1375
|
+
value: "/",
|
|
1376
|
+
output: ""
|
|
1377
|
+
});
|
|
1378
|
+
continue;
|
|
1379
|
+
}
|
|
1380
|
+
state.output = state.output.slice(0, -prev.output.length);
|
|
1381
|
+
prev.type = "globstar";
|
|
1382
|
+
prev.output = globstar(opts);
|
|
1383
|
+
prev.value += value;
|
|
1384
|
+
state.output += prev.output;
|
|
1385
|
+
state.globstar = true;
|
|
1386
|
+
consume(value);
|
|
1387
|
+
continue;
|
|
1388
|
+
}
|
|
1389
|
+
const token = {
|
|
1390
|
+
type: "star",
|
|
1391
|
+
value,
|
|
1392
|
+
output: star
|
|
1393
|
+
};
|
|
1394
|
+
if (opts.bash === true) {
|
|
1395
|
+
token.output = ".*?";
|
|
1396
|
+
if (prev.type === "bos" || prev.type === "slash") {
|
|
1397
|
+
token.output = nodot + token.output;
|
|
1398
|
+
}
|
|
1399
|
+
push(token);
|
|
1400
|
+
continue;
|
|
1401
|
+
}
|
|
1402
|
+
if (prev && (prev.type === "bracket" || prev.type === "paren") && opts.regex === true) {
|
|
1403
|
+
token.output = value;
|
|
1404
|
+
push(token);
|
|
1405
|
+
continue;
|
|
1406
|
+
}
|
|
1407
|
+
if (state.index === state.start || prev.type === "slash" || prev.type === "dot") {
|
|
1408
|
+
if (prev.type === "dot") {
|
|
1409
|
+
state.output += NO_DOT_SLASH$1;
|
|
1410
|
+
prev.output += NO_DOT_SLASH$1;
|
|
1411
|
+
} else if (opts.dot === true) {
|
|
1412
|
+
state.output += NO_DOTS_SLASH$1;
|
|
1413
|
+
prev.output += NO_DOTS_SLASH$1;
|
|
1414
|
+
} else {
|
|
1415
|
+
state.output += nodot;
|
|
1416
|
+
prev.output += nodot;
|
|
1417
|
+
}
|
|
1418
|
+
if (peek() !== "*") {
|
|
1419
|
+
state.output += ONE_CHAR$1;
|
|
1420
|
+
prev.output += ONE_CHAR$1;
|
|
1421
|
+
}
|
|
1422
|
+
}
|
|
1423
|
+
push(token);
|
|
1424
|
+
}
|
|
1425
|
+
while (state.brackets > 0) {
|
|
1426
|
+
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError("closing", "]"));
|
|
1427
|
+
state.output = utils$2.escapeLast(state.output, "[");
|
|
1428
|
+
decrement("brackets");
|
|
1429
|
+
}
|
|
1430
|
+
while (state.parens > 0) {
|
|
1431
|
+
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError("closing", ")"));
|
|
1432
|
+
state.output = utils$2.escapeLast(state.output, "(");
|
|
1433
|
+
decrement("parens");
|
|
1434
|
+
}
|
|
1435
|
+
while (state.braces > 0) {
|
|
1436
|
+
if (opts.strictBrackets === true) throw new SyntaxError(syntaxError("closing", "}"));
|
|
1437
|
+
state.output = utils$2.escapeLast(state.output, "{");
|
|
1438
|
+
decrement("braces");
|
|
1439
|
+
}
|
|
1440
|
+
if (opts.strictSlashes !== true && (prev.type === "star" || prev.type === "bracket")) {
|
|
1441
|
+
push({
|
|
1442
|
+
type: "maybe_slash",
|
|
1443
|
+
value: "",
|
|
1444
|
+
output: `${SLASH_LITERAL$1}?`
|
|
1445
|
+
});
|
|
1446
|
+
}
|
|
1447
|
+
if (state.backtrack === true) {
|
|
1448
|
+
state.output = "";
|
|
1449
|
+
for (const token of state.tokens) {
|
|
1450
|
+
state.output += token.output != null ? token.output : token.value;
|
|
1451
|
+
if (token.suffix) {
|
|
1452
|
+
state.output += token.suffix;
|
|
1453
|
+
}
|
|
1454
|
+
}
|
|
1455
|
+
}
|
|
1456
|
+
return state;
|
|
1457
|
+
};
|
|
1458
|
+
/**
|
|
1459
|
+
* Fast paths for creating regular expressions for common glob patterns.
|
|
1460
|
+
* This can significantly speed up processing and has very little downside
|
|
1461
|
+
* impact when none of the fast paths match.
|
|
1462
|
+
*/
|
|
1463
|
+
parse$3.fastpaths = (input, options) => {
|
|
1464
|
+
const opts = { ...options };
|
|
1465
|
+
const max = typeof opts.maxLength === "number" ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
|
|
1466
|
+
const len = input.length;
|
|
1467
|
+
if (len > max) {
|
|
1468
|
+
throw new SyntaxError(`Input length: ${len}, exceeds maximum allowed length: ${max}`);
|
|
1469
|
+
}
|
|
1470
|
+
input = REPLACEMENTS[input] || input;
|
|
1471
|
+
const { DOT_LITERAL: DOT_LITERAL$1, SLASH_LITERAL: SLASH_LITERAL$1, ONE_CHAR: ONE_CHAR$1, DOTS_SLASH: DOTS_SLASH$1, NO_DOT: NO_DOT$1, NO_DOTS: NO_DOTS$1, NO_DOTS_SLASH: NO_DOTS_SLASH$1, STAR: STAR$1, START_ANCHOR: START_ANCHOR$1 } = constants$1.globChars(opts.windows);
|
|
1472
|
+
const nodot = opts.dot ? NO_DOTS$1 : NO_DOT$1;
|
|
1473
|
+
const slashDot = opts.dot ? NO_DOTS_SLASH$1 : NO_DOT$1;
|
|
1474
|
+
const capture = opts.capture ? "" : "?:";
|
|
1475
|
+
const state = {
|
|
1476
|
+
negated: false,
|
|
1477
|
+
prefix: ""
|
|
1478
|
+
};
|
|
1479
|
+
let star = opts.bash === true ? ".*?" : STAR$1;
|
|
1480
|
+
if (opts.capture) {
|
|
1481
|
+
star = `(${star})`;
|
|
1482
|
+
}
|
|
1483
|
+
const globstar = (opts$1) => {
|
|
1484
|
+
if (opts$1.noglobstar === true) return star;
|
|
1485
|
+
return `(${capture}(?:(?!${START_ANCHOR$1}${opts$1.dot ? DOTS_SLASH$1 : DOT_LITERAL$1}).)*?)`;
|
|
1486
|
+
};
|
|
1487
|
+
const create = (str) => {
|
|
1488
|
+
switch (str) {
|
|
1489
|
+
case "*": return `${nodot}${ONE_CHAR$1}${star}`;
|
|
1490
|
+
case ".*": return `${DOT_LITERAL$1}${ONE_CHAR$1}${star}`;
|
|
1491
|
+
case "*.*": return `${nodot}${star}${DOT_LITERAL$1}${ONE_CHAR$1}${star}`;
|
|
1492
|
+
case "*/*": return `${nodot}${star}${SLASH_LITERAL$1}${ONE_CHAR$1}${slashDot}${star}`;
|
|
1493
|
+
case "**": return nodot + globstar(opts);
|
|
1494
|
+
case "**/*": return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL$1})?${slashDot}${ONE_CHAR$1}${star}`;
|
|
1495
|
+
case "**/*.*": return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL$1})?${slashDot}${star}${DOT_LITERAL$1}${ONE_CHAR$1}${star}`;
|
|
1496
|
+
case "**/.*": return `(?:${nodot}${globstar(opts)}${SLASH_LITERAL$1})?${DOT_LITERAL$1}${ONE_CHAR$1}${star}`;
|
|
1497
|
+
default: {
|
|
1498
|
+
const match = /^(.*?)\.(\w+)$/.exec(str);
|
|
1499
|
+
if (!match) return;
|
|
1500
|
+
const source$1 = create(match[1]);
|
|
1501
|
+
if (!source$1) return;
|
|
1502
|
+
return source$1 + DOT_LITERAL$1 + match[2];
|
|
1503
|
+
}
|
|
1504
|
+
}
|
|
1505
|
+
};
|
|
1506
|
+
const output = utils$2.removePrefix(input, state);
|
|
1507
|
+
let source = create(output);
|
|
1508
|
+
if (source && opts.strictSlashes !== true) {
|
|
1509
|
+
source += `${SLASH_LITERAL$1}?`;
|
|
1510
|
+
}
|
|
1511
|
+
return source;
|
|
1512
|
+
};
|
|
1513
|
+
module.exports = parse$3;
|
|
1514
|
+
}));
|
|
1515
|
+
|
|
1516
|
+
//#endregion
|
|
1517
|
+
//#region node_modules/picomatch/lib/picomatch.js
|
|
1518
|
+
var require_picomatch$1 = /* @__PURE__ */ require_chunk.__commonJSMin(((exports, module) => {
|
|
1519
|
+
const scan = require_scan();
|
|
1520
|
+
const parse$2 = require_parse();
|
|
1521
|
+
const utils$1 = require_utils();
|
|
1522
|
+
const constants = require_constants();
|
|
1523
|
+
const isObject = (val) => val && typeof val === "object" && !Array.isArray(val);
|
|
1524
|
+
/**
|
|
1525
|
+
* Creates a matcher function from one or more glob patterns. The
|
|
1526
|
+
* returned function takes a string to match as its first argument,
|
|
1527
|
+
* and returns true if the string is a match. The returned matcher
|
|
1528
|
+
* function also takes a boolean as the second argument that, when true,
|
|
1529
|
+
* returns an object with additional information.
|
|
1530
|
+
*
|
|
1531
|
+
* ```js
|
|
1532
|
+
* const picomatch = require('picomatch');
|
|
1533
|
+
* // picomatch(glob[, options]);
|
|
1534
|
+
*
|
|
1535
|
+
* const isMatch = picomatch('*.!(*a)');
|
|
1536
|
+
* console.log(isMatch('a.a')); //=> false
|
|
1537
|
+
* console.log(isMatch('a.b')); //=> true
|
|
1538
|
+
* ```
|
|
1539
|
+
* @name picomatch
|
|
1540
|
+
* @param {String|Array} `globs` One or more glob patterns.
|
|
1541
|
+
* @param {Object=} `options`
|
|
1542
|
+
* @return {Function=} Returns a matcher function.
|
|
1543
|
+
* @api public
|
|
1544
|
+
*/
|
|
1545
|
+
const picomatch$2 = (glob, options, returnState = false) => {
|
|
1546
|
+
if (Array.isArray(glob)) {
|
|
1547
|
+
const fns = glob.map((input) => picomatch$2(input, options, returnState));
|
|
1548
|
+
const arrayMatcher = (str) => {
|
|
1549
|
+
for (const isMatch of fns) {
|
|
1550
|
+
const state$1 = isMatch(str);
|
|
1551
|
+
if (state$1) return state$1;
|
|
1552
|
+
}
|
|
1553
|
+
return false;
|
|
1554
|
+
};
|
|
1555
|
+
return arrayMatcher;
|
|
1556
|
+
}
|
|
1557
|
+
const isState = isObject(glob) && glob.tokens && glob.input;
|
|
1558
|
+
if (glob === "" || typeof glob !== "string" && !isState) {
|
|
1559
|
+
throw new TypeError("Expected pattern to be a non-empty string");
|
|
1560
|
+
}
|
|
1561
|
+
const opts = options || {};
|
|
1562
|
+
const posix = opts.windows;
|
|
1563
|
+
const regex = isState ? picomatch$2.compileRe(glob, options) : picomatch$2.makeRe(glob, options, false, true);
|
|
1564
|
+
const state = regex.state;
|
|
1565
|
+
delete regex.state;
|
|
1566
|
+
let isIgnored = () => false;
|
|
1567
|
+
if (opts.ignore) {
|
|
1568
|
+
const ignoreOpts = {
|
|
1569
|
+
...options,
|
|
1570
|
+
ignore: null,
|
|
1571
|
+
onMatch: null,
|
|
1572
|
+
onResult: null
|
|
1573
|
+
};
|
|
1574
|
+
isIgnored = picomatch$2(opts.ignore, ignoreOpts, returnState);
|
|
1575
|
+
}
|
|
1576
|
+
const matcher = (input, returnObject = false) => {
|
|
1577
|
+
const { isMatch, match, output } = picomatch$2.test(input, regex, options, {
|
|
1578
|
+
glob,
|
|
1579
|
+
posix
|
|
1580
|
+
});
|
|
1581
|
+
const result = {
|
|
1582
|
+
glob,
|
|
1583
|
+
state,
|
|
1584
|
+
regex,
|
|
1585
|
+
posix,
|
|
1586
|
+
input,
|
|
1587
|
+
output,
|
|
1588
|
+
match,
|
|
1589
|
+
isMatch
|
|
1590
|
+
};
|
|
1591
|
+
if (typeof opts.onResult === "function") {
|
|
1592
|
+
opts.onResult(result);
|
|
1593
|
+
}
|
|
1594
|
+
if (isMatch === false) {
|
|
1595
|
+
result.isMatch = false;
|
|
1596
|
+
return returnObject ? result : false;
|
|
1597
|
+
}
|
|
1598
|
+
if (isIgnored(input)) {
|
|
1599
|
+
if (typeof opts.onIgnore === "function") {
|
|
1600
|
+
opts.onIgnore(result);
|
|
1601
|
+
}
|
|
1602
|
+
result.isMatch = false;
|
|
1603
|
+
return returnObject ? result : false;
|
|
1604
|
+
}
|
|
1605
|
+
if (typeof opts.onMatch === "function") {
|
|
1606
|
+
opts.onMatch(result);
|
|
1607
|
+
}
|
|
1608
|
+
return returnObject ? result : true;
|
|
1609
|
+
};
|
|
1610
|
+
if (returnState) {
|
|
1611
|
+
matcher.state = state;
|
|
1612
|
+
}
|
|
1613
|
+
return matcher;
|
|
1614
|
+
};
|
|
1615
|
+
/**
|
|
1616
|
+
* Test `input` with the given `regex`. This is used by the main
|
|
1617
|
+
* `picomatch()` function to test the input string.
|
|
1618
|
+
*
|
|
1619
|
+
* ```js
|
|
1620
|
+
* const picomatch = require('picomatch');
|
|
1621
|
+
* // picomatch.test(input, regex[, options]);
|
|
1622
|
+
*
|
|
1623
|
+
* console.log(picomatch.test('foo/bar', /^(?:([^/]*?)\/([^/]*?))$/));
|
|
1624
|
+
* // { isMatch: true, match: [ 'foo/', 'foo', 'bar' ], output: 'foo/bar' }
|
|
1625
|
+
* ```
|
|
1626
|
+
* @param {String} `input` String to test.
|
|
1627
|
+
* @param {RegExp} `regex`
|
|
1628
|
+
* @return {Object} Returns an object with matching info.
|
|
1629
|
+
* @api public
|
|
1630
|
+
*/
|
|
1631
|
+
picomatch$2.test = (input, regex, options, { glob, posix } = {}) => {
|
|
1632
|
+
if (typeof input !== "string") {
|
|
1633
|
+
throw new TypeError("Expected input to be a string");
|
|
1634
|
+
}
|
|
1635
|
+
if (input === "") {
|
|
1636
|
+
return {
|
|
1637
|
+
isMatch: false,
|
|
1638
|
+
output: ""
|
|
1639
|
+
};
|
|
1640
|
+
}
|
|
1641
|
+
const opts = options || {};
|
|
1642
|
+
const format = opts.format || (posix ? utils$1.toPosixSlashes : null);
|
|
1643
|
+
let match = input === glob;
|
|
1644
|
+
let output = match && format ? format(input) : input;
|
|
1645
|
+
if (match === false) {
|
|
1646
|
+
output = format ? format(input) : input;
|
|
1647
|
+
match = output === glob;
|
|
1648
|
+
}
|
|
1649
|
+
if (match === false || opts.capture === true) {
|
|
1650
|
+
if (opts.matchBase === true || opts.basename === true) {
|
|
1651
|
+
match = picomatch$2.matchBase(input, regex, options, posix);
|
|
1652
|
+
} else {
|
|
1653
|
+
match = regex.exec(output);
|
|
1654
|
+
}
|
|
1655
|
+
}
|
|
1656
|
+
return {
|
|
1657
|
+
isMatch: Boolean(match),
|
|
1658
|
+
match,
|
|
1659
|
+
output
|
|
1660
|
+
};
|
|
1661
|
+
};
|
|
1662
|
+
/**
|
|
1663
|
+
* Match the basename of a filepath.
|
|
1664
|
+
*
|
|
1665
|
+
* ```js
|
|
1666
|
+
* const picomatch = require('picomatch');
|
|
1667
|
+
* // picomatch.matchBase(input, glob[, options]);
|
|
1668
|
+
* console.log(picomatch.matchBase('foo/bar.js', '*.js'); // true
|
|
1669
|
+
* ```
|
|
1670
|
+
* @param {String} `input` String to test.
|
|
1671
|
+
* @param {RegExp|String} `glob` Glob pattern or regex created by [.makeRe](#makeRe).
|
|
1672
|
+
* @return {Boolean}
|
|
1673
|
+
* @api public
|
|
1674
|
+
*/
|
|
1675
|
+
picomatch$2.matchBase = (input, glob, options) => {
|
|
1676
|
+
const regex = glob instanceof RegExp ? glob : picomatch$2.makeRe(glob, options);
|
|
1677
|
+
return regex.test(utils$1.basename(input));
|
|
1678
|
+
};
|
|
1679
|
+
/**
|
|
1680
|
+
* Returns true if **any** of the given glob `patterns` match the specified `string`.
|
|
1681
|
+
*
|
|
1682
|
+
* ```js
|
|
1683
|
+
* const picomatch = require('picomatch');
|
|
1684
|
+
* // picomatch.isMatch(string, patterns[, options]);
|
|
1685
|
+
*
|
|
1686
|
+
* console.log(picomatch.isMatch('a.a', ['b.*', '*.a'])); //=> true
|
|
1687
|
+
* console.log(picomatch.isMatch('a.a', 'b.*')); //=> false
|
|
1688
|
+
* ```
|
|
1689
|
+
* @param {String|Array} str The string to test.
|
|
1690
|
+
* @param {String|Array} patterns One or more glob patterns to use for matching.
|
|
1691
|
+
* @param {Object} [options] See available [options](#options).
|
|
1692
|
+
* @return {Boolean} Returns true if any patterns match `str`
|
|
1693
|
+
* @api public
|
|
1694
|
+
*/
|
|
1695
|
+
picomatch$2.isMatch = (str, patterns, options) => picomatch$2(patterns, options)(str);
|
|
1696
|
+
/**
|
|
1697
|
+
* Parse a glob pattern to create the source string for a regular
|
|
1698
|
+
* expression.
|
|
1699
|
+
*
|
|
1700
|
+
* ```js
|
|
1701
|
+
* const picomatch = require('picomatch');
|
|
1702
|
+
* const result = picomatch.parse(pattern[, options]);
|
|
1703
|
+
* ```
|
|
1704
|
+
* @param {String} `pattern`
|
|
1705
|
+
* @param {Object} `options`
|
|
1706
|
+
* @return {Object} Returns an object with useful properties and output to be used as a regex source string.
|
|
1707
|
+
* @api public
|
|
1708
|
+
*/
|
|
1709
|
+
picomatch$2.parse = (pattern, options) => {
|
|
1710
|
+
if (Array.isArray(pattern)) return pattern.map((p) => picomatch$2.parse(p, options));
|
|
1711
|
+
return parse$2(pattern, {
|
|
1712
|
+
...options,
|
|
1713
|
+
fastpaths: false
|
|
1714
|
+
});
|
|
1715
|
+
};
|
|
1716
|
+
/**
|
|
1717
|
+
* Scan a glob pattern to separate the pattern into segments.
|
|
1718
|
+
*
|
|
1719
|
+
* ```js
|
|
1720
|
+
* const picomatch = require('picomatch');
|
|
1721
|
+
* // picomatch.scan(input[, options]);
|
|
1722
|
+
*
|
|
1723
|
+
* const result = picomatch.scan('!./foo/*.js');
|
|
1724
|
+
* console.log(result);
|
|
1725
|
+
* { prefix: '!./',
|
|
1726
|
+
* input: '!./foo/*.js',
|
|
1727
|
+
* start: 3,
|
|
1728
|
+
* base: 'foo',
|
|
1729
|
+
* glob: '*.js',
|
|
1730
|
+
* isBrace: false,
|
|
1731
|
+
* isBracket: false,
|
|
1732
|
+
* isGlob: true,
|
|
1733
|
+
* isExtglob: false,
|
|
1734
|
+
* isGlobstar: false,
|
|
1735
|
+
* negated: true }
|
|
1736
|
+
* ```
|
|
1737
|
+
* @param {String} `input` Glob pattern to scan.
|
|
1738
|
+
* @param {Object} `options`
|
|
1739
|
+
* @return {Object} Returns an object with
|
|
1740
|
+
* @api public
|
|
1741
|
+
*/
|
|
1742
|
+
picomatch$2.scan = (input, options) => scan(input, options);
|
|
1743
|
+
/**
|
|
1744
|
+
* Compile a regular expression from the `state` object returned by the
|
|
1745
|
+
* [parse()](#parse) method.
|
|
1746
|
+
*
|
|
1747
|
+
* @param {Object} `state`
|
|
1748
|
+
* @param {Object} `options`
|
|
1749
|
+
* @param {Boolean} `returnOutput` Intended for implementors, this argument allows you to return the raw output from the parser.
|
|
1750
|
+
* @param {Boolean} `returnState` Adds the state to a `state` property on the returned regex. Useful for implementors and debugging.
|
|
1751
|
+
* @return {RegExp}
|
|
1752
|
+
* @api public
|
|
1753
|
+
*/
|
|
1754
|
+
picomatch$2.compileRe = (state, options, returnOutput = false, returnState = false) => {
|
|
1755
|
+
if (returnOutput === true) {
|
|
1756
|
+
return state.output;
|
|
1757
|
+
}
|
|
1758
|
+
const opts = options || {};
|
|
1759
|
+
const prepend = opts.contains ? "" : "^";
|
|
1760
|
+
const append = opts.contains ? "" : "$";
|
|
1761
|
+
let source = `${prepend}(?:${state.output})${append}`;
|
|
1762
|
+
if (state && state.negated === true) {
|
|
1763
|
+
source = `^(?!${source}).*$`;
|
|
1764
|
+
}
|
|
1765
|
+
const regex = picomatch$2.toRegex(source, options);
|
|
1766
|
+
if (returnState === true) {
|
|
1767
|
+
regex.state = state;
|
|
1768
|
+
}
|
|
1769
|
+
return regex;
|
|
1770
|
+
};
|
|
1771
|
+
/**
|
|
1772
|
+
* Create a regular expression from a parsed glob pattern.
|
|
1773
|
+
*
|
|
1774
|
+
* ```js
|
|
1775
|
+
* const picomatch = require('picomatch');
|
|
1776
|
+
* const state = picomatch.parse('*.js');
|
|
1777
|
+
* // picomatch.compileRe(state[, options]);
|
|
1778
|
+
*
|
|
1779
|
+
* console.log(picomatch.compileRe(state));
|
|
1780
|
+
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
|
1781
|
+
* ```
|
|
1782
|
+
* @param {String} `state` The object returned from the `.parse` method.
|
|
1783
|
+
* @param {Object} `options`
|
|
1784
|
+
* @param {Boolean} `returnOutput` Implementors may use this argument to return the compiled output, instead of a regular expression. This is not exposed on the options to prevent end-users from mutating the result.
|
|
1785
|
+
* @param {Boolean} `returnState` Implementors may use this argument to return the state from the parsed glob with the returned regular expression.
|
|
1786
|
+
* @return {RegExp} Returns a regex created from the given pattern.
|
|
1787
|
+
* @api public
|
|
1788
|
+
*/
|
|
1789
|
+
picomatch$2.makeRe = (input, options = {}, returnOutput = false, returnState = false) => {
|
|
1790
|
+
if (!input || typeof input !== "string") {
|
|
1791
|
+
throw new TypeError("Expected a non-empty string");
|
|
1792
|
+
}
|
|
1793
|
+
let parsed = {
|
|
1794
|
+
negated: false,
|
|
1795
|
+
fastpaths: true
|
|
1796
|
+
};
|
|
1797
|
+
if (options.fastpaths !== false && (input[0] === "." || input[0] === "*")) {
|
|
1798
|
+
parsed.output = parse$2.fastpaths(input, options);
|
|
1799
|
+
}
|
|
1800
|
+
if (!parsed.output) {
|
|
1801
|
+
parsed = parse$2(input, options);
|
|
1802
|
+
}
|
|
1803
|
+
return picomatch$2.compileRe(parsed, options, returnOutput, returnState);
|
|
1804
|
+
};
|
|
1805
|
+
/**
|
|
1806
|
+
* Create a regular expression from the given regex source string.
|
|
1807
|
+
*
|
|
1808
|
+
* ```js
|
|
1809
|
+
* const picomatch = require('picomatch');
|
|
1810
|
+
* // picomatch.toRegex(source[, options]);
|
|
1811
|
+
*
|
|
1812
|
+
* const { output } = picomatch.parse('*.js');
|
|
1813
|
+
* console.log(picomatch.toRegex(output));
|
|
1814
|
+
* //=> /^(?:(?!\.)(?=.)[^/]*?\.js)$/
|
|
1815
|
+
* ```
|
|
1816
|
+
* @param {String} `source` Regular expression source string.
|
|
1817
|
+
* @param {Object} `options`
|
|
1818
|
+
* @return {RegExp}
|
|
1819
|
+
* @api public
|
|
1820
|
+
*/
|
|
1821
|
+
picomatch$2.toRegex = (source, options) => {
|
|
1822
|
+
try {
|
|
1823
|
+
const opts = options || {};
|
|
1824
|
+
return new RegExp(source, opts.flags || (opts.nocase ? "i" : ""));
|
|
1825
|
+
} catch (err$8) {
|
|
1826
|
+
if (options && options.debug === true) throw err$8;
|
|
1827
|
+
return /$^/;
|
|
1828
|
+
}
|
|
1829
|
+
};
|
|
1830
|
+
/**
|
|
1831
|
+
* Picomatch constants.
|
|
1832
|
+
* @return {Object}
|
|
1833
|
+
*/
|
|
1834
|
+
picomatch$2.constants = constants;
|
|
1835
|
+
/**
|
|
1836
|
+
* Expose "picomatch"
|
|
1837
|
+
*/
|
|
1838
|
+
module.exports = picomatch$2;
|
|
1839
|
+
}));
|
|
1840
|
+
|
|
1841
|
+
//#endregion
|
|
1842
|
+
//#region node_modules/picomatch/index.js
|
|
1843
|
+
var require_picomatch = /* @__PURE__ */ require_chunk.__commonJSMin(((exports, module) => {
|
|
1844
|
+
const pico = require_picomatch$1();
|
|
1845
|
+
const utils = require_utils();
|
|
1846
|
+
function picomatch$1(glob, options, returnState = false) {
|
|
1847
|
+
if (options && (options.windows === null || options.windows === undefined)) {
|
|
1848
|
+
options = {
|
|
1849
|
+
...options,
|
|
1850
|
+
windows: utils.isWindows()
|
|
1851
|
+
};
|
|
1852
|
+
}
|
|
1853
|
+
return pico(glob, options, returnState);
|
|
1854
|
+
}
|
|
1855
|
+
Object.assign(picomatch$1, pico);
|
|
1856
|
+
module.exports = picomatch$1;
|
|
1857
|
+
}));
|
|
1858
|
+
|
|
1859
|
+
//#endregion
|
|
1860
|
+
//#region packages/tools/src/codegen/type-filter.ts
|
|
1861
|
+
var import_picomatch = /* @__PURE__ */ require_chunk.__toESM(require_picomatch(), 1);
|
|
1862
|
+
const compileRule = (rule) => {
|
|
1863
|
+
const matcher = (0, import_picomatch.default)(rule.pattern);
|
|
1864
|
+
const categories = rule.category ? Array.isArray(rule.category) ? rule.category : [rule.category] : null;
|
|
1865
|
+
return (context) => {
|
|
1866
|
+
if (categories && !categories.includes(context.category)) {
|
|
1867
|
+
return true;
|
|
1868
|
+
}
|
|
1869
|
+
return !matcher(context.name);
|
|
1870
|
+
};
|
|
1871
|
+
};
|
|
1872
|
+
const compileTypeFilter = (config) => {
|
|
1873
|
+
if (!config) {
|
|
1874
|
+
return () => true;
|
|
1875
|
+
}
|
|
1876
|
+
if (typeof config === "function") {
|
|
1877
|
+
return config;
|
|
1878
|
+
}
|
|
1879
|
+
const rules = config.exclude.map(compileRule);
|
|
1880
|
+
return (context) => rules.every((rule) => rule(context));
|
|
1881
|
+
};
|
|
1882
|
+
const buildExclusionSet = (filter, typeNames) => {
|
|
1883
|
+
const excluded = new Set();
|
|
1884
|
+
for (const [category, names] of typeNames) {
|
|
1885
|
+
for (const name of names) {
|
|
1886
|
+
if (!filter({
|
|
1887
|
+
name,
|
|
1888
|
+
category
|
|
1889
|
+
})) {
|
|
1890
|
+
excluded.add(name);
|
|
1891
|
+
}
|
|
1892
|
+
}
|
|
1893
|
+
}
|
|
1894
|
+
return excluded;
|
|
1895
|
+
};
|
|
1896
|
+
|
|
1897
|
+
//#endregion
|
|
1898
|
+
//#region packages/tools/src/codegen/generator.ts
|
|
1899
|
+
const builtinScalarTypes$1 = new Map([
|
|
1900
|
+
["ID", {
|
|
1901
|
+
input: "string",
|
|
1902
|
+
output: "string"
|
|
1903
|
+
}],
|
|
1904
|
+
["String", {
|
|
1905
|
+
input: "string",
|
|
1906
|
+
output: "string"
|
|
1907
|
+
}],
|
|
1908
|
+
["Int", {
|
|
1909
|
+
input: "number",
|
|
1910
|
+
output: "number"
|
|
1911
|
+
}],
|
|
1912
|
+
["Float", {
|
|
1913
|
+
input: "number",
|
|
1914
|
+
output: "number"
|
|
1915
|
+
}],
|
|
1916
|
+
["Boolean", {
|
|
1917
|
+
input: "boolean",
|
|
1918
|
+
output: "boolean"
|
|
1919
|
+
}]
|
|
1920
|
+
]);
|
|
1921
|
+
const collectTypeLevels = (type, nonNull = false, levels = []) => {
|
|
1922
|
+
if (type.kind === graphql.Kind.NON_NULL_TYPE) {
|
|
1923
|
+
return collectTypeLevels(type.type, true, levels);
|
|
1924
|
+
}
|
|
1925
|
+
if (type.kind === graphql.Kind.LIST_TYPE) {
|
|
1926
|
+
levels.push({
|
|
1927
|
+
kind: "list",
|
|
1928
|
+
nonNull
|
|
1929
|
+
});
|
|
1930
|
+
return collectTypeLevels(type.type, false, levels);
|
|
1931
|
+
}
|
|
1932
|
+
levels.push({
|
|
1933
|
+
kind: "named",
|
|
1934
|
+
nonNull
|
|
1935
|
+
});
|
|
1936
|
+
return {
|
|
1937
|
+
name: type.name.value,
|
|
1938
|
+
levels
|
|
1939
|
+
};
|
|
1940
|
+
};
|
|
1941
|
+
const buildTypeModifier = (levels) => {
|
|
1942
|
+
let modifier = "?";
|
|
1943
|
+
for (const level of levels.slice().reverse()) {
|
|
1944
|
+
if (level.kind === "named") {
|
|
1945
|
+
modifier = level.nonNull ? "!" : "?";
|
|
1946
|
+
continue;
|
|
1947
|
+
}
|
|
1948
|
+
const listSuffix = level.nonNull ? "[]!" : "[]?";
|
|
1949
|
+
modifier = `${modifier}${listSuffix}`;
|
|
1950
|
+
}
|
|
1951
|
+
return modifier;
|
|
1952
|
+
};
|
|
1953
|
+
const parseTypeReference = (type) => {
|
|
1954
|
+
const { name, levels } = collectTypeLevels(type);
|
|
1955
|
+
return {
|
|
1956
|
+
name,
|
|
1957
|
+
modifier: buildTypeModifier(levels)
|
|
1958
|
+
};
|
|
1959
|
+
};
|
|
1960
|
+
const isScalarName$1 = (schema, name) => builtinScalarTypes$1.has(name) || schema.scalars.has(name);
|
|
1961
|
+
const isEnumName$1 = (schema, name) => schema.enums.has(name);
|
|
1962
|
+
const _isInputName = (schema, name) => schema.inputs.has(name);
|
|
1963
|
+
const isUnionName = (schema, name) => schema.unions.has(name);
|
|
1964
|
+
const isObjectName = (schema, name) => schema.objects.has(name);
|
|
1965
|
+
/**
|
|
1966
|
+
* Maps type kind to deferred specifier prefix character.
|
|
1967
|
+
*/
|
|
1968
|
+
const inputKindToChar = (kind) => {
|
|
1969
|
+
switch (kind) {
|
|
1970
|
+
case "scalar": return "s";
|
|
1971
|
+
case "enum": return "e";
|
|
1972
|
+
case "input": return "i";
|
|
1973
|
+
case "excluded": return "x";
|
|
1974
|
+
}
|
|
1975
|
+
};
|
|
1976
|
+
const renderInputRef = (schema, definition, excluded) => {
|
|
1977
|
+
const { name, modifier } = parseTypeReference(definition.type);
|
|
1978
|
+
const defaultValue = definition.defaultValue;
|
|
1979
|
+
if (excluded.has(name)) {
|
|
1980
|
+
const defaultSuffix$1 = defaultValue ? "|D" : "";
|
|
1981
|
+
return `"x|${name}|${modifier}${defaultSuffix$1}"`;
|
|
1982
|
+
}
|
|
1983
|
+
let kind;
|
|
1984
|
+
if (isScalarName$1(schema, name)) {
|
|
1985
|
+
kind = "scalar";
|
|
1986
|
+
} else if (isEnumName$1(schema, name)) {
|
|
1987
|
+
kind = "enum";
|
|
1988
|
+
} else {
|
|
1989
|
+
kind = "input";
|
|
1990
|
+
}
|
|
1991
|
+
const kindChar = inputKindToChar(kind);
|
|
1992
|
+
const defaultSuffix = defaultValue ? "|D" : "";
|
|
1993
|
+
return `"${kindChar}|${name}|${modifier}${defaultSuffix}"`;
|
|
1994
|
+
};
|
|
1995
|
+
/**
|
|
1996
|
+
* Maps output type kind to deferred specifier prefix character.
|
|
1997
|
+
*/
|
|
1998
|
+
const outputKindToChar = (kind) => {
|
|
1999
|
+
switch (kind) {
|
|
2000
|
+
case "scalar": return "s";
|
|
2001
|
+
case "enum": return "e";
|
|
2002
|
+
case "object": return "o";
|
|
2003
|
+
case "union": return "u";
|
|
2004
|
+
case "excluded": return "x";
|
|
2005
|
+
}
|
|
2006
|
+
};
|
|
2007
|
+
/**
|
|
2008
|
+
* Render arguments as object format for DeferredOutputFieldWithArgs.
|
|
2009
|
+
* Returns array of "argName: \"spec\"" entries.
|
|
2010
|
+
*/
|
|
2011
|
+
const renderArgumentsObjectEntries = (schema, args, excluded) => {
|
|
2012
|
+
return [...args].sort((left, right) => left.name.value.localeCompare(right.name.value)).map((arg) => {
|
|
2013
|
+
const { name, modifier } = parseTypeReference(arg.type);
|
|
2014
|
+
if (excluded.has(name)) {
|
|
2015
|
+
return null;
|
|
2016
|
+
}
|
|
2017
|
+
let kind;
|
|
2018
|
+
if (isScalarName$1(schema, name)) {
|
|
2019
|
+
kind = "scalar";
|
|
2020
|
+
} else if (isEnumName$1(schema, name)) {
|
|
2021
|
+
kind = "enum";
|
|
2022
|
+
} else {
|
|
2023
|
+
kind = "input";
|
|
2024
|
+
}
|
|
2025
|
+
const kindChar = inputKindToChar(kind);
|
|
2026
|
+
const defaultSuffix = arg.defaultValue ? "|D" : "";
|
|
2027
|
+
return `${arg.name.value}: "${kindChar}|${name}|${modifier}${defaultSuffix}"`;
|
|
2028
|
+
}).filter((spec) => spec !== null);
|
|
2029
|
+
};
|
|
2030
|
+
const renderArgumentMap = (schema, args, excluded) => {
|
|
2031
|
+
const entries = [...args ?? []].sort((left, right) => left.name.value.localeCompare(right.name.value)).map((arg) => `${arg.name.value}: ${renderInputRef(schema, arg, excluded)}`);
|
|
2032
|
+
return renderPropertyLines({
|
|
2033
|
+
entries,
|
|
2034
|
+
indentSize: 8
|
|
2035
|
+
});
|
|
2036
|
+
};
|
|
2037
|
+
const renderOutputRef = (schema, type, args, excluded) => {
|
|
2038
|
+
const { name, modifier } = parseTypeReference(type);
|
|
2039
|
+
if (excluded.has(name)) {
|
|
2040
|
+
const argumentMap = renderArgumentMap(schema, args, excluded);
|
|
2041
|
+
return `{ spec: "x|${name}|${modifier}", arguments: ${argumentMap} }`;
|
|
2042
|
+
}
|
|
2043
|
+
let kind;
|
|
2044
|
+
if (isScalarName$1(schema, name)) {
|
|
2045
|
+
kind = "scalar";
|
|
2046
|
+
} else if (isEnumName$1(schema, name)) {
|
|
2047
|
+
kind = "enum";
|
|
2048
|
+
} else if (isUnionName(schema, name)) {
|
|
2049
|
+
kind = "union";
|
|
2050
|
+
} else if (isObjectName(schema, name)) {
|
|
2051
|
+
kind = "object";
|
|
2052
|
+
} else {
|
|
2053
|
+
kind = "scalar";
|
|
2054
|
+
}
|
|
2055
|
+
const kindChar = outputKindToChar(kind);
|
|
2056
|
+
const spec = `${kindChar}|${name}|${modifier}`;
|
|
2057
|
+
if (args && args.length > 0) {
|
|
2058
|
+
const argEntries = renderArgumentsObjectEntries(schema, args, excluded);
|
|
2059
|
+
if (argEntries.length > 0) {
|
|
2060
|
+
return `{ spec: "${spec}", arguments: { ${argEntries.join(", ")} } }`;
|
|
2061
|
+
}
|
|
2062
|
+
}
|
|
2063
|
+
return `{ spec: "${spec}", arguments: {} }`;
|
|
2064
|
+
};
|
|
2065
|
+
const renderPropertyLines = ({ entries, indentSize }) => {
|
|
2066
|
+
if (entries.length === 0) {
|
|
2067
|
+
return "{}";
|
|
2068
|
+
}
|
|
2069
|
+
const indent = " ".repeat(indentSize);
|
|
2070
|
+
const lastIndent = " ".repeat(indentSize - 2);
|
|
2071
|
+
return [
|
|
2072
|
+
"{",
|
|
2073
|
+
`${indent}${entries.join(`,\n${indent}`)},`,
|
|
2074
|
+
`${lastIndent}}`
|
|
2075
|
+
].join(`\n`);
|
|
2076
|
+
};
|
|
2077
|
+
const renderObjectFields = (schema, fields, excluded) => {
|
|
2078
|
+
const entries = Array.from(fields.values()).sort((left, right) => left.name.value.localeCompare(right.name.value)).map((field) => `${field.name.value}: ${renderOutputRef(schema, field.type, field.arguments, excluded)}`);
|
|
2079
|
+
return renderPropertyLines({
|
|
2080
|
+
entries,
|
|
2081
|
+
indentSize: 6
|
|
2082
|
+
});
|
|
2083
|
+
};
|
|
2084
|
+
const renderInputFields = (schema, fields, excluded) => {
|
|
2085
|
+
const entries = Array.from(fields.values()).sort((left, right) => left.name.value.localeCompare(right.name.value)).map((field) => `${field.name.value}: ${renderInputRef(schema, field, excluded)}`);
|
|
2086
|
+
return renderPropertyLines({
|
|
2087
|
+
entries,
|
|
2088
|
+
indentSize: 6
|
|
2089
|
+
});
|
|
2090
|
+
};
|
|
2091
|
+
const renderScalarVar = (schemaName, record) => {
|
|
2092
|
+
const typeInfo = builtinScalarTypes$1.get(record.name) ?? {
|
|
2093
|
+
input: "string",
|
|
2094
|
+
output: "string"
|
|
2095
|
+
};
|
|
2096
|
+
return `const scalar_${schemaName}_${record.name} = { name: "${record.name}", $type: {} as { input: ${typeInfo.input}; output: ${typeInfo.output}; inputProfile: { kind: "scalar"; name: "${record.name}"; value: ${typeInfo.input} }; outputProfile: { kind: "scalar"; name: "${record.name}"; value: ${typeInfo.output} } } } as const;`;
|
|
2097
|
+
};
|
|
2098
|
+
const renderEnumVar = (schemaName, record) => {
|
|
2099
|
+
const valueNames = Array.from(record.values.values()).sort((left, right) => left.name.value.localeCompare(right.name.value)).map((value) => value.name.value);
|
|
2100
|
+
const valuesObj = valueNames.length === 0 ? "{}" : `{ ${valueNames.map((v) => `${v}: true`).join(", ")} }`;
|
|
2101
|
+
const valueUnion = valueNames.length === 0 ? "never" : valueNames.map((v) => `"${v}"`).join(" | ");
|
|
2102
|
+
return `const enum_${schemaName}_${record.name} = defineEnum<"${record.name}", ${valueUnion}>("${record.name}", ${valuesObj});`;
|
|
2103
|
+
};
|
|
2104
|
+
const renderInputVar = (schemaName, schema, record, excluded) => {
|
|
2105
|
+
const fields = renderInputFields(schema, record.fields, excluded);
|
|
2106
|
+
return `const input_${schemaName}_${record.name} = { name: "${record.name}", fields: ${fields} } as const;`;
|
|
2107
|
+
};
|
|
2108
|
+
const renderObjectVar = (schemaName, schema, record, excluded) => {
|
|
2109
|
+
const fields = renderObjectFields(schema, record.fields, excluded);
|
|
2110
|
+
return `const object_${schemaName}_${record.name} = { name: "${record.name}", fields: ${fields} } as const;`;
|
|
2111
|
+
};
|
|
2112
|
+
const renderUnionVar = (schemaName, record, excluded) => {
|
|
2113
|
+
const memberNames = Array.from(record.members.values()).filter((member) => !excluded.has(member.name.value)).sort((left, right) => left.name.value.localeCompare(right.name.value)).map((member) => member.name.value);
|
|
2114
|
+
const typesObj = memberNames.length === 0 ? "{}" : `{ ${memberNames.map((m) => `${m}: true`).join(", ")} }`;
|
|
2115
|
+
return `const union_${schemaName}_${record.name} = { name: "${record.name}", types: ${typesObj} } as const;`;
|
|
2116
|
+
};
|
|
2117
|
+
const collectObjectTypeNames = (schema) => Array.from(schema.objects.keys()).filter((name) => !name.startsWith("__")).sort((left, right) => left.localeCompare(right));
|
|
2118
|
+
const collectInputTypeNames = (schema) => Array.from(schema.inputs.keys()).filter((name) => !name.startsWith("__")).sort((left, right) => left.localeCompare(right));
|
|
2119
|
+
const collectEnumTypeNames = (schema) => Array.from(schema.enums.keys()).filter((name) => !name.startsWith("__")).sort((left, right) => left.localeCompare(right));
|
|
2120
|
+
const collectUnionTypeNames = (schema) => Array.from(schema.unions.keys()).filter((name) => !name.startsWith("__")).sort((left, right) => left.localeCompare(right));
|
|
2121
|
+
const collectScalarNames = (schema) => Array.from(schema.scalars.keys()).filter((name) => !name.startsWith("__")).sort((left, right) => left.localeCompare(right));
|
|
2122
|
+
const collectDirectiveNames = (schema) => Array.from(schema.directives.keys()).sort((left, right) => left.localeCompare(right));
|
|
2123
|
+
const renderInputTypeMethod = (factoryVar, kind, typeName) => `${typeName}: ${factoryVar}("${kind}", "${typeName}")`;
|
|
2124
|
+
const renderInputTypeMethods = (schema, factoryVar, excluded) => {
|
|
2125
|
+
const scalarMethods = Array.from(builtinScalarTypes$1.keys()).concat(collectScalarNames(schema).filter((name) => !builtinScalarTypes$1.has(name))).filter((name) => !excluded.has(name)).map((name) => renderInputTypeMethod(factoryVar, "scalar", name));
|
|
2126
|
+
const enumMethods = collectEnumTypeNames(schema).filter((name) => !excluded.has(name)).map((name) => renderInputTypeMethod(factoryVar, "enum", name));
|
|
2127
|
+
const inputMethods = collectInputTypeNames(schema).filter((name) => !excluded.has(name)).map((name) => renderInputTypeMethod(factoryVar, "input", name));
|
|
2128
|
+
const allMethods = [
|
|
2129
|
+
...scalarMethods,
|
|
2130
|
+
...enumMethods,
|
|
2131
|
+
...inputMethods
|
|
2132
|
+
].sort((left, right) => {
|
|
2133
|
+
const leftName = left.split(":")[0] ?? "";
|
|
2134
|
+
const rightName = right.split(":")[0] ?? "";
|
|
2135
|
+
return leftName.localeCompare(rightName);
|
|
2136
|
+
});
|
|
2137
|
+
return renderPropertyLines({
|
|
2138
|
+
entries: allMethods,
|
|
2139
|
+
indentSize: 2
|
|
2140
|
+
});
|
|
2141
|
+
};
|
|
2142
|
+
/**
|
|
2143
|
+
* Renders an input reference as a deferred string for directive arguments.
|
|
2144
|
+
* Format: "{kindChar}|{name}|{modifier}"
|
|
2145
|
+
*/
|
|
2146
|
+
const renderDeferredDirectiveArgRef = (schema, definition, excluded) => {
|
|
2147
|
+
const { name, modifier } = parseTypeReference(definition.type);
|
|
2148
|
+
if (excluded.has(name)) {
|
|
2149
|
+
return null;
|
|
2150
|
+
}
|
|
2151
|
+
let kind;
|
|
2152
|
+
if (isScalarName$1(schema, name)) {
|
|
2153
|
+
kind = "scalar";
|
|
2154
|
+
} else if (isEnumName$1(schema, name)) {
|
|
2155
|
+
kind = "enum";
|
|
2156
|
+
} else {
|
|
2157
|
+
kind = "input";
|
|
2158
|
+
}
|
|
2159
|
+
const kindChar = inputKindToChar(kind);
|
|
2160
|
+
return `"${kindChar}|${name}|${modifier}"`;
|
|
2161
|
+
};
|
|
2162
|
+
/**
|
|
2163
|
+
* Renders argument specifiers for a directive.
|
|
2164
|
+
* Returns null if the directive has no arguments.
|
|
2165
|
+
* Uses deferred string format for consistency with other type specifiers.
|
|
2166
|
+
*/
|
|
2167
|
+
const renderDirectiveArgsSpec = (schema, args, excluded) => {
|
|
2168
|
+
if (args.size === 0) return null;
|
|
2169
|
+
const entries = Array.from(args.values()).sort((left, right) => left.name.value.localeCompare(right.name.value)).map((arg) => {
|
|
2170
|
+
const ref = renderDeferredDirectiveArgRef(schema, arg, excluded);
|
|
2171
|
+
return ref ? `${arg.name.value}: ${ref}` : null;
|
|
2172
|
+
}).filter((entry) => entry !== null);
|
|
2173
|
+
if (entries.length === 0) return null;
|
|
2174
|
+
return renderPropertyLines({
|
|
2175
|
+
entries,
|
|
2176
|
+
indentSize: 4
|
|
2177
|
+
});
|
|
2178
|
+
};
|
|
2179
|
+
const renderDirectiveMethod = (schema, record, excluded) => {
|
|
2180
|
+
const locationsJson = JSON.stringify(record.locations);
|
|
2181
|
+
const argsSpec = renderDirectiveArgsSpec(schema, record.args, excluded);
|
|
2182
|
+
if (argsSpec === null) {
|
|
2183
|
+
return `${record.name}: createDirectiveMethod("${record.name}", ${locationsJson} as const)`;
|
|
2184
|
+
}
|
|
2185
|
+
return `${record.name}: createTypedDirectiveMethod("${record.name}", ${locationsJson} as const, ${argsSpec})`;
|
|
2186
|
+
};
|
|
2187
|
+
const renderDirectiveMethods = (schema, excluded) => {
|
|
2188
|
+
const directiveNames = collectDirectiveNames(schema);
|
|
2189
|
+
if (directiveNames.length === 0) {
|
|
2190
|
+
return "{}";
|
|
2191
|
+
}
|
|
2192
|
+
const methods = directiveNames.map((name) => {
|
|
2193
|
+
const record = schema.directives.get(name);
|
|
2194
|
+
return record ? renderDirectiveMethod(schema, record, excluded) : null;
|
|
2195
|
+
}).filter((method) => method !== null);
|
|
2196
|
+
return renderPropertyLines({
|
|
2197
|
+
entries: methods,
|
|
2198
|
+
indentSize: 2
|
|
2199
|
+
});
|
|
2200
|
+
};
|
|
2201
|
+
/**
|
|
2202
|
+
* Generates the _internal-injects.ts module code.
|
|
2203
|
+
* This file contains only adapter imports (scalar, adapter) to keep it lightweight.
|
|
2204
|
+
* The heavy schema types remain in _internal.ts.
|
|
2205
|
+
*/
|
|
2206
|
+
const generateInjectsCode = (injection) => {
|
|
2207
|
+
const imports = [];
|
|
2208
|
+
const exports$1 = [];
|
|
2209
|
+
const typeExports = [];
|
|
2210
|
+
const importsByPath = new Map();
|
|
2211
|
+
for (const [schemaName, config] of injection) {
|
|
2212
|
+
const scalarAlias = `scalar_${schemaName}`;
|
|
2213
|
+
const scalarSpecifiers = importsByPath.get(config.scalarImportPath) ?? [];
|
|
2214
|
+
if (!importsByPath.has(config.scalarImportPath)) {
|
|
2215
|
+
importsByPath.set(config.scalarImportPath, scalarSpecifiers);
|
|
2216
|
+
}
|
|
2217
|
+
scalarSpecifiers.push(`scalar as ${scalarAlias}`);
|
|
2218
|
+
exports$1.push(`export { ${scalarAlias} };`);
|
|
2219
|
+
typeExports.push(`export type Scalar_${schemaName} = typeof ${scalarAlias};`);
|
|
2220
|
+
if (config.adapterImportPath) {
|
|
2221
|
+
const adapterAlias = `adapter_${schemaName}`;
|
|
2222
|
+
const adapterSpecifiers = importsByPath.get(config.adapterImportPath) ?? [];
|
|
2223
|
+
if (!importsByPath.has(config.adapterImportPath)) {
|
|
2224
|
+
importsByPath.set(config.adapterImportPath, adapterSpecifiers);
|
|
2225
|
+
}
|
|
2226
|
+
adapterSpecifiers.push(`adapter as ${adapterAlias}`);
|
|
2227
|
+
exports$1.push(`export { ${adapterAlias} };`);
|
|
2228
|
+
typeExports.push(`export type Adapter_${schemaName} = typeof ${adapterAlias} & { _?: never };`);
|
|
2229
|
+
}
|
|
2230
|
+
}
|
|
2231
|
+
for (const [path, specifiers] of importsByPath) {
|
|
2232
|
+
if (specifiers.length === 1) {
|
|
2233
|
+
imports.push(`import { ${specifiers[0]} } from "${path}";`);
|
|
2234
|
+
} else {
|
|
2235
|
+
imports.push(`import {\n ${specifiers.join(",\n ")},\n} from "${path}";`);
|
|
2236
|
+
}
|
|
2237
|
+
}
|
|
2238
|
+
return `\
|
|
2239
|
+
/**
|
|
2240
|
+
* Adapter injections for schema.
|
|
2241
|
+
* Separated to allow lightweight imports for prebuilt module.
|
|
2242
|
+
* @generated by @soda-gql/tools/codegen
|
|
2243
|
+
*/
|
|
2244
|
+
|
|
2245
|
+
${imports.join("\n")}
|
|
2246
|
+
|
|
2247
|
+
// Value exports
|
|
2248
|
+
${exports$1.join("\n")}
|
|
2249
|
+
|
|
2250
|
+
// Type exports
|
|
2251
|
+
${typeExports.join("\n")}
|
|
2252
|
+
`;
|
|
2253
|
+
};
|
|
2254
|
+
const multiRuntimeTemplate = ($$) => {
|
|
2255
|
+
const imports = [];
|
|
2256
|
+
const scalarAliases = new Map();
|
|
2257
|
+
const adapterAliases = new Map();
|
|
2258
|
+
if ($$.injection.mode === "inject") {
|
|
2259
|
+
const injectsImports = [];
|
|
2260
|
+
for (const [schemaName, injection] of $$.injection.perSchema) {
|
|
2261
|
+
const scalarAlias = `scalar_${schemaName}`;
|
|
2262
|
+
scalarAliases.set(schemaName, scalarAlias);
|
|
2263
|
+
injectsImports.push(scalarAlias);
|
|
2264
|
+
if (injection.adapterImportPath) {
|
|
2265
|
+
const adapterAlias = `adapter_${schemaName}`;
|
|
2266
|
+
adapterAliases.set(schemaName, adapterAlias);
|
|
2267
|
+
injectsImports.push(adapterAlias);
|
|
2268
|
+
}
|
|
2269
|
+
}
|
|
2270
|
+
imports.push(`import { ${injectsImports.join(", ")} } from "${$$.injection.injectsModulePath}";`);
|
|
2271
|
+
}
|
|
2272
|
+
{
|
|
2273
|
+
const { importPaths } = $$.splitting;
|
|
2274
|
+
for (const [name, config] of Object.entries($$.schemas)) {
|
|
2275
|
+
if (config.enumNames.length > 0) {
|
|
2276
|
+
const enumImports = config.enumNames.map((n) => `enum_${name}_${n}`).join(", ");
|
|
2277
|
+
imports.push(`import { ${enumImports} } from "${importPaths.enums}";`);
|
|
2278
|
+
}
|
|
2279
|
+
if (config.inputNames.length > 0) {
|
|
2280
|
+
const inputImports = config.inputNames.map((n) => `input_${name}_${n}`).join(", ");
|
|
2281
|
+
imports.push(`import { ${inputImports} } from "${importPaths.inputs}";`);
|
|
2282
|
+
}
|
|
2283
|
+
if (config.objectNames.length > 0) {
|
|
2284
|
+
const objectImports = config.objectNames.map((n) => `object_${name}_${n}`).join(", ");
|
|
2285
|
+
imports.push(`import { ${objectImports} } from "${importPaths.objects}";`);
|
|
2286
|
+
}
|
|
2287
|
+
if (config.unionNames.length > 0) {
|
|
2288
|
+
const unionImports = config.unionNames.map((n) => `union_${name}_${n}`).join(", ");
|
|
2289
|
+
imports.push(`import { ${unionImports} } from "${importPaths.unions}";`);
|
|
2290
|
+
}
|
|
2291
|
+
}
|
|
2292
|
+
}
|
|
2293
|
+
const extraImports = imports.length > 0 ? `${imports.join("\n")}\n` : "";
|
|
2294
|
+
const schemaBlocks = [];
|
|
2295
|
+
const gqlExports = [];
|
|
2296
|
+
for (const [name, config] of Object.entries($$.schemas)) {
|
|
2297
|
+
const schemaVar = `${name}Schema`;
|
|
2298
|
+
const adapterVar = adapterAliases.get(name);
|
|
2299
|
+
const typeExports = [`export type Schema_${name} = typeof ${schemaVar} & { _?: never };`];
|
|
2300
|
+
if (adapterVar) {
|
|
2301
|
+
typeExports.push(`export type Adapter_${name} = typeof ${adapterVar} & { _?: never };`);
|
|
2302
|
+
}
|
|
2303
|
+
const inputTypeMethodsVar = `inputTypeMethods_${name}`;
|
|
2304
|
+
const factoryVar = `createMethod_${name}`;
|
|
2305
|
+
const customDirectivesVar = `customDirectives_${name}`;
|
|
2306
|
+
const defaultDepthBlock = config.defaultInputDepth !== undefined && config.defaultInputDepth !== 3 ? `\n __defaultInputDepth: ${config.defaultInputDepth},` : "";
|
|
2307
|
+
const depthOverridesBlock = config.inputDepthOverrides && Object.keys(config.inputDepthOverrides).length > 0 ? `\n __inputDepthOverrides: ${JSON.stringify(config.inputDepthOverrides)},` : "";
|
|
2308
|
+
const isSplitMode = true;
|
|
2309
|
+
const scalarVarsBlock = config.scalarVars.join("\n");
|
|
2310
|
+
const enumVarsBlock = isSplitMode ? "// (enums imported)" : config.enumVars.length > 0 ? config.enumVars.join("\n") : "// (no enums)";
|
|
2311
|
+
const inputVarsBlock = isSplitMode ? "// (inputs imported)" : config.inputVars.length > 0 ? config.inputVars.join("\n") : "// (no inputs)";
|
|
2312
|
+
const objectVarsBlock = isSplitMode ? "// (objects imported)" : config.objectVars.length > 0 ? config.objectVars.join("\n") : "// (no objects)";
|
|
2313
|
+
const unionVarsBlock = isSplitMode ? "// (unions imported)" : config.unionVars.length > 0 ? config.unionVars.join("\n") : "// (no unions)";
|
|
2314
|
+
const scalarAssembly = $$.injection.mode === "inject" ? scalarAliases.get(name) ?? "{}" : config.scalarNames.length > 0 ? `{ ${config.scalarNames.map((n) => `${n}: scalar_${name}_${n}`).join(", ")} }` : "{}";
|
|
2315
|
+
const enumAssembly = config.enumNames.length > 0 ? `{ ${config.enumNames.map((n) => `${n}: enum_${name}_${n}`).join(", ")} }` : "{}";
|
|
2316
|
+
const inputAssembly = config.inputNames.length > 0 ? `{ ${config.inputNames.map((n) => `${n}: input_${name}_${n}`).join(", ")} }` : "{}";
|
|
2317
|
+
const objectAssembly = config.objectNames.length > 0 ? `{ ${config.objectNames.map((n) => `${n}: object_${name}_${n}`).join(", ")} }` : "{}";
|
|
2318
|
+
const unionAssembly = config.unionNames.length > 0 ? `{ ${config.unionNames.map((n) => `${n}: union_${name}_${n}`).join(", ")} }` : "{}";
|
|
2319
|
+
const scalarVarsSection = $$.injection.mode === "inject" ? "// (scalars imported)" : scalarVarsBlock;
|
|
2320
|
+
const scalarAssemblyLine = $$.injection.mode === "inject" ? `// scalar_${name} is imported directly` : `const scalar_${name} = ${scalarAssembly} as const;`;
|
|
2321
|
+
const scalarRef = $$.injection.mode === "inject" ? scalarAliases.get(name) ?? `scalar_${name}` : `scalar_${name}`;
|
|
2322
|
+
schemaBlocks.push(`
|
|
2323
|
+
// Individual scalar definitions
|
|
2324
|
+
${scalarVarsSection}
|
|
2325
|
+
|
|
2326
|
+
// Individual enum definitions
|
|
2327
|
+
${enumVarsBlock}
|
|
2328
|
+
|
|
2329
|
+
// Individual input definitions
|
|
2330
|
+
${inputVarsBlock}
|
|
2331
|
+
|
|
2332
|
+
// Individual object definitions
|
|
2333
|
+
${objectVarsBlock}
|
|
2334
|
+
|
|
2335
|
+
// Individual union definitions
|
|
2336
|
+
${unionVarsBlock}
|
|
2337
|
+
|
|
2338
|
+
// Category assembly
|
|
2339
|
+
${scalarAssemblyLine}
|
|
2340
|
+
const enum_${name} = ${enumAssembly} as const;
|
|
2341
|
+
const input_${name} = ${inputAssembly} as const;
|
|
2342
|
+
const object_${name} = ${objectAssembly} as const;
|
|
2343
|
+
const union_${name} = ${unionAssembly} as const;
|
|
2344
|
+
|
|
2345
|
+
// Schema assembly
|
|
2346
|
+
const ${schemaVar} = {
|
|
2347
|
+
label: "${name}" as const,
|
|
2348
|
+
operations: { query: "${config.queryType}", mutation: "${config.mutationType}", subscription: "${config.subscriptionType}" } as const,
|
|
2349
|
+
scalar: ${scalarRef},
|
|
2350
|
+
enum: enum_${name},
|
|
2351
|
+
input: input_${name},
|
|
2352
|
+
object: object_${name},
|
|
2353
|
+
union: union_${name},${defaultDepthBlock}${depthOverridesBlock}
|
|
2354
|
+
} as const satisfies AnyGraphqlSchema;
|
|
2355
|
+
|
|
2356
|
+
const ${factoryVar} = createVarMethodFactory<typeof ${schemaVar}>();
|
|
2357
|
+
const ${inputTypeMethodsVar} = ${config.inputTypeMethodsBlock};
|
|
2358
|
+
const ${customDirectivesVar} = { ...createStandardDirectives(), ...${config.directiveMethodsBlock} };
|
|
2359
|
+
|
|
2360
|
+
${typeExports.join("\n")}`);
|
|
2361
|
+
const gqlVarName = `gql_${name}`;
|
|
2362
|
+
if (adapterVar) {
|
|
2363
|
+
const typeParams = `<Schema_${name}, typeof ${customDirectivesVar}, Adapter_${name}>`;
|
|
2364
|
+
schemaBlocks.push(`const ${gqlVarName} = createGqlElementComposer${typeParams}(${schemaVar}, { adapter: ${adapterVar}, inputTypeMethods: ${inputTypeMethodsVar}, directiveMethods: ${customDirectivesVar} });`);
|
|
2365
|
+
} else {
|
|
2366
|
+
const typeParams = `<Schema_${name}, typeof ${customDirectivesVar}>`;
|
|
2367
|
+
schemaBlocks.push(`const ${gqlVarName} = createGqlElementComposer${typeParams}(${schemaVar}, { inputTypeMethods: ${inputTypeMethodsVar}, directiveMethods: ${customDirectivesVar} });`);
|
|
2368
|
+
}
|
|
2369
|
+
schemaBlocks.push(`export type Context_${name} = Parameters<typeof ${gqlVarName}>[0] extends (ctx: infer C) => unknown ? C : never;`);
|
|
2370
|
+
const prebuiltExports = [
|
|
2371
|
+
`export { ${schemaVar} as __schema_${name} }`,
|
|
2372
|
+
`export { ${inputTypeMethodsVar} as __inputTypeMethods_${name} }`,
|
|
2373
|
+
`export { ${customDirectivesVar} as __directiveMethods_${name} }`
|
|
2374
|
+
];
|
|
2375
|
+
if (adapterVar) {
|
|
2376
|
+
prebuiltExports.push(`export { ${adapterVar} as __adapter_${name} }`);
|
|
2377
|
+
}
|
|
2378
|
+
schemaBlocks.push(`${prebuiltExports.join(";\n")};`);
|
|
2379
|
+
gqlExports.push(`export { ${gqlVarName} as __gql_${name} }`);
|
|
2380
|
+
}
|
|
2381
|
+
const needsDefineEnum = false;
|
|
2382
|
+
return `\
|
|
2383
|
+
import {${needsDefineEnum ? "\n defineEnum," : ""}
|
|
2384
|
+
type AnyGraphqlSchema,
|
|
2385
|
+
createDirectiveMethod,
|
|
2386
|
+
createTypedDirectiveMethod,
|
|
2387
|
+
createGqlElementComposer,
|
|
2388
|
+
createStandardDirectives,
|
|
2389
|
+
createVarMethodFactory,
|
|
2390
|
+
} from "@soda-gql/core";
|
|
2391
|
+
${extraImports}
|
|
2392
|
+
${schemaBlocks.join("\n")}
|
|
2393
|
+
|
|
2394
|
+
${gqlExports.join(";\n")};
|
|
2395
|
+
`;
|
|
2396
|
+
};
|
|
2397
|
+
const generateMultiSchemaModule = (schemas, options) => {
|
|
2398
|
+
const schemaConfigs = {};
|
|
2399
|
+
const allStats = {
|
|
2400
|
+
objects: 0,
|
|
2401
|
+
enums: 0,
|
|
2402
|
+
inputs: 0,
|
|
2403
|
+
unions: 0
|
|
2404
|
+
};
|
|
2405
|
+
for (const [name, document] of schemas.entries()) {
|
|
2406
|
+
const schema = (0, __soda_gql_core.createSchemaIndex)(document);
|
|
2407
|
+
const typeFilterConfig = options?.typeFilters?.get(name);
|
|
2408
|
+
const typeFilter = compileTypeFilter(typeFilterConfig);
|
|
2409
|
+
const allTypeNames = new Map([
|
|
2410
|
+
["object", Array.from(schema.objects.keys()).filter((n) => !n.startsWith("__"))],
|
|
2411
|
+
["input", Array.from(schema.inputs.keys()).filter((n) => !n.startsWith("__"))],
|
|
2412
|
+
["enum", Array.from(schema.enums.keys()).filter((n) => !n.startsWith("__"))],
|
|
2413
|
+
["union", Array.from(schema.unions.keys()).filter((n) => !n.startsWith("__"))],
|
|
2414
|
+
["scalar", Array.from(schema.scalars.keys()).filter((n) => !n.startsWith("__"))]
|
|
2415
|
+
]);
|
|
2416
|
+
const excluded = buildExclusionSet(typeFilter, allTypeNames);
|
|
2417
|
+
const objectTypeNames = collectObjectTypeNames(schema).filter((n) => !excluded.has(n));
|
|
2418
|
+
const enumTypeNames = collectEnumTypeNames(schema).filter((n) => !excluded.has(n));
|
|
2419
|
+
const inputTypeNames = collectInputTypeNames(schema).filter((n) => !excluded.has(n));
|
|
2420
|
+
const unionTypeNames = collectUnionTypeNames(schema).filter((n) => !excluded.has(n));
|
|
2421
|
+
const customScalarNames = collectScalarNames(schema).filter((n) => !builtinScalarTypes$1.has(n) && !excluded.has(n));
|
|
2422
|
+
const scalarVars = [];
|
|
2423
|
+
const enumVars = [];
|
|
2424
|
+
const inputVars = [];
|
|
2425
|
+
const objectVars = [];
|
|
2426
|
+
const unionVars = [];
|
|
2427
|
+
for (const scalarName of builtinScalarTypes$1.keys()) {
|
|
2428
|
+
const record = schema.scalars.get(scalarName) ?? {
|
|
2429
|
+
name: scalarName,
|
|
2430
|
+
directives: []
|
|
2431
|
+
};
|
|
2432
|
+
scalarVars.push(renderScalarVar(name, record));
|
|
2433
|
+
}
|
|
2434
|
+
for (const scalarName of customScalarNames) {
|
|
2435
|
+
const record = schema.scalars.get(scalarName);
|
|
2436
|
+
if (record) {
|
|
2437
|
+
scalarVars.push(renderScalarVar(name, record));
|
|
2438
|
+
}
|
|
2439
|
+
}
|
|
2440
|
+
for (const enumName of enumTypeNames) {
|
|
2441
|
+
const record = schema.enums.get(enumName);
|
|
2442
|
+
if (record) {
|
|
2443
|
+
enumVars.push(renderEnumVar(name, record));
|
|
2444
|
+
}
|
|
2445
|
+
}
|
|
2446
|
+
for (const inputName of inputTypeNames) {
|
|
2447
|
+
const record = schema.inputs.get(inputName);
|
|
2448
|
+
if (record) {
|
|
2449
|
+
inputVars.push(renderInputVar(name, schema, record, excluded));
|
|
2450
|
+
}
|
|
2451
|
+
}
|
|
2452
|
+
for (const objectName of objectTypeNames) {
|
|
2453
|
+
const record = schema.objects.get(objectName);
|
|
2454
|
+
if (record) {
|
|
2455
|
+
objectVars.push(renderObjectVar(name, schema, record, excluded));
|
|
2456
|
+
}
|
|
2457
|
+
}
|
|
2458
|
+
for (const unionName of unionTypeNames) {
|
|
2459
|
+
const record = schema.unions.get(unionName);
|
|
2460
|
+
if (record) {
|
|
2461
|
+
unionVars.push(renderUnionVar(name, record, excluded));
|
|
2462
|
+
}
|
|
2463
|
+
}
|
|
2464
|
+
const allScalarNames = [...builtinScalarTypes$1.keys(), ...customScalarNames];
|
|
2465
|
+
const factoryVar = `createMethod_${name}`;
|
|
2466
|
+
const inputTypeMethodsBlock = renderInputTypeMethods(schema, factoryVar, excluded);
|
|
2467
|
+
const directiveMethodsBlock = renderDirectiveMethods(schema, excluded);
|
|
2468
|
+
const queryType = schema.operationTypes.query ?? "Query";
|
|
2469
|
+
const mutationType = schema.operationTypes.mutation ?? "Mutation";
|
|
2470
|
+
const subscriptionType = schema.operationTypes.subscription ?? "Subscription";
|
|
2471
|
+
schemaConfigs[name] = {
|
|
2472
|
+
queryType,
|
|
2473
|
+
mutationType,
|
|
2474
|
+
subscriptionType,
|
|
2475
|
+
scalarVars,
|
|
2476
|
+
enumVars,
|
|
2477
|
+
inputVars,
|
|
2478
|
+
objectVars,
|
|
2479
|
+
unionVars,
|
|
2480
|
+
scalarNames: allScalarNames,
|
|
2481
|
+
enumNames: enumTypeNames,
|
|
2482
|
+
inputNames: inputTypeNames,
|
|
2483
|
+
objectNames: objectTypeNames,
|
|
2484
|
+
unionNames: unionTypeNames,
|
|
2485
|
+
inputTypeMethodsBlock,
|
|
2486
|
+
directiveMethodsBlock,
|
|
2487
|
+
defaultInputDepth: options?.defaultInputDepth?.get(name),
|
|
2488
|
+
inputDepthOverrides: options?.inputDepthOverrides?.get(name)
|
|
2489
|
+
};
|
|
2490
|
+
allStats.objects += objectVars.length;
|
|
2491
|
+
allStats.enums += enumVars.length;
|
|
2492
|
+
allStats.inputs += inputVars.length;
|
|
2493
|
+
allStats.unions += unionVars.length;
|
|
2494
|
+
}
|
|
2495
|
+
const injection = options?.injection ? {
|
|
2496
|
+
mode: "inject",
|
|
2497
|
+
perSchema: options.injection,
|
|
2498
|
+
injectsModulePath: "./_internal-injects"
|
|
2499
|
+
} : { mode: "inline" };
|
|
2500
|
+
const splitting = { importPaths: {
|
|
2501
|
+
enums: "./_defs/enums",
|
|
2502
|
+
inputs: "./_defs/inputs",
|
|
2503
|
+
objects: "./_defs/objects",
|
|
2504
|
+
unions: "./_defs/unions"
|
|
2505
|
+
} };
|
|
2506
|
+
const code = multiRuntimeTemplate({
|
|
2507
|
+
schemas: schemaConfigs,
|
|
2508
|
+
injection,
|
|
2509
|
+
splitting
|
|
2510
|
+
});
|
|
2511
|
+
const injectsCode = options?.injection ? generateInjectsCode(options.injection) : undefined;
|
|
2512
|
+
const categoryVarsResult = Object.fromEntries(Object.entries(schemaConfigs).map(([schemaName, config]) => {
|
|
2513
|
+
const toDefVar = (code$1, prefix) => {
|
|
2514
|
+
const match = code$1.match(new RegExp(`const (${prefix}_${schemaName}_(\\w+))`));
|
|
2515
|
+
return {
|
|
2516
|
+
name: match?.[1] ?? "",
|
|
2517
|
+
code: code$1
|
|
2518
|
+
};
|
|
2519
|
+
};
|
|
2520
|
+
return [schemaName, {
|
|
2521
|
+
enums: config.enumVars.map((c) => toDefVar(c, "enum")),
|
|
2522
|
+
inputs: config.inputVars.map((c) => toDefVar(c, "input")),
|
|
2523
|
+
objects: config.objectVars.map((c) => toDefVar(c, "object")),
|
|
2524
|
+
unions: config.unionVars.map((c) => toDefVar(c, "union"))
|
|
2525
|
+
}];
|
|
2526
|
+
}));
|
|
2527
|
+
return {
|
|
2528
|
+
code,
|
|
2529
|
+
injectsCode,
|
|
2530
|
+
categoryVars: categoryVarsResult,
|
|
2531
|
+
stats: allStats
|
|
2532
|
+
};
|
|
2533
|
+
};
|
|
2534
|
+
/**
|
|
2535
|
+
* Generate a stub `types.prebuilt.ts` file with empty PrebuiltTypes registries.
|
|
2536
|
+
* This stub is only written when `types.prebuilt.ts` does not already exist.
|
|
2537
|
+
* Typegen will later overwrite it with the real type registry.
|
|
2538
|
+
*/
|
|
2539
|
+
const generatePrebuiltStub = (schemaNames) => {
|
|
2540
|
+
const typeDeclarations = schemaNames.map((name) => `export type PrebuiltTypes_${name} = {
|
|
2541
|
+
readonly fragments: {};
|
|
2542
|
+
readonly operations: {};
|
|
2543
|
+
};`).join("\n\n");
|
|
2544
|
+
return `\
|
|
2545
|
+
/**
|
|
2546
|
+
* Prebuilt type registry stub.
|
|
2547
|
+
*
|
|
2548
|
+
* This file was generated by @soda-gql/tools/codegen as an empty stub.
|
|
2549
|
+
* Run 'soda-gql typegen' to populate with real prebuilt types.
|
|
2550
|
+
*
|
|
2551
|
+
* @module
|
|
2552
|
+
* @generated
|
|
2553
|
+
*/
|
|
2554
|
+
|
|
2555
|
+
${typeDeclarations}
|
|
2556
|
+
`;
|
|
2557
|
+
};
|
|
2558
|
+
/**
|
|
2559
|
+
* Generate the `index.ts` module that re-exports from `_internal`
|
|
2560
|
+
* and constructs the `gql` object from individual `__gql_*` exports.
|
|
2561
|
+
*
|
|
2562
|
+
* The `gql` object preserves the original inferred types from schema inference.
|
|
2563
|
+
* PrebuiltContext types will be integrated once the type resolution strategy
|
|
2564
|
+
* is redesigned to match the tagged template runtime API.
|
|
2565
|
+
*/
|
|
2566
|
+
const generateIndexModule = (schemaNames, allFieldNames) => {
|
|
2567
|
+
const gqlImports = schemaNames.map((name) => `__gql_${name}`).join(", ");
|
|
2568
|
+
const prebuiltImports = schemaNames.map((name) => `PrebuiltTypes_${name}`).join(", ");
|
|
2569
|
+
const schemaTypeImports = schemaNames.map((name) => `Schema_${name}`).join(", ");
|
|
2570
|
+
const directiveImports = schemaNames.map((name) => `__directiveMethods_${name}`).join(", ");
|
|
2571
|
+
const perSchemaTypes = schemaNames.map((name) => `
|
|
2572
|
+
type ResolveFragmentAtBuilder_${name}<TKey extends string> =
|
|
2573
|
+
TKey extends keyof PrebuiltTypes_${name}["fragments"]
|
|
2574
|
+
? Fragment<
|
|
2575
|
+
PrebuiltTypes_${name}["fragments"][TKey]["typename"],
|
|
2576
|
+
PrebuiltTypes_${name}["fragments"][TKey]["input"] extends void
|
|
2577
|
+
? void
|
|
2578
|
+
: Partial<PrebuiltTypes_${name}["fragments"][TKey]["input"] & AnyConstAssignableInput>,
|
|
2579
|
+
Partial<AnyFields>,
|
|
2580
|
+
PrebuiltTypes_${name}["fragments"][TKey]["output"] & object
|
|
2581
|
+
>
|
|
2582
|
+
: Fragment<"(unknown)", PrebuiltEntryNotFound<TKey, "fragment">, Partial<AnyFields>, PrebuiltEntryNotFound<TKey, "fragment">>;
|
|
2583
|
+
|
|
2584
|
+
type ResolveOperationAtBuilder_${name}<TOperationType extends OperationType, TName extends string> =
|
|
2585
|
+
TName extends keyof PrebuiltTypes_${name}["operations"]
|
|
2586
|
+
? Operation<
|
|
2587
|
+
TOperationType,
|
|
2588
|
+
TName,
|
|
2589
|
+
string[],
|
|
2590
|
+
PrebuiltTypes_${name}["operations"][TName]["input"],
|
|
2591
|
+
Partial<AnyFields>,
|
|
2592
|
+
PrebuiltTypes_${name}["operations"][TName]["output"] & object
|
|
2593
|
+
>
|
|
2594
|
+
: Operation<
|
|
2595
|
+
TOperationType,
|
|
2596
|
+
TName,
|
|
2597
|
+
string[],
|
|
2598
|
+
PrebuiltEntryNotFound<TName, "operation">,
|
|
2599
|
+
Partial<AnyFields>,
|
|
2600
|
+
PrebuiltEntryNotFound<TName, "operation">
|
|
2601
|
+
>;
|
|
2602
|
+
|
|
2603
|
+
type PrebuiltCurriedFragment_${name} = <TKey extends string>(
|
|
2604
|
+
name: TKey,
|
|
2605
|
+
typeName: string,
|
|
2606
|
+
) => (...args: unknown[]) => (...args: unknown[]) => ResolveFragmentAtBuilder_${name}<TKey>;
|
|
2607
|
+
|
|
2608
|
+
type PrebuiltCurriedOperation_${name}<TOperationType extends OperationType> = <TName extends string>(
|
|
2609
|
+
operationName: TName,
|
|
2610
|
+
) => (...args: unknown[]) => (...args: unknown[]) => ResolveOperationAtBuilder_${name}<TOperationType, TName>;
|
|
2611
|
+
|
|
2612
|
+
type FieldFactoryFn_${name} = (...args: unknown[]) => Record<string, unknown> & ((callback: (tools: GenericFieldsBuilderTools_${name}) => Record<string, unknown>) => Record<string, unknown>);
|
|
2613
|
+
${(() => {
|
|
2614
|
+
const fieldNames = allFieldNames?.get(name);
|
|
2615
|
+
if (fieldNames && fieldNames.length > 0) {
|
|
2616
|
+
const union = fieldNames.map((n) => JSON.stringify(n)).join(" | ");
|
|
2617
|
+
return `type AllObjectFieldNames_${name} = ${union};
|
|
2618
|
+
type GenericFieldFactory_${name} = { readonly [K in AllObjectFieldNames_${name}]: FieldFactoryFn_${name} } & Record<string, FieldFactoryFn_${name}>;`;
|
|
2619
|
+
}
|
|
2620
|
+
return `type GenericFieldFactory_${name} = Record<string, FieldFactoryFn_${name}>;`;
|
|
2621
|
+
})()}
|
|
2622
|
+
type GenericFieldsBuilderTools_${name} = { readonly f: GenericFieldFactory_${name}; readonly $: Readonly<Record<string, never>> };
|
|
2623
|
+
|
|
2624
|
+
type PrebuiltCallbackOperation_${name}<TOperationType extends OperationType> = <TName extends string>(
|
|
2625
|
+
options: { name: TName; fields: (tools: GenericFieldsBuilderTools_${name}) => Record<string, unknown>; variables?: Record<string, unknown>; metadata?: (tools: { readonly $: Readonly<Record<string, never>>; readonly fragmentMetadata: unknown[] | undefined }) => Record<string, unknown> },
|
|
2626
|
+
) => ResolveOperationAtBuilder_${name}<TOperationType, TName>;
|
|
2627
|
+
|
|
2628
|
+
export type PrebuiltContext_${name} = {
|
|
2629
|
+
readonly fragment: PrebuiltCurriedFragment_${name};
|
|
2630
|
+
readonly query: PrebuiltCurriedOperation_${name}<"query"> & {
|
|
2631
|
+
readonly operation: PrebuiltCallbackOperation_${name}<"query">;
|
|
2632
|
+
readonly compat: (operationName: string) => (strings: TemplateStringsArray, ...values: never[]) => GqlDefine<unknown>;
|
|
2633
|
+
};
|
|
2634
|
+
readonly mutation: PrebuiltCurriedOperation_${name}<"mutation"> & {
|
|
2635
|
+
readonly operation: PrebuiltCallbackOperation_${name}<"mutation">;
|
|
2636
|
+
readonly compat: (operationName: string) => (strings: TemplateStringsArray, ...values: never[]) => GqlDefine<unknown>;
|
|
2637
|
+
};
|
|
2638
|
+
readonly subscription: PrebuiltCurriedOperation_${name}<"subscription"> & {
|
|
2639
|
+
readonly operation: PrebuiltCallbackOperation_${name}<"subscription">;
|
|
2640
|
+
readonly compat: (operationName: string) => (strings: TemplateStringsArray, ...values: never[]) => GqlDefine<unknown>;
|
|
2641
|
+
};
|
|
2642
|
+
readonly define: <TValue>(factory: () => TValue | Promise<TValue>) => GqlDefine<TValue>;
|
|
2643
|
+
readonly extend: (...args: unknown[]) => AnyOperation;
|
|
2644
|
+
readonly $var: VarBuilder<Schema_${name}>;
|
|
2645
|
+
readonly $dir: typeof __directiveMethods_${name};
|
|
2646
|
+
readonly $colocate: <T extends Record<string, unknown>>(projections: T) => T;
|
|
2647
|
+
};
|
|
2648
|
+
|
|
2649
|
+
type GqlComposer_${name} = {
|
|
2650
|
+
<TResult>(composeElement: (context: PrebuiltContext_${name}) => TResult): TResult;
|
|
2651
|
+
readonly $schema: AnyGraphqlSchema;
|
|
2652
|
+
};`).join("\n");
|
|
2653
|
+
const gqlEntries = schemaNames.map((name) => ` ${name}: __gql_${name} as unknown as GqlComposer_${name}`).join(",\n");
|
|
2654
|
+
return `\
|
|
2655
|
+
/**
|
|
2656
|
+
* Generated by @soda-gql/tools/codegen
|
|
2657
|
+
* @module
|
|
2658
|
+
* @generated
|
|
2659
|
+
*/
|
|
2660
|
+
|
|
2661
|
+
export * from "./_internal";
|
|
2662
|
+
import { ${gqlImports} } from "./_internal";
|
|
2663
|
+
import type { ${schemaTypeImports} } from "./_internal";
|
|
2664
|
+
import type { ${directiveImports} } from "./_internal";
|
|
2665
|
+
import type { ${prebuiltImports} } from "./types.prebuilt";
|
|
2666
|
+
import type { Fragment, Operation, OperationType, PrebuiltEntryNotFound, AnyConstAssignableInput, AnyFields, AnyGraphqlSchema, AnyOperation, VarBuilder, GqlDefine } from "@soda-gql/core";
|
|
2667
|
+
${perSchemaTypes}
|
|
2668
|
+
|
|
2669
|
+
export const gql = {
|
|
2670
|
+
${gqlEntries}
|
|
2671
|
+
};
|
|
2672
|
+
`;
|
|
2673
|
+
};
|
|
2674
|
+
|
|
2675
|
+
//#endregion
|
|
2676
|
+
//#region packages/tools/src/codegen/graphql-compat/parser.ts
|
|
2677
|
+
/**
|
|
2678
|
+
* Parser for .graphql operation files.
|
|
2679
|
+
* Extracts operations and fragments from GraphQL documents.
|
|
2680
|
+
* @module
|
|
2681
|
+
*/
|
|
2682
|
+
/**
|
|
2683
|
+
* Parse a single .graphql file and extract operations and fragments.
|
|
2684
|
+
*/
|
|
2685
|
+
const parseGraphqlFile = (filePath) => {
|
|
2686
|
+
const resolvedPath = (0, node_path.resolve)(filePath);
|
|
2687
|
+
if (!(0, node_fs.existsSync)(resolvedPath)) {
|
|
2688
|
+
return (0, neverthrow.err)({
|
|
2689
|
+
code: "GRAPHQL_FILE_NOT_FOUND",
|
|
2690
|
+
message: `GraphQL file not found at ${resolvedPath}`,
|
|
2691
|
+
filePath: resolvedPath
|
|
2692
|
+
});
|
|
2693
|
+
}
|
|
2694
|
+
try {
|
|
2695
|
+
const source = (0, node_fs.readFileSync)(resolvedPath, "utf8");
|
|
2696
|
+
const document = (0, graphql.parse)(source);
|
|
2697
|
+
return (0, neverthrow.ok)(extractFromDocument(document, resolvedPath));
|
|
2698
|
+
} catch (error) {
|
|
2699
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2700
|
+
return (0, neverthrow.err)({
|
|
2701
|
+
code: "GRAPHQL_PARSE_ERROR",
|
|
2702
|
+
message: `GraphQL parse error: ${message}`,
|
|
2703
|
+
filePath: resolvedPath
|
|
2704
|
+
});
|
|
2705
|
+
}
|
|
2706
|
+
};
|
|
2707
|
+
/**
|
|
2708
|
+
* Parse GraphQL source string directly.
|
|
2709
|
+
*/
|
|
2710
|
+
const parseGraphqlSource = (source, sourceFile) => {
|
|
2711
|
+
try {
|
|
2712
|
+
const document = (0, graphql.parse)(source);
|
|
2713
|
+
return (0, neverthrow.ok)(extractFromDocument(document, sourceFile));
|
|
2714
|
+
} catch (error) {
|
|
2715
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
2716
|
+
return (0, neverthrow.err)({
|
|
2717
|
+
code: "GRAPHQL_PARSE_ERROR",
|
|
2718
|
+
message: `GraphQL parse error: ${message}`,
|
|
2719
|
+
filePath: sourceFile
|
|
2720
|
+
});
|
|
2721
|
+
}
|
|
2722
|
+
};
|
|
2723
|
+
/**
|
|
2724
|
+
* Extract operations and fragments from a parsed GraphQL document.
|
|
2725
|
+
*/
|
|
2726
|
+
const extractFromDocument = (document, sourceFile) => {
|
|
2727
|
+
const operations = [];
|
|
2728
|
+
const fragments = [];
|
|
2729
|
+
for (const definition of document.definitions) {
|
|
2730
|
+
if (definition.kind === graphql.Kind.OPERATION_DEFINITION) {
|
|
2731
|
+
const operation = extractOperation(definition, sourceFile);
|
|
2732
|
+
if (operation) {
|
|
2733
|
+
operations.push(operation);
|
|
2734
|
+
}
|
|
2735
|
+
} else if (definition.kind === graphql.Kind.FRAGMENT_DEFINITION) {
|
|
2736
|
+
fragments.push(extractFragment(definition, sourceFile));
|
|
2737
|
+
}
|
|
2738
|
+
}
|
|
2739
|
+
return {
|
|
2740
|
+
operations,
|
|
2741
|
+
fragments
|
|
2742
|
+
};
|
|
2743
|
+
};
|
|
2744
|
+
/**
|
|
2745
|
+
* Extract a single operation from an OperationDefinitionNode.
|
|
2746
|
+
*/
|
|
2747
|
+
const extractOperation = (node, sourceFile) => {
|
|
2748
|
+
if (!node.name) {
|
|
2749
|
+
return null;
|
|
2750
|
+
}
|
|
2751
|
+
const variables = (node.variableDefinitions ?? []).map(extractVariable);
|
|
2752
|
+
const selections = extractSelections(node.selectionSet.selections);
|
|
2753
|
+
return {
|
|
2754
|
+
kind: node.operation,
|
|
2755
|
+
name: node.name.value,
|
|
2756
|
+
variables,
|
|
2757
|
+
selections,
|
|
2758
|
+
sourceFile
|
|
2759
|
+
};
|
|
2760
|
+
};
|
|
2761
|
+
/**
|
|
2762
|
+
* Extract a fragment from a FragmentDefinitionNode.
|
|
2763
|
+
*/
|
|
2764
|
+
const extractFragment = (node, sourceFile) => {
|
|
2765
|
+
const selections = extractSelections(node.selectionSet.selections);
|
|
2766
|
+
return {
|
|
2767
|
+
name: node.name.value,
|
|
2768
|
+
onType: node.typeCondition.name.value,
|
|
2769
|
+
selections,
|
|
2770
|
+
sourceFile
|
|
2771
|
+
};
|
|
2772
|
+
};
|
|
2773
|
+
/**
|
|
2774
|
+
* Extract a variable definition.
|
|
2775
|
+
*/
|
|
2776
|
+
const extractVariable = (node) => {
|
|
2777
|
+
const { typeName, modifier } = parseTypeNode(node.type);
|
|
2778
|
+
const defaultValue = node.defaultValue ? extractValue(node.defaultValue) : undefined;
|
|
2779
|
+
return {
|
|
2780
|
+
name: node.variable.name.value,
|
|
2781
|
+
typeName,
|
|
2782
|
+
modifier,
|
|
2783
|
+
typeKind: "scalar",
|
|
2784
|
+
defaultValue
|
|
2785
|
+
};
|
|
2786
|
+
};
|
|
2787
|
+
/**
|
|
2788
|
+
* Parse a GraphQL TypeNode into type name and modifier.
|
|
2789
|
+
*
|
|
2790
|
+
* Format: inner nullability + list modifiers
|
|
2791
|
+
* - Inner: `!` (non-null) or `?` (nullable)
|
|
2792
|
+
* - List: `[]!` (non-null list) or `[]?` (nullable list)
|
|
2793
|
+
*/
|
|
2794
|
+
const parseTypeNode = (node) => {
|
|
2795
|
+
const levels = [];
|
|
2796
|
+
const collect = (n, nonNull) => {
|
|
2797
|
+
if (n.kind === graphql.Kind.NON_NULL_TYPE) {
|
|
2798
|
+
return collect(n.type, true);
|
|
2799
|
+
}
|
|
2800
|
+
if (n.kind === graphql.Kind.LIST_TYPE) {
|
|
2801
|
+
levels.push({
|
|
2802
|
+
kind: "list",
|
|
2803
|
+
nonNull
|
|
2804
|
+
});
|
|
2805
|
+
return collect(n.type, false);
|
|
2806
|
+
}
|
|
2807
|
+
levels.push({
|
|
2808
|
+
kind: "named",
|
|
2809
|
+
nonNull
|
|
2810
|
+
});
|
|
2811
|
+
return n.name.value;
|
|
2812
|
+
};
|
|
2813
|
+
const typeName = collect(node, false);
|
|
2814
|
+
let modifier = "?";
|
|
2815
|
+
for (const level of levels.slice().reverse()) {
|
|
2816
|
+
if (level.kind === "named") {
|
|
2817
|
+
modifier = level.nonNull ? "!" : "?";
|
|
2818
|
+
continue;
|
|
2819
|
+
}
|
|
2820
|
+
const listSuffix = level.nonNull ? "[]!" : "[]?";
|
|
2821
|
+
modifier = `${modifier}${listSuffix}`;
|
|
2822
|
+
}
|
|
2823
|
+
return {
|
|
2824
|
+
typeName,
|
|
2825
|
+
modifier
|
|
2826
|
+
};
|
|
2827
|
+
};
|
|
2828
|
+
/**
|
|
2829
|
+
* Extract selections from a SelectionSet.
|
|
2830
|
+
*/
|
|
2831
|
+
const extractSelections = (selections) => {
|
|
2832
|
+
return selections.map(extractSelection);
|
|
2833
|
+
};
|
|
2834
|
+
/**
|
|
2835
|
+
* Extract a single selection.
|
|
2836
|
+
*/
|
|
2837
|
+
const extractSelection = (node) => {
|
|
2838
|
+
switch (node.kind) {
|
|
2839
|
+
case graphql.Kind.FIELD: return extractFieldSelection(node);
|
|
2840
|
+
case graphql.Kind.FRAGMENT_SPREAD: return extractFragmentSpread(node);
|
|
2841
|
+
case graphql.Kind.INLINE_FRAGMENT: return extractInlineFragment(node);
|
|
2842
|
+
}
|
|
2843
|
+
};
|
|
2844
|
+
/**
|
|
2845
|
+
* Extract a field selection.
|
|
2846
|
+
*/
|
|
2847
|
+
const extractFieldSelection = (node) => {
|
|
2848
|
+
const args = node.arguments?.length ? node.arguments.map(extractArgument) : undefined;
|
|
2849
|
+
const selections = node.selectionSet ? extractSelections(node.selectionSet.selections) : undefined;
|
|
2850
|
+
return {
|
|
2851
|
+
kind: "field",
|
|
2852
|
+
name: node.name.value,
|
|
2853
|
+
alias: node.alias?.value,
|
|
2854
|
+
arguments: args,
|
|
2855
|
+
selections
|
|
2856
|
+
};
|
|
2857
|
+
};
|
|
2858
|
+
/**
|
|
2859
|
+
* Extract a fragment spread.
|
|
2860
|
+
*/
|
|
2861
|
+
const extractFragmentSpread = (node) => {
|
|
2862
|
+
return {
|
|
2863
|
+
kind: "fragmentSpread",
|
|
2864
|
+
name: node.name.value
|
|
2865
|
+
};
|
|
2866
|
+
};
|
|
2867
|
+
/**
|
|
2868
|
+
* Extract an inline fragment.
|
|
2869
|
+
*/
|
|
2870
|
+
const extractInlineFragment = (node) => {
|
|
2871
|
+
return {
|
|
2872
|
+
kind: "inlineFragment",
|
|
2873
|
+
onType: node.typeCondition?.name.value ?? "",
|
|
2874
|
+
selections: extractSelections(node.selectionSet.selections)
|
|
2875
|
+
};
|
|
2876
|
+
};
|
|
2877
|
+
/**
|
|
2878
|
+
* Extract an argument.
|
|
2879
|
+
*/
|
|
2880
|
+
const extractArgument = (node) => {
|
|
2881
|
+
return {
|
|
2882
|
+
name: node.name.value,
|
|
2883
|
+
value: extractValue(node.value)
|
|
2884
|
+
};
|
|
2885
|
+
};
|
|
2886
|
+
/**
|
|
2887
|
+
* Assert unreachable code path (for exhaustiveness checks).
|
|
2888
|
+
*/
|
|
2889
|
+
const assertUnreachable = (value) => {
|
|
2890
|
+
throw new Error(`Unexpected value: ${JSON.stringify(value)}`);
|
|
2891
|
+
};
|
|
2892
|
+
/**
|
|
2893
|
+
* Extract a value (literal or variable reference).
|
|
2894
|
+
*/
|
|
2895
|
+
const extractValue = (node) => {
|
|
2896
|
+
switch (node.kind) {
|
|
2897
|
+
case graphql.Kind.VARIABLE: return {
|
|
2898
|
+
kind: "variable",
|
|
2899
|
+
name: node.name.value
|
|
2900
|
+
};
|
|
2901
|
+
case graphql.Kind.INT: return {
|
|
2902
|
+
kind: "int",
|
|
2903
|
+
value: node.value
|
|
2904
|
+
};
|
|
2905
|
+
case graphql.Kind.FLOAT: return {
|
|
2906
|
+
kind: "float",
|
|
2907
|
+
value: node.value
|
|
2908
|
+
};
|
|
2909
|
+
case graphql.Kind.STRING: return {
|
|
2910
|
+
kind: "string",
|
|
2911
|
+
value: node.value
|
|
2912
|
+
};
|
|
2913
|
+
case graphql.Kind.BOOLEAN: return {
|
|
2914
|
+
kind: "boolean",
|
|
2915
|
+
value: node.value
|
|
2916
|
+
};
|
|
2917
|
+
case graphql.Kind.NULL: return { kind: "null" };
|
|
2918
|
+
case graphql.Kind.ENUM: return {
|
|
2919
|
+
kind: "enum",
|
|
2920
|
+
value: node.value
|
|
2921
|
+
};
|
|
2922
|
+
case graphql.Kind.LIST: return {
|
|
2923
|
+
kind: "list",
|
|
2924
|
+
values: node.values.map(extractValue)
|
|
2925
|
+
};
|
|
2926
|
+
case graphql.Kind.OBJECT: return {
|
|
2927
|
+
kind: "object",
|
|
2928
|
+
fields: node.fields.map((field) => ({
|
|
2929
|
+
name: field.name.value,
|
|
2930
|
+
value: extractValue(field.value)
|
|
2931
|
+
}))
|
|
2932
|
+
};
|
|
2933
|
+
default: return assertUnreachable(node);
|
|
2934
|
+
}
|
|
2935
|
+
};
|
|
2936
|
+
|
|
2937
|
+
//#endregion
|
|
2938
|
+
//#region packages/tools/src/codegen/graphql-compat/transformer.ts
|
|
2939
|
+
/**
|
|
2940
|
+
* Transformer for enriching parsed GraphQL operations with schema information.
|
|
2941
|
+
* @module
|
|
2942
|
+
*/
|
|
2943
|
+
/**
|
|
2944
|
+
* Built-in GraphQL scalar types.
|
|
2945
|
+
*/
|
|
2946
|
+
const builtinScalarTypes = new Set([
|
|
2947
|
+
"ID",
|
|
2948
|
+
"String",
|
|
2949
|
+
"Int",
|
|
2950
|
+
"Float",
|
|
2951
|
+
"Boolean"
|
|
2952
|
+
]);
|
|
2953
|
+
/**
|
|
2954
|
+
* Parse a modifier string into its structural components.
|
|
2955
|
+
* @param modifier - Modifier string like "!", "?", "![]!", "?[]?[]!"
|
|
2956
|
+
* @returns Parsed structure with inner nullability and list modifiers
|
|
2957
|
+
*/
|
|
2958
|
+
const parseModifierStructure = (modifier) => {
|
|
2959
|
+
const inner = modifier[0] === "!" ? "!" : "?";
|
|
2960
|
+
const lists = [];
|
|
2961
|
+
const listPattern = /\[\]([!?])/g;
|
|
2962
|
+
let match;
|
|
2963
|
+
while ((match = listPattern.exec(modifier)) !== null) {
|
|
2964
|
+
lists.push(`[]${match[1]}`);
|
|
2965
|
+
}
|
|
2966
|
+
return {
|
|
2967
|
+
inner,
|
|
2968
|
+
lists
|
|
2969
|
+
};
|
|
2970
|
+
};
|
|
2971
|
+
/**
|
|
2972
|
+
* Rebuild modifier string from structure.
|
|
2973
|
+
*/
|
|
2974
|
+
const buildModifier = (structure) => {
|
|
2975
|
+
return structure.inner + structure.lists.join("");
|
|
2976
|
+
};
|
|
2977
|
+
/**
|
|
2978
|
+
* Check if source modifier can be assigned to target modifier.
|
|
2979
|
+
* Implements GraphQL List Coercion: depth difference of 0 or 1 is allowed.
|
|
2980
|
+
*
|
|
2981
|
+
* Rules:
|
|
2982
|
+
* - A single value can be coerced into a list (depth diff = 1)
|
|
2983
|
+
* - At each level, non-null can be assigned to nullable (but not vice versa)
|
|
2984
|
+
*
|
|
2985
|
+
* @param source - The modifier of the value being assigned (variable's type)
|
|
2986
|
+
* @param target - The modifier expected by the position (field argument's type)
|
|
2987
|
+
* @returns true if assignment is valid
|
|
2988
|
+
*/
|
|
2989
|
+
const isModifierAssignable = (source, target) => {
|
|
2990
|
+
const srcStruct = parseModifierStructure(source);
|
|
2991
|
+
const tgtStruct = parseModifierStructure(target);
|
|
2992
|
+
const depthDiff = tgtStruct.lists.length - srcStruct.lists.length;
|
|
2993
|
+
if (depthDiff < 0 || depthDiff > 1) return false;
|
|
2994
|
+
const tgtListsToCompare = depthDiff === 1 ? tgtStruct.lists.slice(1) : tgtStruct.lists;
|
|
2995
|
+
if (depthDiff === 1 && srcStruct.lists.length === 0 && srcStruct.inner === "?" && tgtStruct.lists[0] === "[]!") {
|
|
2996
|
+
return false;
|
|
2997
|
+
}
|
|
2998
|
+
if (srcStruct.inner === "?" && tgtStruct.inner === "!") return false;
|
|
2999
|
+
for (let i = 0; i < srcStruct.lists.length; i++) {
|
|
3000
|
+
const srcList = srcStruct.lists[i];
|
|
3001
|
+
const tgtList = tgtListsToCompare[i];
|
|
3002
|
+
if (srcList === undefined || tgtList === undefined) break;
|
|
3003
|
+
if (srcList === "[]?" && tgtList === "[]!") return false;
|
|
3004
|
+
}
|
|
3005
|
+
return true;
|
|
3006
|
+
};
|
|
3007
|
+
/**
|
|
3008
|
+
* Derive minimum modifier needed to satisfy expected modifier.
|
|
3009
|
+
* When List Coercion can apply, returns one level shallower.
|
|
3010
|
+
*
|
|
3011
|
+
* @param expectedModifier - The modifier expected by the field argument
|
|
3012
|
+
* @returns The minimum modifier the variable must have
|
|
3013
|
+
*/
|
|
3014
|
+
const deriveMinimumModifier = (expectedModifier) => {
|
|
3015
|
+
const struct = parseModifierStructure(expectedModifier);
|
|
3016
|
+
if (struct.lists.length > 0) {
|
|
3017
|
+
return buildModifier({
|
|
3018
|
+
inner: struct.inner,
|
|
3019
|
+
lists: struct.lists.slice(1)
|
|
3020
|
+
});
|
|
3021
|
+
}
|
|
3022
|
+
return expectedModifier;
|
|
3023
|
+
};
|
|
3024
|
+
/**
|
|
3025
|
+
* Merge two modifiers by taking the stricter constraint at each level.
|
|
3026
|
+
* - Non-null (!) is stricter than nullable (?)
|
|
3027
|
+
* - List depths must match
|
|
3028
|
+
*
|
|
3029
|
+
* @param a - First modifier
|
|
3030
|
+
* @param b - Second modifier
|
|
3031
|
+
* @returns Merged modifier or error if incompatible
|
|
3032
|
+
*/
|
|
3033
|
+
const mergeModifiers = (a, b) => {
|
|
3034
|
+
const structA = parseModifierStructure(a);
|
|
3035
|
+
const structB = parseModifierStructure(b);
|
|
3036
|
+
if (structA.lists.length !== structB.lists.length) {
|
|
3037
|
+
return {
|
|
3038
|
+
ok: false,
|
|
3039
|
+
reason: `Incompatible list depths: "${a}" has ${structA.lists.length} list level(s), "${b}" has ${structB.lists.length}`
|
|
3040
|
+
};
|
|
3041
|
+
}
|
|
3042
|
+
const mergedInner = structA.inner === "!" || structB.inner === "!" ? "!" : "?";
|
|
3043
|
+
const mergedLists = [];
|
|
3044
|
+
for (let i = 0; i < structA.lists.length; i++) {
|
|
3045
|
+
const listA = structA.lists[i];
|
|
3046
|
+
const listB = structB.lists[i];
|
|
3047
|
+
if (listA === undefined || listB === undefined) break;
|
|
3048
|
+
mergedLists.push(listA === "[]!" || listB === "[]!" ? "[]!" : "[]?");
|
|
3049
|
+
}
|
|
3050
|
+
return {
|
|
3051
|
+
ok: true,
|
|
3052
|
+
value: buildModifier({
|
|
3053
|
+
inner: mergedInner,
|
|
3054
|
+
lists: mergedLists
|
|
3055
|
+
})
|
|
3056
|
+
};
|
|
3057
|
+
};
|
|
3058
|
+
/**
|
|
3059
|
+
* Get the expected type for a field argument from the schema.
|
|
3060
|
+
* Returns null if the field or argument is not found.
|
|
3061
|
+
*/
|
|
3062
|
+
const getArgumentType = (schema, parentTypeName, fieldName, argumentName) => {
|
|
3063
|
+
const objectRecord = schema.objects.get(parentTypeName);
|
|
3064
|
+
if (!objectRecord) return null;
|
|
3065
|
+
const fieldDef = objectRecord.fields.get(fieldName);
|
|
3066
|
+
if (!fieldDef) return null;
|
|
3067
|
+
const argDef = fieldDef.arguments?.find((arg) => arg.name.value === argumentName);
|
|
3068
|
+
if (!argDef) return null;
|
|
3069
|
+
return parseTypeNode(argDef.type);
|
|
3070
|
+
};
|
|
3071
|
+
/**
|
|
3072
|
+
* Get the expected type for an input object field from the schema.
|
|
3073
|
+
*/
|
|
3074
|
+
const getInputFieldType = (schema, inputTypeName, fieldName) => {
|
|
3075
|
+
const inputRecord = schema.inputs.get(inputTypeName);
|
|
3076
|
+
if (!inputRecord) return null;
|
|
3077
|
+
const fieldDef = inputRecord.fields.get(fieldName);
|
|
3078
|
+
if (!fieldDef) return null;
|
|
3079
|
+
return parseTypeNode(fieldDef.type);
|
|
3080
|
+
};
|
|
3081
|
+
/**
|
|
3082
|
+
* Resolve the type kind for a type name.
|
|
3083
|
+
*/
|
|
3084
|
+
const resolveTypeKindFromName = (schema, typeName) => {
|
|
3085
|
+
if (isScalarName(schema, typeName)) return "scalar";
|
|
3086
|
+
if (isEnumName(schema, typeName)) return "enum";
|
|
3087
|
+
if (schema.inputs.has(typeName)) return "input";
|
|
3088
|
+
return null;
|
|
3089
|
+
};
|
|
3090
|
+
/**
|
|
3091
|
+
* Extract variable usages from a parsed value, given the expected type.
|
|
3092
|
+
* Handles nested input objects recursively.
|
|
3093
|
+
*/
|
|
3094
|
+
const collectVariablesFromValue = (value, expectedTypeName, expectedModifier, schema, usages) => {
|
|
3095
|
+
if (value.kind === "variable") {
|
|
3096
|
+
const typeKind = resolveTypeKindFromName(schema, expectedTypeName);
|
|
3097
|
+
if (!typeKind) {
|
|
3098
|
+
return {
|
|
3099
|
+
code: "GRAPHQL_UNKNOWN_TYPE",
|
|
3100
|
+
message: `Unknown type "${expectedTypeName}" for variable "$${value.name}"`,
|
|
3101
|
+
typeName: expectedTypeName
|
|
3102
|
+
};
|
|
3103
|
+
}
|
|
3104
|
+
usages.push({
|
|
3105
|
+
name: value.name,
|
|
3106
|
+
typeName: expectedTypeName,
|
|
3107
|
+
expectedModifier,
|
|
3108
|
+
minimumModifier: deriveMinimumModifier(expectedModifier),
|
|
3109
|
+
typeKind
|
|
3110
|
+
});
|
|
3111
|
+
return null;
|
|
3112
|
+
}
|
|
3113
|
+
if (value.kind === "object") {
|
|
3114
|
+
for (const field of value.fields) {
|
|
3115
|
+
const fieldType = getInputFieldType(schema, expectedTypeName, field.name);
|
|
3116
|
+
if (!fieldType) {
|
|
3117
|
+
return {
|
|
3118
|
+
code: "GRAPHQL_UNKNOWN_FIELD",
|
|
3119
|
+
message: `Unknown field "${field.name}" on input type "${expectedTypeName}"`,
|
|
3120
|
+
typeName: expectedTypeName,
|
|
3121
|
+
fieldName: field.name
|
|
3122
|
+
};
|
|
3123
|
+
}
|
|
3124
|
+
const error = collectVariablesFromValue(field.value, fieldType.typeName, fieldType.modifier, schema, usages);
|
|
3125
|
+
if (error) return error;
|
|
3126
|
+
}
|
|
3127
|
+
return null;
|
|
3128
|
+
}
|
|
3129
|
+
if (value.kind === "list") {
|
|
3130
|
+
const struct = parseModifierStructure(expectedModifier);
|
|
3131
|
+
if (struct.lists.length > 0) {
|
|
3132
|
+
const innerModifier = buildModifier({
|
|
3133
|
+
inner: struct.inner,
|
|
3134
|
+
lists: struct.lists.slice(1)
|
|
3135
|
+
});
|
|
3136
|
+
for (const item of value.values) {
|
|
3137
|
+
const error = collectVariablesFromValue(item, expectedTypeName, innerModifier, schema, usages);
|
|
3138
|
+
if (error) return error;
|
|
3139
|
+
}
|
|
3140
|
+
}
|
|
3141
|
+
}
|
|
3142
|
+
return null;
|
|
3143
|
+
};
|
|
3144
|
+
/**
|
|
3145
|
+
* Collect variable usages from field arguments.
|
|
3146
|
+
*/
|
|
3147
|
+
const collectVariablesFromArguments = (args, parentTypeName, fieldName, schema, usages) => {
|
|
3148
|
+
for (const arg of args) {
|
|
3149
|
+
const argType = getArgumentType(schema, parentTypeName, fieldName, arg.name);
|
|
3150
|
+
if (!argType) {
|
|
3151
|
+
return {
|
|
3152
|
+
code: "GRAPHQL_UNKNOWN_ARGUMENT",
|
|
3153
|
+
message: `Unknown argument "${arg.name}" on field "${fieldName}"`,
|
|
3154
|
+
fieldName,
|
|
3155
|
+
argumentName: arg.name
|
|
3156
|
+
};
|
|
3157
|
+
}
|
|
3158
|
+
const error = collectVariablesFromValue(arg.value, argType.typeName, argType.modifier, schema, usages);
|
|
3159
|
+
if (error) return error;
|
|
3160
|
+
}
|
|
3161
|
+
return null;
|
|
3162
|
+
};
|
|
3163
|
+
/**
|
|
3164
|
+
* Recursively collect all variable usages from selections.
|
|
3165
|
+
*/
|
|
3166
|
+
const collectVariableUsages = (selections, parentTypeName, schema) => {
|
|
3167
|
+
const usages = [];
|
|
3168
|
+
const collect = (sels, parentType) => {
|
|
3169
|
+
for (const sel of sels) {
|
|
3170
|
+
switch (sel.kind) {
|
|
3171
|
+
case "field": {
|
|
3172
|
+
if (sel.arguments && sel.arguments.length > 0) {
|
|
3173
|
+
const error$1 = collectVariablesFromArguments(sel.arguments, parentType, sel.name, schema, usages);
|
|
3174
|
+
if (error$1) return error$1;
|
|
3175
|
+
}
|
|
3176
|
+
if (sel.selections && sel.selections.length > 0) {
|
|
3177
|
+
const fieldReturnType = getFieldReturnType(schema, parentType, sel.name);
|
|
3178
|
+
if (!fieldReturnType) {
|
|
3179
|
+
return {
|
|
3180
|
+
code: "GRAPHQL_UNKNOWN_FIELD",
|
|
3181
|
+
message: `Unknown field "${sel.name}" on type "${parentType}"`,
|
|
3182
|
+
typeName: parentType,
|
|
3183
|
+
fieldName: sel.name
|
|
3184
|
+
};
|
|
3185
|
+
}
|
|
3186
|
+
const error$1 = collect(sel.selections, fieldReturnType);
|
|
3187
|
+
if (error$1) return error$1;
|
|
3188
|
+
}
|
|
3189
|
+
break;
|
|
3190
|
+
}
|
|
3191
|
+
case "inlineFragment": {
|
|
3192
|
+
const error$1 = collect(sel.selections, sel.onType);
|
|
3193
|
+
if (error$1) return error$1;
|
|
3194
|
+
break;
|
|
3195
|
+
}
|
|
3196
|
+
case "fragmentSpread": break;
|
|
3197
|
+
}
|
|
3198
|
+
}
|
|
3199
|
+
return null;
|
|
3200
|
+
};
|
|
3201
|
+
const error = collect(selections, parentTypeName);
|
|
3202
|
+
if (error) return (0, neverthrow.err)(error);
|
|
3203
|
+
return (0, neverthrow.ok)(usages);
|
|
3204
|
+
};
|
|
3205
|
+
/**
|
|
3206
|
+
* Get the return type of a field (unwrapped from modifiers).
|
|
3207
|
+
*/
|
|
3208
|
+
const getFieldReturnType = (schema, parentTypeName, fieldName) => {
|
|
3209
|
+
const objectRecord = schema.objects.get(parentTypeName);
|
|
3210
|
+
if (!objectRecord) return null;
|
|
3211
|
+
const fieldDef = objectRecord.fields.get(fieldName);
|
|
3212
|
+
if (!fieldDef) return null;
|
|
3213
|
+
const { typeName } = parseTypeNode(fieldDef.type);
|
|
3214
|
+
return typeName;
|
|
3215
|
+
};
|
|
3216
|
+
/**
|
|
3217
|
+
* Merge multiple variable usages into a single InferredVariable.
|
|
3218
|
+
* Validates type compatibility and merges modifiers using List Coercion rules.
|
|
3219
|
+
*
|
|
3220
|
+
* The algorithm:
|
|
3221
|
+
* 1. Validate all usages have the same type name
|
|
3222
|
+
* 2. Merge minimumModifiers to find the candidate (shallowest type that could work)
|
|
3223
|
+
* 3. Verify the candidate can satisfy ALL expected modifiers via isModifierAssignable
|
|
3224
|
+
*/
|
|
3225
|
+
const mergeVariableUsages = (variableName, usages) => {
|
|
3226
|
+
const first = usages[0];
|
|
3227
|
+
if (!first) {
|
|
3228
|
+
return (0, neverthrow.err)({
|
|
3229
|
+
code: "GRAPHQL_UNDECLARED_VARIABLE",
|
|
3230
|
+
message: `No usages found for variable "${variableName}"`,
|
|
3231
|
+
variableName
|
|
3232
|
+
});
|
|
3233
|
+
}
|
|
3234
|
+
for (const usage of usages) {
|
|
3235
|
+
if (usage.typeName !== first.typeName) {
|
|
3236
|
+
return (0, neverthrow.err)({
|
|
3237
|
+
code: "GRAPHQL_VARIABLE_TYPE_MISMATCH",
|
|
3238
|
+
message: `Variable "$${variableName}" has conflicting types: "${first.typeName}" and "${usage.typeName}"`,
|
|
3239
|
+
variableName
|
|
3240
|
+
});
|
|
3241
|
+
}
|
|
3242
|
+
}
|
|
3243
|
+
let candidateModifier = first.minimumModifier;
|
|
3244
|
+
for (let i = 1; i < usages.length; i++) {
|
|
3245
|
+
const usage = usages[i];
|
|
3246
|
+
if (!usage) break;
|
|
3247
|
+
const result = mergeModifiers(candidateModifier, usage.minimumModifier);
|
|
3248
|
+
if (!result.ok) {
|
|
3249
|
+
return (0, neverthrow.err)({
|
|
3250
|
+
code: "GRAPHQL_VARIABLE_MODIFIER_INCOMPATIBLE",
|
|
3251
|
+
message: `Variable "$${variableName}" has incompatible modifiers: ${result.reason}`,
|
|
3252
|
+
variableName
|
|
3253
|
+
});
|
|
3254
|
+
}
|
|
3255
|
+
candidateModifier = result.value;
|
|
3256
|
+
}
|
|
3257
|
+
for (const usage of usages) {
|
|
3258
|
+
if (!isModifierAssignable(candidateModifier, usage.expectedModifier)) {
|
|
3259
|
+
return (0, neverthrow.err)({
|
|
3260
|
+
code: "GRAPHQL_VARIABLE_MODIFIER_INCOMPATIBLE",
|
|
3261
|
+
message: `Variable "$${variableName}" with modifier "${candidateModifier}" cannot satisfy expected "${usage.expectedModifier}"`,
|
|
3262
|
+
variableName
|
|
3263
|
+
});
|
|
3264
|
+
}
|
|
3265
|
+
}
|
|
3266
|
+
return (0, neverthrow.ok)({
|
|
3267
|
+
name: variableName,
|
|
3268
|
+
typeName: first.typeName,
|
|
3269
|
+
modifier: candidateModifier,
|
|
3270
|
+
typeKind: first.typeKind
|
|
3271
|
+
});
|
|
3272
|
+
};
|
|
3273
|
+
/**
|
|
3274
|
+
* Infer variables from collected usages.
|
|
3275
|
+
* Groups by variable name and merges each group.
|
|
3276
|
+
*/
|
|
3277
|
+
const inferVariablesFromUsages = (usages) => {
|
|
3278
|
+
const byName = new Map();
|
|
3279
|
+
for (const usage of usages) {
|
|
3280
|
+
const existing = byName.get(usage.name);
|
|
3281
|
+
if (existing) {
|
|
3282
|
+
existing.push(usage);
|
|
3283
|
+
} else {
|
|
3284
|
+
byName.set(usage.name, [usage]);
|
|
3285
|
+
}
|
|
3286
|
+
}
|
|
3287
|
+
const variables = [];
|
|
3288
|
+
for (const [name, group] of byName) {
|
|
3289
|
+
const result = mergeVariableUsages(name, group);
|
|
3290
|
+
if (result.isErr()) return (0, neverthrow.err)(result.error);
|
|
3291
|
+
variables.push(result.value);
|
|
3292
|
+
}
|
|
3293
|
+
variables.sort((a, b) => a.name.localeCompare(b.name));
|
|
3294
|
+
return (0, neverthrow.ok)(variables);
|
|
3295
|
+
};
|
|
3296
|
+
/**
|
|
3297
|
+
* Check if a type name is a scalar type.
|
|
3298
|
+
*/
|
|
3299
|
+
const isScalarName = (schema, name) => builtinScalarTypes.has(name) || schema.scalars.has(name);
|
|
3300
|
+
/**
|
|
3301
|
+
* Topologically sort fragments so dependencies come before dependents.
|
|
3302
|
+
* Detects circular dependencies.
|
|
3303
|
+
*
|
|
3304
|
+
* Note: Uses the existing collectFragmentDependencies function defined below.
|
|
3305
|
+
*/
|
|
3306
|
+
const sortFragmentsByDependency = (fragments) => {
|
|
3307
|
+
const graph = new Map();
|
|
3308
|
+
for (const fragment of fragments) {
|
|
3309
|
+
const deps = collectFragmentDependenciesSet(fragment.selections);
|
|
3310
|
+
graph.set(fragment.name, deps);
|
|
3311
|
+
}
|
|
3312
|
+
const fragmentByName = new Map();
|
|
3313
|
+
for (const f of fragments) {
|
|
3314
|
+
fragmentByName.set(f.name, f);
|
|
3315
|
+
}
|
|
3316
|
+
const sorted = [];
|
|
3317
|
+
const visited = new Set();
|
|
3318
|
+
const visiting = new Set();
|
|
3319
|
+
const visit = (name, path) => {
|
|
3320
|
+
if (visited.has(name)) return null;
|
|
3321
|
+
if (visiting.has(name)) {
|
|
3322
|
+
const cycleStart = path.indexOf(name);
|
|
3323
|
+
const cycle = path.slice(cycleStart).concat(name);
|
|
3324
|
+
return {
|
|
3325
|
+
code: "GRAPHQL_FRAGMENT_CIRCULAR_DEPENDENCY",
|
|
3326
|
+
message: `Circular dependency detected in fragments: ${cycle.join(" -> ")}`,
|
|
3327
|
+
fragmentNames: cycle
|
|
3328
|
+
};
|
|
3329
|
+
}
|
|
3330
|
+
const fragment = fragmentByName.get(name);
|
|
3331
|
+
if (!fragment) {
|
|
3332
|
+
visited.add(name);
|
|
3333
|
+
return null;
|
|
3334
|
+
}
|
|
3335
|
+
visiting.add(name);
|
|
3336
|
+
const deps = graph.get(name) ?? new Set();
|
|
3337
|
+
for (const dep of deps) {
|
|
3338
|
+
const error = visit(dep, [...path, name]);
|
|
3339
|
+
if (error) return error;
|
|
3340
|
+
}
|
|
3341
|
+
visiting.delete(name);
|
|
3342
|
+
visited.add(name);
|
|
3343
|
+
sorted.push(fragment);
|
|
3344
|
+
return null;
|
|
3345
|
+
};
|
|
3346
|
+
for (const fragment of fragments) {
|
|
3347
|
+
const error = visit(fragment.name, []);
|
|
3348
|
+
if (error) return (0, neverthrow.err)(error);
|
|
3349
|
+
}
|
|
3350
|
+
return (0, neverthrow.ok)(sorted);
|
|
3351
|
+
};
|
|
3352
|
+
/**
|
|
3353
|
+
* Recursively collect fragment spread names from selections into a Set.
|
|
3354
|
+
* Internal helper for sortFragmentsByDependency.
|
|
3355
|
+
*/
|
|
3356
|
+
const collectFragmentDependenciesSet = (selections) => {
|
|
3357
|
+
const deps = new Set();
|
|
3358
|
+
const collect = (sels) => {
|
|
3359
|
+
for (const sel of sels) {
|
|
3360
|
+
switch (sel.kind) {
|
|
3361
|
+
case "fragmentSpread":
|
|
3362
|
+
deps.add(sel.name);
|
|
3363
|
+
break;
|
|
3364
|
+
case "field":
|
|
3365
|
+
if (sel.selections) {
|
|
3366
|
+
collect(sel.selections);
|
|
3367
|
+
}
|
|
3368
|
+
break;
|
|
3369
|
+
case "inlineFragment":
|
|
3370
|
+
collect(sel.selections);
|
|
3371
|
+
break;
|
|
3372
|
+
}
|
|
3373
|
+
}
|
|
3374
|
+
};
|
|
3375
|
+
collect(selections);
|
|
3376
|
+
return deps;
|
|
3377
|
+
};
|
|
3378
|
+
/**
|
|
3379
|
+
* Check if a type name is an enum type.
|
|
3380
|
+
*/
|
|
3381
|
+
const isEnumName = (schema, name) => schema.enums.has(name);
|
|
3382
|
+
/**
|
|
3383
|
+
* Transform parsed operations/fragments by enriching them with schema information.
|
|
3384
|
+
*
|
|
3385
|
+
* This resolves variable type kinds (scalar, enum, input), collects
|
|
3386
|
+
* fragment dependencies, and infers variables for fragments.
|
|
3387
|
+
*/
|
|
3388
|
+
const transformParsedGraphql = (parsed, options) => {
|
|
3389
|
+
const schema = (0, __soda_gql_core.createSchemaIndex)(options.schemaDocument);
|
|
3390
|
+
const sortResult = sortFragmentsByDependency(parsed.fragments);
|
|
3391
|
+
if (sortResult.isErr()) {
|
|
3392
|
+
return (0, neverthrow.err)(sortResult.error);
|
|
3393
|
+
}
|
|
3394
|
+
const sortedFragments = sortResult.value;
|
|
3395
|
+
const resolvedFragmentVariables = new Map();
|
|
3396
|
+
const fragments = [];
|
|
3397
|
+
for (const frag of sortedFragments) {
|
|
3398
|
+
const result = transformFragment(frag, schema, resolvedFragmentVariables);
|
|
3399
|
+
if (result.isErr()) {
|
|
3400
|
+
return (0, neverthrow.err)(result.error);
|
|
3401
|
+
}
|
|
3402
|
+
resolvedFragmentVariables.set(frag.name, result.value.variables);
|
|
3403
|
+
fragments.push(result.value);
|
|
3404
|
+
}
|
|
3405
|
+
const operations = [];
|
|
3406
|
+
for (const op of parsed.operations) {
|
|
3407
|
+
const result = transformOperation(op, schema);
|
|
3408
|
+
if (result.isErr()) {
|
|
3409
|
+
return (0, neverthrow.err)(result.error);
|
|
3410
|
+
}
|
|
3411
|
+
operations.push(result.value);
|
|
3412
|
+
}
|
|
3413
|
+
return (0, neverthrow.ok)({
|
|
3414
|
+
operations,
|
|
3415
|
+
fragments
|
|
3416
|
+
});
|
|
3417
|
+
};
|
|
3418
|
+
/**
|
|
3419
|
+
* Transform a single operation.
|
|
3420
|
+
*/
|
|
3421
|
+
const transformOperation = (op, schema) => {
|
|
3422
|
+
const variables = [];
|
|
3423
|
+
for (const v of op.variables) {
|
|
3424
|
+
const typeKind = resolveTypeKind(schema, v.typeName);
|
|
3425
|
+
if (typeKind === null) {
|
|
3426
|
+
return (0, neverthrow.err)({
|
|
3427
|
+
code: "GRAPHQL_UNKNOWN_TYPE",
|
|
3428
|
+
message: `Unknown type "${v.typeName}" in variable "${v.name}"`,
|
|
3429
|
+
typeName: v.typeName
|
|
3430
|
+
});
|
|
3431
|
+
}
|
|
3432
|
+
variables.push({
|
|
3433
|
+
...v,
|
|
3434
|
+
typeKind
|
|
3435
|
+
});
|
|
3436
|
+
}
|
|
3437
|
+
const fragmentDependencies = collectFragmentDependencies(op.selections);
|
|
3438
|
+
return (0, neverthrow.ok)({
|
|
3439
|
+
...op,
|
|
3440
|
+
variables,
|
|
3441
|
+
fragmentDependencies
|
|
3442
|
+
});
|
|
3443
|
+
};
|
|
3444
|
+
/**
|
|
3445
|
+
* Transform a single fragment.
|
|
3446
|
+
* Infers variables from field arguments and propagates variables from spread fragments.
|
|
3447
|
+
*/
|
|
3448
|
+
const transformFragment = (frag, schema, resolvedFragmentVariables) => {
|
|
3449
|
+
const fragmentDependencies = collectFragmentDependencies(frag.selections);
|
|
3450
|
+
const directUsagesResult = collectVariableUsages(frag.selections, frag.onType, schema);
|
|
3451
|
+
if (directUsagesResult.isErr()) {
|
|
3452
|
+
return (0, neverthrow.err)(directUsagesResult.error);
|
|
3453
|
+
}
|
|
3454
|
+
const directUsages = directUsagesResult.value;
|
|
3455
|
+
const spreadVariables = [];
|
|
3456
|
+
for (const depName of fragmentDependencies) {
|
|
3457
|
+
const depVariables = resolvedFragmentVariables.get(depName);
|
|
3458
|
+
if (depVariables) {
|
|
3459
|
+
spreadVariables.push(...depVariables);
|
|
3460
|
+
}
|
|
3461
|
+
}
|
|
3462
|
+
const allUsages = [...directUsages, ...spreadVariables.map((v) => ({
|
|
3463
|
+
name: v.name,
|
|
3464
|
+
typeName: v.typeName,
|
|
3465
|
+
expectedModifier: v.modifier,
|
|
3466
|
+
minimumModifier: v.modifier,
|
|
3467
|
+
typeKind: v.typeKind
|
|
3468
|
+
}))];
|
|
3469
|
+
const variablesResult = inferVariablesFromUsages(allUsages);
|
|
3470
|
+
if (variablesResult.isErr()) {
|
|
3471
|
+
return (0, neverthrow.err)(variablesResult.error);
|
|
3472
|
+
}
|
|
3473
|
+
return (0, neverthrow.ok)({
|
|
3474
|
+
...frag,
|
|
3475
|
+
fragmentDependencies,
|
|
3476
|
+
variables: variablesResult.value
|
|
3477
|
+
});
|
|
3478
|
+
};
|
|
3479
|
+
/**
|
|
3480
|
+
* Resolve the type kind for a type name.
|
|
3481
|
+
*/
|
|
3482
|
+
const resolveTypeKind = (schema, typeName) => {
|
|
3483
|
+
if (isScalarName(schema, typeName)) {
|
|
3484
|
+
return "scalar";
|
|
3485
|
+
}
|
|
3486
|
+
if (isEnumName(schema, typeName)) {
|
|
3487
|
+
return "enum";
|
|
3488
|
+
}
|
|
3489
|
+
if (schema.inputs.has(typeName)) {
|
|
3490
|
+
return "input";
|
|
3491
|
+
}
|
|
3492
|
+
return null;
|
|
3493
|
+
};
|
|
3494
|
+
/**
|
|
3495
|
+
* Collect fragment names used in selections (recursively).
|
|
3496
|
+
*/
|
|
3497
|
+
const collectFragmentDependencies = (selections) => {
|
|
3498
|
+
const fragments = new Set();
|
|
3499
|
+
const collect = (sels) => {
|
|
3500
|
+
for (const sel of sels) {
|
|
3501
|
+
switch (sel.kind) {
|
|
3502
|
+
case "fragmentSpread":
|
|
3503
|
+
fragments.add(sel.name);
|
|
3504
|
+
break;
|
|
3505
|
+
case "field":
|
|
3506
|
+
if (sel.selections) {
|
|
3507
|
+
collect(sel.selections);
|
|
3508
|
+
}
|
|
3509
|
+
break;
|
|
3510
|
+
case "inlineFragment":
|
|
3511
|
+
collect(sel.selections);
|
|
3512
|
+
break;
|
|
3513
|
+
}
|
|
3514
|
+
}
|
|
3515
|
+
};
|
|
3516
|
+
collect(selections);
|
|
3517
|
+
return [...fragments];
|
|
3518
|
+
};
|
|
3519
|
+
|
|
3520
|
+
//#endregion
|
|
3521
|
+
//#region packages/tools/src/codegen/graphql-compat/emitter.ts
|
|
3522
|
+
/**
|
|
3523
|
+
* Map operation kind to root type name.
|
|
3524
|
+
* Uses schema.operationTypes if available, falls back to standard names.
|
|
3525
|
+
*/
|
|
3526
|
+
const getRootTypeName = (schema, kind) => {
|
|
3527
|
+
if (schema) {
|
|
3528
|
+
switch (kind) {
|
|
3529
|
+
case "query": return schema.operationTypes.query ?? "Query";
|
|
3530
|
+
case "mutation": return schema.operationTypes.mutation ?? "Mutation";
|
|
3531
|
+
case "subscription": return schema.operationTypes.subscription ?? "Subscription";
|
|
3532
|
+
}
|
|
3533
|
+
}
|
|
3534
|
+
switch (kind) {
|
|
3535
|
+
case "query": return "Query";
|
|
3536
|
+
case "mutation": return "Mutation";
|
|
3537
|
+
case "subscription": return "Subscription";
|
|
3538
|
+
}
|
|
3539
|
+
};
|
|
3540
|
+
/**
|
|
3541
|
+
* Emit TypeScript code for an operation.
|
|
3542
|
+
*/
|
|
3543
|
+
const emitOperation = (operation, options) => {
|
|
3544
|
+
const lines = [];
|
|
3545
|
+
const schema = options.schemaDocument ? (0, __soda_gql_core.createSchemaIndex)(options.schemaDocument) : null;
|
|
3546
|
+
const exportName = `${operation.name}Compat`;
|
|
3547
|
+
const operationType = operation.kind;
|
|
3548
|
+
lines.push(`export const ${exportName} = gql.${options.schemaName}(({ ${operationType}, $var }) =>`);
|
|
3549
|
+
lines.push(` ${operationType}.compat({`);
|
|
3550
|
+
lines.push(` name: ${JSON.stringify(operation.name)},`);
|
|
3551
|
+
if (operation.variables.length > 0) {
|
|
3552
|
+
lines.push(` variables: { ${emitVariables(operation.variables)} },`);
|
|
3553
|
+
}
|
|
3554
|
+
const rootTypeName = getRootTypeName(schema, operation.kind);
|
|
3555
|
+
lines.push(` fields: ({ f, $ }) => ({`);
|
|
3556
|
+
const fieldLinesResult = emitSelections(operation.selections, 3, operation.variables, schema, rootTypeName);
|
|
3557
|
+
if (fieldLinesResult.isErr()) {
|
|
3558
|
+
return (0, neverthrow.err)(fieldLinesResult.error);
|
|
3559
|
+
}
|
|
3560
|
+
lines.push(fieldLinesResult.value);
|
|
3561
|
+
lines.push(` }),`);
|
|
3562
|
+
lines.push(` }),`);
|
|
3563
|
+
lines.push(`);`);
|
|
3564
|
+
return (0, neverthrow.ok)(lines.join("\n"));
|
|
3565
|
+
};
|
|
3566
|
+
/**
|
|
3567
|
+
* Emit TypeScript code for a fragment.
|
|
3568
|
+
*/
|
|
3569
|
+
const emitFragment = (fragment, options) => {
|
|
3570
|
+
const lines = [];
|
|
3571
|
+
const schema = options.schemaDocument ? (0, __soda_gql_core.createSchemaIndex)(options.schemaDocument) : null;
|
|
3572
|
+
const hasVariables = fragment.variables.length > 0;
|
|
3573
|
+
const exportName = `${fragment.name}Fragment`;
|
|
3574
|
+
const destructure = hasVariables ? "fragment, $var" : "fragment";
|
|
3575
|
+
lines.push(`export const ${exportName} = gql.${options.schemaName}(({ ${destructure} }) =>`);
|
|
3576
|
+
lines.push(` fragment.${fragment.onType}({`);
|
|
3577
|
+
if (hasVariables) {
|
|
3578
|
+
lines.push(` variables: { ${emitVariables(fragment.variables)} },`);
|
|
3579
|
+
}
|
|
3580
|
+
const fieldsContext = hasVariables ? "{ f, $ }" : "{ f }";
|
|
3581
|
+
lines.push(` fields: (${fieldsContext}) => ({`);
|
|
3582
|
+
const fieldLinesResult = emitSelections(fragment.selections, 3, fragment.variables, schema, fragment.onType);
|
|
3583
|
+
if (fieldLinesResult.isErr()) {
|
|
3584
|
+
return (0, neverthrow.err)(fieldLinesResult.error);
|
|
3585
|
+
}
|
|
3586
|
+
lines.push(fieldLinesResult.value);
|
|
3587
|
+
lines.push(` }),`);
|
|
3588
|
+
lines.push(` }),`);
|
|
3589
|
+
lines.push(`);`);
|
|
3590
|
+
return (0, neverthrow.ok)(lines.join("\n"));
|
|
3591
|
+
};
|
|
3592
|
+
/**
|
|
3593
|
+
* Emit variable definitions.
|
|
3594
|
+
*/
|
|
3595
|
+
const emitVariables = (variables) => {
|
|
3596
|
+
return variables.map((v) => `...$var(${JSON.stringify(v.name)}).${v.typeName}(${JSON.stringify(v.modifier)})`).join(", ");
|
|
3597
|
+
};
|
|
3598
|
+
/**
|
|
3599
|
+
* Emit field selections (public API).
|
|
3600
|
+
* Converts variable array to Set<string> and delegates to internal implementation.
|
|
3601
|
+
*/
|
|
3602
|
+
const emitSelections = (selections, indent, variables, schema, parentTypeName) => {
|
|
3603
|
+
const variableNames = new Set(variables.map((v) => v.name));
|
|
3604
|
+
return emitSelectionsInternal(selections, indent, variableNames, schema, parentTypeName);
|
|
3605
|
+
};
|
|
3606
|
+
/**
|
|
3607
|
+
* Internal implementation for emitting field selections.
|
|
3608
|
+
* Takes variableNames as Set<string> for recursive calls.
|
|
3609
|
+
*/
|
|
3610
|
+
const emitSelectionsInternal = (selections, indent, variableNames, schema, parentTypeName) => {
|
|
3611
|
+
const lines = [];
|
|
3612
|
+
const inlineFragments = [];
|
|
3613
|
+
const otherSelections = [];
|
|
3614
|
+
let hasTypenameField = false;
|
|
3615
|
+
for (const sel of selections) {
|
|
3616
|
+
if (sel.kind === "inlineFragment") {
|
|
3617
|
+
inlineFragments.push(sel);
|
|
3618
|
+
} else if (sel.kind === "field" && sel.name === "__typename" && !sel.alias) {
|
|
3619
|
+
hasTypenameField = true;
|
|
3620
|
+
} else {
|
|
3621
|
+
otherSelections.push(sel);
|
|
3622
|
+
}
|
|
3623
|
+
}
|
|
3624
|
+
for (const sel of otherSelections) {
|
|
3625
|
+
const result = emitSingleSelection(sel, indent, variableNames, schema, parentTypeName);
|
|
3626
|
+
if (result.isErr()) {
|
|
3627
|
+
return (0, neverthrow.err)(result.error);
|
|
3628
|
+
}
|
|
3629
|
+
lines.push(result.value);
|
|
3630
|
+
}
|
|
3631
|
+
if (inlineFragments.length > 0) {
|
|
3632
|
+
const unionResult = emitInlineFragmentsAsUnion(inlineFragments, indent, variableNames, schema, hasTypenameField);
|
|
3633
|
+
if (unionResult.isErr()) {
|
|
3634
|
+
return (0, neverthrow.err)(unionResult.error);
|
|
3635
|
+
}
|
|
3636
|
+
lines.push(unionResult.value);
|
|
3637
|
+
} else if (hasTypenameField) {
|
|
3638
|
+
const padding = " ".repeat(indent);
|
|
3639
|
+
lines.push(`${padding}__typename: true,`);
|
|
3640
|
+
}
|
|
3641
|
+
return (0, neverthrow.ok)(lines.join("\n"));
|
|
3642
|
+
};
|
|
3643
|
+
/**
|
|
3644
|
+
* Emit a single selection (field or fragment spread).
|
|
3645
|
+
*/
|
|
3646
|
+
const emitSingleSelection = (sel, indent, variableNames, schema, parentTypeName) => {
|
|
3647
|
+
const padding = " ".repeat(indent);
|
|
3648
|
+
switch (sel.kind) {
|
|
3649
|
+
case "field": return emitFieldSelection(sel, indent, variableNames, schema, parentTypeName);
|
|
3650
|
+
case "fragmentSpread": return (0, neverthrow.ok)(`${padding}...${sel.name}Fragment.spread(),`);
|
|
3651
|
+
case "inlineFragment": return (0, neverthrow.ok)("");
|
|
3652
|
+
}
|
|
3653
|
+
};
|
|
3654
|
+
/**
|
|
3655
|
+
* Emit inline fragments grouped as a union selection.
|
|
3656
|
+
* Format: { TypeA: ({ f }) => ({ ...fields }), TypeB: ({ f }) => ({ ...fields }), __typename: true }
|
|
3657
|
+
*/
|
|
3658
|
+
const emitInlineFragmentsAsUnion = (inlineFragments, indent, variableNames, schema, includeTypename) => {
|
|
3659
|
+
const padding = " ".repeat(indent);
|
|
3660
|
+
for (const frag of inlineFragments) {
|
|
3661
|
+
if (frag.onType === "") {
|
|
3662
|
+
return (0, neverthrow.err)({
|
|
3663
|
+
code: "GRAPHQL_INLINE_FRAGMENT_WITHOUT_TYPE",
|
|
3664
|
+
message: "Inline fragments without type condition are not supported. Use `... on TypeName { }` syntax."
|
|
3665
|
+
});
|
|
3666
|
+
}
|
|
3667
|
+
}
|
|
3668
|
+
for (const frag of inlineFragments) {
|
|
3669
|
+
if (schema && !schema.objects.has(frag.onType)) {
|
|
3670
|
+
let isUnionMember = false;
|
|
3671
|
+
for (const [, unionDef] of schema.unions) {
|
|
3672
|
+
if (unionDef.members.has(frag.onType)) {
|
|
3673
|
+
isUnionMember = true;
|
|
3674
|
+
break;
|
|
3675
|
+
}
|
|
3676
|
+
}
|
|
3677
|
+
if (!isUnionMember) {
|
|
3678
|
+
return (0, neverthrow.err)({
|
|
3679
|
+
code: "GRAPHQL_INLINE_FRAGMENT_ON_INTERFACE",
|
|
3680
|
+
message: `Inline fragments on interface type "${frag.onType}" are not supported. Use union types instead.`,
|
|
3681
|
+
onType: frag.onType
|
|
3682
|
+
});
|
|
3683
|
+
}
|
|
3684
|
+
}
|
|
3685
|
+
}
|
|
3686
|
+
const entries = [];
|
|
3687
|
+
for (const frag of inlineFragments) {
|
|
3688
|
+
const innerPadding = " ".repeat(indent + 1);
|
|
3689
|
+
const fieldsResult = emitSelectionsInternal(frag.selections, indent + 2, variableNames, schema, frag.onType);
|
|
3690
|
+
if (fieldsResult.isErr()) {
|
|
3691
|
+
return (0, neverthrow.err)(fieldsResult.error);
|
|
3692
|
+
}
|
|
3693
|
+
entries.push(`${innerPadding}${frag.onType}: ({ f }) => ({
|
|
3694
|
+
${fieldsResult.value}
|
|
3695
|
+
${innerPadding}}),`);
|
|
3696
|
+
}
|
|
3697
|
+
if (includeTypename) {
|
|
3698
|
+
const innerPadding = " ".repeat(indent + 1);
|
|
3699
|
+
entries.push(`${innerPadding}__typename: true,`);
|
|
3700
|
+
}
|
|
3701
|
+
return (0, neverthrow.ok)(`${padding}...({
|
|
3702
|
+
${entries.join("\n")}
|
|
3703
|
+
${padding}}),`);
|
|
3704
|
+
};
|
|
3705
|
+
/**
|
|
3706
|
+
* Emit a single field selection.
|
|
3707
|
+
*/
|
|
3708
|
+
const emitFieldSelection = (field, indent, variableNames, schema, parentTypeName) => {
|
|
3709
|
+
const padding = " ".repeat(indent);
|
|
3710
|
+
const args = field.arguments;
|
|
3711
|
+
const selections = field.selections;
|
|
3712
|
+
const hasArgs = args && args.length > 0;
|
|
3713
|
+
const hasSelections = selections && selections.length > 0;
|
|
3714
|
+
if (!hasArgs && !hasSelections && !field.alias) {
|
|
3715
|
+
return (0, neverthrow.ok)(`${padding}${field.name}: true,`);
|
|
3716
|
+
}
|
|
3717
|
+
let line = `${padding}...f.${field.name}(`;
|
|
3718
|
+
if (hasArgs) {
|
|
3719
|
+
const argsResult = emitArguments(args, variableNames, schema, parentTypeName, field.name);
|
|
3720
|
+
if (argsResult.isErr()) {
|
|
3721
|
+
return (0, neverthrow.err)(argsResult.error);
|
|
3722
|
+
}
|
|
3723
|
+
line += argsResult.value;
|
|
3724
|
+
if (field.alias) {
|
|
3725
|
+
line += `, { alias: ${JSON.stringify(field.alias)} }`;
|
|
3726
|
+
}
|
|
3727
|
+
} else if (field.alias) {
|
|
3728
|
+
line += `null, { alias: ${JSON.stringify(field.alias)} }`;
|
|
3729
|
+
}
|
|
3730
|
+
line += ")";
|
|
3731
|
+
if (hasSelections) {
|
|
3732
|
+
const hasInlineFragments = selections.some((s) => s.kind === "inlineFragment");
|
|
3733
|
+
const nestedParentType = schema && parentTypeName ? getFieldReturnType(schema, parentTypeName, field.name) ?? undefined : undefined;
|
|
3734
|
+
if (hasInlineFragments) {
|
|
3735
|
+
const nestedResult = emitSelectionsInternal(selections, indent + 1, variableNames, schema, nestedParentType);
|
|
3736
|
+
if (nestedResult.isErr()) {
|
|
3737
|
+
return (0, neverthrow.err)(nestedResult.error);
|
|
3738
|
+
}
|
|
3739
|
+
line += "({\n";
|
|
3740
|
+
line += `${nestedResult.value}\n`;
|
|
3741
|
+
line += `${padding}})`;
|
|
3742
|
+
} else {
|
|
3743
|
+
line += "(({ f }) => ({\n";
|
|
3744
|
+
const nestedResult = emitSelectionsInternal(selections, indent + 1, variableNames, schema, nestedParentType);
|
|
3745
|
+
if (nestedResult.isErr()) {
|
|
3746
|
+
return (0, neverthrow.err)(nestedResult.error);
|
|
3747
|
+
}
|
|
3748
|
+
line += `${nestedResult.value}\n`;
|
|
3749
|
+
line += `${padding}}))`;
|
|
3750
|
+
}
|
|
3751
|
+
}
|
|
3752
|
+
line += ",";
|
|
3753
|
+
return (0, neverthrow.ok)(line);
|
|
3754
|
+
};
|
|
3755
|
+
/**
|
|
3756
|
+
* Check if a modifier represents a list type (contains []).
|
|
3757
|
+
*/
|
|
3758
|
+
const isListModifier = (modifier) => {
|
|
3759
|
+
return modifier.includes("[]");
|
|
3760
|
+
};
|
|
3761
|
+
/**
|
|
3762
|
+
* Determine if a value needs to be wrapped in an array for list coercion.
|
|
3763
|
+
* Returns true if:
|
|
3764
|
+
* - Expected type is a list
|
|
3765
|
+
* - Value is NOT already a list
|
|
3766
|
+
* - Value is NOT a variable (runtime handles coercion)
|
|
3767
|
+
* - Value is NOT null
|
|
3768
|
+
*/
|
|
3769
|
+
const needsListCoercion = (value, expectedModifier) => {
|
|
3770
|
+
if (!expectedModifier) return false;
|
|
3771
|
+
if (!isListModifier(expectedModifier)) return false;
|
|
3772
|
+
if (value.kind === "variable") return false;
|
|
3773
|
+
if (value.kind === "null") return false;
|
|
3774
|
+
if (value.kind === "list") return false;
|
|
3775
|
+
return true;
|
|
3776
|
+
};
|
|
3777
|
+
/**
|
|
3778
|
+
* Extract the element type from a list type by removing the outermost list modifier.
|
|
3779
|
+
* For example: "![]!" (non-null list of non-null) → "!" (non-null element)
|
|
3780
|
+
* "?[]![]!" (nested lists) → "?[]!" (inner list type)
|
|
3781
|
+
* Returns null if the modifier doesn't represent a list type.
|
|
3782
|
+
*/
|
|
3783
|
+
const getListElementType = (expectedType) => {
|
|
3784
|
+
const { modifier, typeName } = expectedType;
|
|
3785
|
+
const listMatch = modifier.match(/^(.+?)(\[\][!?])$/);
|
|
3786
|
+
if (!listMatch || !listMatch[1]) return null;
|
|
3787
|
+
return {
|
|
3788
|
+
typeName,
|
|
3789
|
+
modifier: listMatch[1]
|
|
3790
|
+
};
|
|
3791
|
+
};
|
|
3792
|
+
/**
|
|
3793
|
+
* Emit a value with type context for list coercion.
|
|
3794
|
+
*/
|
|
3795
|
+
const emitValueWithType = (value, expectedType, variableNames, schema) => {
|
|
3796
|
+
const shouldCoerce = needsListCoercion(value, expectedType?.modifier);
|
|
3797
|
+
if (value.kind === "object" && expectedType && schema) {
|
|
3798
|
+
return emitObjectWithType(value, expectedType.typeName, variableNames, schema, shouldCoerce);
|
|
3799
|
+
}
|
|
3800
|
+
if (value.kind === "list" && expectedType && schema) {
|
|
3801
|
+
const elementType = getListElementType(expectedType);
|
|
3802
|
+
if (elementType) {
|
|
3803
|
+
const values = [];
|
|
3804
|
+
for (const v of value.values) {
|
|
3805
|
+
const result$1 = emitValueWithType(v, elementType, variableNames, schema);
|
|
3806
|
+
if (result$1.isErr()) return result$1;
|
|
3807
|
+
values.push(result$1.value);
|
|
3808
|
+
}
|
|
3809
|
+
return (0, neverthrow.ok)(`[${values.join(", ")}]`);
|
|
3810
|
+
}
|
|
3811
|
+
}
|
|
3812
|
+
const result = emitValue(value, variableNames);
|
|
3813
|
+
if (result.isErr()) return result;
|
|
3814
|
+
if (shouldCoerce) {
|
|
3815
|
+
return (0, neverthrow.ok)(`[${result.value}]`);
|
|
3816
|
+
}
|
|
3817
|
+
return result;
|
|
3818
|
+
};
|
|
3819
|
+
/**
|
|
3820
|
+
* Emit an object value with type context for recursive list coercion.
|
|
3821
|
+
*/
|
|
3822
|
+
const emitObjectWithType = (value, inputTypeName, variableNames, schema, wrapInArray) => {
|
|
3823
|
+
if (value.fields.length === 0) {
|
|
3824
|
+
return (0, neverthrow.ok)(wrapInArray ? "[{}]" : "{}");
|
|
3825
|
+
}
|
|
3826
|
+
const entries = [];
|
|
3827
|
+
for (const f of value.fields) {
|
|
3828
|
+
const fieldType = getInputFieldType(schema, inputTypeName, f.name);
|
|
3829
|
+
if (fieldType === null) {
|
|
3830
|
+
return (0, neverthrow.err)({
|
|
3831
|
+
code: "GRAPHQL_UNKNOWN_FIELD",
|
|
3832
|
+
message: `Unknown field "${f.name}" on input type "${inputTypeName}"`,
|
|
3833
|
+
typeName: inputTypeName,
|
|
3834
|
+
fieldName: f.name
|
|
3835
|
+
});
|
|
3836
|
+
}
|
|
3837
|
+
const result = emitValueWithType(f.value, fieldType, variableNames, schema);
|
|
3838
|
+
if (result.isErr()) {
|
|
3839
|
+
return (0, neverthrow.err)(result.error);
|
|
3840
|
+
}
|
|
3841
|
+
entries.push(`${f.name}: ${result.value}`);
|
|
3842
|
+
}
|
|
3843
|
+
const objectStr = `{ ${entries.join(", ")} }`;
|
|
3844
|
+
return (0, neverthrow.ok)(wrapInArray ? `[${objectStr}]` : objectStr);
|
|
3845
|
+
};
|
|
3846
|
+
/**
|
|
3847
|
+
* Emit field arguments with type context for list coercion.
|
|
3848
|
+
*/
|
|
3849
|
+
const emitArguments = (args, variableNames, schema, parentTypeName, fieldName) => {
|
|
3850
|
+
if (args.length === 0) {
|
|
3851
|
+
return (0, neverthrow.ok)("");
|
|
3852
|
+
}
|
|
3853
|
+
const argEntries = [];
|
|
3854
|
+
for (const arg of args) {
|
|
3855
|
+
const expectedType = schema && parentTypeName && fieldName ? getArgumentType(schema, parentTypeName, fieldName, arg.name) : null;
|
|
3856
|
+
const result = emitValueWithType(arg.value, expectedType, variableNames, schema);
|
|
3857
|
+
if (result.isErr()) {
|
|
3858
|
+
return (0, neverthrow.err)(result.error);
|
|
3859
|
+
}
|
|
3860
|
+
argEntries.push(`${arg.name}: ${result.value}`);
|
|
3861
|
+
}
|
|
3862
|
+
return (0, neverthrow.ok)(`{ ${argEntries.join(", ")} }`);
|
|
3863
|
+
};
|
|
3864
|
+
/**
|
|
3865
|
+
* Emit a value (literal or variable reference).
|
|
3866
|
+
*/
|
|
3867
|
+
const emitValue = (value, variableNames) => {
|
|
3868
|
+
switch (value.kind) {
|
|
3869
|
+
case "variable":
|
|
3870
|
+
if (variableNames.has(value.name)) {
|
|
3871
|
+
return (0, neverthrow.ok)(`$.${value.name}`);
|
|
3872
|
+
}
|
|
3873
|
+
return (0, neverthrow.err)({
|
|
3874
|
+
code: "GRAPHQL_UNDECLARED_VARIABLE",
|
|
3875
|
+
message: `Variable "$${value.name}" is not declared in the operation`,
|
|
3876
|
+
variableName: value.name
|
|
3877
|
+
});
|
|
3878
|
+
case "int":
|
|
3879
|
+
case "float": return (0, neverthrow.ok)(value.value);
|
|
3880
|
+
case "string": return (0, neverthrow.ok)(JSON.stringify(value.value));
|
|
3881
|
+
case "boolean": return (0, neverthrow.ok)(value.value ? "true" : "false");
|
|
3882
|
+
case "null": return (0, neverthrow.ok)("null");
|
|
3883
|
+
case "enum": return (0, neverthrow.ok)(JSON.stringify(value.value));
|
|
3884
|
+
case "list": {
|
|
3885
|
+
const values = [];
|
|
3886
|
+
for (const v of value.values) {
|
|
3887
|
+
const result = emitValue(v, variableNames);
|
|
3888
|
+
if (result.isErr()) {
|
|
3889
|
+
return (0, neverthrow.err)(result.error);
|
|
3890
|
+
}
|
|
3891
|
+
values.push(result.value);
|
|
3892
|
+
}
|
|
3893
|
+
return (0, neverthrow.ok)(`[${values.join(", ")}]`);
|
|
3894
|
+
}
|
|
3895
|
+
case "object": {
|
|
3896
|
+
if (value.fields.length === 0) {
|
|
3897
|
+
return (0, neverthrow.ok)("{}");
|
|
3898
|
+
}
|
|
3899
|
+
const entries = [];
|
|
3900
|
+
for (const f of value.fields) {
|
|
3901
|
+
const result = emitValue(f.value, variableNames);
|
|
3902
|
+
if (result.isErr()) {
|
|
3903
|
+
return (0, neverthrow.err)(result.error);
|
|
3904
|
+
}
|
|
3905
|
+
entries.push(`${f.name}: ${result.value}`);
|
|
3906
|
+
}
|
|
3907
|
+
return (0, neverthrow.ok)(`{ ${entries.join(", ")} }`);
|
|
3908
|
+
}
|
|
3909
|
+
}
|
|
3910
|
+
};
|
|
3911
|
+
|
|
3912
|
+
//#endregion
|
|
3913
|
+
//#region packages/tools/src/codegen/inject-template.ts
|
|
3914
|
+
const templateContents = `\
|
|
3915
|
+
import { defineScalar } from "@soda-gql/core";
|
|
3916
|
+
|
|
3917
|
+
export const scalar = {
|
|
3918
|
+
...defineScalar<"ID", string, string>("ID"),
|
|
3919
|
+
...defineScalar<"String", string, string>("String"),
|
|
3920
|
+
...defineScalar<"Int", number, number>("Int"),
|
|
3921
|
+
...defineScalar<"Float", number, number>("Float"),
|
|
3922
|
+
...defineScalar<"Boolean", boolean, boolean>("Boolean"),
|
|
3923
|
+
} as const;
|
|
3924
|
+
`;
|
|
3925
|
+
const writeInjectTemplate = (outPath) => {
|
|
3926
|
+
const targetPath = (0, node_path.resolve)(outPath);
|
|
3927
|
+
try {
|
|
3928
|
+
if ((0, node_fs.existsSync)(targetPath)) {
|
|
3929
|
+
return (0, neverthrow.err)({
|
|
3930
|
+
code: "INJECT_TEMPLATE_EXISTS",
|
|
3931
|
+
message: `Inject module already exists: ${targetPath}`,
|
|
3932
|
+
outPath: targetPath
|
|
3933
|
+
});
|
|
3934
|
+
}
|
|
3935
|
+
(0, node_fs.mkdirSync)((0, node_path.dirname)(targetPath), { recursive: true });
|
|
3936
|
+
(0, node_fs.writeFileSync)(targetPath, `${templateContents}\n`);
|
|
3937
|
+
return (0, neverthrow.ok)(undefined);
|
|
3938
|
+
} catch (error) {
|
|
3939
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
3940
|
+
return (0, neverthrow.err)({
|
|
3941
|
+
code: "INJECT_TEMPLATE_FAILED",
|
|
3942
|
+
message,
|
|
3943
|
+
outPath: targetPath
|
|
3944
|
+
});
|
|
3945
|
+
}
|
|
3946
|
+
};
|
|
3947
|
+
const getInjectTemplate = () => `${templateContents}\n`;
|
|
3948
|
+
|
|
3949
|
+
//#endregion
|
|
3950
|
+
//#region packages/tools/src/codegen/reachability.ts
|
|
3951
|
+
/**
|
|
3952
|
+
* Schema type reachability analysis.
|
|
3953
|
+
*
|
|
3954
|
+
* Determines which types are reachable from root types (Query/Mutation/Subscription)
|
|
3955
|
+
* to specified target types (e.g., fragment onType values from .graphql files).
|
|
3956
|
+
* Produces a CompiledFilter for use with existing buildExclusionSet.
|
|
3957
|
+
*
|
|
3958
|
+
* @module
|
|
3959
|
+
*/
|
|
3960
|
+
const extractNamedType = (typeNode) => {
|
|
3961
|
+
switch (typeNode.kind) {
|
|
3962
|
+
case graphql.Kind.NAMED_TYPE: return typeNode.name.value;
|
|
3963
|
+
case graphql.Kind.LIST_TYPE: return extractNamedType(typeNode.type);
|
|
3964
|
+
case graphql.Kind.NON_NULL_TYPE: return extractNamedType(typeNode.type);
|
|
3965
|
+
}
|
|
3966
|
+
};
|
|
3967
|
+
const addEdge = (graph, from, to) => {
|
|
3968
|
+
let edges = graph.get(from);
|
|
3969
|
+
if (!edges) {
|
|
3970
|
+
edges = new Set();
|
|
3971
|
+
graph.set(from, edges);
|
|
3972
|
+
}
|
|
3973
|
+
edges.add(to);
|
|
3974
|
+
};
|
|
3975
|
+
const buildTypeGraph = (document) => {
|
|
3976
|
+
const schema = (0, __soda_gql_core.createSchemaIndex)(document);
|
|
3977
|
+
const forward = new Map();
|
|
3978
|
+
const reverse = new Map();
|
|
3979
|
+
const addBidirectional = (from, to) => {
|
|
3980
|
+
addEdge(forward, from, to);
|
|
3981
|
+
addEdge(reverse, to, from);
|
|
3982
|
+
};
|
|
3983
|
+
for (const [typeName, record] of schema.objects) {
|
|
3984
|
+
for (const field of record.fields.values()) {
|
|
3985
|
+
const returnType = extractNamedType(field.type);
|
|
3986
|
+
addBidirectional(typeName, returnType);
|
|
3987
|
+
if (field.arguments) {
|
|
3988
|
+
for (const arg of field.arguments) {
|
|
3989
|
+
const argType = extractNamedType(arg.type);
|
|
3990
|
+
addBidirectional(typeName, argType);
|
|
3991
|
+
}
|
|
3992
|
+
}
|
|
3993
|
+
}
|
|
3994
|
+
}
|
|
3995
|
+
for (const [typeName, record] of schema.inputs) {
|
|
3996
|
+
for (const field of record.fields.values()) {
|
|
3997
|
+
const fieldType = extractNamedType(field.type);
|
|
3998
|
+
addBidirectional(typeName, fieldType);
|
|
3999
|
+
}
|
|
4000
|
+
}
|
|
4001
|
+
for (const [typeName, record] of schema.unions) {
|
|
4002
|
+
for (const memberName of record.members.keys()) {
|
|
4003
|
+
addBidirectional(typeName, memberName);
|
|
4004
|
+
}
|
|
4005
|
+
}
|
|
4006
|
+
return {
|
|
4007
|
+
graph: {
|
|
4008
|
+
forward,
|
|
4009
|
+
reverse
|
|
4010
|
+
},
|
|
4011
|
+
schema
|
|
4012
|
+
};
|
|
4013
|
+
};
|
|
4014
|
+
/**
|
|
4015
|
+
* BFS traversal collecting all reachable nodes from seeds.
|
|
4016
|
+
*/
|
|
4017
|
+
const bfs = (adjacency, seeds, constraint) => {
|
|
4018
|
+
const visited = new Set();
|
|
4019
|
+
const queue = [];
|
|
4020
|
+
for (const seed of seeds) {
|
|
4021
|
+
if (!visited.has(seed)) {
|
|
4022
|
+
visited.add(seed);
|
|
4023
|
+
queue.push(seed);
|
|
4024
|
+
}
|
|
4025
|
+
}
|
|
4026
|
+
let head = 0;
|
|
4027
|
+
while (head < queue.length) {
|
|
4028
|
+
const current = queue[head++];
|
|
4029
|
+
if (current === undefined) break;
|
|
4030
|
+
const neighbors = adjacency.get(current);
|
|
4031
|
+
if (!neighbors) continue;
|
|
4032
|
+
for (const neighbor of neighbors) {
|
|
4033
|
+
if (visited.has(neighbor)) continue;
|
|
4034
|
+
if (constraint && !constraint.has(neighbor)) continue;
|
|
4035
|
+
visited.add(neighbor);
|
|
4036
|
+
queue.push(neighbor);
|
|
4037
|
+
}
|
|
4038
|
+
}
|
|
4039
|
+
return visited;
|
|
4040
|
+
};
|
|
4041
|
+
/**
|
|
4042
|
+
* Compute the set of type names reachable on paths from root types to target types.
|
|
4043
|
+
*
|
|
4044
|
+
* Algorithm:
|
|
4045
|
+
* 1. Backward BFS from target types to find all upstream types
|
|
4046
|
+
* 2. Forward BFS from root types, constrained to upstream set, to find actual paths
|
|
4047
|
+
* 3. Collect input/enum/scalar types used as field arguments on reachable object types
|
|
4048
|
+
*/
|
|
4049
|
+
const computeReachableTypes = (graph, schema, targetTypes, usedArgumentTypes) => {
|
|
4050
|
+
const upstream = bfs(graph.reverse, targetTypes);
|
|
4051
|
+
const rootTypes = [];
|
|
4052
|
+
if (schema.operationTypes.query) rootTypes.push(schema.operationTypes.query);
|
|
4053
|
+
if (schema.operationTypes.mutation) rootTypes.push(schema.operationTypes.mutation);
|
|
4054
|
+
if (schema.operationTypes.subscription) rootTypes.push(schema.operationTypes.subscription);
|
|
4055
|
+
const validRoots = rootTypes.filter((r) => upstream.has(r));
|
|
4056
|
+
const pathTypes = bfs(graph.forward, validRoots, upstream);
|
|
4057
|
+
const reachable = new Set(pathTypes);
|
|
4058
|
+
const inputQueue = [];
|
|
4059
|
+
for (const typeName of pathTypes) {
|
|
4060
|
+
const objectRecord = schema.objects.get(typeName);
|
|
4061
|
+
if (!objectRecord) continue;
|
|
4062
|
+
for (const field of objectRecord.fields.values()) {
|
|
4063
|
+
const returnType = extractNamedType(field.type);
|
|
4064
|
+
if (!reachable.has(returnType)) {
|
|
4065
|
+
const isKnownComposite = schema.objects.has(returnType) || schema.inputs.has(returnType) || schema.unions.has(returnType);
|
|
4066
|
+
if (!isKnownComposite) {
|
|
4067
|
+
reachable.add(returnType);
|
|
4068
|
+
}
|
|
4069
|
+
}
|
|
4070
|
+
if (!usedArgumentTypes && field.arguments) {
|
|
4071
|
+
for (const arg of field.arguments) {
|
|
4072
|
+
const argType = extractNamedType(arg.type);
|
|
4073
|
+
if (!reachable.has(argType)) {
|
|
4074
|
+
reachable.add(argType);
|
|
4075
|
+
if (schema.inputs.has(argType)) {
|
|
4076
|
+
inputQueue.push(argType);
|
|
4077
|
+
}
|
|
4078
|
+
}
|
|
4079
|
+
}
|
|
4080
|
+
}
|
|
4081
|
+
}
|
|
4082
|
+
}
|
|
4083
|
+
if (usedArgumentTypes) {
|
|
4084
|
+
for (const inputName of usedArgumentTypes) {
|
|
4085
|
+
if (!reachable.has(inputName)) {
|
|
4086
|
+
reachable.add(inputName);
|
|
4087
|
+
inputQueue.push(inputName);
|
|
4088
|
+
}
|
|
4089
|
+
}
|
|
4090
|
+
}
|
|
4091
|
+
let inputHead = 0;
|
|
4092
|
+
while (inputHead < inputQueue.length) {
|
|
4093
|
+
const inputName = inputQueue[inputHead++];
|
|
4094
|
+
if (inputName === undefined) break;
|
|
4095
|
+
const inputRecord = schema.inputs.get(inputName);
|
|
4096
|
+
if (!inputRecord) continue;
|
|
4097
|
+
for (const field of inputRecord.fields.values()) {
|
|
4098
|
+
const fieldType = extractNamedType(field.type);
|
|
4099
|
+
if (!reachable.has(fieldType)) {
|
|
4100
|
+
reachable.add(fieldType);
|
|
4101
|
+
if (schema.inputs.has(fieldType)) {
|
|
4102
|
+
inputQueue.push(fieldType);
|
|
4103
|
+
}
|
|
4104
|
+
}
|
|
4105
|
+
}
|
|
4106
|
+
}
|
|
4107
|
+
return reachable;
|
|
4108
|
+
};
|
|
4109
|
+
/**
|
|
4110
|
+
* Compute a filter function that includes only types reachable from root types
|
|
4111
|
+
* to the specified target types.
|
|
4112
|
+
*
|
|
4113
|
+
* When targetTypes is empty, returns a pass-all filter (no filtering).
|
|
4114
|
+
* Warns when target types are not found in the schema.
|
|
4115
|
+
*
|
|
4116
|
+
* @param document - The parsed GraphQL schema document
|
|
4117
|
+
* @param targetTypes - Set of type names that fragments target (e.g., from ParsedFragment.onType)
|
|
4118
|
+
* @returns Filter function and any warnings
|
|
4119
|
+
*/
|
|
4120
|
+
const computeReachabilityFilter = (document, targetTypes, usedArgumentTypes) => {
|
|
4121
|
+
if (targetTypes.size === 0) {
|
|
4122
|
+
return {
|
|
4123
|
+
filter: () => true,
|
|
4124
|
+
warnings: []
|
|
4125
|
+
};
|
|
4126
|
+
}
|
|
4127
|
+
const { graph, schema } = buildTypeGraph(document);
|
|
4128
|
+
const allTypeNames = new Set([
|
|
4129
|
+
...schema.objects.keys(),
|
|
4130
|
+
...schema.inputs.keys(),
|
|
4131
|
+
...schema.enums.keys(),
|
|
4132
|
+
...schema.unions.keys(),
|
|
4133
|
+
...schema.scalars.keys()
|
|
4134
|
+
]);
|
|
4135
|
+
const warnings = [];
|
|
4136
|
+
const validTargets = new Set();
|
|
4137
|
+
for (const target of targetTypes) {
|
|
4138
|
+
if (allTypeNames.has(target)) {
|
|
4139
|
+
validTargets.add(target);
|
|
4140
|
+
} else {
|
|
4141
|
+
warnings.push(`Target type "${target}" not found in schema`);
|
|
4142
|
+
}
|
|
4143
|
+
}
|
|
4144
|
+
if (validTargets.size === 0) {
|
|
4145
|
+
return {
|
|
4146
|
+
filter: () => true,
|
|
4147
|
+
warnings
|
|
4148
|
+
};
|
|
4149
|
+
}
|
|
4150
|
+
const reachable = computeReachableTypes(graph, schema, validTargets, usedArgumentTypes);
|
|
4151
|
+
if (reachable.size === 0) {
|
|
4152
|
+
warnings.push(`No types reachable from root operations to target types: ${[...validTargets].join(", ")}; skipping reachability filter`);
|
|
4153
|
+
return {
|
|
4154
|
+
filter: () => true,
|
|
4155
|
+
warnings
|
|
4156
|
+
};
|
|
4157
|
+
}
|
|
4158
|
+
return {
|
|
4159
|
+
filter: (context) => reachable.has(context.name),
|
|
4160
|
+
warnings
|
|
4161
|
+
};
|
|
4162
|
+
};
|
|
4163
|
+
|
|
4164
|
+
//#endregion
|
|
4165
|
+
//#region packages/tools/src/codegen/bundler/esbuild.ts
|
|
4166
|
+
const esbuildBundler = {
|
|
4167
|
+
name: "esbuild",
|
|
4168
|
+
bundle: async ({ sourcePath, external }) => {
|
|
4169
|
+
try {
|
|
4170
|
+
const sourceExt = (0, node_path.extname)(sourcePath);
|
|
4171
|
+
const baseName = sourcePath.slice(0, -sourceExt.length);
|
|
4172
|
+
const cjsPath = `${baseName}.cjs`;
|
|
4173
|
+
await (0, esbuild.build)({
|
|
4174
|
+
entryPoints: [sourcePath],
|
|
4175
|
+
outfile: cjsPath,
|
|
4176
|
+
format: "cjs",
|
|
4177
|
+
platform: "node",
|
|
4178
|
+
bundle: true,
|
|
4179
|
+
external: [...external],
|
|
4180
|
+
sourcemap: false,
|
|
4181
|
+
minify: false,
|
|
4182
|
+
treeShaking: false
|
|
4183
|
+
});
|
|
4184
|
+
return (0, neverthrow.ok)({ cjsPath });
|
|
4185
|
+
} catch (error) {
|
|
4186
|
+
return (0, neverthrow.err)({
|
|
4187
|
+
code: "EMIT_FAILED",
|
|
4188
|
+
message: `[esbuild] Failed to bundle: ${error instanceof Error ? error.message : String(error)}`,
|
|
4189
|
+
outPath: sourcePath
|
|
4190
|
+
});
|
|
4191
|
+
}
|
|
4192
|
+
}
|
|
4193
|
+
};
|
|
4194
|
+
|
|
4195
|
+
//#endregion
|
|
4196
|
+
//#region packages/tools/src/codegen/defs-generator.ts
|
|
4197
|
+
/**
|
|
4198
|
+
* Split an array into chunks of the specified size.
|
|
4199
|
+
*/
|
|
4200
|
+
const chunkArray = (array, size) => {
|
|
4201
|
+
if (size <= 0) {
|
|
4202
|
+
return [Array.from(array)];
|
|
4203
|
+
}
|
|
4204
|
+
const result = [];
|
|
4205
|
+
for (let i = 0; i < array.length; i += size) {
|
|
4206
|
+
result.push(array.slice(i, i + size));
|
|
4207
|
+
}
|
|
4208
|
+
return result;
|
|
4209
|
+
};
|
|
4210
|
+
/**
|
|
4211
|
+
* Determine if chunking is needed based on the number of definitions.
|
|
4212
|
+
*/
|
|
4213
|
+
const needsChunking = (vars, chunkSize) => {
|
|
4214
|
+
return vars.length > chunkSize;
|
|
4215
|
+
};
|
|
4216
|
+
/**
|
|
4217
|
+
* Generate a single definition file content.
|
|
4218
|
+
*/
|
|
4219
|
+
const generateDefinitionFile = (options) => {
|
|
4220
|
+
const { category, vars, needsDefineEnum } = options;
|
|
4221
|
+
if (vars.length === 0) {
|
|
4222
|
+
return `/**
|
|
4223
|
+
* ${category} definitions (empty)
|
|
4224
|
+
* @generated by @soda-gql/tools/codegen
|
|
4225
|
+
*/
|
|
4226
|
+
`;
|
|
4227
|
+
}
|
|
4228
|
+
const imports = [];
|
|
4229
|
+
if (needsDefineEnum && category === "enums") {
|
|
4230
|
+
imports.push("import { defineEnum } from \"@soda-gql/core\";");
|
|
4231
|
+
}
|
|
4232
|
+
const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
|
|
4233
|
+
const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
|
|
4234
|
+
return `/**
|
|
4235
|
+
* ${category} definitions
|
|
4236
|
+
* @generated by @soda-gql/tools/codegen
|
|
4237
|
+
*/
|
|
4238
|
+
${importsBlock}${exportStatements}
|
|
4239
|
+
`;
|
|
4240
|
+
};
|
|
4241
|
+
/**
|
|
4242
|
+
* Generate a chunk file content.
|
|
4243
|
+
*/
|
|
4244
|
+
const generateChunkFile = (options) => {
|
|
4245
|
+
const { category, vars, chunkIndex, needsDefineEnum } = options;
|
|
4246
|
+
if (vars.length === 0) {
|
|
4247
|
+
return `/**
|
|
4248
|
+
* ${category} chunk ${chunkIndex} (empty)
|
|
4249
|
+
* @generated by @soda-gql/tools/codegen
|
|
4250
|
+
*/
|
|
4251
|
+
`;
|
|
4252
|
+
}
|
|
4253
|
+
const imports = [];
|
|
4254
|
+
if (needsDefineEnum && category === "enums") {
|
|
4255
|
+
imports.push("import { defineEnum } from \"@soda-gql/core\";");
|
|
4256
|
+
}
|
|
4257
|
+
const importsBlock = imports.length > 0 ? `${imports.join("\n")}\n\n` : "";
|
|
4258
|
+
const exportStatements = vars.map((v) => `export ${v.code}`).join("\n");
|
|
4259
|
+
return `/**
|
|
4260
|
+
* ${category} chunk ${chunkIndex}
|
|
4261
|
+
* @generated by @soda-gql/tools/codegen
|
|
4262
|
+
*/
|
|
4263
|
+
${importsBlock}${exportStatements}
|
|
4264
|
+
`;
|
|
4265
|
+
};
|
|
4266
|
+
/**
|
|
4267
|
+
* Generate the index file that re-exports all chunks.
|
|
4268
|
+
*/
|
|
4269
|
+
const generateChunkIndex = (options) => {
|
|
4270
|
+
const { category, chunkCount } = options;
|
|
4271
|
+
const reExports = Array.from({ length: chunkCount }, (_, i) => `export * from "./chunk-${i}";`).join("\n");
|
|
4272
|
+
return `/**
|
|
4273
|
+
* ${category} index (re-exports all chunks)
|
|
4274
|
+
* @generated by @soda-gql/tools/codegen
|
|
4275
|
+
*/
|
|
4276
|
+
${reExports}
|
|
4277
|
+
`;
|
|
4278
|
+
};
|
|
4279
|
+
/**
|
|
4280
|
+
* Generate chunked definition files.
|
|
4281
|
+
*/
|
|
4282
|
+
const generateChunkedDefinitionFiles = (category, schemaName, vars, chunkSize) => {
|
|
4283
|
+
const chunks = chunkArray(vars, chunkSize);
|
|
4284
|
+
const needsDefineEnum = category === "enums";
|
|
4285
|
+
const chunkContents = chunks.map((chunkVars, chunkIndex) => ({
|
|
4286
|
+
chunkIndex,
|
|
4287
|
+
content: generateChunkFile({
|
|
4288
|
+
category,
|
|
4289
|
+
schemaName,
|
|
4290
|
+
vars: chunkVars,
|
|
4291
|
+
chunkIndex,
|
|
4292
|
+
needsDefineEnum
|
|
4293
|
+
}),
|
|
4294
|
+
varNames: chunkVars.map((v) => v.name)
|
|
4295
|
+
}));
|
|
4296
|
+
const allVarNames = vars.map((v) => v.name);
|
|
4297
|
+
const indexContent = generateChunkIndex({
|
|
4298
|
+
category,
|
|
4299
|
+
chunkCount: chunks.length,
|
|
4300
|
+
varNames: allVarNames
|
|
4301
|
+
});
|
|
4302
|
+
return {
|
|
4303
|
+
indexContent,
|
|
4304
|
+
chunks: chunkContents
|
|
4305
|
+
};
|
|
4306
|
+
};
|
|
4307
|
+
/**
|
|
4308
|
+
* Generate the complete _defs directory structure.
|
|
4309
|
+
*/
|
|
4310
|
+
const generateDefsStructure = (schemaName, categoryVars, chunkSize) => {
|
|
4311
|
+
const files = [];
|
|
4312
|
+
const importPaths = {
|
|
4313
|
+
enums: "./_defs/enums",
|
|
4314
|
+
inputs: "./_defs/inputs",
|
|
4315
|
+
objects: "./_defs/objects",
|
|
4316
|
+
unions: "./_defs/unions"
|
|
4317
|
+
};
|
|
4318
|
+
const categories = [
|
|
4319
|
+
"enums",
|
|
4320
|
+
"inputs",
|
|
4321
|
+
"objects",
|
|
4322
|
+
"unions"
|
|
4323
|
+
];
|
|
4324
|
+
for (const category of categories) {
|
|
4325
|
+
const vars = categoryVars[category];
|
|
4326
|
+
const needsDefineEnum = category === "enums";
|
|
4327
|
+
if (needsChunking(vars, chunkSize)) {
|
|
4328
|
+
const chunked = generateChunkedDefinitionFiles(category, schemaName, vars, chunkSize);
|
|
4329
|
+
importPaths[category] = `./_defs/${category}`;
|
|
4330
|
+
files.push({
|
|
4331
|
+
relativePath: `_defs/${category}/index.ts`,
|
|
4332
|
+
content: chunked.indexContent
|
|
4333
|
+
});
|
|
4334
|
+
for (const chunk of chunked.chunks) {
|
|
4335
|
+
files.push({
|
|
4336
|
+
relativePath: `_defs/${category}/chunk-${chunk.chunkIndex}.ts`,
|
|
4337
|
+
content: chunk.content
|
|
4338
|
+
});
|
|
4339
|
+
}
|
|
4340
|
+
} else {
|
|
4341
|
+
const content = generateDefinitionFile({
|
|
4342
|
+
category,
|
|
4343
|
+
schemaName,
|
|
4344
|
+
vars,
|
|
4345
|
+
needsDefineEnum
|
|
4346
|
+
});
|
|
4347
|
+
files.push({
|
|
4348
|
+
relativePath: `_defs/${category}.ts`,
|
|
4349
|
+
content
|
|
4350
|
+
});
|
|
4351
|
+
}
|
|
4352
|
+
}
|
|
4353
|
+
return {
|
|
4354
|
+
files,
|
|
4355
|
+
importPaths
|
|
4356
|
+
};
|
|
4357
|
+
};
|
|
4358
|
+
|
|
4359
|
+
//#endregion
|
|
4360
|
+
//#region packages/tools/src/codegen/file.ts
|
|
4361
|
+
const removeDirectory = (dirPath) => {
|
|
4362
|
+
const targetPath = (0, node_path.resolve)(dirPath);
|
|
4363
|
+
try {
|
|
4364
|
+
(0, node_fs.rmSync)(targetPath, {
|
|
4365
|
+
recursive: true,
|
|
4366
|
+
force: true
|
|
4367
|
+
});
|
|
4368
|
+
return (0, neverthrow.ok)(undefined);
|
|
4369
|
+
} catch (error) {
|
|
4370
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
4371
|
+
return (0, neverthrow.err)({
|
|
4372
|
+
code: "REMOVE_FAILED",
|
|
4373
|
+
message,
|
|
4374
|
+
outPath: targetPath
|
|
4375
|
+
});
|
|
4376
|
+
}
|
|
4377
|
+
};
|
|
4378
|
+
const readModule = (filePath) => {
|
|
4379
|
+
const targetPath = (0, node_path.resolve)(filePath);
|
|
4380
|
+
try {
|
|
4381
|
+
const content = (0, node_fs.readFileSync)(targetPath, "utf-8");
|
|
4382
|
+
return (0, neverthrow.ok)(content);
|
|
4383
|
+
} catch (error) {
|
|
4384
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
4385
|
+
return (0, neverthrow.err)({
|
|
4386
|
+
code: "READ_FAILED",
|
|
4387
|
+
message,
|
|
4388
|
+
outPath: targetPath
|
|
4389
|
+
});
|
|
4390
|
+
}
|
|
4391
|
+
};
|
|
4392
|
+
const writeModule = (outPath, contents) => {
|
|
4393
|
+
const targetPath = (0, node_path.resolve)(outPath);
|
|
4394
|
+
try {
|
|
4395
|
+
(0, node_fs.mkdirSync)((0, node_path.dirname)(targetPath), { recursive: true });
|
|
4396
|
+
(0, node_fs.writeFileSync)(targetPath, contents);
|
|
4397
|
+
return (0, neverthrow.ok)(undefined);
|
|
4398
|
+
} catch (error) {
|
|
4399
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
4400
|
+
return (0, neverthrow.err)({
|
|
4401
|
+
code: "EMIT_FAILED",
|
|
4402
|
+
message,
|
|
4403
|
+
outPath: targetPath
|
|
4404
|
+
});
|
|
4405
|
+
}
|
|
4406
|
+
};
|
|
4407
|
+
|
|
4408
|
+
//#endregion
|
|
4409
|
+
//#region packages/tools/src/codegen/schema.ts
|
|
4410
|
+
/**
|
|
4411
|
+
* Load a single schema file.
|
|
4412
|
+
* @internal Use loadSchema for public API.
|
|
4413
|
+
*/
|
|
4414
|
+
const loadSingleSchema = (schemaPath) => {
|
|
4415
|
+
const resolvedPath = (0, node_path.resolve)(schemaPath);
|
|
4416
|
+
if (!(0, node_fs.existsSync)(resolvedPath)) {
|
|
4417
|
+
return (0, neverthrow.err)({
|
|
4418
|
+
code: "SCHEMA_NOT_FOUND",
|
|
4419
|
+
message: `Schema file not found at ${resolvedPath}`,
|
|
4420
|
+
schemaPath: resolvedPath
|
|
4421
|
+
});
|
|
4422
|
+
}
|
|
4423
|
+
try {
|
|
4424
|
+
const schemaSource = (0, node_fs.readFileSync)(resolvedPath, "utf8");
|
|
4425
|
+
const document = (0, graphql.parse)(schemaSource);
|
|
4426
|
+
return (0, neverthrow.ok)(document);
|
|
4427
|
+
} catch (error) {
|
|
4428
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
4429
|
+
return (0, neverthrow.err)({
|
|
4430
|
+
code: "SCHEMA_INVALID",
|
|
4431
|
+
message: `SchemaValidationError: ${message}`,
|
|
4432
|
+
schemaPath: resolvedPath
|
|
4433
|
+
});
|
|
4434
|
+
}
|
|
4435
|
+
};
|
|
4436
|
+
/**
|
|
4437
|
+
* Load and merge multiple schema files into a single DocumentNode.
|
|
4438
|
+
* Uses GraphQL's concatAST to combine definitions from all files.
|
|
4439
|
+
*/
|
|
4440
|
+
const loadSchema = (schemaPaths) => {
|
|
4441
|
+
const documents = [];
|
|
4442
|
+
for (const schemaPath of schemaPaths) {
|
|
4443
|
+
const result = loadSingleSchema(schemaPath);
|
|
4444
|
+
if (result.isErr()) {
|
|
4445
|
+
return (0, neverthrow.err)(result.error);
|
|
4446
|
+
}
|
|
4447
|
+
documents.push(result.value);
|
|
4448
|
+
}
|
|
4449
|
+
const merged = (0, graphql.concatAST)(documents);
|
|
4450
|
+
return (0, neverthrow.ok)(merged);
|
|
4451
|
+
};
|
|
4452
|
+
const hashSchema = (document) => (0, node_crypto.createHash)("sha256").update((0, graphql.print)(document)).digest("hex");
|
|
4453
|
+
|
|
4454
|
+
//#endregion
|
|
4455
|
+
//#region packages/tools/src/codegen/runner.ts
|
|
4456
|
+
const extensionMap = {
|
|
4457
|
+
".ts": ".js",
|
|
4458
|
+
".tsx": ".js",
|
|
4459
|
+
".mts": ".mjs",
|
|
4460
|
+
".cts": ".cjs",
|
|
4461
|
+
".js": ".js",
|
|
4462
|
+
".mjs": ".mjs",
|
|
4463
|
+
".cjs": ".cjs"
|
|
4464
|
+
};
|
|
4465
|
+
const toImportSpecifier = (fromPath, targetPath, options) => {
|
|
4466
|
+
const fromDir = (0, node_path.dirname)(fromPath);
|
|
4467
|
+
const normalized = (0, node_path.relative)(fromDir, targetPath).replace(/\\/g, "/");
|
|
4468
|
+
const sourceExt = (0, node_path.extname)(targetPath);
|
|
4469
|
+
if (!options?.includeExtension) {
|
|
4470
|
+
if (normalized.length === 0) {
|
|
4471
|
+
return `./${(0, node_path.basename)(targetPath, sourceExt)}`;
|
|
4472
|
+
}
|
|
4473
|
+
const withPrefix$1 = normalized.startsWith(".") ? normalized : `./${normalized}`;
|
|
4474
|
+
const currentExt$1 = (0, node_path.extname)(withPrefix$1);
|
|
4475
|
+
return currentExt$1 ? withPrefix$1.slice(0, -currentExt$1.length) : withPrefix$1;
|
|
4476
|
+
}
|
|
4477
|
+
const runtimeExt = extensionMap[sourceExt] ?? sourceExt;
|
|
4478
|
+
if (normalized.length === 0) {
|
|
4479
|
+
const base = runtimeExt !== sourceExt ? (0, node_path.basename)(targetPath, sourceExt) : (0, node_path.basename)(targetPath);
|
|
4480
|
+
return `./${base}${runtimeExt}`;
|
|
4481
|
+
}
|
|
4482
|
+
const withPrefix = normalized.startsWith(".") ? normalized : `./${normalized}`;
|
|
4483
|
+
if (!runtimeExt) {
|
|
4484
|
+
return withPrefix;
|
|
4485
|
+
}
|
|
4486
|
+
if (withPrefix.endsWith(runtimeExt)) {
|
|
4487
|
+
return withPrefix;
|
|
4488
|
+
}
|
|
4489
|
+
const currentExt = (0, node_path.extname)(withPrefix);
|
|
4490
|
+
const withoutExt = currentExt ? withPrefix.slice(0, -currentExt.length) : withPrefix;
|
|
4491
|
+
return `${withoutExt}${runtimeExt}`;
|
|
4492
|
+
};
|
|
4493
|
+
const runCodegen = async (options) => {
|
|
4494
|
+
const outPath = (0, node_path.resolve)(options.outPath);
|
|
4495
|
+
const importSpecifierOptions = { includeExtension: options.importExtension };
|
|
4496
|
+
for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {
|
|
4497
|
+
const scalarPath = (0, node_path.resolve)(schemaConfig.inject.scalars);
|
|
4498
|
+
if (!(0, node_fs.existsSync)(scalarPath)) {
|
|
4499
|
+
return (0, neverthrow.err)({
|
|
4500
|
+
code: "INJECT_MODULE_NOT_FOUND",
|
|
4501
|
+
message: `Scalar module not found for schema '${schemaName}': ${scalarPath}`,
|
|
4502
|
+
injectPath: scalarPath
|
|
4503
|
+
});
|
|
4504
|
+
}
|
|
4505
|
+
if (schemaConfig.inject.adapter) {
|
|
4506
|
+
const adapterPath = (0, node_path.resolve)(schemaConfig.inject.adapter);
|
|
4507
|
+
if (!(0, node_fs.existsSync)(adapterPath)) {
|
|
4508
|
+
return (0, neverthrow.err)({
|
|
4509
|
+
code: "INJECT_MODULE_NOT_FOUND",
|
|
4510
|
+
message: `Adapter module not found for schema '${schemaName}': ${adapterPath}`,
|
|
4511
|
+
injectPath: adapterPath
|
|
4512
|
+
});
|
|
4513
|
+
}
|
|
4514
|
+
}
|
|
4515
|
+
}
|
|
4516
|
+
const schemas = new Map();
|
|
4517
|
+
const schemaHashes = {};
|
|
4518
|
+
for (const [name, schemaConfig] of Object.entries(options.schemas)) {
|
|
4519
|
+
const preloaded = options.preloadedSchemas?.get(name);
|
|
4520
|
+
if (preloaded) {
|
|
4521
|
+
schemas.set(name, preloaded);
|
|
4522
|
+
} else {
|
|
4523
|
+
const result = await loadSchema(schemaConfig.schema).match((doc) => Promise.resolve((0, neverthrow.ok)(doc)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
4524
|
+
if (result.isErr()) {
|
|
4525
|
+
return (0, neverthrow.err)(result.error);
|
|
4526
|
+
}
|
|
4527
|
+
schemas.set(name, result.value);
|
|
4528
|
+
}
|
|
4529
|
+
}
|
|
4530
|
+
const injectionConfig = new Map();
|
|
4531
|
+
for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {
|
|
4532
|
+
const injectConfig = schemaConfig.inject;
|
|
4533
|
+
injectionConfig.set(schemaName, {
|
|
4534
|
+
scalarImportPath: toImportSpecifier(outPath, (0, node_path.resolve)(injectConfig.scalars), importSpecifierOptions),
|
|
4535
|
+
...injectConfig.adapter ? { adapterImportPath: toImportSpecifier(outPath, (0, node_path.resolve)(injectConfig.adapter), importSpecifierOptions) } : {}
|
|
4536
|
+
});
|
|
4537
|
+
}
|
|
4538
|
+
const defaultInputDepthConfig = new Map();
|
|
4539
|
+
const inputDepthOverridesConfig = new Map();
|
|
4540
|
+
for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {
|
|
4541
|
+
if (schemaConfig.defaultInputDepth !== undefined && schemaConfig.defaultInputDepth !== 3) {
|
|
4542
|
+
defaultInputDepthConfig.set(schemaName, schemaConfig.defaultInputDepth);
|
|
4543
|
+
}
|
|
4544
|
+
if (schemaConfig.inputDepthOverrides && Object.keys(schemaConfig.inputDepthOverrides).length > 0) {
|
|
4545
|
+
inputDepthOverridesConfig.set(schemaName, schemaConfig.inputDepthOverrides);
|
|
4546
|
+
}
|
|
4547
|
+
}
|
|
4548
|
+
const chunkSize = options.chunkSize ?? 100;
|
|
4549
|
+
const typeFiltersConfig = new Map();
|
|
4550
|
+
for (const [schemaName, schemaConfig] of Object.entries(options.schemas)) {
|
|
4551
|
+
if (schemaConfig.typeFilter) {
|
|
4552
|
+
typeFiltersConfig.set(schemaName, schemaConfig.typeFilter);
|
|
4553
|
+
}
|
|
4554
|
+
}
|
|
4555
|
+
const { code: internalCode, injectsCode, categoryVars } = generateMultiSchemaModule(schemas, {
|
|
4556
|
+
injection: injectionConfig,
|
|
4557
|
+
defaultInputDepth: defaultInputDepthConfig.size > 0 ? defaultInputDepthConfig : undefined,
|
|
4558
|
+
inputDepthOverrides: inputDepthOverridesConfig.size > 0 ? inputDepthOverridesConfig : undefined,
|
|
4559
|
+
chunkSize,
|
|
4560
|
+
typeFilters: typeFiltersConfig.size > 0 ? typeFiltersConfig : undefined
|
|
4561
|
+
});
|
|
4562
|
+
const schemaNames = Object.keys(options.schemas);
|
|
4563
|
+
const allFieldNames = new Map();
|
|
4564
|
+
for (const [name, document] of schemas.entries()) {
|
|
4565
|
+
const schemaIndex = (0, __soda_gql_core.createSchemaIndex)(document);
|
|
4566
|
+
const fieldNameSet = new Set();
|
|
4567
|
+
for (const [objectName, record] of schemaIndex.objects.entries()) {
|
|
4568
|
+
if (objectName.startsWith("__")) continue;
|
|
4569
|
+
for (const fieldName of record.fields.keys()) {
|
|
4570
|
+
fieldNameSet.add(fieldName);
|
|
4571
|
+
}
|
|
4572
|
+
}
|
|
4573
|
+
allFieldNames.set(name, Array.from(fieldNameSet).sort());
|
|
4574
|
+
const objects = Array.from(schemaIndex.objects.keys()).filter((n) => !n.startsWith("__")).length;
|
|
4575
|
+
const enums = Array.from(schemaIndex.enums.keys()).filter((n) => !n.startsWith("__")).length;
|
|
4576
|
+
const inputs = Array.from(schemaIndex.inputs.keys()).filter((n) => !n.startsWith("__")).length;
|
|
4577
|
+
const unions = Array.from(schemaIndex.unions.keys()).filter((n) => !n.startsWith("__")).length;
|
|
4578
|
+
schemaHashes[name] = {
|
|
4579
|
+
schemaHash: hashSchema(document),
|
|
4580
|
+
objects,
|
|
4581
|
+
enums,
|
|
4582
|
+
inputs,
|
|
4583
|
+
unions
|
|
4584
|
+
};
|
|
4585
|
+
}
|
|
4586
|
+
const indexCode = generateIndexModule(schemaNames, allFieldNames);
|
|
4587
|
+
const injectsPath = (0, node_path.join)((0, node_path.dirname)(outPath), "_internal-injects.ts");
|
|
4588
|
+
if (injectsCode) {
|
|
4589
|
+
const injectsWriteResult = await writeModule(injectsPath, injectsCode).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
4590
|
+
if (injectsWriteResult.isErr()) {
|
|
4591
|
+
return (0, neverthrow.err)(injectsWriteResult.error);
|
|
4592
|
+
}
|
|
4593
|
+
}
|
|
4594
|
+
const defsPaths = [];
|
|
4595
|
+
if (categoryVars) {
|
|
4596
|
+
const outDir = (0, node_path.dirname)(outPath);
|
|
4597
|
+
const defsDir = (0, node_path.join)(outDir, "_defs");
|
|
4598
|
+
if ((0, node_fs.existsSync)(defsDir)) {
|
|
4599
|
+
const removeResult = removeDirectory(defsDir);
|
|
4600
|
+
if (removeResult.isErr()) {
|
|
4601
|
+
return (0, neverthrow.err)(removeResult.error);
|
|
4602
|
+
}
|
|
4603
|
+
}
|
|
4604
|
+
const combinedVars = {
|
|
4605
|
+
enums: [],
|
|
4606
|
+
inputs: [],
|
|
4607
|
+
objects: [],
|
|
4608
|
+
unions: []
|
|
4609
|
+
};
|
|
4610
|
+
for (const vars of Object.values(categoryVars)) {
|
|
4611
|
+
combinedVars.enums.push(...vars.enums);
|
|
4612
|
+
combinedVars.inputs.push(...vars.inputs);
|
|
4613
|
+
combinedVars.objects.push(...vars.objects);
|
|
4614
|
+
combinedVars.unions.push(...vars.unions);
|
|
4615
|
+
}
|
|
4616
|
+
const defsStructure = generateDefsStructure("combined", combinedVars, chunkSize);
|
|
4617
|
+
for (const file of defsStructure.files) {
|
|
4618
|
+
const filePath = (0, node_path.join)(outDir, file.relativePath);
|
|
4619
|
+
const writeResult = await writeModule(filePath, file.content).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
4620
|
+
if (writeResult.isErr()) {
|
|
4621
|
+
return (0, neverthrow.err)(writeResult.error);
|
|
4622
|
+
}
|
|
4623
|
+
defsPaths.push(filePath);
|
|
4624
|
+
}
|
|
4625
|
+
}
|
|
4626
|
+
const internalPath = (0, node_path.join)((0, node_path.dirname)(outPath), "_internal.ts");
|
|
4627
|
+
const internalWriteResult = await writeModule(internalPath, internalCode).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
4628
|
+
if (internalWriteResult.isErr()) {
|
|
4629
|
+
return (0, neverthrow.err)(internalWriteResult.error);
|
|
4630
|
+
}
|
|
4631
|
+
const indexWriteResult = await writeModule(outPath, indexCode).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
4632
|
+
if (indexWriteResult.isErr()) {
|
|
4633
|
+
return (0, neverthrow.err)(indexWriteResult.error);
|
|
4634
|
+
}
|
|
4635
|
+
const prebuiltStubPath = (0, node_path.join)((0, node_path.dirname)(outPath), "types.prebuilt.ts");
|
|
4636
|
+
if (!(0, node_fs.existsSync)(prebuiltStubPath)) {
|
|
4637
|
+
const prebuiltStubCode = generatePrebuiltStub(schemaNames);
|
|
4638
|
+
const prebuiltWriteResult = await writeModule(prebuiltStubPath, prebuiltStubCode).match(() => Promise.resolve((0, neverthrow.ok)(undefined)), (error) => Promise.resolve((0, neverthrow.err)(error)));
|
|
4639
|
+
if (prebuiltWriteResult.isErr()) {
|
|
4640
|
+
return (0, neverthrow.err)(prebuiltWriteResult.error);
|
|
4641
|
+
}
|
|
4642
|
+
} else {
|
|
4643
|
+
const readResult = readModule(prebuiltStubPath);
|
|
4644
|
+
if (readResult.isErr()) {
|
|
4645
|
+
return (0, neverthrow.err)(readResult.error);
|
|
4646
|
+
}
|
|
4647
|
+
const existingContent = readResult.value;
|
|
4648
|
+
const existingNames = new Set();
|
|
4649
|
+
for (const match of existingContent.matchAll(/export type PrebuiltTypes_(\w+)/g)) {
|
|
4650
|
+
const name = match[1];
|
|
4651
|
+
if (name) existingNames.add(name);
|
|
4652
|
+
}
|
|
4653
|
+
const missingNames = schemaNames.filter((name) => !existingNames.has(name));
|
|
4654
|
+
if (missingNames.length > 0) {
|
|
4655
|
+
const missingStubs = generatePrebuiltStub(missingNames);
|
|
4656
|
+
const stubDeclarations = missingStubs.replace(/^\/\*\*[\s\S]*?\*\/\n\n/, "");
|
|
4657
|
+
const updatedContent = `${existingContent.trimEnd()}\n\n${stubDeclarations}`;
|
|
4658
|
+
const patchResult = writeModule(prebuiltStubPath, updatedContent);
|
|
4659
|
+
if (patchResult.isErr()) {
|
|
4660
|
+
return (0, neverthrow.err)(patchResult.error);
|
|
4661
|
+
}
|
|
4662
|
+
}
|
|
4663
|
+
}
|
|
4664
|
+
const bundleOutcome = await esbuildBundler.bundle({
|
|
4665
|
+
sourcePath: outPath,
|
|
4666
|
+
external: ["@soda-gql/core", "@soda-gql/core/runtime"]
|
|
4667
|
+
});
|
|
4668
|
+
const bundleResult = bundleOutcome.match((result) => (0, neverthrow.ok)(result), (error) => (0, neverthrow.err)(error));
|
|
4669
|
+
if (bundleResult.isErr()) {
|
|
4670
|
+
return (0, neverthrow.err)(bundleResult.error);
|
|
4671
|
+
}
|
|
4672
|
+
return (0, neverthrow.ok)({
|
|
4673
|
+
schemas: schemaHashes,
|
|
4674
|
+
outPath,
|
|
4675
|
+
internalPath,
|
|
4676
|
+
injectsPath,
|
|
4677
|
+
cjsPath: bundleResult.value.cjsPath,
|
|
4678
|
+
...defsPaths.length > 0 ? { defsPaths } : {}
|
|
4679
|
+
});
|
|
4680
|
+
};
|
|
4681
|
+
|
|
4682
|
+
//#endregion
|
|
4683
|
+
exports.collectVariableUsages = collectVariableUsages;
|
|
4684
|
+
exports.compileTypeFilter = compileTypeFilter;
|
|
4685
|
+
exports.computeReachabilityFilter = computeReachabilityFilter;
|
|
4686
|
+
exports.emitFragment = emitFragment;
|
|
4687
|
+
exports.emitOperation = emitOperation;
|
|
4688
|
+
exports.getArgumentType = getArgumentType;
|
|
4689
|
+
exports.getFieldReturnType = getFieldReturnType;
|
|
4690
|
+
exports.getInputFieldType = getInputFieldType;
|
|
4691
|
+
exports.hashSchema = hashSchema;
|
|
4692
|
+
exports.inferVariablesFromUsages = inferVariablesFromUsages;
|
|
4693
|
+
exports.isModifierAssignable = isModifierAssignable;
|
|
4694
|
+
exports.loadSchema = loadSchema;
|
|
4695
|
+
exports.mergeModifiers = mergeModifiers;
|
|
4696
|
+
exports.mergeVariableUsages = mergeVariableUsages;
|
|
4697
|
+
exports.parseGraphqlFile = parseGraphqlFile;
|
|
4698
|
+
exports.parseGraphqlSource = parseGraphqlSource;
|
|
4699
|
+
exports.parseTypeNode = parseTypeNode;
|
|
4700
|
+
exports.runCodegen = runCodegen;
|
|
4701
|
+
exports.sortFragmentsByDependency = sortFragmentsByDependency;
|
|
4702
|
+
exports.transformParsedGraphql = transformParsedGraphql;
|
|
4703
|
+
exports.writeInjectTemplate = writeInjectTemplate;
|
|
4704
|
+
//# sourceMappingURL=codegen.cjs.map
|