@soda-gql/metro-plugin 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +165 -0
- package/dist/chunk--GtjC1aJ.mjs +7 -0
- package/dist/index.cjs +78 -0
- package/dist/index.cjs.map +1 -0
- package/dist/index.d.cts +49 -0
- package/dist/index.d.cts.map +1 -0
- package/dist/index.d.mts +49 -0
- package/dist/index.d.mts.map +1 -0
- package/dist/index.mjs +60 -0
- package/dist/index.mjs.map +1 -0
- package/dist/transformer.cjs +990 -0
- package/dist/transformer.cjs.map +1 -0
- package/dist/transformer.d.cts +17 -0
- package/dist/transformer.d.cts.map +1 -0
- package/dist/transformer.d.mts +17 -0
- package/dist/transformer.d.mts.map +1 -0
- package/dist/transformer.mjs +961 -0
- package/dist/transformer.mjs.map +1 -0
- package/dist/types-DrVbRHyR.d.cts +108 -0
- package/dist/types-DrVbRHyR.d.cts.map +1 -0
- package/dist/types-DvXBqw4W.d.mts +108 -0
- package/dist/types-DvXBqw4W.d.mts.map +1 -0
- package/package.json +70 -0
|
@@ -0,0 +1,990 @@
|
|
|
1
|
+
//#region rolldown:runtime
|
|
2
|
+
var __create = Object.create;
|
|
3
|
+
var __defProp = Object.defineProperty;
|
|
4
|
+
var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
|
|
5
|
+
var __getOwnPropNames = Object.getOwnPropertyNames;
|
|
6
|
+
var __getProtoOf = Object.getPrototypeOf;
|
|
7
|
+
var __hasOwnProp = Object.prototype.hasOwnProperty;
|
|
8
|
+
var __copyProps = (to, from, except, desc) => {
|
|
9
|
+
if (from && typeof from === "object" || typeof from === "function") {
|
|
10
|
+
for (var keys = __getOwnPropNames(from), i = 0, n = keys.length, key; i < n; i++) {
|
|
11
|
+
key = keys[i];
|
|
12
|
+
if (!__hasOwnProp.call(to, key) && key !== except) {
|
|
13
|
+
__defProp(to, key, {
|
|
14
|
+
get: ((k) => from[k]).bind(null, key),
|
|
15
|
+
enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
return to;
|
|
21
|
+
};
|
|
22
|
+
var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps(isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", {
|
|
23
|
+
value: mod,
|
|
24
|
+
enumerable: true
|
|
25
|
+
}) : target, mod));
|
|
26
|
+
|
|
27
|
+
//#endregion
|
|
28
|
+
let __soda_gql_plugin_common = require("@soda-gql/plugin-common");
|
|
29
|
+
let node_crypto = require("node:crypto");
|
|
30
|
+
node_crypto = __toESM(node_crypto);
|
|
31
|
+
let __babel_core = require("@babel/core");
|
|
32
|
+
let __soda_gql_babel_plugin = require("@soda-gql/babel-plugin");
|
|
33
|
+
let __soda_gql_common = require("@soda-gql/common");
|
|
34
|
+
|
|
35
|
+
//#region node_modules/@jridgewell/sourcemap-codec/dist/sourcemap-codec.mjs
|
|
36
|
+
var comma = ",".charCodeAt(0);
|
|
37
|
+
var semicolon = ";".charCodeAt(0);
|
|
38
|
+
var chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
|
|
39
|
+
var intToChar = new Uint8Array(64);
|
|
40
|
+
var charToInt = new Uint8Array(128);
|
|
41
|
+
for (let i = 0; i < chars.length; i++) {
|
|
42
|
+
const c = chars.charCodeAt(i);
|
|
43
|
+
intToChar[i] = c;
|
|
44
|
+
charToInt[c] = i;
|
|
45
|
+
}
|
|
46
|
+
function decodeInteger(reader, relative) {
|
|
47
|
+
let value = 0;
|
|
48
|
+
let shift = 0;
|
|
49
|
+
let integer = 0;
|
|
50
|
+
do {
|
|
51
|
+
integer = charToInt[reader.next()];
|
|
52
|
+
value |= (integer & 31) << shift;
|
|
53
|
+
shift += 5;
|
|
54
|
+
} while (integer & 32);
|
|
55
|
+
const shouldNegate = value & 1;
|
|
56
|
+
value >>>= 1;
|
|
57
|
+
if (shouldNegate) value = -2147483648 | -value;
|
|
58
|
+
return relative + value;
|
|
59
|
+
}
|
|
60
|
+
function encodeInteger(builder, num, relative) {
|
|
61
|
+
let delta = num - relative;
|
|
62
|
+
delta = delta < 0 ? -delta << 1 | 1 : delta << 1;
|
|
63
|
+
do {
|
|
64
|
+
let clamped = delta & 31;
|
|
65
|
+
delta >>>= 5;
|
|
66
|
+
if (delta > 0) clamped |= 32;
|
|
67
|
+
builder.write(intToChar[clamped]);
|
|
68
|
+
} while (delta > 0);
|
|
69
|
+
return num;
|
|
70
|
+
}
|
|
71
|
+
function hasMoreVlq(reader, max) {
|
|
72
|
+
if (reader.pos >= max) return false;
|
|
73
|
+
return reader.peek() !== comma;
|
|
74
|
+
}
|
|
75
|
+
var bufLength = 1024 * 16;
|
|
76
|
+
var td = typeof TextDecoder !== "undefined" ? /* @__PURE__ */ new TextDecoder() : typeof Buffer !== "undefined" ? { decode(buf) {
|
|
77
|
+
return Buffer.from(buf.buffer, buf.byteOffset, buf.byteLength).toString();
|
|
78
|
+
} } : { decode(buf) {
|
|
79
|
+
let out = "";
|
|
80
|
+
for (let i = 0; i < buf.length; i++) out += String.fromCharCode(buf[i]);
|
|
81
|
+
return out;
|
|
82
|
+
} };
|
|
83
|
+
var StringWriter = class {
|
|
84
|
+
constructor() {
|
|
85
|
+
this.pos = 0;
|
|
86
|
+
this.out = "";
|
|
87
|
+
this.buffer = new Uint8Array(bufLength);
|
|
88
|
+
}
|
|
89
|
+
write(v) {
|
|
90
|
+
const { buffer } = this;
|
|
91
|
+
buffer[this.pos++] = v;
|
|
92
|
+
if (this.pos === bufLength) {
|
|
93
|
+
this.out += td.decode(buffer);
|
|
94
|
+
this.pos = 0;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
flush() {
|
|
98
|
+
const { buffer, out, pos } = this;
|
|
99
|
+
return pos > 0 ? out + td.decode(buffer.subarray(0, pos)) : out;
|
|
100
|
+
}
|
|
101
|
+
};
|
|
102
|
+
var StringReader = class {
|
|
103
|
+
constructor(buffer) {
|
|
104
|
+
this.pos = 0;
|
|
105
|
+
this.buffer = buffer;
|
|
106
|
+
}
|
|
107
|
+
next() {
|
|
108
|
+
return this.buffer.charCodeAt(this.pos++);
|
|
109
|
+
}
|
|
110
|
+
peek() {
|
|
111
|
+
return this.buffer.charCodeAt(this.pos);
|
|
112
|
+
}
|
|
113
|
+
indexOf(char) {
|
|
114
|
+
const { buffer, pos } = this;
|
|
115
|
+
const idx = buffer.indexOf(char, pos);
|
|
116
|
+
return idx === -1 ? buffer.length : idx;
|
|
117
|
+
}
|
|
118
|
+
};
|
|
119
|
+
function decode(mappings) {
|
|
120
|
+
const { length } = mappings;
|
|
121
|
+
const reader = new StringReader(mappings);
|
|
122
|
+
const decoded = [];
|
|
123
|
+
let genColumn = 0;
|
|
124
|
+
let sourcesIndex = 0;
|
|
125
|
+
let sourceLine = 0;
|
|
126
|
+
let sourceColumn = 0;
|
|
127
|
+
let namesIndex = 0;
|
|
128
|
+
do {
|
|
129
|
+
const semi = reader.indexOf(";");
|
|
130
|
+
const line = [];
|
|
131
|
+
let sorted = true;
|
|
132
|
+
let lastCol = 0;
|
|
133
|
+
genColumn = 0;
|
|
134
|
+
while (reader.pos < semi) {
|
|
135
|
+
let seg;
|
|
136
|
+
genColumn = decodeInteger(reader, genColumn);
|
|
137
|
+
if (genColumn < lastCol) sorted = false;
|
|
138
|
+
lastCol = genColumn;
|
|
139
|
+
if (hasMoreVlq(reader, semi)) {
|
|
140
|
+
sourcesIndex = decodeInteger(reader, sourcesIndex);
|
|
141
|
+
sourceLine = decodeInteger(reader, sourceLine);
|
|
142
|
+
sourceColumn = decodeInteger(reader, sourceColumn);
|
|
143
|
+
if (hasMoreVlq(reader, semi)) {
|
|
144
|
+
namesIndex = decodeInteger(reader, namesIndex);
|
|
145
|
+
seg = [
|
|
146
|
+
genColumn,
|
|
147
|
+
sourcesIndex,
|
|
148
|
+
sourceLine,
|
|
149
|
+
sourceColumn,
|
|
150
|
+
namesIndex
|
|
151
|
+
];
|
|
152
|
+
} else seg = [
|
|
153
|
+
genColumn,
|
|
154
|
+
sourcesIndex,
|
|
155
|
+
sourceLine,
|
|
156
|
+
sourceColumn
|
|
157
|
+
];
|
|
158
|
+
} else seg = [genColumn];
|
|
159
|
+
line.push(seg);
|
|
160
|
+
reader.pos++;
|
|
161
|
+
}
|
|
162
|
+
if (!sorted) sort(line);
|
|
163
|
+
decoded.push(line);
|
|
164
|
+
reader.pos = semi + 1;
|
|
165
|
+
} while (reader.pos <= length);
|
|
166
|
+
return decoded;
|
|
167
|
+
}
|
|
168
|
+
function sort(line) {
|
|
169
|
+
line.sort(sortComparator$1);
|
|
170
|
+
}
|
|
171
|
+
function sortComparator$1(a, b) {
|
|
172
|
+
return a[0] - b[0];
|
|
173
|
+
}
|
|
174
|
+
function encode(decoded) {
|
|
175
|
+
const writer = new StringWriter();
|
|
176
|
+
let sourcesIndex = 0;
|
|
177
|
+
let sourceLine = 0;
|
|
178
|
+
let sourceColumn = 0;
|
|
179
|
+
let namesIndex = 0;
|
|
180
|
+
for (let i = 0; i < decoded.length; i++) {
|
|
181
|
+
const line = decoded[i];
|
|
182
|
+
if (i > 0) writer.write(semicolon);
|
|
183
|
+
if (line.length === 0) continue;
|
|
184
|
+
let genColumn = 0;
|
|
185
|
+
for (let j = 0; j < line.length; j++) {
|
|
186
|
+
const segment = line[j];
|
|
187
|
+
if (j > 0) writer.write(comma);
|
|
188
|
+
genColumn = encodeInteger(writer, segment[0], genColumn);
|
|
189
|
+
if (segment.length === 1) continue;
|
|
190
|
+
sourcesIndex = encodeInteger(writer, segment[1], sourcesIndex);
|
|
191
|
+
sourceLine = encodeInteger(writer, segment[2], sourceLine);
|
|
192
|
+
sourceColumn = encodeInteger(writer, segment[3], sourceColumn);
|
|
193
|
+
if (segment.length === 4) continue;
|
|
194
|
+
namesIndex = encodeInteger(writer, segment[4], namesIndex);
|
|
195
|
+
}
|
|
196
|
+
}
|
|
197
|
+
return writer.flush();
|
|
198
|
+
}
|
|
199
|
+
|
|
200
|
+
//#endregion
|
|
201
|
+
//#region node_modules/@jridgewell/resolve-uri/dist/resolve-uri.mjs
|
|
202
|
+
const schemeRegex = /^[\w+.-]+:\/\//;
|
|
203
|
+
/**
|
|
204
|
+
* Matches the parts of a URL:
|
|
205
|
+
* 1. Scheme, including ":", guaranteed.
|
|
206
|
+
* 2. User/password, including "@", optional.
|
|
207
|
+
* 3. Host, guaranteed.
|
|
208
|
+
* 4. Port, including ":", optional.
|
|
209
|
+
* 5. Path, including "/", optional.
|
|
210
|
+
* 6. Query, including "?", optional.
|
|
211
|
+
* 7. Hash, including "#", optional.
|
|
212
|
+
*/
|
|
213
|
+
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
|
|
214
|
+
/**
|
|
215
|
+
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
|
|
216
|
+
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
|
|
217
|
+
*
|
|
218
|
+
* 1. Host, optional.
|
|
219
|
+
* 2. Path, which may include "/", guaranteed.
|
|
220
|
+
* 3. Query, including "?", optional.
|
|
221
|
+
* 4. Hash, including "#", optional.
|
|
222
|
+
*/
|
|
223
|
+
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
|
|
224
|
+
function isAbsoluteUrl(input) {
|
|
225
|
+
return schemeRegex.test(input);
|
|
226
|
+
}
|
|
227
|
+
function isSchemeRelativeUrl(input) {
|
|
228
|
+
return input.startsWith("//");
|
|
229
|
+
}
|
|
230
|
+
function isAbsolutePath(input) {
|
|
231
|
+
return input.startsWith("/");
|
|
232
|
+
}
|
|
233
|
+
function isFileUrl(input) {
|
|
234
|
+
return input.startsWith("file:");
|
|
235
|
+
}
|
|
236
|
+
function isRelative(input) {
|
|
237
|
+
return /^[.?#]/.test(input);
|
|
238
|
+
}
|
|
239
|
+
function parseAbsoluteUrl(input) {
|
|
240
|
+
const match = urlRegex.exec(input);
|
|
241
|
+
return makeUrl(match[1], match[2] || "", match[3], match[4] || "", match[5] || "/", match[6] || "", match[7] || "");
|
|
242
|
+
}
|
|
243
|
+
function parseFileUrl(input) {
|
|
244
|
+
const match = fileRegex.exec(input);
|
|
245
|
+
const path = match[2];
|
|
246
|
+
return makeUrl("file:", "", match[1] || "", "", isAbsolutePath(path) ? path : "/" + path, match[3] || "", match[4] || "");
|
|
247
|
+
}
|
|
248
|
+
function makeUrl(scheme, user, host, port, path, query, hash) {
|
|
249
|
+
return {
|
|
250
|
+
scheme,
|
|
251
|
+
user,
|
|
252
|
+
host,
|
|
253
|
+
port,
|
|
254
|
+
path,
|
|
255
|
+
query,
|
|
256
|
+
hash,
|
|
257
|
+
type: 7
|
|
258
|
+
};
|
|
259
|
+
}
|
|
260
|
+
function parseUrl(input) {
|
|
261
|
+
if (isSchemeRelativeUrl(input)) {
|
|
262
|
+
const url$1 = parseAbsoluteUrl("http:" + input);
|
|
263
|
+
url$1.scheme = "";
|
|
264
|
+
url$1.type = 6;
|
|
265
|
+
return url$1;
|
|
266
|
+
}
|
|
267
|
+
if (isAbsolutePath(input)) {
|
|
268
|
+
const url$1 = parseAbsoluteUrl("http://foo.com" + input);
|
|
269
|
+
url$1.scheme = "";
|
|
270
|
+
url$1.host = "";
|
|
271
|
+
url$1.type = 5;
|
|
272
|
+
return url$1;
|
|
273
|
+
}
|
|
274
|
+
if (isFileUrl(input)) return parseFileUrl(input);
|
|
275
|
+
if (isAbsoluteUrl(input)) return parseAbsoluteUrl(input);
|
|
276
|
+
const url = parseAbsoluteUrl("http://foo.com/" + input);
|
|
277
|
+
url.scheme = "";
|
|
278
|
+
url.host = "";
|
|
279
|
+
url.type = input ? input.startsWith("?") ? 3 : input.startsWith("#") ? 2 : 4 : 1;
|
|
280
|
+
return url;
|
|
281
|
+
}
|
|
282
|
+
function stripPathFilename(path) {
|
|
283
|
+
if (path.endsWith("/..")) return path;
|
|
284
|
+
const index = path.lastIndexOf("/");
|
|
285
|
+
return path.slice(0, index + 1);
|
|
286
|
+
}
|
|
287
|
+
function mergePaths(url, base) {
|
|
288
|
+
normalizePath$1(base, base.type);
|
|
289
|
+
if (url.path === "/") url.path = base.path;
|
|
290
|
+
else url.path = stripPathFilename(base.path) + url.path;
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
|
|
294
|
+
* "foo/.". We need to normalize to a standard representation.
|
|
295
|
+
*/
|
|
296
|
+
function normalizePath$1(url, type) {
|
|
297
|
+
const rel = type <= 4;
|
|
298
|
+
const pieces = url.path.split("/");
|
|
299
|
+
let pointer = 1;
|
|
300
|
+
let positive = 0;
|
|
301
|
+
let addTrailingSlash = false;
|
|
302
|
+
for (let i = 1; i < pieces.length; i++) {
|
|
303
|
+
const piece = pieces[i];
|
|
304
|
+
if (!piece) {
|
|
305
|
+
addTrailingSlash = true;
|
|
306
|
+
continue;
|
|
307
|
+
}
|
|
308
|
+
addTrailingSlash = false;
|
|
309
|
+
if (piece === ".") continue;
|
|
310
|
+
if (piece === "..") {
|
|
311
|
+
if (positive) {
|
|
312
|
+
addTrailingSlash = true;
|
|
313
|
+
positive--;
|
|
314
|
+
pointer--;
|
|
315
|
+
} else if (rel) pieces[pointer++] = piece;
|
|
316
|
+
continue;
|
|
317
|
+
}
|
|
318
|
+
pieces[pointer++] = piece;
|
|
319
|
+
positive++;
|
|
320
|
+
}
|
|
321
|
+
let path = "";
|
|
322
|
+
for (let i = 1; i < pointer; i++) path += "/" + pieces[i];
|
|
323
|
+
if (!path || addTrailingSlash && !path.endsWith("/..")) path += "/";
|
|
324
|
+
url.path = path;
|
|
325
|
+
}
|
|
326
|
+
/**
|
|
327
|
+
* Attempts to resolve `input` URL/path relative to `base`.
|
|
328
|
+
*/
|
|
329
|
+
function resolve(input, base) {
|
|
330
|
+
if (!input && !base) return "";
|
|
331
|
+
const url = parseUrl(input);
|
|
332
|
+
let inputType = url.type;
|
|
333
|
+
if (base && inputType !== 7) {
|
|
334
|
+
const baseUrl = parseUrl(base);
|
|
335
|
+
const baseType = baseUrl.type;
|
|
336
|
+
switch (inputType) {
|
|
337
|
+
case 1: url.hash = baseUrl.hash;
|
|
338
|
+
case 2: url.query = baseUrl.query;
|
|
339
|
+
case 3:
|
|
340
|
+
case 4: mergePaths(url, baseUrl);
|
|
341
|
+
case 5:
|
|
342
|
+
url.user = baseUrl.user;
|
|
343
|
+
url.host = baseUrl.host;
|
|
344
|
+
url.port = baseUrl.port;
|
|
345
|
+
case 6: url.scheme = baseUrl.scheme;
|
|
346
|
+
}
|
|
347
|
+
if (baseType > inputType) inputType = baseType;
|
|
348
|
+
}
|
|
349
|
+
normalizePath$1(url, inputType);
|
|
350
|
+
const queryHash = url.query + url.hash;
|
|
351
|
+
switch (inputType) {
|
|
352
|
+
case 2:
|
|
353
|
+
case 3: return queryHash;
|
|
354
|
+
case 4: {
|
|
355
|
+
const path = url.path.slice(1);
|
|
356
|
+
if (!path) return queryHash || ".";
|
|
357
|
+
if (isRelative(base || input) && !isRelative(path)) return "./" + path + queryHash;
|
|
358
|
+
return path + queryHash;
|
|
359
|
+
}
|
|
360
|
+
case 5: return url.path + queryHash;
|
|
361
|
+
default: return url.scheme + "//" + url.user + url.host + url.port + url.path + queryHash;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
|
|
365
|
+
//#endregion
|
|
366
|
+
//#region node_modules/@jridgewell/trace-mapping/dist/trace-mapping.mjs
|
|
367
|
+
function stripFilename(path) {
|
|
368
|
+
if (!path) return "";
|
|
369
|
+
const index = path.lastIndexOf("/");
|
|
370
|
+
return path.slice(0, index + 1);
|
|
371
|
+
}
|
|
372
|
+
function resolver(mapUrl, sourceRoot) {
|
|
373
|
+
const from = stripFilename(mapUrl);
|
|
374
|
+
const prefix = sourceRoot ? sourceRoot + "/" : "";
|
|
375
|
+
return (source) => resolve(prefix + (source || ""), from);
|
|
376
|
+
}
|
|
377
|
+
var COLUMN$1 = 0;
|
|
378
|
+
function maybeSort(mappings, owned) {
|
|
379
|
+
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
|
|
380
|
+
if (unsortedIndex === mappings.length) return mappings;
|
|
381
|
+
if (!owned) mappings = mappings.slice();
|
|
382
|
+
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) mappings[i] = sortSegments(mappings[i], owned);
|
|
383
|
+
return mappings;
|
|
384
|
+
}
|
|
385
|
+
function nextUnsortedSegmentLine(mappings, start) {
|
|
386
|
+
for (let i = start; i < mappings.length; i++) if (!isSorted(mappings[i])) return i;
|
|
387
|
+
return mappings.length;
|
|
388
|
+
}
|
|
389
|
+
function isSorted(line) {
|
|
390
|
+
for (let j = 1; j < line.length; j++) if (line[j][COLUMN$1] < line[j - 1][COLUMN$1]) return false;
|
|
391
|
+
return true;
|
|
392
|
+
}
|
|
393
|
+
function sortSegments(line, owned) {
|
|
394
|
+
if (!owned) line = line.slice();
|
|
395
|
+
return line.sort(sortComparator);
|
|
396
|
+
}
|
|
397
|
+
function sortComparator(a, b) {
|
|
398
|
+
return a[COLUMN$1] - b[COLUMN$1];
|
|
399
|
+
}
|
|
400
|
+
var found = false;
|
|
401
|
+
function binarySearch(haystack, needle, low, high) {
|
|
402
|
+
while (low <= high) {
|
|
403
|
+
const mid = low + (high - low >> 1);
|
|
404
|
+
const cmp = haystack[mid][COLUMN$1] - needle;
|
|
405
|
+
if (cmp === 0) {
|
|
406
|
+
found = true;
|
|
407
|
+
return mid;
|
|
408
|
+
}
|
|
409
|
+
if (cmp < 0) low = mid + 1;
|
|
410
|
+
else high = mid - 1;
|
|
411
|
+
}
|
|
412
|
+
found = false;
|
|
413
|
+
return low - 1;
|
|
414
|
+
}
|
|
415
|
+
function upperBound(haystack, needle, index) {
|
|
416
|
+
for (let i = index + 1; i < haystack.length; index = i++) if (haystack[i][COLUMN$1] !== needle) break;
|
|
417
|
+
return index;
|
|
418
|
+
}
|
|
419
|
+
function lowerBound(haystack, needle, index) {
|
|
420
|
+
for (let i = index - 1; i >= 0; index = i--) if (haystack[i][COLUMN$1] !== needle) break;
|
|
421
|
+
return index;
|
|
422
|
+
}
|
|
423
|
+
function memoizedState() {
|
|
424
|
+
return {
|
|
425
|
+
lastKey: -1,
|
|
426
|
+
lastNeedle: -1,
|
|
427
|
+
lastIndex: -1
|
|
428
|
+
};
|
|
429
|
+
}
|
|
430
|
+
function memoizedBinarySearch(haystack, needle, state, key) {
|
|
431
|
+
const { lastKey, lastNeedle, lastIndex } = state;
|
|
432
|
+
let low = 0;
|
|
433
|
+
let high = haystack.length - 1;
|
|
434
|
+
if (key === lastKey) {
|
|
435
|
+
if (needle === lastNeedle) {
|
|
436
|
+
found = lastIndex !== -1 && haystack[lastIndex][COLUMN$1] === needle;
|
|
437
|
+
return lastIndex;
|
|
438
|
+
}
|
|
439
|
+
if (needle >= lastNeedle) low = lastIndex === -1 ? 0 : lastIndex;
|
|
440
|
+
else high = lastIndex;
|
|
441
|
+
}
|
|
442
|
+
state.lastKey = key;
|
|
443
|
+
state.lastNeedle = needle;
|
|
444
|
+
return state.lastIndex = binarySearch(haystack, needle, low, high);
|
|
445
|
+
}
|
|
446
|
+
function parse(map) {
|
|
447
|
+
return typeof map === "string" ? JSON.parse(map) : map;
|
|
448
|
+
}
|
|
449
|
+
var LEAST_UPPER_BOUND = -1;
|
|
450
|
+
var GREATEST_LOWER_BOUND = 1;
|
|
451
|
+
var TraceMap = class {
|
|
452
|
+
constructor(map, mapUrl) {
|
|
453
|
+
const isString = typeof map === "string";
|
|
454
|
+
if (!isString && map._decodedMemo) return map;
|
|
455
|
+
const parsed = parse(map);
|
|
456
|
+
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
|
|
457
|
+
this.version = version;
|
|
458
|
+
this.file = file;
|
|
459
|
+
this.names = names || [];
|
|
460
|
+
this.sourceRoot = sourceRoot;
|
|
461
|
+
this.sources = sources;
|
|
462
|
+
this.sourcesContent = sourcesContent;
|
|
463
|
+
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || void 0;
|
|
464
|
+
const resolve$1 = resolver(mapUrl, sourceRoot);
|
|
465
|
+
this.resolvedSources = sources.map(resolve$1);
|
|
466
|
+
const { mappings } = parsed;
|
|
467
|
+
if (typeof mappings === "string") {
|
|
468
|
+
this._encoded = mappings;
|
|
469
|
+
this._decoded = void 0;
|
|
470
|
+
} else if (Array.isArray(mappings)) {
|
|
471
|
+
this._encoded = void 0;
|
|
472
|
+
this._decoded = maybeSort(mappings, isString);
|
|
473
|
+
} else if (parsed.sections) throw new Error(`TraceMap passed sectioned source map, please use FlattenMap export instead`);
|
|
474
|
+
else throw new Error(`invalid source map: ${JSON.stringify(parsed)}`);
|
|
475
|
+
this._decodedMemo = memoizedState();
|
|
476
|
+
this._bySources = void 0;
|
|
477
|
+
this._bySourceMemos = void 0;
|
|
478
|
+
}
|
|
479
|
+
};
|
|
480
|
+
function cast$1(map) {
|
|
481
|
+
return map;
|
|
482
|
+
}
|
|
483
|
+
function decodedMappings(map) {
|
|
484
|
+
var _a;
|
|
485
|
+
return (_a = cast$1(map))._decoded || (_a._decoded = decode(cast$1(map)._encoded));
|
|
486
|
+
}
|
|
487
|
+
function traceSegment(map, line, column) {
|
|
488
|
+
const decoded = decodedMappings(map);
|
|
489
|
+
if (line >= decoded.length) return null;
|
|
490
|
+
const segments = decoded[line];
|
|
491
|
+
const index = traceSegmentInternal(segments, cast$1(map)._decodedMemo, line, column, GREATEST_LOWER_BOUND);
|
|
492
|
+
return index === -1 ? null : segments[index];
|
|
493
|
+
}
|
|
494
|
+
function traceSegmentInternal(segments, memo, line, column, bias) {
|
|
495
|
+
let index = memoizedBinarySearch(segments, column, memo, line);
|
|
496
|
+
if (found) index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
|
|
497
|
+
else if (bias === LEAST_UPPER_BOUND) index++;
|
|
498
|
+
if (index === -1 || index === segments.length) return -1;
|
|
499
|
+
return index;
|
|
500
|
+
}
|
|
501
|
+
|
|
502
|
+
//#endregion
|
|
503
|
+
//#region node_modules/@jridgewell/gen-mapping/dist/gen-mapping.mjs
|
|
504
|
+
var SetArray = class {
|
|
505
|
+
constructor() {
|
|
506
|
+
this._indexes = { __proto__: null };
|
|
507
|
+
this.array = [];
|
|
508
|
+
}
|
|
509
|
+
};
|
|
510
|
+
function cast(set) {
|
|
511
|
+
return set;
|
|
512
|
+
}
|
|
513
|
+
function get(setarr, key) {
|
|
514
|
+
return cast(setarr)._indexes[key];
|
|
515
|
+
}
|
|
516
|
+
function put(setarr, key) {
|
|
517
|
+
const index = get(setarr, key);
|
|
518
|
+
if (index !== void 0) return index;
|
|
519
|
+
const { array, _indexes: indexes } = cast(setarr);
|
|
520
|
+
return indexes[key] = array.push(key) - 1;
|
|
521
|
+
}
|
|
522
|
+
function remove(setarr, key) {
|
|
523
|
+
const index = get(setarr, key);
|
|
524
|
+
if (index === void 0) return;
|
|
525
|
+
const { array, _indexes: indexes } = cast(setarr);
|
|
526
|
+
for (let i = index + 1; i < array.length; i++) {
|
|
527
|
+
const k = array[i];
|
|
528
|
+
array[i - 1] = k;
|
|
529
|
+
indexes[k]--;
|
|
530
|
+
}
|
|
531
|
+
indexes[key] = void 0;
|
|
532
|
+
array.pop();
|
|
533
|
+
}
|
|
534
|
+
var COLUMN = 0;
|
|
535
|
+
var SOURCES_INDEX = 1;
|
|
536
|
+
var SOURCE_LINE = 2;
|
|
537
|
+
var SOURCE_COLUMN = 3;
|
|
538
|
+
var NAMES_INDEX = 4;
|
|
539
|
+
var NO_NAME = -1;
|
|
540
|
+
var GenMapping = class {
|
|
541
|
+
constructor({ file, sourceRoot } = {}) {
|
|
542
|
+
this._names = new SetArray();
|
|
543
|
+
this._sources = new SetArray();
|
|
544
|
+
this._sourcesContent = [];
|
|
545
|
+
this._mappings = [];
|
|
546
|
+
this.file = file;
|
|
547
|
+
this.sourceRoot = sourceRoot;
|
|
548
|
+
this._ignoreList = new SetArray();
|
|
549
|
+
}
|
|
550
|
+
};
|
|
551
|
+
function cast2(map) {
|
|
552
|
+
return map;
|
|
553
|
+
}
|
|
554
|
+
var maybeAddSegment = (map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) => {
|
|
555
|
+
return addSegmentInternal(true, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content);
|
|
556
|
+
};
|
|
557
|
+
function setSourceContent(map, source, content) {
|
|
558
|
+
const { _sources: sources, _sourcesContent: sourcesContent } = cast2(map);
|
|
559
|
+
const index = put(sources, source);
|
|
560
|
+
sourcesContent[index] = content;
|
|
561
|
+
}
|
|
562
|
+
function setIgnore(map, source, ignore = true) {
|
|
563
|
+
const { _sources: sources, _sourcesContent: sourcesContent, _ignoreList: ignoreList } = cast2(map);
|
|
564
|
+
const index = put(sources, source);
|
|
565
|
+
if (index === sourcesContent.length) sourcesContent[index] = null;
|
|
566
|
+
if (ignore) put(ignoreList, index);
|
|
567
|
+
else remove(ignoreList, index);
|
|
568
|
+
}
|
|
569
|
+
function toDecodedMap(map) {
|
|
570
|
+
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names, _ignoreList: ignoreList } = cast2(map);
|
|
571
|
+
removeEmptyFinalLines(mappings);
|
|
572
|
+
return {
|
|
573
|
+
version: 3,
|
|
574
|
+
file: map.file || void 0,
|
|
575
|
+
names: names.array,
|
|
576
|
+
sourceRoot: map.sourceRoot || void 0,
|
|
577
|
+
sources: sources.array,
|
|
578
|
+
sourcesContent,
|
|
579
|
+
mappings,
|
|
580
|
+
ignoreList: ignoreList.array
|
|
581
|
+
};
|
|
582
|
+
}
|
|
583
|
+
function toEncodedMap(map) {
|
|
584
|
+
const decoded = toDecodedMap(map);
|
|
585
|
+
return Object.assign({}, decoded, { mappings: encode(decoded.mappings) });
|
|
586
|
+
}
|
|
587
|
+
function addSegmentInternal(skipable, map, genLine, genColumn, source, sourceLine, sourceColumn, name, content) {
|
|
588
|
+
const { _mappings: mappings, _sources: sources, _sourcesContent: sourcesContent, _names: names } = cast2(map);
|
|
589
|
+
const line = getIndex(mappings, genLine);
|
|
590
|
+
const index = getColumnIndex(line, genColumn);
|
|
591
|
+
if (!source) {
|
|
592
|
+
if (skipable && skipSourceless(line, index)) return;
|
|
593
|
+
return insert(line, index, [genColumn]);
|
|
594
|
+
}
|
|
595
|
+
assert(sourceLine);
|
|
596
|
+
assert(sourceColumn);
|
|
597
|
+
const sourcesIndex = put(sources, source);
|
|
598
|
+
const namesIndex = name ? put(names, name) : NO_NAME;
|
|
599
|
+
if (sourcesIndex === sourcesContent.length) sourcesContent[sourcesIndex] = content != null ? content : null;
|
|
600
|
+
if (skipable && skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex)) return;
|
|
601
|
+
return insert(line, index, name ? [
|
|
602
|
+
genColumn,
|
|
603
|
+
sourcesIndex,
|
|
604
|
+
sourceLine,
|
|
605
|
+
sourceColumn,
|
|
606
|
+
namesIndex
|
|
607
|
+
] : [
|
|
608
|
+
genColumn,
|
|
609
|
+
sourcesIndex,
|
|
610
|
+
sourceLine,
|
|
611
|
+
sourceColumn
|
|
612
|
+
]);
|
|
613
|
+
}
|
|
614
|
+
function assert(_val) {}
|
|
615
|
+
function getIndex(arr, index) {
|
|
616
|
+
for (let i = arr.length; i <= index; i++) arr[i] = [];
|
|
617
|
+
return arr[index];
|
|
618
|
+
}
|
|
619
|
+
function getColumnIndex(line, genColumn) {
|
|
620
|
+
let index = line.length;
|
|
621
|
+
for (let i = index - 1; i >= 0; index = i--) if (genColumn >= line[i][COLUMN]) break;
|
|
622
|
+
return index;
|
|
623
|
+
}
|
|
624
|
+
function insert(array, index, value) {
|
|
625
|
+
for (let i = array.length; i > index; i--) array[i] = array[i - 1];
|
|
626
|
+
array[index] = value;
|
|
627
|
+
}
|
|
628
|
+
function removeEmptyFinalLines(mappings) {
|
|
629
|
+
const { length } = mappings;
|
|
630
|
+
let len = length;
|
|
631
|
+
for (let i = len - 1; i >= 0; len = i, i--) if (mappings[i].length > 0) break;
|
|
632
|
+
if (len < length) mappings.length = len;
|
|
633
|
+
}
|
|
634
|
+
function skipSourceless(line, index) {
|
|
635
|
+
if (index === 0) return true;
|
|
636
|
+
return line[index - 1].length === 1;
|
|
637
|
+
}
|
|
638
|
+
function skipSource(line, index, sourcesIndex, sourceLine, sourceColumn, namesIndex) {
|
|
639
|
+
if (index === 0) return false;
|
|
640
|
+
const prev = line[index - 1];
|
|
641
|
+
if (prev.length === 1) return false;
|
|
642
|
+
return sourcesIndex === prev[SOURCES_INDEX] && sourceLine === prev[SOURCE_LINE] && sourceColumn === prev[SOURCE_COLUMN] && namesIndex === (prev.length === 5 ? prev[NAMES_INDEX] : NO_NAME);
|
|
643
|
+
}
|
|
644
|
+
|
|
645
|
+
//#endregion
|
|
646
|
+
//#region node_modules/@ampproject/remapping/dist/remapping.mjs
|
|
647
|
+
const SOURCELESS_MAPPING = /* @__PURE__ */ SegmentObject("", -1, -1, "", null, false);
|
|
648
|
+
const EMPTY_SOURCES = [];
|
|
649
|
+
function SegmentObject(source, line, column, name, content, ignore) {
|
|
650
|
+
return {
|
|
651
|
+
source,
|
|
652
|
+
line,
|
|
653
|
+
column,
|
|
654
|
+
name,
|
|
655
|
+
content,
|
|
656
|
+
ignore
|
|
657
|
+
};
|
|
658
|
+
}
|
|
659
|
+
function Source(map, sources, source, content, ignore) {
|
|
660
|
+
return {
|
|
661
|
+
map,
|
|
662
|
+
sources,
|
|
663
|
+
source,
|
|
664
|
+
content,
|
|
665
|
+
ignore
|
|
666
|
+
};
|
|
667
|
+
}
|
|
668
|
+
/**
|
|
669
|
+
* MapSource represents a single sourcemap, with the ability to trace mappings into its child nodes
|
|
670
|
+
* (which may themselves be SourceMapTrees).
|
|
671
|
+
*/
|
|
672
|
+
function MapSource(map, sources) {
|
|
673
|
+
return Source(map, sources, "", null, false);
|
|
674
|
+
}
|
|
675
|
+
/**
|
|
676
|
+
* A "leaf" node in the sourcemap tree, representing an original, unmodified source file. Recursive
|
|
677
|
+
* segment tracing ends at the `OriginalSource`.
|
|
678
|
+
*/
|
|
679
|
+
function OriginalSource(source, content, ignore) {
|
|
680
|
+
return Source(null, EMPTY_SOURCES, source, content, ignore);
|
|
681
|
+
}
|
|
682
|
+
/**
|
|
683
|
+
* traceMappings is only called on the root level SourceMapTree, and begins the process of
|
|
684
|
+
* resolving each mapping in terms of the original source files.
|
|
685
|
+
*/
|
|
686
|
+
function traceMappings(tree) {
|
|
687
|
+
const gen = new GenMapping({ file: tree.map.file });
|
|
688
|
+
const { sources: rootSources, map } = tree;
|
|
689
|
+
const rootNames = map.names;
|
|
690
|
+
const rootMappings = decodedMappings(map);
|
|
691
|
+
for (let i = 0; i < rootMappings.length; i++) {
|
|
692
|
+
const segments = rootMappings[i];
|
|
693
|
+
for (let j = 0; j < segments.length; j++) {
|
|
694
|
+
const segment = segments[j];
|
|
695
|
+
const genCol = segment[0];
|
|
696
|
+
let traced = SOURCELESS_MAPPING;
|
|
697
|
+
if (segment.length !== 1) {
|
|
698
|
+
const source$1 = rootSources[segment[1]];
|
|
699
|
+
traced = originalPositionFor(source$1, segment[2], segment[3], segment.length === 5 ? rootNames[segment[4]] : "");
|
|
700
|
+
if (traced == null) continue;
|
|
701
|
+
}
|
|
702
|
+
const { column, line, name, content, source, ignore } = traced;
|
|
703
|
+
maybeAddSegment(gen, i, genCol, source, line, column, name);
|
|
704
|
+
if (source && content != null) setSourceContent(gen, source, content);
|
|
705
|
+
if (ignore) setIgnore(gen, source, true);
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
return gen;
|
|
709
|
+
}
|
|
710
|
+
/**
|
|
711
|
+
* originalPositionFor is only called on children SourceMapTrees. It recurses down into its own
|
|
712
|
+
* child SourceMapTrees, until we find the original source map.
|
|
713
|
+
*/
|
|
714
|
+
function originalPositionFor(source, line, column, name) {
|
|
715
|
+
if (!source.map) return SegmentObject(source.source, line, column, name, source.content, source.ignore);
|
|
716
|
+
const segment = traceSegment(source.map, line, column);
|
|
717
|
+
if (segment == null) return null;
|
|
718
|
+
if (segment.length === 1) return SOURCELESS_MAPPING;
|
|
719
|
+
return originalPositionFor(source.sources[segment[1]], segment[2], segment[3], segment.length === 5 ? source.map.names[segment[4]] : name);
|
|
720
|
+
}
|
|
721
|
+
function asArray(value) {
|
|
722
|
+
if (Array.isArray(value)) return value;
|
|
723
|
+
return [value];
|
|
724
|
+
}
|
|
725
|
+
/**
|
|
726
|
+
* Recursively builds a tree structure out of sourcemap files, with each node
|
|
727
|
+
* being either an `OriginalSource` "leaf" or a `SourceMapTree` composed of
|
|
728
|
+
* `OriginalSource`s and `SourceMapTree`s.
|
|
729
|
+
*
|
|
730
|
+
* Every sourcemap is composed of a collection of source files and mappings
|
|
731
|
+
* into locations of those source files. When we generate a `SourceMapTree` for
|
|
732
|
+
* the sourcemap, we attempt to load each source file's own sourcemap. If it
|
|
733
|
+
* does not have an associated sourcemap, it is considered an original,
|
|
734
|
+
* unmodified source file.
|
|
735
|
+
*/
|
|
736
|
+
function buildSourceMapTree(input, loader) {
|
|
737
|
+
const maps = asArray(input).map((m) => new TraceMap(m, ""));
|
|
738
|
+
const map = maps.pop();
|
|
739
|
+
for (let i = 0; i < maps.length; i++) if (maps[i].sources.length > 1) throw new Error(`Transformation map ${i} must have exactly one source file.\nDid you specify these with the most recent transformation maps first?`);
|
|
740
|
+
let tree = build(map, loader, "", 0);
|
|
741
|
+
for (let i = maps.length - 1; i >= 0; i--) tree = MapSource(maps[i], [tree]);
|
|
742
|
+
return tree;
|
|
743
|
+
}
|
|
744
|
+
function build(map, loader, importer, importerDepth) {
|
|
745
|
+
const { resolvedSources, sourcesContent, ignoreList } = map;
|
|
746
|
+
const depth = importerDepth + 1;
|
|
747
|
+
return MapSource(map, resolvedSources.map((sourceFile, i) => {
|
|
748
|
+
const ctx = {
|
|
749
|
+
importer,
|
|
750
|
+
depth,
|
|
751
|
+
source: sourceFile || "",
|
|
752
|
+
content: void 0,
|
|
753
|
+
ignore: void 0
|
|
754
|
+
};
|
|
755
|
+
const sourceMap = loader(ctx.source, ctx);
|
|
756
|
+
const { source, content, ignore } = ctx;
|
|
757
|
+
if (sourceMap) return build(new TraceMap(sourceMap, source), loader, source, depth);
|
|
758
|
+
return OriginalSource(source, content !== void 0 ? content : sourcesContent ? sourcesContent[i] : null, ignore !== void 0 ? ignore : ignoreList ? ignoreList.includes(i) : false);
|
|
759
|
+
}));
|
|
760
|
+
}
|
|
761
|
+
/**
|
|
762
|
+
* A SourceMap v3 compatible sourcemap, which only includes fields that were
|
|
763
|
+
* provided to it.
|
|
764
|
+
*/
|
|
765
|
+
var SourceMap = class {
|
|
766
|
+
constructor(map, options) {
|
|
767
|
+
const out = options.decodedMappings ? toDecodedMap(map) : toEncodedMap(map);
|
|
768
|
+
this.version = out.version;
|
|
769
|
+
this.file = out.file;
|
|
770
|
+
this.mappings = out.mappings;
|
|
771
|
+
this.names = out.names;
|
|
772
|
+
this.ignoreList = out.ignoreList;
|
|
773
|
+
this.sourceRoot = out.sourceRoot;
|
|
774
|
+
this.sources = out.sources;
|
|
775
|
+
if (!options.excludeContent) this.sourcesContent = out.sourcesContent;
|
|
776
|
+
}
|
|
777
|
+
toString() {
|
|
778
|
+
return JSON.stringify(this);
|
|
779
|
+
}
|
|
780
|
+
};
|
|
781
|
+
/**
|
|
782
|
+
* Traces through all the mappings in the root sourcemap, through the sources
|
|
783
|
+
* (and their sourcemaps), all the way back to the original source location.
|
|
784
|
+
*
|
|
785
|
+
* `loader` will be called every time we encounter a source file. If it returns
|
|
786
|
+
* a sourcemap, we will recurse into that sourcemap to continue the trace. If
|
|
787
|
+
* it returns a falsey value, that source file is treated as an original,
|
|
788
|
+
* unmodified source file.
|
|
789
|
+
*
|
|
790
|
+
* Pass `excludeContent` to exclude any self-containing source file content
|
|
791
|
+
* from the output sourcemap.
|
|
792
|
+
*
|
|
793
|
+
* Pass `decodedMappings` to receive a SourceMap with decoded (instead of
|
|
794
|
+
* VLQ encoded) mappings.
|
|
795
|
+
*/
|
|
796
|
+
function remapping(input, loader, options) {
|
|
797
|
+
const opts = typeof options === "object" ? options : {
|
|
798
|
+
excludeContent: !!options,
|
|
799
|
+
decodedMappings: false
|
|
800
|
+
};
|
|
801
|
+
return new SourceMap(traceMappings(buildSourceMapTree(input, loader)), opts);
|
|
802
|
+
}
|
|
803
|
+
|
|
804
|
+
//#endregion
|
|
805
|
+
//#region packages/metro-plugin/src/transformer.ts
|
|
806
|
+
/**
|
|
807
|
+
* Upstream transformer candidates in order of preference.
|
|
808
|
+
* - Expo projects: @expo/metro-config/babel-transformer
|
|
809
|
+
* - React Native 0.73+: @react-native/metro-babel-transformer
|
|
810
|
+
* - Legacy React Native: metro-react-native-babel-transformer
|
|
811
|
+
*/
|
|
812
|
+
const UPSTREAM_TRANSFORMER_CANDIDATES = [
|
|
813
|
+
"@expo/metro-config/babel-transformer",
|
|
814
|
+
"@react-native/metro-babel-transformer",
|
|
815
|
+
"metro-react-native-babel-transformer"
|
|
816
|
+
];
|
|
817
|
+
/**
|
|
818
|
+
* Cached upstream transformer module.
|
|
819
|
+
*/
|
|
820
|
+
let upstreamTransformer = null;
|
|
821
|
+
/**
|
|
822
|
+
* Try to resolve a module from multiple locations.
|
|
823
|
+
* Falls back through various resolution strategies.
|
|
824
|
+
*/
|
|
825
|
+
const tryResolve = (moduleName) => {
|
|
826
|
+
try {
|
|
827
|
+
return require.resolve(moduleName);
|
|
828
|
+
} catch {}
|
|
829
|
+
try {
|
|
830
|
+
return require.resolve(moduleName, { paths: [process.cwd()] });
|
|
831
|
+
} catch {}
|
|
832
|
+
return null;
|
|
833
|
+
};
|
|
834
|
+
/**
|
|
835
|
+
* Detect and load the upstream Metro Babel transformer.
|
|
836
|
+
* Tries multiple candidates in order of preference.
|
|
837
|
+
*/
|
|
838
|
+
const getUpstreamTransformer = () => {
|
|
839
|
+
if (upstreamTransformer) return upstreamTransformer;
|
|
840
|
+
for (const candidate of UPSTREAM_TRANSFORMER_CANDIDATES) {
|
|
841
|
+
const resolved = tryResolve(candidate);
|
|
842
|
+
if (resolved) {
|
|
843
|
+
upstreamTransformer = require(resolved);
|
|
844
|
+
return upstreamTransformer;
|
|
845
|
+
}
|
|
846
|
+
}
|
|
847
|
+
throw new Error(`No compatible Metro Babel transformer found. Tried: ${UPSTREAM_TRANSFORMER_CANDIDATES.join(", ")}. Please install one of these packages.`);
|
|
848
|
+
};
|
|
849
|
+
/**
|
|
850
|
+
* Cached plugin session.
|
|
851
|
+
*/
|
|
852
|
+
let pluginSession = null;
|
|
853
|
+
let sessionInitialized = false;
|
|
854
|
+
/**
|
|
855
|
+
* Ensure plugin session is initialized.
|
|
856
|
+
* First tries to use shared session, falls back to creating own.
|
|
857
|
+
*/
|
|
858
|
+
const ensurePluginSession = () => {
|
|
859
|
+
const stateKey = (0, __soda_gql_plugin_common.getStateKey)();
|
|
860
|
+
const sharedSession = (0, __soda_gql_plugin_common.getSharedPluginSession)(stateKey);
|
|
861
|
+
if (sharedSession) return sharedSession;
|
|
862
|
+
if (!sessionInitialized) {
|
|
863
|
+
sessionInitialized = true;
|
|
864
|
+
pluginSession = (0, __soda_gql_plugin_common.createPluginSession)({}, "@soda-gql/metro-plugin");
|
|
865
|
+
if (pluginSession) (0, __soda_gql_plugin_common.setSharedPluginSession)(stateKey, pluginSession);
|
|
866
|
+
}
|
|
867
|
+
return pluginSession;
|
|
868
|
+
};
|
|
869
|
+
/**
|
|
870
|
+
* Whether SWC transformer initialization has been attempted.
|
|
871
|
+
*/
|
|
872
|
+
let swcInitialized = false;
|
|
873
|
+
/**
|
|
874
|
+
* Initialize SWC transformer if configured.
|
|
875
|
+
*/
|
|
876
|
+
const initializeSwcTransformer = async (artifact, config) => {
|
|
877
|
+
const stateKey = (0, __soda_gql_plugin_common.getStateKey)();
|
|
878
|
+
const existing = (0, __soda_gql_plugin_common.getSharedSwcTransformer)(stateKey);
|
|
879
|
+
if (existing || swcInitialized) return existing;
|
|
880
|
+
swcInitialized = true;
|
|
881
|
+
if ((0, __soda_gql_plugin_common.getSharedTransformerType)(stateKey) !== "swc") return null;
|
|
882
|
+
try {
|
|
883
|
+
const { createTransformer } = await import("@soda-gql/swc-transformer");
|
|
884
|
+
const transformer = await createTransformer({
|
|
885
|
+
config,
|
|
886
|
+
artifact,
|
|
887
|
+
sourceMap: true
|
|
888
|
+
});
|
|
889
|
+
(0, __soda_gql_plugin_common.setSharedSwcTransformer)(stateKey, transformer);
|
|
890
|
+
return transformer;
|
|
891
|
+
} catch (error) {
|
|
892
|
+
console.warn(`[@soda-gql/metro-plugin] Failed to initialize SWC transformer: ${error}. Make sure @soda-gql/swc-transformer is installed. Falling back to Babel.`);
|
|
893
|
+
return null;
|
|
894
|
+
}
|
|
895
|
+
};
|
|
896
|
+
/**
|
|
897
|
+
* Transform source code with soda-gql transformations.
|
|
898
|
+
* Wraps the upstream Metro Babel transformer.
|
|
899
|
+
*/
|
|
900
|
+
async function transform(params) {
|
|
901
|
+
const { src, filename, options } = params;
|
|
902
|
+
const stateKey = (0, __soda_gql_plugin_common.getStateKey)();
|
|
903
|
+
const upstream = getUpstreamTransformer();
|
|
904
|
+
const session = ensurePluginSession();
|
|
905
|
+
if (!session) return upstream.transform(params);
|
|
906
|
+
let artifact = (0, __soda_gql_plugin_common.getSharedArtifact)(stateKey);
|
|
907
|
+
if (!artifact) {
|
|
908
|
+
artifact = await session.getArtifactAsync();
|
|
909
|
+
if (artifact) (0, __soda_gql_plugin_common.setSharedArtifact)(stateKey, artifact);
|
|
910
|
+
}
|
|
911
|
+
if (!artifact) return upstream.transform(params);
|
|
912
|
+
const normalizedPath = (0, __soda_gql_common.normalizePath)(filename);
|
|
913
|
+
if (!Object.values(artifact.elements).some((element) => (0, __soda_gql_common.normalizePath)(element.metadata.sourcePath) === normalizedPath)) return upstream.transform(params);
|
|
914
|
+
const swcTransformer = await initializeSwcTransformer(artifact, session.config);
|
|
915
|
+
if (swcTransformer) {
|
|
916
|
+
const swcResult = swcTransformer.transform({
|
|
917
|
+
sourceCode: src,
|
|
918
|
+
sourcePath: filename
|
|
919
|
+
});
|
|
920
|
+
if (swcResult.transformed) {
|
|
921
|
+
const upstreamResult$1 = await upstream.transform({
|
|
922
|
+
src: swcResult.sourceCode,
|
|
923
|
+
filename,
|
|
924
|
+
options
|
|
925
|
+
});
|
|
926
|
+
if (swcResult.sourceMap && upstreamResult$1.map) {
|
|
927
|
+
const mergedMap = remapping([upstreamResult$1.map, JSON.parse(swcResult.sourceMap)], () => null);
|
|
928
|
+
return {
|
|
929
|
+
...upstreamResult$1,
|
|
930
|
+
map: mergedMap
|
|
931
|
+
};
|
|
932
|
+
}
|
|
933
|
+
if (swcResult.sourceMap) return {
|
|
934
|
+
...upstreamResult$1,
|
|
935
|
+
map: JSON.parse(swcResult.sourceMap)
|
|
936
|
+
};
|
|
937
|
+
return upstreamResult$1;
|
|
938
|
+
}
|
|
939
|
+
return upstream.transform(params);
|
|
940
|
+
}
|
|
941
|
+
const babelResult = (0, __babel_core.transformSync)(src, {
|
|
942
|
+
filename,
|
|
943
|
+
babelrc: false,
|
|
944
|
+
configFile: false,
|
|
945
|
+
plugins: [(0, __soda_gql_babel_plugin.createPluginWithArtifact)({
|
|
946
|
+
artifact,
|
|
947
|
+
config: session.config
|
|
948
|
+
})],
|
|
949
|
+
sourceMaps: true
|
|
950
|
+
});
|
|
951
|
+
if (!babelResult?.code) return upstream.transform(params);
|
|
952
|
+
const upstreamResult = await upstream.transform({
|
|
953
|
+
src: babelResult.code,
|
|
954
|
+
filename,
|
|
955
|
+
options
|
|
956
|
+
});
|
|
957
|
+
if (babelResult.map && upstreamResult.map) {
|
|
958
|
+
const mergedMap = remapping([upstreamResult.map, babelResult.map], () => null);
|
|
959
|
+
return {
|
|
960
|
+
...upstreamResult,
|
|
961
|
+
map: mergedMap
|
|
962
|
+
};
|
|
963
|
+
}
|
|
964
|
+
if (babelResult.map) return {
|
|
965
|
+
...upstreamResult,
|
|
966
|
+
map: babelResult.map
|
|
967
|
+
};
|
|
968
|
+
return upstreamResult;
|
|
969
|
+
}
|
|
970
|
+
/**
|
|
971
|
+
* Get cache key for the transformer.
|
|
972
|
+
* Includes artifact generation to ensure cache invalidation when models change.
|
|
973
|
+
*/
|
|
974
|
+
function getCacheKey() {
|
|
975
|
+
const state = (0, __soda_gql_plugin_common.getSharedState)((0, __soda_gql_plugin_common.getStateKey)());
|
|
976
|
+
const artifact = state.currentArtifact;
|
|
977
|
+
const upstream = getUpstreamTransformer();
|
|
978
|
+
const hash = node_crypto.default.createHash("md5");
|
|
979
|
+
hash.update("@soda-gql/metro-plugin:v1");
|
|
980
|
+
if (upstream.getCacheKey) hash.update(upstream.getCacheKey());
|
|
981
|
+
hash.update(String(state.generation));
|
|
982
|
+
if (artifact) hash.update(String(Object.keys(artifact.elements).length));
|
|
983
|
+
return hash.digest("hex");
|
|
984
|
+
}
|
|
985
|
+
|
|
986
|
+
//#endregion
|
|
987
|
+
exports.__toESM = __toESM;
|
|
988
|
+
exports.getCacheKey = getCacheKey;
|
|
989
|
+
exports.transform = transform;
|
|
990
|
+
//# sourceMappingURL=transformer.cjs.map
|