instant-cli 0.22.96-experimental.add-posthog-frontend.20386914944.1 → 0.22.96
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +1 -1
- package/__tests__/mergeSchema.test.ts +197 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +15 -3
- package/dist/index.js.map +1 -1
- package/dist/util/mergeSchema.d.ts +2 -0
- package/dist/util/mergeSchema.d.ts.map +1 -0
- package/dist/util/mergeSchema.js +334 -0
- package/dist/util/mergeSchema.js.map +1 -0
- package/package.json +6 -4
- package/src/index.js +28 -10
- package/src/util/mergeSchema.js +364 -0
|
@@ -0,0 +1,364 @@
|
|
|
1
|
+
import * as acorn from 'acorn';
|
|
2
|
+
import tsPlugin from 'acorn-typescript';
|
|
3
|
+
|
|
4
|
+
const node = acorn.Parser.extend(tsPlugin({ dts: false }));
|
|
5
|
+
|
|
6
|
+
// --- Import Handling Helpers ---
|
|
7
|
+
|
|
8
|
+
function collectImports(ast) {
|
|
9
|
+
const imports = new Map(); // localName -> { source, importedName, type, importKind }
|
|
10
|
+
|
|
11
|
+
for (const node of ast.body) {
|
|
12
|
+
if (node.type === 'ImportDeclaration') {
|
|
13
|
+
const source = node.source.value;
|
|
14
|
+
const declImportKind = node.importKind;
|
|
15
|
+
|
|
16
|
+
for (const specifier of node.specifiers) {
|
|
17
|
+
let kind = 'value';
|
|
18
|
+
if (declImportKind === 'type' || specifier.importKind === 'type') {
|
|
19
|
+
kind = 'type';
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
if (specifier.type === 'ImportSpecifier') {
|
|
23
|
+
imports.set(specifier.local.name, {
|
|
24
|
+
source,
|
|
25
|
+
importedName: specifier.imported.name,
|
|
26
|
+
type: 'named',
|
|
27
|
+
importKind: kind,
|
|
28
|
+
});
|
|
29
|
+
} else if (specifier.type === 'ImportDefaultSpecifier') {
|
|
30
|
+
imports.set(specifier.local.name, {
|
|
31
|
+
source,
|
|
32
|
+
type: 'default',
|
|
33
|
+
importKind: kind,
|
|
34
|
+
});
|
|
35
|
+
} else if (specifier.type === 'ImportNamespaceSpecifier') {
|
|
36
|
+
imports.set(specifier.local.name, {
|
|
37
|
+
source,
|
|
38
|
+
type: 'namespace',
|
|
39
|
+
importKind: kind,
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
return imports;
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
function collectExistingImports(ast) {
|
|
49
|
+
const existing = new Map(); // source -> Set<localName>
|
|
50
|
+
|
|
51
|
+
for (const node of ast.body) {
|
|
52
|
+
if (node.type === 'ImportDeclaration') {
|
|
53
|
+
const source = node.source.value;
|
|
54
|
+
if (!existing.has(source)) existing.set(source, new Set());
|
|
55
|
+
|
|
56
|
+
for (const specifier of node.specifiers) {
|
|
57
|
+
existing.get(source).add(specifier.local.name);
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return existing;
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
function findIdentifiers(node, set = new Set()) {
|
|
65
|
+
if (!node) return set;
|
|
66
|
+
|
|
67
|
+
// If it's a Type Reference, grab the name
|
|
68
|
+
if (node.type === 'TSTypeReference') {
|
|
69
|
+
if (node.typeName.type === 'Identifier') {
|
|
70
|
+
set.add(node.typeName.name);
|
|
71
|
+
} else if (node.typeName.type === 'TSQualifiedName') {
|
|
72
|
+
// For A.B, we need A
|
|
73
|
+
let left = node.typeName.left;
|
|
74
|
+
while (left.type === 'TSQualifiedName') {
|
|
75
|
+
left = left.left;
|
|
76
|
+
}
|
|
77
|
+
if (left.type === 'Identifier') {
|
|
78
|
+
set.add(left.name);
|
|
79
|
+
}
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
|
|
83
|
+
// Recursive walk
|
|
84
|
+
for (const key in node) {
|
|
85
|
+
if (key === 'loc' || key === 'start' || key === 'end') continue;
|
|
86
|
+
if (typeof node[key] === 'object' && node[key] !== null) {
|
|
87
|
+
if (Array.isArray(node[key])) {
|
|
88
|
+
node[key].forEach((child) => findIdentifiers(child, set));
|
|
89
|
+
} else {
|
|
90
|
+
findIdentifiers(node[key], set);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
return set;
|
|
95
|
+
}
|
|
96
|
+
|
|
97
|
+
// --- Schema Traversal Helpers ---
|
|
98
|
+
|
|
99
|
+
function getPropName(prop) {
|
|
100
|
+
if (prop.key.type === 'Identifier') return prop.key.name;
|
|
101
|
+
if (prop.key.type === 'Literal') return prop.key.value;
|
|
102
|
+
return null;
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
function analyzeChain(node) {
|
|
106
|
+
let curr = node;
|
|
107
|
+
let typeParams = null;
|
|
108
|
+
let baseCall = null;
|
|
109
|
+
|
|
110
|
+
while (curr.type === 'CallExpression') {
|
|
111
|
+
if (curr.typeParameters) {
|
|
112
|
+
typeParams = curr.typeParameters;
|
|
113
|
+
}
|
|
114
|
+
|
|
115
|
+
if (
|
|
116
|
+
curr.callee.type === 'MemberExpression' &&
|
|
117
|
+
curr.callee.object.type === 'Identifier' &&
|
|
118
|
+
curr.callee.object.name === 'i'
|
|
119
|
+
) {
|
|
120
|
+
baseCall = curr;
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
if (curr.callee.type === 'MemberExpression') {
|
|
124
|
+
curr = curr.callee.object;
|
|
125
|
+
} else {
|
|
126
|
+
break;
|
|
127
|
+
}
|
|
128
|
+
}
|
|
129
|
+
return { typeParams, baseCall };
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
function traverseSchema(node, path, callback) {
|
|
133
|
+
if (node.type === 'ObjectExpression') {
|
|
134
|
+
for (const prop of node.properties) {
|
|
135
|
+
const name = getPropName(prop);
|
|
136
|
+
if (!name) continue;
|
|
137
|
+
const newPath = path ? `${path}.${name}` : name;
|
|
138
|
+
|
|
139
|
+
if (prop.value.type === 'ObjectExpression') {
|
|
140
|
+
traverseSchema(prop.value, newPath, callback);
|
|
141
|
+
} else if (prop.value.type === 'CallExpression') {
|
|
142
|
+
let isEntity = false;
|
|
143
|
+
if (
|
|
144
|
+
prop.value.callee.type === 'MemberExpression' &&
|
|
145
|
+
prop.value.callee.property.name === 'entity' &&
|
|
146
|
+
prop.value.callee.object.type === 'Identifier' &&
|
|
147
|
+
prop.value.callee.object.name === 'i'
|
|
148
|
+
) {
|
|
149
|
+
isEntity = true;
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
if (isEntity) {
|
|
153
|
+
callback(prop.value, newPath);
|
|
154
|
+
if (prop.value.arguments.length > 0) {
|
|
155
|
+
traverseSchema(prop.value.arguments[0], newPath, callback);
|
|
156
|
+
}
|
|
157
|
+
} else {
|
|
158
|
+
callback(prop.value, newPath);
|
|
159
|
+
}
|
|
160
|
+
}
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
function findSchemaObject(ast) {
|
|
166
|
+
let schemaObj = null;
|
|
167
|
+
function walk(node) {
|
|
168
|
+
if (!node) return;
|
|
169
|
+
if (schemaObj) return;
|
|
170
|
+
if (
|
|
171
|
+
node.type === 'CallExpression' &&
|
|
172
|
+
node.callee.type === 'MemberExpression' &&
|
|
173
|
+
node.callee.object.name === 'i' &&
|
|
174
|
+
node.callee.property.name === 'schema' &&
|
|
175
|
+
node.arguments.length > 0
|
|
176
|
+
) {
|
|
177
|
+
schemaObj = node.arguments[0];
|
|
178
|
+
return;
|
|
179
|
+
}
|
|
180
|
+
for (const key in node) {
|
|
181
|
+
if (key === 'loc' || key === 'start' || key === 'end') continue;
|
|
182
|
+
if (node[key] && typeof node[key] === 'object') {
|
|
183
|
+
if (Array.isArray(node[key])) {
|
|
184
|
+
node[key].forEach(walk);
|
|
185
|
+
} else {
|
|
186
|
+
walk(node[key]);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
}
|
|
190
|
+
}
|
|
191
|
+
walk(ast);
|
|
192
|
+
return schemaObj;
|
|
193
|
+
}
|
|
194
|
+
|
|
195
|
+
export function mergeSchema(oldFile, newFile) {
|
|
196
|
+
const oldParsed = node.parse(oldFile, {
|
|
197
|
+
sourceType: 'module',
|
|
198
|
+
ecmaVersion: 'latest',
|
|
199
|
+
locations: true,
|
|
200
|
+
});
|
|
201
|
+
|
|
202
|
+
const newParsed = node.parse(newFile, {
|
|
203
|
+
sourceType: 'module',
|
|
204
|
+
ecmaVersion: 'latest',
|
|
205
|
+
locations: true,
|
|
206
|
+
});
|
|
207
|
+
|
|
208
|
+
const schemaMap = new Map(); // Path -> { src, ast }
|
|
209
|
+
|
|
210
|
+
// 1. Extract from old file
|
|
211
|
+
const oldSchemaObj = findSchemaObject(oldParsed);
|
|
212
|
+
if (oldSchemaObj) {
|
|
213
|
+
traverseSchema(oldSchemaObj, '', (node, path) => {
|
|
214
|
+
const { typeParams } = analyzeChain(node);
|
|
215
|
+
if (typeParams) {
|
|
216
|
+
const src = oldFile.slice(typeParams.start, typeParams.end);
|
|
217
|
+
schemaMap.set(path, { src, ast: typeParams });
|
|
218
|
+
}
|
|
219
|
+
});
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
// 2. Collect Imports
|
|
223
|
+
const oldImports = collectImports(oldParsed);
|
|
224
|
+
const newExistingImports = collectExistingImports(newParsed);
|
|
225
|
+
const neededIdentifiers = new Set();
|
|
226
|
+
|
|
227
|
+
// 3. Apply to new file & Collect needed identifiers
|
|
228
|
+
const edits = [];
|
|
229
|
+
const newSchemaObj = findSchemaObject(newParsed);
|
|
230
|
+
|
|
231
|
+
if (newSchemaObj) {
|
|
232
|
+
traverseSchema(newSchemaObj, '', (node, path) => {
|
|
233
|
+
const { typeParams, baseCall } = analyzeChain(node);
|
|
234
|
+
const stored = schemaMap.get(path);
|
|
235
|
+
|
|
236
|
+
if (stored) {
|
|
237
|
+
// Collect identifiers from the type params we are about to inject
|
|
238
|
+
findIdentifiers(stored.ast, neededIdentifiers);
|
|
239
|
+
|
|
240
|
+
if (typeParams) {
|
|
241
|
+
edits.push({
|
|
242
|
+
start: typeParams.start,
|
|
243
|
+
end: typeParams.end,
|
|
244
|
+
text: stored.src,
|
|
245
|
+
});
|
|
246
|
+
} else if (baseCall) {
|
|
247
|
+
edits.push({
|
|
248
|
+
start: baseCall.callee.end,
|
|
249
|
+
end: baseCall.callee.end,
|
|
250
|
+
text: stored.src,
|
|
251
|
+
});
|
|
252
|
+
}
|
|
253
|
+
}
|
|
254
|
+
});
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
// 4. Generate Import Statements
|
|
258
|
+
const importsToAdd = new Map(); // source -> { named: Map<string, {str, isType}>, default: {str, isType}, namespace: {str, isType} }
|
|
259
|
+
|
|
260
|
+
for (const id of neededIdentifiers) {
|
|
261
|
+
const importInfo = oldImports.get(id);
|
|
262
|
+
if (importInfo) {
|
|
263
|
+
// Check if already imported in new file
|
|
264
|
+
const existing = newExistingImports.get(importInfo.source);
|
|
265
|
+
if (existing && existing.has(id)) {
|
|
266
|
+
continue; // Already imported
|
|
267
|
+
}
|
|
268
|
+
|
|
269
|
+
if (!importsToAdd.has(importInfo.source)) {
|
|
270
|
+
importsToAdd.set(importInfo.source, {
|
|
271
|
+
named: new Map(),
|
|
272
|
+
default: null,
|
|
273
|
+
namespace: null,
|
|
274
|
+
});
|
|
275
|
+
}
|
|
276
|
+
const group = importsToAdd.get(importInfo.source);
|
|
277
|
+
const isType = importInfo.importKind === 'type';
|
|
278
|
+
|
|
279
|
+
if (importInfo.type === 'named') {
|
|
280
|
+
let importStr = id;
|
|
281
|
+
if (importInfo.importedName !== id) {
|
|
282
|
+
importStr = `${importInfo.importedName} as ${id}`;
|
|
283
|
+
}
|
|
284
|
+
group.named.set(importStr, { str: importStr, isType });
|
|
285
|
+
} else if (importInfo.type === 'default') {
|
|
286
|
+
group.default = { str: id, isType };
|
|
287
|
+
} else if (importInfo.type === 'namespace') {
|
|
288
|
+
group.namespace = { str: id, isType };
|
|
289
|
+
}
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
|
|
293
|
+
const importBlocks = [];
|
|
294
|
+
|
|
295
|
+
for (const [source, info] of importsToAdd) {
|
|
296
|
+
// Check if source exists in new file to merge?
|
|
297
|
+
// For simplicity, we append new import lines.
|
|
298
|
+
// But we can try to be smart.
|
|
299
|
+
|
|
300
|
+
// If we have named imports
|
|
301
|
+
if (info.named.size > 0) {
|
|
302
|
+
const namedImports = Array.from(info.named.values());
|
|
303
|
+
const allTypes = namedImports.every((x) => x.isType);
|
|
304
|
+
|
|
305
|
+
if (allTypes) {
|
|
306
|
+
const names = namedImports.map((x) => x.str).join(', ');
|
|
307
|
+
importBlocks.push(`import type { ${names} } from '${source}';`);
|
|
308
|
+
} else {
|
|
309
|
+
const names = namedImports
|
|
310
|
+
.map((x) => (x.isType ? `type ${x.str}` : x.str))
|
|
311
|
+
.join(', ');
|
|
312
|
+
importBlocks.push(`import { ${names} } from '${source}';`);
|
|
313
|
+
}
|
|
314
|
+
}
|
|
315
|
+
if (info.default) {
|
|
316
|
+
if (info.default.isType) {
|
|
317
|
+
importBlocks.push(`import type ${info.default.str} from '${source}';`);
|
|
318
|
+
} else {
|
|
319
|
+
importBlocks.push(`import ${info.default.str} from '${source}';`);
|
|
320
|
+
}
|
|
321
|
+
}
|
|
322
|
+
if (info.namespace) {
|
|
323
|
+
if (info.namespace.isType) {
|
|
324
|
+
importBlocks.push(
|
|
325
|
+
`import type * as ${info.namespace.str} from '${source}';`,
|
|
326
|
+
);
|
|
327
|
+
} else {
|
|
328
|
+
importBlocks.push(
|
|
329
|
+
`import * as ${info.namespace.str} from '${source}';`,
|
|
330
|
+
);
|
|
331
|
+
}
|
|
332
|
+
}
|
|
333
|
+
}
|
|
334
|
+
|
|
335
|
+
// 5. Apply edits
|
|
336
|
+
edits.sort((a, b) => b.start - a.start);
|
|
337
|
+
|
|
338
|
+
let output = newFile;
|
|
339
|
+
for (const edit of edits) {
|
|
340
|
+
output = output.slice(0, edit.start) + edit.text + output.slice(edit.end);
|
|
341
|
+
}
|
|
342
|
+
|
|
343
|
+
// Prepend imports
|
|
344
|
+
if (importBlocks.length > 0) {
|
|
345
|
+
// Check for leading comments (e.g. // Docs: ...)
|
|
346
|
+
// We want to insert imports AFTER the leading comments but BEFORE the first code/import
|
|
347
|
+
|
|
348
|
+
// Simple heuristic: match consecutive lines at start that begin with //
|
|
349
|
+
const commentMatch = output.match(/^(\s*\/\/.*(\r?\n|$))+/);
|
|
350
|
+
|
|
351
|
+
if (commentMatch) {
|
|
352
|
+
const commentEnd = commentMatch[0].length;
|
|
353
|
+
output =
|
|
354
|
+
output.slice(0, commentEnd) +
|
|
355
|
+
'\n' +
|
|
356
|
+
importBlocks.join('\n') +
|
|
357
|
+
output.slice(commentEnd);
|
|
358
|
+
} else {
|
|
359
|
+
output = importBlocks.join('\n') + '\n' + output;
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
|
|
363
|
+
return output;
|
|
364
|
+
}
|