@tanstack/start-plugin-core 1.163.2 → 1.163.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. package/dist/esm/constants.d.ts +1 -0
  2. package/dist/esm/constants.js +2 -0
  3. package/dist/esm/constants.js.map +1 -1
  4. package/dist/esm/import-protection-plugin/ast.d.ts +3 -0
  5. package/dist/esm/import-protection-plugin/ast.js +8 -0
  6. package/dist/esm/import-protection-plugin/ast.js.map +1 -0
  7. package/dist/esm/import-protection-plugin/constants.d.ts +6 -0
  8. package/dist/esm/import-protection-plugin/constants.js +24 -0
  9. package/dist/esm/import-protection-plugin/constants.js.map +1 -0
  10. package/dist/esm/import-protection-plugin/extensionlessAbsoluteIdResolver.d.ts +22 -0
  11. package/dist/esm/import-protection-plugin/extensionlessAbsoluteIdResolver.js +95 -0
  12. package/dist/esm/import-protection-plugin/extensionlessAbsoluteIdResolver.js.map +1 -0
  13. package/dist/esm/import-protection-plugin/plugin.d.ts +2 -13
  14. package/dist/esm/import-protection-plugin/plugin.js +684 -299
  15. package/dist/esm/import-protection-plugin/plugin.js.map +1 -1
  16. package/dist/esm/import-protection-plugin/postCompileUsage.js +4 -2
  17. package/dist/esm/import-protection-plugin/postCompileUsage.js.map +1 -1
  18. package/dist/esm/import-protection-plugin/rewriteDeniedImports.d.ts +4 -5
  19. package/dist/esm/import-protection-plugin/rewriteDeniedImports.js +225 -3
  20. package/dist/esm/import-protection-plugin/rewriteDeniedImports.js.map +1 -1
  21. package/dist/esm/import-protection-plugin/sourceLocation.d.ts +4 -7
  22. package/dist/esm/import-protection-plugin/sourceLocation.js +18 -73
  23. package/dist/esm/import-protection-plugin/sourceLocation.js.map +1 -1
  24. package/dist/esm/import-protection-plugin/types.d.ts +94 -0
  25. package/dist/esm/import-protection-plugin/utils.d.ts +33 -1
  26. package/dist/esm/import-protection-plugin/utils.js +69 -3
  27. package/dist/esm/import-protection-plugin/utils.js.map +1 -1
  28. package/dist/esm/import-protection-plugin/virtualModules.d.ts +30 -2
  29. package/dist/esm/import-protection-plugin/virtualModules.js +66 -23
  30. package/dist/esm/import-protection-plugin/virtualModules.js.map +1 -1
  31. package/dist/esm/start-compiler-plugin/plugin.d.ts +2 -1
  32. package/dist/esm/start-compiler-plugin/plugin.js +1 -2
  33. package/dist/esm/start-compiler-plugin/plugin.js.map +1 -1
  34. package/package.json +6 -6
  35. package/src/constants.ts +2 -0
  36. package/src/import-protection-plugin/INTERNALS.md +462 -60
  37. package/src/import-protection-plugin/ast.ts +7 -0
  38. package/src/import-protection-plugin/constants.ts +25 -0
  39. package/src/import-protection-plugin/extensionlessAbsoluteIdResolver.ts +121 -0
  40. package/src/import-protection-plugin/plugin.ts +1080 -597
  41. package/src/import-protection-plugin/postCompileUsage.ts +8 -2
  42. package/src/import-protection-plugin/rewriteDeniedImports.ts +141 -9
  43. package/src/import-protection-plugin/sourceLocation.ts +19 -89
  44. package/src/import-protection-plugin/types.ts +103 -0
  45. package/src/import-protection-plugin/utils.ts +123 -4
  46. package/src/import-protection-plugin/virtualModules.ts +117 -31
  47. package/src/start-compiler-plugin/plugin.ts +7 -2
@@ -1,6 +1,8 @@
1
1
  import * as t from "@babel/types";
2
- import { parseAst } from "@tanstack/router-utils";
2
+ import { generateFromAst } from "@tanstack/router-utils";
3
+ import { MOCK_MODULE_ID } from "./virtualModules.js";
3
4
  import { getOrCreate } from "./utils.js";
5
+ import { parseImportProtectionAst } from "./ast.js";
4
6
  function isValidExportName(name) {
5
7
  if (name === "default" || name.length === 0) return false;
6
8
  const first = name.charCodeAt(0);
@@ -14,8 +16,11 @@ function isValidExportName(name) {
14
16
  return true;
15
17
  }
16
18
  function collectMockExportNamesBySource(code) {
17
- const ast = parseAst({ code });
19
+ return collectMockExportNamesBySourceFromAst(parseImportProtectionAst(code));
20
+ }
21
+ function collectMockExportNamesBySourceFromAst(ast) {
18
22
  const namesBySource = /* @__PURE__ */ new Map();
23
+ const memberBindingToSource = /* @__PURE__ */ new Map();
19
24
  const add = (source, name) => {
20
25
  if (name === "default" || name.length === 0) return;
21
26
  getOrCreate(namesBySource, source, () => /* @__PURE__ */ new Set()).add(name);
@@ -25,6 +30,14 @@ function collectMockExportNamesBySource(code) {
25
30
  if (node.importKind === "type") continue;
26
31
  const source = node.source.value;
27
32
  for (const s of node.specifiers) {
33
+ if (t.isImportNamespaceSpecifier(s)) {
34
+ memberBindingToSource.set(s.local.name, source);
35
+ continue;
36
+ }
37
+ if (t.isImportDefaultSpecifier(s)) {
38
+ memberBindingToSource.set(s.local.name, source);
39
+ continue;
40
+ }
28
41
  if (!t.isImportSpecifier(s)) continue;
29
42
  if (s.importKind === "type") continue;
30
43
  const importedName = t.isIdentifier(s.imported) ? s.imported.name : s.imported.value;
@@ -42,14 +55,223 @@ function collectMockExportNamesBySource(code) {
42
55
  }
43
56
  }
44
57
  }
58
+ if (memberBindingToSource.size > 0) {
59
+ const visit = (node) => {
60
+ if (t.isMemberExpression(node)) {
61
+ const object = node.object;
62
+ if (t.isIdentifier(object)) {
63
+ const source = memberBindingToSource.get(object.name);
64
+ if (source) {
65
+ const property = node.property;
66
+ if (!node.computed && t.isIdentifier(property)) {
67
+ add(source, property.name);
68
+ } else if (node.computed && t.isStringLiteral(property)) {
69
+ add(source, property.value);
70
+ }
71
+ }
72
+ }
73
+ }
74
+ const keys = t.VISITOR_KEYS[node.type];
75
+ if (!keys) return;
76
+ for (const key of keys) {
77
+ const child = node[key];
78
+ if (Array.isArray(child)) {
79
+ for (const item of child) {
80
+ if (item && typeof item === "object" && "type" in item) {
81
+ visit(item);
82
+ }
83
+ }
84
+ } else if (child && typeof child === "object" && "type" in child) {
85
+ visit(child);
86
+ }
87
+ }
88
+ };
89
+ visit(ast.program);
90
+ }
45
91
  const out = /* @__PURE__ */ new Map();
46
92
  for (const [source, set] of namesBySource) {
47
93
  out.set(source, Array.from(set).sort());
48
94
  }
49
95
  return out;
50
96
  }
97
+ function collectNamedExports(code) {
98
+ return collectNamedExportsFromAst(parseImportProtectionAst(code));
99
+ }
100
+ function collectIdentifiersFromPattern(pattern, add) {
101
+ if (t.isIdentifier(pattern)) {
102
+ add(pattern.name);
103
+ } else if (t.isObjectPattern(pattern)) {
104
+ for (const prop of pattern.properties) {
105
+ if (t.isRestElement(prop)) {
106
+ collectIdentifiersFromPattern(prop.argument, add);
107
+ } else {
108
+ collectIdentifiersFromPattern(prop.value, add);
109
+ }
110
+ }
111
+ } else if (t.isArrayPattern(pattern)) {
112
+ for (const elem of pattern.elements) {
113
+ if (elem) collectIdentifiersFromPattern(elem, add);
114
+ }
115
+ } else if (t.isAssignmentPattern(pattern)) {
116
+ collectIdentifiersFromPattern(pattern.left, add);
117
+ } else if (t.isRestElement(pattern)) {
118
+ collectIdentifiersFromPattern(pattern.argument, add);
119
+ }
120
+ }
121
+ function collectNamedExportsFromAst(ast) {
122
+ const names = /* @__PURE__ */ new Set();
123
+ const add = (name) => {
124
+ if (isValidExportName(name)) names.add(name);
125
+ };
126
+ for (const node of ast.program.body) {
127
+ if (t.isExportNamedDeclaration(node)) {
128
+ if (node.exportKind === "type") continue;
129
+ if (node.declaration) {
130
+ const decl = node.declaration;
131
+ if (t.isFunctionDeclaration(decl) || t.isClassDeclaration(decl)) {
132
+ if (decl.id?.name) add(decl.id.name);
133
+ } else if (t.isVariableDeclaration(decl)) {
134
+ for (const d of decl.declarations) {
135
+ collectIdentifiersFromPattern(d.id, add);
136
+ }
137
+ }
138
+ }
139
+ for (const s of node.specifiers) {
140
+ if (!t.isExportSpecifier(s)) continue;
141
+ if (s.exportKind === "type") continue;
142
+ const exportedName = t.isIdentifier(s.exported) ? s.exported.name : s.exported.value;
143
+ add(exportedName);
144
+ }
145
+ }
146
+ }
147
+ return Array.from(names).sort();
148
+ }
149
+ function rewriteDeniedImports(code, id, deniedSources, getMockModuleId = () => MOCK_MODULE_ID) {
150
+ return rewriteDeniedImportsFromAst(
151
+ parseImportProtectionAst(code),
152
+ id,
153
+ deniedSources,
154
+ getMockModuleId
155
+ );
156
+ }
157
+ function rewriteDeniedImportsFromAst(ast, id, deniedSources, getMockModuleId = () => MOCK_MODULE_ID) {
158
+ let modified = false;
159
+ let mockCounter = 0;
160
+ for (let i = ast.program.body.length - 1; i >= 0; i--) {
161
+ const node = ast.program.body[i];
162
+ if (t.isImportDeclaration(node)) {
163
+ if (node.importKind === "type") continue;
164
+ if (!deniedSources.has(node.source.value)) continue;
165
+ const mockVar = `__tss_deny_${mockCounter++}`;
166
+ const replacements = [];
167
+ replacements.push(
168
+ t.importDeclaration(
169
+ [t.importDefaultSpecifier(t.identifier(mockVar))],
170
+ t.stringLiteral(getMockModuleId(node.source.value))
171
+ )
172
+ );
173
+ for (const specifier of node.specifiers) {
174
+ if (t.isImportDefaultSpecifier(specifier) || t.isImportNamespaceSpecifier(specifier)) {
175
+ replacements.push(
176
+ t.variableDeclaration("const", [
177
+ t.variableDeclarator(
178
+ t.identifier(specifier.local.name),
179
+ t.identifier(mockVar)
180
+ )
181
+ ])
182
+ );
183
+ } else if (t.isImportSpecifier(specifier)) {
184
+ if (specifier.importKind === "type") continue;
185
+ const importedName = t.isIdentifier(specifier.imported) ? specifier.imported.name : specifier.imported.value;
186
+ replacements.push(
187
+ t.variableDeclaration("const", [
188
+ t.variableDeclarator(
189
+ t.identifier(specifier.local.name),
190
+ t.memberExpression(
191
+ t.identifier(mockVar),
192
+ t.identifier(importedName)
193
+ )
194
+ )
195
+ ])
196
+ );
197
+ }
198
+ }
199
+ ast.program.body.splice(i, 1, ...replacements);
200
+ modified = true;
201
+ continue;
202
+ }
203
+ if (t.isExportNamedDeclaration(node) && node.source) {
204
+ if (node.exportKind === "type") continue;
205
+ if (!deniedSources.has(node.source.value)) continue;
206
+ const mockVar = `__tss_deny_${mockCounter++}`;
207
+ const replacements = [];
208
+ replacements.push(
209
+ t.importDeclaration(
210
+ [t.importDefaultSpecifier(t.identifier(mockVar))],
211
+ t.stringLiteral(getMockModuleId(node.source.value))
212
+ )
213
+ );
214
+ const exportSpecifiers = [];
215
+ for (const specifier of node.specifiers) {
216
+ if (t.isExportSpecifier(specifier)) {
217
+ if (specifier.exportKind === "type") continue;
218
+ const localName = specifier.local.name;
219
+ const exportedName = t.isIdentifier(specifier.exported) ? specifier.exported.name : specifier.exported.value;
220
+ const internalVar = `__tss_reexport_${localName}`;
221
+ replacements.push(
222
+ t.variableDeclaration("const", [
223
+ t.variableDeclarator(
224
+ t.identifier(internalVar),
225
+ t.memberExpression(
226
+ t.identifier(mockVar),
227
+ t.identifier(localName)
228
+ )
229
+ )
230
+ ])
231
+ );
232
+ exportSpecifiers.push({ localName: internalVar, exportedName });
233
+ }
234
+ }
235
+ if (exportSpecifiers.length > 0) {
236
+ replacements.push(
237
+ t.exportNamedDeclaration(
238
+ null,
239
+ exportSpecifiers.map(
240
+ (s) => t.exportSpecifier(
241
+ t.identifier(s.localName),
242
+ t.identifier(s.exportedName)
243
+ )
244
+ )
245
+ )
246
+ );
247
+ }
248
+ ast.program.body.splice(i, 1, ...replacements);
249
+ modified = true;
250
+ continue;
251
+ }
252
+ if (t.isExportAllDeclaration(node)) {
253
+ if (node.exportKind === "type") continue;
254
+ if (!deniedSources.has(node.source.value)) continue;
255
+ ast.program.body.splice(i, 1);
256
+ modified = true;
257
+ continue;
258
+ }
259
+ }
260
+ if (!modified) return void 0;
261
+ const result = generateFromAst(ast, {
262
+ sourceMaps: true,
263
+ sourceFileName: id,
264
+ filename: id
265
+ });
266
+ return {
267
+ code: result.code,
268
+ ...result.map ? { map: result.map } : {}
269
+ };
270
+ }
51
271
  export {
52
272
  collectMockExportNamesBySource,
53
- isValidExportName
273
+ collectNamedExports,
274
+ isValidExportName,
275
+ rewriteDeniedImports
54
276
  };
55
277
  //# sourceMappingURL=rewriteDeniedImports.js.map
@@ -1 +1 @@
1
- {"version":3,"file":"rewriteDeniedImports.js","sources":["../../../src/import-protection-plugin/rewriteDeniedImports.ts"],"sourcesContent":["import * as t from '@babel/types'\nimport { generateFromAst, parseAst } from '@tanstack/router-utils'\n\nimport { MOCK_MODULE_ID } from './virtualModules'\nimport { getOrCreate } from './utils'\n\nexport function isValidExportName(name: string): boolean {\n if (name === 'default' || name.length === 0) return false\n const first = name.charCodeAt(0)\n // First char: A-Z (65-90), a-z (97-122), _ (95), $ (36)\n if (\n !(\n (first >= 65 && first <= 90) ||\n (first >= 97 && first <= 122) ||\n first === 95 ||\n first === 36\n )\n )\n return false\n for (let i = 1; i < name.length; i++) {\n const ch = name.charCodeAt(i)\n // Subsequent: A-Z, a-z, 0-9 (48-57), _, $\n if (\n !(\n (ch >= 65 && ch <= 90) ||\n (ch >= 97 && ch <= 122) ||\n (ch >= 48 && ch <= 57) ||\n ch === 95 ||\n ch === 36\n )\n )\n return false\n }\n return true\n}\n\n/**\n * Best-effort static analysis of an importer's source to determine which\n * named exports are needed per specifier, to keep native ESM valid in dev.\n */\nexport function collectMockExportNamesBySource(\n code: string,\n): Map<string, Array<string>> {\n const ast = parseAst({ code })\n\n const namesBySource = new Map<string, Set<string>>()\n const add = (source: string, name: string) => {\n if (name === 'default' || name.length === 0) return\n getOrCreate(namesBySource, source, () => new Set<string>()).add(name)\n }\n\n for (const node of ast.program.body) {\n if (t.isImportDeclaration(node)) {\n if (node.importKind === 'type') continue\n const source = node.source.value\n for (const s of node.specifiers) {\n if (!t.isImportSpecifier(s)) continue\n if (s.importKind === 'type') continue\n const importedName = t.isIdentifier(s.imported)\n ? s.imported.name\n : s.imported.value\n // `import { default as x } from 'm'` only requires a default export.\n if (importedName === 'default') continue\n add(source, importedName)\n }\n }\n\n if (t.isExportNamedDeclaration(node) && node.source?.value) {\n if (node.exportKind === 'type') continue\n const source = node.source.value\n for (const s of node.specifiers) {\n if (!t.isExportSpecifier(s)) continue\n if (s.exportKind === 'type') continue\n add(source, s.local.name)\n }\n }\n }\n\n const out = new Map<string, Array<string>>()\n for (const [source, set] of namesBySource) {\n out.set(source, Array.from(set).sort())\n }\n return out\n}\n\n/**\n * Rewrite static imports/re-exports from denied sources using Babel AST transforms.\n *\n * Transforms:\n * import { a as b, c } from 'denied'\n * Into:\n * import __tss_deny_0 from 'tanstack-start-import-protection:mock'\n * const b = __tss_deny_0.a\n * const c = __tss_deny_0.c\n *\n * Also handles:\n * import def from 'denied' -> import def from mock\n * import * as ns from 'denied' -> import ns from mock\n * export { x } from 'denied' -> export const x = mock.x\n * export * from 'denied' -> removed\n * export { x as y } from 'denied' -> export const y = mock.x\n */\nexport function rewriteDeniedImports(\n code: string,\n id: string,\n deniedSources: Set<string>,\n getMockModuleId: (source: string) => string = () => MOCK_MODULE_ID,\n): { code: string; map?: object | null } | undefined {\n const ast = parseAst({ code })\n let modified = false\n let mockCounter = 0\n\n // Walk program body in reverse so splice indices stay valid\n for (let i = ast.program.body.length - 1; i >= 0; i--) {\n const node = ast.program.body[i]!\n\n if (t.isImportDeclaration(node)) {\n if (node.importKind === 'type') continue\n if (!deniedSources.has(node.source.value)) continue\n\n const mockVar = `__tss_deny_${mockCounter++}`\n const replacements: Array<t.Statement> = []\n\n replacements.push(\n t.importDeclaration(\n [t.importDefaultSpecifier(t.identifier(mockVar))],\n t.stringLiteral(getMockModuleId(node.source.value)),\n ),\n )\n\n for (const specifier of node.specifiers) {\n if (\n t.isImportDefaultSpecifier(specifier) ||\n t.isImportNamespaceSpecifier(specifier)\n ) {\n replacements.push(\n t.variableDeclaration('const', [\n t.variableDeclarator(\n t.identifier(specifier.local.name),\n t.identifier(mockVar),\n ),\n ]),\n )\n } else if (t.isImportSpecifier(specifier)) {\n if (specifier.importKind === 'type') continue\n const importedName = t.isIdentifier(specifier.imported)\n ? specifier.imported.name\n : specifier.imported.value\n replacements.push(\n t.variableDeclaration('const', [\n t.variableDeclarator(\n t.identifier(specifier.local.name),\n t.memberExpression(\n t.identifier(mockVar),\n t.identifier(importedName),\n ),\n ),\n ]),\n )\n }\n }\n\n ast.program.body.splice(i, 1, ...replacements)\n modified = true\n continue\n }\n\n if (t.isExportNamedDeclaration(node) && node.source) {\n if (node.exportKind === 'type') continue\n if (!deniedSources.has(node.source.value)) continue\n\n const mockVar = `__tss_deny_${mockCounter++}`\n const replacements: Array<t.Statement> = []\n\n replacements.push(\n t.importDeclaration(\n [t.importDefaultSpecifier(t.identifier(mockVar))],\n t.stringLiteral(getMockModuleId(node.source.value)),\n ),\n )\n const exportSpecifiers: Array<{\n localName: string\n exportedName: string\n }> = []\n for (const specifier of node.specifiers) {\n if (t.isExportSpecifier(specifier)) {\n if (specifier.exportKind === 'type') continue\n const localName = specifier.local.name\n const exportedName = t.isIdentifier(specifier.exported)\n ? specifier.exported.name\n : specifier.exported.value\n\n const internalVar = `__tss_reexport_${localName}`\n replacements.push(\n t.variableDeclaration('const', [\n t.variableDeclarator(\n t.identifier(internalVar),\n t.memberExpression(\n t.identifier(mockVar),\n t.identifier(localName),\n ),\n ),\n ]),\n )\n exportSpecifiers.push({ localName: internalVar, exportedName })\n }\n }\n\n if (exportSpecifiers.length > 0) {\n replacements.push(\n t.exportNamedDeclaration(\n null,\n exportSpecifiers.map((s) =>\n t.exportSpecifier(\n t.identifier(s.localName),\n t.identifier(s.exportedName),\n ),\n ),\n ),\n )\n }\n\n ast.program.body.splice(i, 1, ...replacements)\n modified = true\n continue\n }\n\n if (t.isExportAllDeclaration(node)) {\n if (node.exportKind === 'type') continue\n if (!deniedSources.has(node.source.value)) continue\n\n ast.program.body.splice(i, 1)\n modified = true\n continue\n }\n }\n\n if (!modified) return undefined\n\n const result = generateFromAst(ast, {\n sourceMaps: true,\n sourceFileName: id,\n filename: id,\n })\n\n return { code: result.code, map: result.map }\n}\n"],"names":[],"mappings":";;;AAMO,SAAS,kBAAkB,MAAuB;AACvD,MAAI,SAAS,aAAa,KAAK,WAAW,EAAG,QAAO;AACpD,QAAM,QAAQ,KAAK,WAAW,CAAC;AAE/B,MACE,EACG,SAAS,MAAM,SAAS,MACxB,SAAS,MAAM,SAAS,OACzB,UAAU,MACV,UAAU;AAGZ,WAAO;AACT,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,KAAK,KAAK,WAAW,CAAC;AAE5B,QACE,EACG,MAAM,MAAM,MAAM,MAClB,MAAM,MAAM,MAAM,OAClB,MAAM,MAAM,MAAM,MACnB,OAAO,MACP,OAAO;AAGT,aAAO;AAAA,EACX;AACA,SAAO;AACT;AAMO,SAAS,+BACd,MAC4B;AAC5B,QAAM,MAAM,SAAS,EAAE,MAAM;AAE7B,QAAM,oCAAoB,IAAA;AAC1B,QAAM,MAAM,CAAC,QAAgB,SAAiB;AAC5C,QAAI,SAAS,aAAa,KAAK,WAAW,EAAG;AAC7C,gBAAY,eAAe,QAAQ,MAAM,oBAAI,KAAa,EAAE,IAAI,IAAI;AAAA,EACtE;AAEA,aAAW,QAAQ,IAAI,QAAQ,MAAM;AACnC,QAAI,EAAE,oBAAoB,IAAI,GAAG;AAC/B,UAAI,KAAK,eAAe,OAAQ;AAChC,YAAM,SAAS,KAAK,OAAO;AAC3B,iBAAW,KAAK,KAAK,YAAY;AAC/B,YAAI,CAAC,EAAE,kBAAkB,CAAC,EAAG;AAC7B,YAAI,EAAE,eAAe,OAAQ;AAC7B,cAAM,eAAe,EAAE,aAAa,EAAE,QAAQ,IAC1C,EAAE,SAAS,OACX,EAAE,SAAS;AAEf,YAAI,iBAAiB,UAAW;AAChC,YAAI,QAAQ,YAAY;AAAA,MAC1B;AAAA,IACF;AAEA,QAAI,EAAE,yBAAyB,IAAI,KAAK,KAAK,QAAQ,OAAO;AAC1D,UAAI,KAAK,eAAe,OAAQ;AAChC,YAAM,SAAS,KAAK,OAAO;AAC3B,iBAAW,KAAK,KAAK,YAAY;AAC/B,YAAI,CAAC,EAAE,kBAAkB,CAAC,EAAG;AAC7B,YAAI,EAAE,eAAe,OAAQ;AAC7B,YAAI,QAAQ,EAAE,MAAM,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAEA,QAAM,0BAAU,IAAA;AAChB,aAAW,CAAC,QAAQ,GAAG,KAAK,eAAe;AACzC,QAAI,IAAI,QAAQ,MAAM,KAAK,GAAG,EAAE,MAAM;AAAA,EACxC;AACA,SAAO;AACT;"}
1
+ {"version":3,"file":"rewriteDeniedImports.js","sources":["../../../src/import-protection-plugin/rewriteDeniedImports.ts"],"sourcesContent":["import * as t from '@babel/types'\nimport { generateFromAst } from '@tanstack/router-utils'\n\nimport { MOCK_MODULE_ID } from './virtualModules'\nimport { getOrCreate } from './utils'\nimport { parseImportProtectionAst } from './ast'\nimport type { SourceMapLike } from './sourceLocation'\nimport type { ParsedAst } from './ast'\n\nexport function isValidExportName(name: string): boolean {\n if (name === 'default' || name.length === 0) return false\n const first = name.charCodeAt(0)\n // First char: A-Z (65-90), a-z (97-122), _ (95), $ (36)\n if (\n !(\n (first >= 65 && first <= 90) ||\n (first >= 97 && first <= 122) ||\n first === 95 ||\n first === 36\n )\n )\n return false\n for (let i = 1; i < name.length; i++) {\n const ch = name.charCodeAt(i)\n // Subsequent: A-Z, a-z, 0-9 (48-57), _, $\n if (\n !(\n (ch >= 65 && ch <= 90) ||\n (ch >= 97 && ch <= 122) ||\n (ch >= 48 && ch <= 57) ||\n ch === 95 ||\n ch === 36\n )\n )\n return false\n }\n return true\n}\n\nexport function collectMockExportNamesBySource(\n code: string,\n): Map<string, Array<string>> {\n return collectMockExportNamesBySourceFromAst(parseImportProtectionAst(code))\n}\n\nfunction collectMockExportNamesBySourceFromAst(\n ast: ParsedAst,\n): Map<string, Array<string>> {\n const namesBySource = new Map<string, Set<string>>()\n const memberBindingToSource = new Map<string, string>()\n const add = (source: string, name: string) => {\n if (name === 'default' || name.length === 0) return\n getOrCreate(namesBySource, source, () => new Set<string>()).add(name)\n }\n\n for (const node of ast.program.body) {\n if (t.isImportDeclaration(node)) {\n if (node.importKind === 'type') continue\n const source = node.source.value\n for (const s of node.specifiers) {\n if (t.isImportNamespaceSpecifier(s)) {\n memberBindingToSource.set(s.local.name, source)\n continue\n }\n if (t.isImportDefaultSpecifier(s)) {\n memberBindingToSource.set(s.local.name, source)\n continue\n }\n if (!t.isImportSpecifier(s)) continue\n if (s.importKind === 'type') continue\n const importedName = t.isIdentifier(s.imported)\n ? s.imported.name\n : s.imported.value\n // `import { default as x } from 'm'` only requires a default export.\n if (importedName === 'default') continue\n add(source, importedName)\n }\n }\n\n if (t.isExportNamedDeclaration(node) && node.source?.value) {\n if (node.exportKind === 'type') continue\n const source = node.source.value\n for (const s of node.specifiers) {\n if (!t.isExportSpecifier(s)) continue\n if (s.exportKind === 'type') continue\n add(source, s.local.name)\n }\n }\n }\n\n // For namespace/default imports, collect property names used as\n // `binding.foo`/`binding?.foo` so mock-edge modules can expose explicit ESM\n // named exports required by Rolldown/native ESM.\n if (memberBindingToSource.size > 0) {\n const visit = (node: t.Node): void => {\n if (t.isMemberExpression(node)) {\n const object = node.object\n if (t.isIdentifier(object)) {\n const source = memberBindingToSource.get(object.name)\n if (source) {\n const property = node.property\n if (!node.computed && t.isIdentifier(property)) {\n add(source, property.name)\n } else if (node.computed && t.isStringLiteral(property)) {\n add(source, property.value)\n }\n }\n }\n }\n\n const keys = t.VISITOR_KEYS[node.type]\n if (!keys) return\n for (const key of keys) {\n const child = (node as unknown as Record<string, unknown>)[key]\n if (Array.isArray(child)) {\n for (const item of child) {\n if (item && typeof item === 'object' && 'type' in item) {\n visit(item as t.Node)\n }\n }\n } else if (child && typeof child === 'object' && 'type' in child) {\n visit(child as t.Node)\n }\n }\n }\n\n visit(ast.program)\n }\n\n const out = new Map<string, Array<string>>()\n for (const [source, set] of namesBySource) {\n out.set(source, Array.from(set).sort())\n }\n return out\n}\n\n/** Collect all valid named export identifiers from the given code. */\nexport function collectNamedExports(code: string): Array<string> {\n return collectNamedExportsFromAst(parseImportProtectionAst(code))\n}\n\nfunction collectIdentifiersFromPattern(\n pattern: t.LVal,\n add: (name: string) => void,\n): void {\n if (t.isIdentifier(pattern)) {\n add(pattern.name)\n } else if (t.isObjectPattern(pattern)) {\n for (const prop of pattern.properties) {\n if (t.isRestElement(prop)) {\n collectIdentifiersFromPattern(prop.argument as t.LVal, add)\n } else {\n collectIdentifiersFromPattern(prop.value as t.LVal, add)\n }\n }\n } else if (t.isArrayPattern(pattern)) {\n for (const elem of pattern.elements) {\n if (elem) collectIdentifiersFromPattern(elem as t.LVal, add)\n }\n } else if (t.isAssignmentPattern(pattern)) {\n collectIdentifiersFromPattern(pattern.left, add)\n } else if (t.isRestElement(pattern)) {\n collectIdentifiersFromPattern(pattern.argument as t.LVal, add)\n }\n}\n\nfunction collectNamedExportsFromAst(ast: ParsedAst): Array<string> {\n const names = new Set<string>()\n const add = (name: string) => {\n if (isValidExportName(name)) names.add(name)\n }\n\n for (const node of ast.program.body) {\n if (t.isExportNamedDeclaration(node)) {\n if (node.exportKind === 'type') continue\n\n if (node.declaration) {\n const decl = node.declaration\n if (t.isFunctionDeclaration(decl) || t.isClassDeclaration(decl)) {\n if (decl.id?.name) add(decl.id.name)\n } else if (t.isVariableDeclaration(decl)) {\n for (const d of decl.declarations) {\n collectIdentifiersFromPattern(d.id as t.LVal, add)\n }\n }\n }\n\n for (const s of node.specifiers) {\n if (!t.isExportSpecifier(s)) continue\n if (s.exportKind === 'type') continue\n const exportedName = t.isIdentifier(s.exported)\n ? s.exported.name\n : s.exported.value\n add(exportedName)\n }\n }\n }\n\n return Array.from(names).sort()\n}\n\n/**\n * Rewrite static imports/re-exports from denied sources using Babel AST transforms.\n *\n * Transforms:\n * import { a as b, c } from 'denied'\n * Into:\n * import __tss_deny_0 from 'tanstack-start-import-protection:mock'\n * const b = __tss_deny_0.a\n * const c = __tss_deny_0.c\n *\n * Also handles:\n * import def from 'denied' -> import def from mock\n * import * as ns from 'denied' -> import ns from mock\n * export { x } from 'denied' -> export const x = mock.x\n * export * from 'denied' -> removed\n * export { x as y } from 'denied' -> export const y = mock.x\n */\nexport function rewriteDeniedImports(\n code: string,\n id: string,\n deniedSources: Set<string>,\n getMockModuleId: (source: string) => string = () => MOCK_MODULE_ID,\n): { code: string; map?: SourceMapLike } | undefined {\n return rewriteDeniedImportsFromAst(\n parseImportProtectionAst(code),\n id,\n deniedSources,\n getMockModuleId,\n )\n}\n\nfunction rewriteDeniedImportsFromAst(\n ast: ParsedAst,\n id: string,\n deniedSources: Set<string>,\n getMockModuleId: (source: string) => string = () => MOCK_MODULE_ID,\n): { code: string; map?: SourceMapLike } | undefined {\n let modified = false\n let mockCounter = 0\n\n // Walk program body in reverse so splice indices stay valid\n for (let i = ast.program.body.length - 1; i >= 0; i--) {\n const node = ast.program.body[i]!\n\n if (t.isImportDeclaration(node)) {\n if (node.importKind === 'type') continue\n if (!deniedSources.has(node.source.value)) continue\n\n const mockVar = `__tss_deny_${mockCounter++}`\n const replacements: Array<t.Statement> = []\n\n replacements.push(\n t.importDeclaration(\n [t.importDefaultSpecifier(t.identifier(mockVar))],\n t.stringLiteral(getMockModuleId(node.source.value)),\n ),\n )\n\n for (const specifier of node.specifiers) {\n if (\n t.isImportDefaultSpecifier(specifier) ||\n t.isImportNamespaceSpecifier(specifier)\n ) {\n replacements.push(\n t.variableDeclaration('const', [\n t.variableDeclarator(\n t.identifier(specifier.local.name),\n t.identifier(mockVar),\n ),\n ]),\n )\n } else if (t.isImportSpecifier(specifier)) {\n if (specifier.importKind === 'type') continue\n const importedName = t.isIdentifier(specifier.imported)\n ? specifier.imported.name\n : specifier.imported.value\n replacements.push(\n t.variableDeclaration('const', [\n t.variableDeclarator(\n t.identifier(specifier.local.name),\n t.memberExpression(\n t.identifier(mockVar),\n t.identifier(importedName),\n ),\n ),\n ]),\n )\n }\n }\n\n ast.program.body.splice(i, 1, ...replacements)\n modified = true\n continue\n }\n\n if (t.isExportNamedDeclaration(node) && node.source) {\n if (node.exportKind === 'type') continue\n if (!deniedSources.has(node.source.value)) continue\n\n const mockVar = `__tss_deny_${mockCounter++}`\n const replacements: Array<t.Statement> = []\n\n replacements.push(\n t.importDeclaration(\n [t.importDefaultSpecifier(t.identifier(mockVar))],\n t.stringLiteral(getMockModuleId(node.source.value)),\n ),\n )\n const exportSpecifiers: Array<{\n localName: string\n exportedName: string\n }> = []\n for (const specifier of node.specifiers) {\n if (t.isExportSpecifier(specifier)) {\n if (specifier.exportKind === 'type') continue\n const localName = specifier.local.name\n const exportedName = t.isIdentifier(specifier.exported)\n ? specifier.exported.name\n : specifier.exported.value\n\n const internalVar = `__tss_reexport_${localName}`\n replacements.push(\n t.variableDeclaration('const', [\n t.variableDeclarator(\n t.identifier(internalVar),\n t.memberExpression(\n t.identifier(mockVar),\n t.identifier(localName),\n ),\n ),\n ]),\n )\n exportSpecifiers.push({ localName: internalVar, exportedName })\n }\n }\n\n if (exportSpecifiers.length > 0) {\n replacements.push(\n t.exportNamedDeclaration(\n null,\n exportSpecifiers.map((s) =>\n t.exportSpecifier(\n t.identifier(s.localName),\n t.identifier(s.exportedName),\n ),\n ),\n ),\n )\n }\n\n ast.program.body.splice(i, 1, ...replacements)\n modified = true\n continue\n }\n\n if (t.isExportAllDeclaration(node)) {\n if (node.exportKind === 'type') continue\n if (!deniedSources.has(node.source.value)) continue\n\n ast.program.body.splice(i, 1)\n modified = true\n continue\n }\n }\n\n if (!modified) return undefined\n\n const result = generateFromAst(ast, {\n sourceMaps: true,\n sourceFileName: id,\n filename: id,\n })\n\n return {\n code: result.code,\n ...(result.map ? { map: result.map as SourceMapLike } : {}),\n }\n}\n"],"names":[],"mappings":";;;;;AASO,SAAS,kBAAkB,MAAuB;AACvD,MAAI,SAAS,aAAa,KAAK,WAAW,EAAG,QAAO;AACpD,QAAM,QAAQ,KAAK,WAAW,CAAC;AAE/B,MACE,EACG,SAAS,MAAM,SAAS,MACxB,SAAS,MAAM,SAAS,OACzB,UAAU,MACV,UAAU;AAGZ,WAAO;AACT,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,UAAM,KAAK,KAAK,WAAW,CAAC;AAE5B,QACE,EACG,MAAM,MAAM,MAAM,MAClB,MAAM,MAAM,MAAM,OAClB,MAAM,MAAM,MAAM,MACnB,OAAO,MACP,OAAO;AAGT,aAAO;AAAA,EACX;AACA,SAAO;AACT;AAEO,SAAS,+BACd,MAC4B;AAC5B,SAAO,sCAAsC,yBAAyB,IAAI,CAAC;AAC7E;AAEA,SAAS,sCACP,KAC4B;AAC5B,QAAM,oCAAoB,IAAA;AAC1B,QAAM,4CAA4B,IAAA;AAClC,QAAM,MAAM,CAAC,QAAgB,SAAiB;AAC5C,QAAI,SAAS,aAAa,KAAK,WAAW,EAAG;AAC7C,gBAAY,eAAe,QAAQ,MAAM,oBAAI,KAAa,EAAE,IAAI,IAAI;AAAA,EACtE;AAEA,aAAW,QAAQ,IAAI,QAAQ,MAAM;AACnC,QAAI,EAAE,oBAAoB,IAAI,GAAG;AAC/B,UAAI,KAAK,eAAe,OAAQ;AAChC,YAAM,SAAS,KAAK,OAAO;AAC3B,iBAAW,KAAK,KAAK,YAAY;AAC/B,YAAI,EAAE,2BAA2B,CAAC,GAAG;AACnC,gCAAsB,IAAI,EAAE,MAAM,MAAM,MAAM;AAC9C;AAAA,QACF;AACA,YAAI,EAAE,yBAAyB,CAAC,GAAG;AACjC,gCAAsB,IAAI,EAAE,MAAM,MAAM,MAAM;AAC9C;AAAA,QACF;AACA,YAAI,CAAC,EAAE,kBAAkB,CAAC,EAAG;AAC7B,YAAI,EAAE,eAAe,OAAQ;AAC7B,cAAM,eAAe,EAAE,aAAa,EAAE,QAAQ,IAC1C,EAAE,SAAS,OACX,EAAE,SAAS;AAEf,YAAI,iBAAiB,UAAW;AAChC,YAAI,QAAQ,YAAY;AAAA,MAC1B;AAAA,IACF;AAEA,QAAI,EAAE,yBAAyB,IAAI,KAAK,KAAK,QAAQ,OAAO;AAC1D,UAAI,KAAK,eAAe,OAAQ;AAChC,YAAM,SAAS,KAAK,OAAO;AAC3B,iBAAW,KAAK,KAAK,YAAY;AAC/B,YAAI,CAAC,EAAE,kBAAkB,CAAC,EAAG;AAC7B,YAAI,EAAE,eAAe,OAAQ;AAC7B,YAAI,QAAQ,EAAE,MAAM,IAAI;AAAA,MAC1B;AAAA,IACF;AAAA,EACF;AAKA,MAAI,sBAAsB,OAAO,GAAG;AAClC,UAAM,QAAQ,CAAC,SAAuB;AACpC,UAAI,EAAE,mBAAmB,IAAI,GAAG;AAC9B,cAAM,SAAS,KAAK;AACpB,YAAI,EAAE,aAAa,MAAM,GAAG;AAC1B,gBAAM,SAAS,sBAAsB,IAAI,OAAO,IAAI;AACpD,cAAI,QAAQ;AACV,kBAAM,WAAW,KAAK;AACtB,gBAAI,CAAC,KAAK,YAAY,EAAE,aAAa,QAAQ,GAAG;AAC9C,kBAAI,QAAQ,SAAS,IAAI;AAAA,YAC3B,WAAW,KAAK,YAAY,EAAE,gBAAgB,QAAQ,GAAG;AACvD,kBAAI,QAAQ,SAAS,KAAK;AAAA,YAC5B;AAAA,UACF;AAAA,QACF;AAAA,MACF;AAEA,YAAM,OAAO,EAAE,aAAa,KAAK,IAAI;AACrC,UAAI,CAAC,KAAM;AACX,iBAAW,OAAO,MAAM;AACtB,cAAM,QAAS,KAA4C,GAAG;AAC9D,YAAI,MAAM,QAAQ,KAAK,GAAG;AACxB,qBAAW,QAAQ,OAAO;AACxB,gBAAI,QAAQ,OAAO,SAAS,YAAY,UAAU,MAAM;AACtD,oBAAM,IAAc;AAAA,YACtB;AAAA,UACF;AAAA,QACF,WAAW,SAAS,OAAO,UAAU,YAAY,UAAU,OAAO;AAChE,gBAAM,KAAe;AAAA,QACvB;AAAA,MACF;AAAA,IACF;AAEA,UAAM,IAAI,OAAO;AAAA,EACnB;AAEA,QAAM,0BAAU,IAAA;AAChB,aAAW,CAAC,QAAQ,GAAG,KAAK,eAAe;AACzC,QAAI,IAAI,QAAQ,MAAM,KAAK,GAAG,EAAE,MAAM;AAAA,EACxC;AACA,SAAO;AACT;AAGO,SAAS,oBAAoB,MAA6B;AAC/D,SAAO,2BAA2B,yBAAyB,IAAI,CAAC;AAClE;AAEA,SAAS,8BACP,SACA,KACM;AACN,MAAI,EAAE,aAAa,OAAO,GAAG;AAC3B,QAAI,QAAQ,IAAI;AAAA,EAClB,WAAW,EAAE,gBAAgB,OAAO,GAAG;AACrC,eAAW,QAAQ,QAAQ,YAAY;AACrC,UAAI,EAAE,cAAc,IAAI,GAAG;AACzB,sCAA8B,KAAK,UAAoB,GAAG;AAAA,MAC5D,OAAO;AACL,sCAA8B,KAAK,OAAiB,GAAG;AAAA,MACzD;AAAA,IACF;AAAA,EACF,WAAW,EAAE,eAAe,OAAO,GAAG;AACpC,eAAW,QAAQ,QAAQ,UAAU;AACnC,UAAI,KAAM,+BAA8B,MAAgB,GAAG;AAAA,IAC7D;AAAA,EACF,WAAW,EAAE,oBAAoB,OAAO,GAAG;AACzC,kCAA8B,QAAQ,MAAM,GAAG;AAAA,EACjD,WAAW,EAAE,cAAc,OAAO,GAAG;AACnC,kCAA8B,QAAQ,UAAoB,GAAG;AAAA,EAC/D;AACF;AAEA,SAAS,2BAA2B,KAA+B;AACjE,QAAM,4BAAY,IAAA;AAClB,QAAM,MAAM,CAAC,SAAiB;AAC5B,QAAI,kBAAkB,IAAI,EAAG,OAAM,IAAI,IAAI;AAAA,EAC7C;AAEA,aAAW,QAAQ,IAAI,QAAQ,MAAM;AACnC,QAAI,EAAE,yBAAyB,IAAI,GAAG;AACpC,UAAI,KAAK,eAAe,OAAQ;AAEhC,UAAI,KAAK,aAAa;AACpB,cAAM,OAAO,KAAK;AAClB,YAAI,EAAE,sBAAsB,IAAI,KAAK,EAAE,mBAAmB,IAAI,GAAG;AAC/D,cAAI,KAAK,IAAI,KAAM,KAAI,KAAK,GAAG,IAAI;AAAA,QACrC,WAAW,EAAE,sBAAsB,IAAI,GAAG;AACxC,qBAAW,KAAK,KAAK,cAAc;AACjC,0CAA8B,EAAE,IAAc,GAAG;AAAA,UACnD;AAAA,QACF;AAAA,MACF;AAEA,iBAAW,KAAK,KAAK,YAAY;AAC/B,YAAI,CAAC,EAAE,kBAAkB,CAAC,EAAG;AAC7B,YAAI,EAAE,eAAe,OAAQ;AAC7B,cAAM,eAAe,EAAE,aAAa,EAAE,QAAQ,IAC1C,EAAE,SAAS,OACX,EAAE,SAAS;AACf,YAAI,YAAY;AAAA,MAClB;AAAA,IACF;AAAA,EACF;AAEA,SAAO,MAAM,KAAK,KAAK,EAAE,KAAA;AAC3B;AAmBO,SAAS,qBACd,MACA,IACA,eACA,kBAA8C,MAAM,gBACD;AACnD,SAAO;AAAA,IACL,yBAAyB,IAAI;AAAA,IAC7B;AAAA,IACA;AAAA,IACA;AAAA,EAAA;AAEJ;AAEA,SAAS,4BACP,KACA,IACA,eACA,kBAA8C,MAAM,gBACD;AACnD,MAAI,WAAW;AACf,MAAI,cAAc;AAGlB,WAAS,IAAI,IAAI,QAAQ,KAAK,SAAS,GAAG,KAAK,GAAG,KAAK;AACrD,UAAM,OAAO,IAAI,QAAQ,KAAK,CAAC;AAE/B,QAAI,EAAE,oBAAoB,IAAI,GAAG;AAC/B,UAAI,KAAK,eAAe,OAAQ;AAChC,UAAI,CAAC,cAAc,IAAI,KAAK,OAAO,KAAK,EAAG;AAE3C,YAAM,UAAU,cAAc,aAAa;AAC3C,YAAM,eAAmC,CAAA;AAEzC,mBAAa;AAAA,QACX,EAAE;AAAA,UACA,CAAC,EAAE,uBAAuB,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UAChD,EAAE,cAAc,gBAAgB,KAAK,OAAO,KAAK,CAAC;AAAA,QAAA;AAAA,MACpD;AAGF,iBAAW,aAAa,KAAK,YAAY;AACvC,YACE,EAAE,yBAAyB,SAAS,KACpC,EAAE,2BAA2B,SAAS,GACtC;AACA,uBAAa;AAAA,YACX,EAAE,oBAAoB,SAAS;AAAA,cAC7B,EAAE;AAAA,gBACA,EAAE,WAAW,UAAU,MAAM,IAAI;AAAA,gBACjC,EAAE,WAAW,OAAO;AAAA,cAAA;AAAA,YACtB,CACD;AAAA,UAAA;AAAA,QAEL,WAAW,EAAE,kBAAkB,SAAS,GAAG;AACzC,cAAI,UAAU,eAAe,OAAQ;AACrC,gBAAM,eAAe,EAAE,aAAa,UAAU,QAAQ,IAClD,UAAU,SAAS,OACnB,UAAU,SAAS;AACvB,uBAAa;AAAA,YACX,EAAE,oBAAoB,SAAS;AAAA,cAC7B,EAAE;AAAA,gBACA,EAAE,WAAW,UAAU,MAAM,IAAI;AAAA,gBACjC,EAAE;AAAA,kBACA,EAAE,WAAW,OAAO;AAAA,kBACpB,EAAE,WAAW,YAAY;AAAA,gBAAA;AAAA,cAC3B;AAAA,YACF,CACD;AAAA,UAAA;AAAA,QAEL;AAAA,MACF;AAEA,UAAI,QAAQ,KAAK,OAAO,GAAG,GAAG,GAAG,YAAY;AAC7C,iBAAW;AACX;AAAA,IACF;AAEA,QAAI,EAAE,yBAAyB,IAAI,KAAK,KAAK,QAAQ;AACnD,UAAI,KAAK,eAAe,OAAQ;AAChC,UAAI,CAAC,cAAc,IAAI,KAAK,OAAO,KAAK,EAAG;AAE3C,YAAM,UAAU,cAAc,aAAa;AAC3C,YAAM,eAAmC,CAAA;AAEzC,mBAAa;AAAA,QACX,EAAE;AAAA,UACA,CAAC,EAAE,uBAAuB,EAAE,WAAW,OAAO,CAAC,CAAC;AAAA,UAChD,EAAE,cAAc,gBAAgB,KAAK,OAAO,KAAK,CAAC;AAAA,QAAA;AAAA,MACpD;AAEF,YAAM,mBAGD,CAAA;AACL,iBAAW,aAAa,KAAK,YAAY;AACvC,YAAI,EAAE,kBAAkB,SAAS,GAAG;AAClC,cAAI,UAAU,eAAe,OAAQ;AACrC,gBAAM,YAAY,UAAU,MAAM;AAClC,gBAAM,eAAe,EAAE,aAAa,UAAU,QAAQ,IAClD,UAAU,SAAS,OACnB,UAAU,SAAS;AAEvB,gBAAM,cAAc,kBAAkB,SAAS;AAC/C,uBAAa;AAAA,YACX,EAAE,oBAAoB,SAAS;AAAA,cAC7B,EAAE;AAAA,gBACA,EAAE,WAAW,WAAW;AAAA,gBACxB,EAAE;AAAA,kBACA,EAAE,WAAW,OAAO;AAAA,kBACpB,EAAE,WAAW,SAAS;AAAA,gBAAA;AAAA,cACxB;AAAA,YACF,CACD;AAAA,UAAA;AAEH,2BAAiB,KAAK,EAAE,WAAW,aAAa,cAAc;AAAA,QAChE;AAAA,MACF;AAEA,UAAI,iBAAiB,SAAS,GAAG;AAC/B,qBAAa;AAAA,UACX,EAAE;AAAA,YACA;AAAA,YACA,iBAAiB;AAAA,cAAI,CAAC,MACpB,EAAE;AAAA,gBACA,EAAE,WAAW,EAAE,SAAS;AAAA,gBACxB,EAAE,WAAW,EAAE,YAAY;AAAA,cAAA;AAAA,YAC7B;AAAA,UACF;AAAA,QACF;AAAA,MAEJ;AAEA,UAAI,QAAQ,KAAK,OAAO,GAAG,GAAG,GAAG,YAAY;AAC7C,iBAAW;AACX;AAAA,IACF;AAEA,QAAI,EAAE,uBAAuB,IAAI,GAAG;AAClC,UAAI,KAAK,eAAe,OAAQ;AAChC,UAAI,CAAC,cAAc,IAAI,KAAK,OAAO,KAAK,EAAG;AAE3C,UAAI,QAAQ,KAAK,OAAO,GAAG,CAAC;AAC5B,iBAAW;AACX;AAAA,IACF;AAAA,EACF;AAEA,MAAI,CAAC,SAAU,QAAO;AAEtB,QAAM,SAAS,gBAAgB,KAAK;AAAA,IAClC,YAAY;AAAA,IACZ,gBAAgB;AAAA,IAChB,UAAU;AAAA,EAAA,CACX;AAED,SAAO;AAAA,IACL,MAAM,OAAO;AAAA,IACb,GAAI,OAAO,MAAM,EAAE,KAAK,OAAO,IAAA,IAAyB,CAAA;AAAA,EAAC;AAE7D;"}
@@ -55,22 +55,19 @@ export declare class ImportLocCache {
55
55
  /** Remove all cache entries where the importer matches `file`. */
56
56
  deleteByFile(file: string): void;
57
57
  }
58
- export declare function clearImportPatternCache(): void;
58
+ export type FindImportSpecifierIndex = (code: string, source: string) => number;
59
59
  /**
60
60
  * Find the location of an import statement in a transformed module
61
61
  * by searching the post-transform code and mapping back via sourcemap.
62
62
  * Results are cached in `importLocCache`.
63
63
  */
64
- export declare function findImportStatementLocationFromTransformed(provider: TransformResultProvider, importerId: string, source: string, importLocCache: ImportLocCache): Promise<Loc | undefined>;
64
+ export declare function findImportStatementLocationFromTransformed(provider: TransformResultProvider, importerId: string, source: string, importLocCache: ImportLocCache, findImportSpecifierIndex: FindImportSpecifierIndex): Promise<Loc | undefined>;
65
65
  /**
66
66
  * Find the first post-compile usage location for a denied import specifier.
67
67
  * Best-effort: searches transformed code for non-import uses of imported
68
68
  * bindings and maps back to original source via sourcemap.
69
69
  */
70
- export declare function findPostCompileUsageLocation(provider: TransformResultProvider, importerId: string, source: string, findPostCompileUsagePos: (code: string, source: string) => {
71
- line: number;
72
- column0: number;
73
- } | undefined): Promise<Loc | undefined>;
70
+ export declare function findPostCompileUsageLocation(provider: TransformResultProvider, importerId: string, source: string): Promise<Loc | undefined>;
74
71
  /**
75
72
  * Annotate each trace hop with the location of the import that created the
76
73
  * edge (file:line:col). Skips steps that already have a location.
@@ -80,7 +77,7 @@ export declare function addTraceImportLocations(provider: TransformResultProvide
80
77
  specifier?: string;
81
78
  line?: number;
82
79
  column?: number;
83
- }>, importLocCache: ImportLocCache): Promise<void>;
80
+ }>, importLocCache: ImportLocCache, findImportSpecifierIndex: FindImportSpecifierIndex): Promise<void>;
84
81
  export interface CodeSnippet {
85
82
  /** Source lines with line numbers, e.g. `[" 6 | import { getSecret } from './secret.server'", ...]` */
86
83
  lines: Array<string>;
@@ -1,6 +1,7 @@
1
1
  import { SourceMapConsumer } from "source-map";
2
2
  import * as path from "pathe";
3
- import { normalizeFilePath, getOrCreate, escapeRegExp } from "./utils.js";
3
+ import { findPostCompileUsagePos } from "./postCompileUsage.js";
4
+ import { normalizeFilePath, getOrCreate } from "./utils.js";
4
5
  function buildLineIndex(code) {
5
6
  const offsets = [0];
6
7
  for (let i = 0; i < code.length; i++) {
@@ -159,32 +160,7 @@ class ImportLocCache {
159
160
  }
160
161
  }
161
162
  }
162
- const importPatternCache = /* @__PURE__ */ new Map();
163
- function clearImportPatternCache() {
164
- importPatternCache.clear();
165
- }
166
- function findFirstImportSpecifierIndex(code, source) {
167
- let patterns = importPatternCache.get(source);
168
- if (!patterns) {
169
- const escaped = escapeRegExp(source);
170
- patterns = [
171
- new RegExp(`\\bimport\\s+(['"])${escaped}\\1`),
172
- new RegExp(`\\bfrom\\s+(['"])${escaped}\\1`),
173
- new RegExp(`\\bimport\\s*\\(\\s*(['"])${escaped}\\1\\s*\\)`)
174
- ];
175
- importPatternCache.set(source, patterns);
176
- }
177
- let best = -1;
178
- for (const re of patterns) {
179
- const m = re.exec(code);
180
- if (!m) continue;
181
- const idx = m.index + m[0].indexOf(source);
182
- if (idx === -1) continue;
183
- if (best === -1 || idx < best) best = idx;
184
- }
185
- return best;
186
- }
187
- async function findImportStatementLocationFromTransformed(provider, importerId, source, importLocCache) {
163
+ async function findImportStatementLocationFromTransformed(provider, importerId, source, importLocCache, findImportSpecifierIndex) {
188
164
  const importerFile = normalizeFilePath(importerId);
189
165
  const cacheKey = `${importerFile}::${source}`;
190
166
  if (importLocCache.has(cacheKey)) {
@@ -198,7 +174,7 @@ async function findImportStatementLocationFromTransformed(provider, importerId,
198
174
  }
199
175
  const { code, map } = res;
200
176
  const lineIndex = res.lineIndex ?? buildLineIndex(code);
201
- const idx = findFirstImportSpecifierIndex(code, source);
177
+ const idx = findImportSpecifierIndex(code, source);
202
178
  if (idx === -1) {
203
179
  importLocCache.set(cacheKey, null);
204
180
  return void 0;
@@ -212,7 +188,7 @@ async function findImportStatementLocationFromTransformed(provider, importerId,
212
188
  return void 0;
213
189
  }
214
190
  }
215
- async function findPostCompileUsageLocation(provider, importerId, source, findPostCompileUsagePos) {
191
+ async function findPostCompileUsageLocation(provider, importerId, source) {
216
192
  try {
217
193
  const importerFile = normalizeFilePath(importerId);
218
194
  const res = provider.getTransformResult(importerId);
@@ -228,7 +204,7 @@ async function findPostCompileUsageLocation(provider, importerId, source, findPo
228
204
  return void 0;
229
205
  }
230
206
  }
231
- async function addTraceImportLocations(provider, trace, importLocCache) {
207
+ async function addTraceImportLocations(provider, trace, importLocCache, findImportSpecifierIndex) {
232
208
  for (const step of trace) {
233
209
  if (!step.specifier) continue;
234
210
  if (step.line != null && step.column != null) continue;
@@ -236,7 +212,8 @@ async function addTraceImportLocations(provider, trace, importLocCache) {
236
212
  provider,
237
213
  step.file,
238
214
  step.specifier,
239
- importLocCache
215
+ importLocCache,
216
+ findImportSpecifierIndex
240
217
  );
241
218
  if (!loc) continue;
242
219
  step.line = loc.line;
@@ -248,51 +225,20 @@ function buildCodeSnippet(provider, moduleId, loc, contextLines = 2) {
248
225
  const importerFile = normalizeFilePath(moduleId);
249
226
  const res = provider.getTransformResult(moduleId);
250
227
  if (!res) return void 0;
251
- const { code: transformedCode, originalCode } = res;
252
- const sourceCode = originalCode ?? transformedCode;
228
+ const sourceCode = res.originalCode ?? res.code;
253
229
  const targetLine = loc.line;
254
230
  const targetCol = loc.column;
255
231
  if (targetLine < 1) return void 0;
256
- const wantStart = Math.max(1, targetLine - contextLines);
257
- const wantEnd = targetLine + contextLines;
258
- let lineNum = 1;
259
- let pos = 0;
260
- while (lineNum < wantStart && pos < sourceCode.length) {
261
- const ch = sourceCode.charCodeAt(pos);
262
- if (ch === 10) {
263
- lineNum++;
264
- } else if (ch === 13) {
265
- lineNum++;
266
- if (pos + 1 < sourceCode.length && sourceCode.charCodeAt(pos + 1) === 10)
267
- pos++;
268
- }
269
- pos++;
232
+ const allLines = sourceCode.split("\n");
233
+ for (let i = 0; i < allLines.length; i++) {
234
+ const line = allLines[i];
235
+ if (line.endsWith("\r")) allLines[i] = line.slice(0, -1);
270
236
  }
271
- if (lineNum < wantStart) return void 0;
272
- const lines = [];
273
- let curLine = wantStart;
274
- while (curLine <= wantEnd && pos <= sourceCode.length) {
275
- let eol = pos;
276
- while (eol < sourceCode.length) {
277
- const ch = sourceCode.charCodeAt(eol);
278
- if (ch === 10 || ch === 13) break;
279
- eol++;
280
- }
281
- lines.push(sourceCode.slice(pos, eol));
282
- curLine++;
283
- if (eol < sourceCode.length) {
284
- if (sourceCode.charCodeAt(eol) === 13 && eol + 1 < sourceCode.length && sourceCode.charCodeAt(eol + 1) === 10) {
285
- pos = eol + 2;
286
- } else {
287
- pos = eol + 1;
288
- }
289
- } else {
290
- pos = eol + 1;
291
- }
292
- }
293
- if (targetLine > wantStart + lines.length - 1) return void 0;
294
- const actualEnd = wantStart + lines.length - 1;
295
- const gutterWidth = String(actualEnd).length;
237
+ const wantStart = Math.max(1, targetLine - contextLines);
238
+ const wantEnd = Math.min(allLines.length, targetLine + contextLines);
239
+ if (targetLine > allLines.length) return void 0;
240
+ const lines = allLines.slice(wantStart - 1, wantEnd);
241
+ const gutterWidth = String(wantEnd).length;
296
242
  const sourceFile = loc.file ?? importerFile;
297
243
  const snippetLines = [];
298
244
  for (let i = 0; i < lines.length; i++) {
@@ -320,7 +266,6 @@ export {
320
266
  addTraceImportLocations,
321
267
  buildCodeSnippet,
322
268
  buildLineIndex,
323
- clearImportPatternCache,
324
269
  findImportStatementLocationFromTransformed,
325
270
  findPostCompileUsageLocation,
326
271
  pickOriginalCodeFromSourcesContent
@@ -1 +1 @@
1
- {"version":3,"file":"sourceLocation.js","sources":["../../../src/import-protection-plugin/sourceLocation.ts"],"sourcesContent":["import { SourceMapConsumer } from 'source-map'\nimport * as path from 'pathe'\n\nimport { escapeRegExp, getOrCreate, normalizeFilePath } from './utils'\nimport type { Loc } from './trace'\nimport type { RawSourceMap } from 'source-map'\n\n// Source-map type compatible with both Rollup's SourceMap and source-map's\n// RawSourceMap. Structural type avoids version: number vs string mismatch.\n\n/**\n * Minimal source-map shape used throughout the import-protection plugin.\n */\nexport interface SourceMapLike {\n file?: string\n sourceRoot?: string\n version: number | string\n sources: Array<string>\n names: Array<string>\n sourcesContent?: Array<string | null>\n mappings: string\n}\n\n// Transform result provider (replaces ctx.load() which doesn't work in dev)\nexport interface TransformResult {\n code: string\n map: SourceMapLike | undefined\n originalCode: string | undefined\n /** Precomputed line index for `code` (index → line/col). */\n lineIndex?: LineIndex\n}\n\n/**\n * Provides the transformed code and composed sourcemap for a module.\n *\n * Populated from a late-running transform hook. By the time `resolveId`\n * fires for an import, the importer has already been fully transformed.\n */\nexport interface TransformResultProvider {\n getTransformResult: (id: string) => TransformResult | undefined\n}\n\n// Index → line/column conversion\n\nexport type LineIndex = {\n offsets: Array<number>\n}\n\nexport function buildLineIndex(code: string): LineIndex {\n const offsets: Array<number> = [0]\n for (let i = 0; i < code.length; i++) {\n if (code.charCodeAt(i) === 10) {\n offsets.push(i + 1)\n }\n }\n return { offsets }\n}\n\nfunction upperBound(values: Array<number>, x: number): number {\n let lo = 0\n let hi = values.length\n while (lo < hi) {\n const mid = (lo + hi) >> 1\n if (values[mid]! <= x) lo = mid + 1\n else hi = mid\n }\n return lo\n}\n\nfunction indexToLineColWithIndex(\n lineIndex: LineIndex,\n idx: number,\n): { line: number; column0: number } {\n const offsets = lineIndex.offsets\n const ub = upperBound(offsets, idx)\n const lineIdx = Math.max(0, ub - 1)\n const line = lineIdx + 1\n\n const lineStart = offsets[lineIdx] ?? 0\n return { line, column0: Math.max(0, idx - lineStart) }\n}\n\n/**\n * Pick the most-likely original source text for `importerFile` from\n * a sourcemap that may contain multiple sources.\n */\nexport function pickOriginalCodeFromSourcesContent(\n map: SourceMapLike | undefined,\n importerFile: string,\n root: string,\n): string | undefined {\n if (!map?.sourcesContent || map.sources.length === 0) {\n return undefined\n }\n\n const file = normalizeFilePath(importerFile)\n const sourceRoot = map.sourceRoot\n const fileSeg = file.split('/').filter(Boolean)\n\n const resolveBase = sourceRoot ? path.resolve(root, sourceRoot) : root\n\n let bestIdx = -1\n let bestScore = -1\n\n for (let i = 0; i < map.sources.length; i++) {\n const content = map.sourcesContent[i]\n if (typeof content !== 'string') continue\n\n const src = map.sources[i] ?? ''\n\n const normalizedSrc = normalizeFilePath(src)\n if (normalizedSrc === file) {\n return content\n }\n\n let resolved: string\n if (!src) {\n resolved = ''\n } else if (path.isAbsolute(src)) {\n resolved = normalizeFilePath(src)\n } else {\n resolved = normalizeFilePath(path.resolve(resolveBase, src))\n }\n if (resolved === file) {\n return content\n }\n\n // Count matching path segments from the end.\n const normalizedSrcSeg = normalizedSrc.split('/').filter(Boolean)\n const resolvedSeg =\n resolved !== normalizedSrc\n ? resolved.split('/').filter(Boolean)\n : normalizedSrcSeg\n const score = Math.max(\n segmentSuffixScore(normalizedSrcSeg, fileSeg),\n segmentSuffixScore(resolvedSeg, fileSeg),\n )\n\n if (score > bestScore) {\n bestScore = score\n bestIdx = i\n }\n }\n\n if (bestIdx !== -1 && bestScore >= 1) {\n return map.sourcesContent[bestIdx] ?? undefined\n }\n\n return map.sourcesContent[0] ?? undefined\n}\n\n/** Count matching path segments from the end of `aSeg` against `bSeg`. */\nfunction segmentSuffixScore(aSeg: Array<string>, bSeg: Array<string>): number {\n let score = 0\n for (\n let i = aSeg.length - 1, j = bSeg.length - 1;\n i >= 0 && j >= 0;\n i--, j--\n ) {\n if (aSeg[i] !== bSeg[j]) break\n score++\n }\n return score\n}\n\nasync function mapGeneratedToOriginal(\n map: SourceMapLike | undefined,\n generated: { line: number; column0: number },\n fallbackFile: string,\n): Promise<Loc> {\n const fallback: Loc = {\n file: fallbackFile,\n line: generated.line,\n column: generated.column0 + 1,\n }\n\n if (!map) {\n return fallback\n }\n\n const consumer = await getSourceMapConsumer(map)\n if (!consumer) return fallback\n\n try {\n const orig = consumer.originalPositionFor({\n line: generated.line,\n column: generated.column0,\n })\n if (orig.line != null && orig.column != null) {\n return {\n file: orig.source ? normalizeFilePath(orig.source) : fallbackFile,\n line: orig.line,\n column: orig.column + 1,\n }\n }\n } catch {\n // Malformed sourcemap\n }\n\n return fallback\n}\n\nconst consumerCache = new WeakMap<object, Promise<SourceMapConsumer | null>>()\n\nfunction toRawSourceMap(map: SourceMapLike): RawSourceMap {\n return {\n ...map,\n file: map.file ?? '',\n version: Number(map.version),\n sourcesContent: map.sourcesContent?.map((s) => s ?? '') ?? [],\n }\n}\n\nasync function getSourceMapConsumer(\n map: SourceMapLike,\n): Promise<SourceMapConsumer | null> {\n const cached = consumerCache.get(map)\n if (cached) return cached\n\n const promise = (async () => {\n try {\n return await new SourceMapConsumer(toRawSourceMap(map))\n } catch {\n return null\n }\n })()\n\n consumerCache.set(map, promise)\n return promise\n}\n\nexport type ImportLocEntry = { file?: string; line: number; column: number }\n\n/**\n * Cache for import statement locations with reverse index for O(1)\n * invalidation by file. Keys: `${importerFile}::${source}`.\n */\nexport class ImportLocCache {\n private cache = new Map<string, ImportLocEntry | null>()\n private reverseIndex = new Map<string, Set<string>>()\n\n has(key: string): boolean {\n return this.cache.has(key)\n }\n\n get(key: string): ImportLocEntry | null | undefined {\n return this.cache.get(key)\n }\n\n set(key: string, value: ImportLocEntry | null): void {\n this.cache.set(key, value)\n const file = key.slice(0, key.indexOf('::'))\n getOrCreate(this.reverseIndex, file, () => new Set()).add(key)\n }\n\n clear(): void {\n this.cache.clear()\n this.reverseIndex.clear()\n }\n\n /** Remove all cache entries where the importer matches `file`. */\n deleteByFile(file: string): void {\n const keys = this.reverseIndex.get(file)\n if (keys) {\n for (const key of keys) {\n this.cache.delete(key)\n }\n this.reverseIndex.delete(file)\n }\n }\n}\n\n// Import specifier search (regex-based)\n\nconst importPatternCache = new Map<string, Array<RegExp>>()\n\nexport function clearImportPatternCache(): void {\n importPatternCache.clear()\n}\n\nfunction findFirstImportSpecifierIndex(code: string, source: string): number {\n let patterns = importPatternCache.get(source)\n if (!patterns) {\n const escaped = escapeRegExp(source)\n patterns = [\n new RegExp(`\\\\bimport\\\\s+(['\"])${escaped}\\\\1`),\n new RegExp(`\\\\bfrom\\\\s+(['\"])${escaped}\\\\1`),\n new RegExp(`\\\\bimport\\\\s*\\\\(\\\\s*(['\"])${escaped}\\\\1\\\\s*\\\\)`),\n ]\n importPatternCache.set(source, patterns)\n }\n\n let best = -1\n for (const re of patterns) {\n const m = re.exec(code)\n if (!m) continue\n const idx = m.index + m[0].indexOf(source)\n if (idx === -1) continue\n if (best === -1 || idx < best) best = idx\n }\n return best\n}\n\n/**\n * Find the location of an import statement in a transformed module\n * by searching the post-transform code and mapping back via sourcemap.\n * Results are cached in `importLocCache`.\n */\nexport async function findImportStatementLocationFromTransformed(\n provider: TransformResultProvider,\n importerId: string,\n source: string,\n importLocCache: ImportLocCache,\n): Promise<Loc | undefined> {\n const importerFile = normalizeFilePath(importerId)\n const cacheKey = `${importerFile}::${source}`\n if (importLocCache.has(cacheKey)) {\n return importLocCache.get(cacheKey) ?? undefined\n }\n\n try {\n const res = provider.getTransformResult(importerId)\n if (!res) {\n importLocCache.set(cacheKey, null)\n return undefined\n }\n\n const { code, map } = res\n\n const lineIndex = res.lineIndex ?? buildLineIndex(code)\n\n const idx = findFirstImportSpecifierIndex(code, source)\n if (idx === -1) {\n importLocCache.set(cacheKey, null)\n return undefined\n }\n\n const generated = indexToLineColWithIndex(lineIndex, idx)\n const loc = await mapGeneratedToOriginal(map, generated, importerFile)\n importLocCache.set(cacheKey, loc)\n return loc\n } catch {\n importLocCache.set(cacheKey, null)\n return undefined\n }\n}\n\n/**\n * Find the first post-compile usage location for a denied import specifier.\n * Best-effort: searches transformed code for non-import uses of imported\n * bindings and maps back to original source via sourcemap.\n */\nexport async function findPostCompileUsageLocation(\n provider: TransformResultProvider,\n importerId: string,\n source: string,\n findPostCompileUsagePos: (\n code: string,\n source: string,\n ) => { line: number; column0: number } | undefined,\n): Promise<Loc | undefined> {\n try {\n const importerFile = normalizeFilePath(importerId)\n const res = provider.getTransformResult(importerId)\n if (!res) return undefined\n const { code, map } = res\n\n if (!res.lineIndex) {\n res.lineIndex = buildLineIndex(code)\n }\n\n const pos = findPostCompileUsagePos(code, source)\n if (!pos) return undefined\n\n return await mapGeneratedToOriginal(map, pos, importerFile)\n } catch {\n return undefined\n }\n}\n\n/**\n * Annotate each trace hop with the location of the import that created the\n * edge (file:line:col). Skips steps that already have a location.\n */\nexport async function addTraceImportLocations(\n provider: TransformResultProvider,\n trace: Array<{\n file: string\n specifier?: string\n line?: number\n column?: number\n }>,\n importLocCache: ImportLocCache,\n): Promise<void> {\n for (const step of trace) {\n if (!step.specifier) continue\n if (step.line != null && step.column != null) continue\n const loc = await findImportStatementLocationFromTransformed(\n provider,\n step.file,\n step.specifier,\n importLocCache,\n )\n if (!loc) continue\n step.line = loc.line\n step.column = loc.column\n }\n}\n\n// Code snippet extraction (vitest-style context around a location)\n\nexport interface CodeSnippet {\n /** Source lines with line numbers, e.g. `[\" 6 | import { getSecret } from './secret.server'\", ...]` */\n lines: Array<string>\n /** The highlighted line (1-indexed original line number) */\n highlightLine: number\n /** Clickable file:line reference */\n location: string\n}\n\n/**\n * Build a vitest-style code snippet showing lines surrounding a location.\n *\n * Prefers `originalCode` from the sourcemap's sourcesContent; falls back\n * to transformed code when unavailable.\n */\nexport function buildCodeSnippet(\n provider: TransformResultProvider,\n moduleId: string,\n loc: Loc,\n contextLines: number = 2,\n): CodeSnippet | undefined {\n try {\n const importerFile = normalizeFilePath(moduleId)\n const res = provider.getTransformResult(moduleId)\n if (!res) return undefined\n\n const { code: transformedCode, originalCode } = res\n\n const sourceCode = originalCode ?? transformedCode\n const targetLine = loc.line // 1-indexed\n const targetCol = loc.column // 1-indexed\n\n if (targetLine < 1) return undefined\n\n const wantStart = Math.max(1, targetLine - contextLines)\n const wantEnd = targetLine + contextLines\n\n // Advance to wantStart\n let lineNum = 1\n let pos = 0\n while (lineNum < wantStart && pos < sourceCode.length) {\n const ch = sourceCode.charCodeAt(pos)\n if (ch === 10) {\n lineNum++\n } else if (ch === 13) {\n lineNum++\n if (\n pos + 1 < sourceCode.length &&\n sourceCode.charCodeAt(pos + 1) === 10\n )\n pos++\n }\n pos++\n }\n if (lineNum < wantStart) return undefined\n\n const lines: Array<string> = []\n let curLine = wantStart\n while (curLine <= wantEnd && pos <= sourceCode.length) {\n // Find end of current line\n let eol = pos\n while (eol < sourceCode.length) {\n const ch = sourceCode.charCodeAt(eol)\n if (ch === 10 || ch === 13) break\n eol++\n }\n lines.push(sourceCode.slice(pos, eol))\n curLine++\n if (eol < sourceCode.length) {\n if (\n sourceCode.charCodeAt(eol) === 13 &&\n eol + 1 < sourceCode.length &&\n sourceCode.charCodeAt(eol + 1) === 10\n ) {\n pos = eol + 2\n } else {\n pos = eol + 1\n }\n } else {\n pos = eol + 1\n }\n }\n\n if (targetLine > wantStart + lines.length - 1) return undefined\n\n const actualEnd = wantStart + lines.length - 1\n const gutterWidth = String(actualEnd).length\n\n const sourceFile = loc.file ?? importerFile\n const snippetLines: Array<string> = []\n for (let i = 0; i < lines.length; i++) {\n const ln = wantStart + i\n const lineContent = lines[i]!\n const lineNumStr = String(ln).padStart(gutterWidth, ' ')\n const marker = ln === targetLine ? '>' : ' '\n snippetLines.push(` ${marker} ${lineNumStr} | ${lineContent}`)\n\n if (ln === targetLine && targetCol > 0) {\n const padding = ' '.repeat(targetCol - 1)\n snippetLines.push(` ${' '.repeat(gutterWidth)} | ${padding}^`)\n }\n }\n\n return {\n lines: snippetLines,\n highlightLine: targetLine,\n location: `${sourceFile}:${targetLine}:${targetCol}`,\n }\n } catch {\n return undefined\n }\n}\n"],"names":[],"mappings":";;;AAgDO,SAAS,eAAe,MAAyB;AACtD,QAAM,UAAyB,CAAC,CAAC;AACjC,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,WAAW,CAAC,MAAM,IAAI;AAC7B,cAAQ,KAAK,IAAI,CAAC;AAAA,IACpB;AAAA,EACF;AACA,SAAO,EAAE,QAAA;AACX;AAEA,SAAS,WAAW,QAAuB,GAAmB;AAC5D,MAAI,KAAK;AACT,MAAI,KAAK,OAAO;AAChB,SAAO,KAAK,IAAI;AACd,UAAM,MAAO,KAAK,MAAO;AACzB,QAAI,OAAO,GAAG,KAAM,QAAQ,MAAM;AAAA,QAC7B,MAAK;AAAA,EACZ;AACA,SAAO;AACT;AAEA,SAAS,wBACP,WACA,KACmC;AACnC,QAAM,UAAU,UAAU;AAC1B,QAAM,KAAK,WAAW,SAAS,GAAG;AAClC,QAAM,UAAU,KAAK,IAAI,GAAG,KAAK,CAAC;AAClC,QAAM,OAAO,UAAU;AAEvB,QAAM,YAAY,QAAQ,OAAO,KAAK;AACtC,SAAO,EAAE,MAAM,SAAS,KAAK,IAAI,GAAG,MAAM,SAAS,EAAA;AACrD;AAMO,SAAS,mCACd,KACA,cACA,MACoB;AACpB,MAAI,CAAC,KAAK,kBAAkB,IAAI,QAAQ,WAAW,GAAG;AACpD,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,kBAAkB,YAAY;AAC3C,QAAM,aAAa,IAAI;AACvB,QAAM,UAAU,KAAK,MAAM,GAAG,EAAE,OAAO,OAAO;AAE9C,QAAM,cAAc,aAAa,KAAK,QAAQ,MAAM,UAAU,IAAI;AAElE,MAAI,UAAU;AACd,MAAI,YAAY;AAEhB,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,QAAQ,KAAK;AAC3C,UAAM,UAAU,IAAI,eAAe,CAAC;AACpC,QAAI,OAAO,YAAY,SAAU;AAEjC,UAAM,MAAM,IAAI,QAAQ,CAAC,KAAK;AAE9B,UAAM,gBAAgB,kBAAkB,GAAG;AAC3C,QAAI,kBAAkB,MAAM;AAC1B,aAAO;AAAA,IACT;AAEA,QAAI;AACJ,QAAI,CAAC,KAAK;AACR,iBAAW;AAAA,IACb,WAAW,KAAK,WAAW,GAAG,GAAG;AAC/B,iBAAW,kBAAkB,GAAG;AAAA,IAClC,OAAO;AACL,iBAAW,kBAAkB,KAAK,QAAQ,aAAa,GAAG,CAAC;AAAA,IAC7D;AACA,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,cAAc,MAAM,GAAG,EAAE,OAAO,OAAO;AAChE,UAAM,cACJ,aAAa,gBACT,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO,IAClC;AACN,UAAM,QAAQ,KAAK;AAAA,MACjB,mBAAmB,kBAAkB,OAAO;AAAA,MAC5C,mBAAmB,aAAa,OAAO;AAAA,IAAA;AAGzC,QAAI,QAAQ,WAAW;AACrB,kBAAY;AACZ,gBAAU;AAAA,IACZ;AAAA,EACF;AAEA,MAAI,YAAY,MAAM,aAAa,GAAG;AACpC,WAAO,IAAI,eAAe,OAAO,KAAK;AAAA,EACxC;AAEA,SAAO,IAAI,eAAe,CAAC,KAAK;AAClC;AAGA,SAAS,mBAAmB,MAAqB,MAA6B;AAC5E,MAAI,QAAQ;AACZ,WACM,IAAI,KAAK,SAAS,GAAG,IAAI,KAAK,SAAS,GAC3C,KAAK,KAAK,KAAK,GACf,KAAK,KACL;AACA,QAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAG;AACzB;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAe,uBACb,KACA,WACA,cACc;AACd,QAAM,WAAgB;AAAA,IACpB,MAAM;AAAA,IACN,MAAM,UAAU;AAAA,IAChB,QAAQ,UAAU,UAAU;AAAA,EAAA;AAG9B,MAAI,CAAC,KAAK;AACR,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,MAAI,CAAC,SAAU,QAAO;AAEtB,MAAI;AACF,UAAM,OAAO,SAAS,oBAAoB;AAAA,MACxC,MAAM,UAAU;AAAA,MAChB,QAAQ,UAAU;AAAA,IAAA,CACnB;AACD,QAAI,KAAK,QAAQ,QAAQ,KAAK,UAAU,MAAM;AAC5C,aAAO;AAAA,QACL,MAAM,KAAK,SAAS,kBAAkB,KAAK,MAAM,IAAI;AAAA,QACrD,MAAM,KAAK;AAAA,QACX,QAAQ,KAAK,SAAS;AAAA,MAAA;AAAA,IAE1B;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEA,MAAM,oCAAoB,QAAA;AAE1B,SAAS,eAAe,KAAkC;AACxD,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,IAAI,QAAQ;AAAA,IAClB,SAAS,OAAO,IAAI,OAAO;AAAA,IAC3B,gBAAgB,IAAI,gBAAgB,IAAI,CAAC,MAAM,KAAK,EAAE,KAAK,CAAA;AAAA,EAAC;AAEhE;AAEA,eAAe,qBACb,KACmC;AACnC,QAAM,SAAS,cAAc,IAAI,GAAG;AACpC,MAAI,OAAQ,QAAO;AAEnB,QAAM,WAAW,YAAY;AAC3B,QAAI;AACF,aAAO,MAAM,IAAI,kBAAkB,eAAe,GAAG,CAAC;AAAA,IACxD,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF,GAAA;AAEA,gBAAc,IAAI,KAAK,OAAO;AAC9B,SAAO;AACT;AAQO,MAAM,eAAe;AAAA,EAClB,4BAAY,IAAA;AAAA,EACZ,mCAAmB,IAAA;AAAA,EAE3B,IAAI,KAAsB;AACxB,WAAO,KAAK,MAAM,IAAI,GAAG;AAAA,EAC3B;AAAA,EAEA,IAAI,KAAgD;AAClD,WAAO,KAAK,MAAM,IAAI,GAAG;AAAA,EAC3B;AAAA,EAEA,IAAI,KAAa,OAAoC;AACnD,SAAK,MAAM,IAAI,KAAK,KAAK;AACzB,UAAM,OAAO,IAAI,MAAM,GAAG,IAAI,QAAQ,IAAI,CAAC;AAC3C,gBAAY,KAAK,cAAc,MAAM,0BAAU,IAAA,CAAK,EAAE,IAAI,GAAG;AAAA,EAC/D;AAAA,EAEA,QAAc;AACZ,SAAK,MAAM,MAAA;AACX,SAAK,aAAa,MAAA;AAAA,EACpB;AAAA;AAAA,EAGA,aAAa,MAAoB;AAC/B,UAAM,OAAO,KAAK,aAAa,IAAI,IAAI;AACvC,QAAI,MAAM;AACR,iBAAW,OAAO,MAAM;AACtB,aAAK,MAAM,OAAO,GAAG;AAAA,MACvB;AACA,WAAK,aAAa,OAAO,IAAI;AAAA,IAC/B;AAAA,EACF;AACF;AAIA,MAAM,yCAAyB,IAAA;AAExB,SAAS,0BAAgC;AAC9C,qBAAmB,MAAA;AACrB;AAEA,SAAS,8BAA8B,MAAc,QAAwB;AAC3E,MAAI,WAAW,mBAAmB,IAAI,MAAM;AAC5C,MAAI,CAAC,UAAU;AACb,UAAM,UAAU,aAAa,MAAM;AACnC,eAAW;AAAA,MACT,IAAI,OAAO,sBAAsB,OAAO,KAAK;AAAA,MAC7C,IAAI,OAAO,oBAAoB,OAAO,KAAK;AAAA,MAC3C,IAAI,OAAO,6BAA6B,OAAO,YAAY;AAAA,IAAA;AAE7D,uBAAmB,IAAI,QAAQ,QAAQ;AAAA,EACzC;AAEA,MAAI,OAAO;AACX,aAAW,MAAM,UAAU;AACzB,UAAM,IAAI,GAAG,KAAK,IAAI;AACtB,QAAI,CAAC,EAAG;AACR,UAAM,MAAM,EAAE,QAAQ,EAAE,CAAC,EAAE,QAAQ,MAAM;AACzC,QAAI,QAAQ,GAAI;AAChB,QAAI,SAAS,MAAM,MAAM,KAAM,QAAO;AAAA,EACxC;AACA,SAAO;AACT;AAOA,eAAsB,2CACpB,UACA,YACA,QACA,gBAC0B;AAC1B,QAAM,eAAe,kBAAkB,UAAU;AACjD,QAAM,WAAW,GAAG,YAAY,KAAK,MAAM;AAC3C,MAAI,eAAe,IAAI,QAAQ,GAAG;AAChC,WAAO,eAAe,IAAI,QAAQ,KAAK;AAAA,EACzC;AAEA,MAAI;AACF,UAAM,MAAM,SAAS,mBAAmB,UAAU;AAClD,QAAI,CAAC,KAAK;AACR,qBAAe,IAAI,UAAU,IAAI;AACjC,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,MAAM,IAAA,IAAQ;AAEtB,UAAM,YAAY,IAAI,aAAa,eAAe,IAAI;AAEtD,UAAM,MAAM,8BAA8B,MAAM,MAAM;AACtD,QAAI,QAAQ,IAAI;AACd,qBAAe,IAAI,UAAU,IAAI;AACjC,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,wBAAwB,WAAW,GAAG;AACxD,UAAM,MAAM,MAAM,uBAAuB,KAAK,WAAW,YAAY;AACrE,mBAAe,IAAI,UAAU,GAAG;AAChC,WAAO;AAAA,EACT,QAAQ;AACN,mBAAe,IAAI,UAAU,IAAI;AACjC,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,6BACpB,UACA,YACA,QACA,yBAI0B;AAC1B,MAAI;AACF,UAAM,eAAe,kBAAkB,UAAU;AACjD,UAAM,MAAM,SAAS,mBAAmB,UAAU;AAClD,QAAI,CAAC,IAAK,QAAO;AACjB,UAAM,EAAE,MAAM,IAAA,IAAQ;AAEtB,QAAI,CAAC,IAAI,WAAW;AAClB,UAAI,YAAY,eAAe,IAAI;AAAA,IACrC;AAEA,UAAM,MAAM,wBAAwB,MAAM,MAAM;AAChD,QAAI,CAAC,IAAK,QAAO;AAEjB,WAAO,MAAM,uBAAuB,KAAK,KAAK,YAAY;AAAA,EAC5D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMA,eAAsB,wBACpB,UACA,OAMA,gBACe;AACf,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,UAAW;AACrB,QAAI,KAAK,QAAQ,QAAQ,KAAK,UAAU,KAAM;AAC9C,UAAM,MAAM,MAAM;AAAA,MAChB;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,IAAA;AAEF,QAAI,CAAC,IAAK;AACV,SAAK,OAAO,IAAI;AAChB,SAAK,SAAS,IAAI;AAAA,EACpB;AACF;AAmBO,SAAS,iBACd,UACA,UACA,KACA,eAAuB,GACE;AACzB,MAAI;AACF,UAAM,eAAe,kBAAkB,QAAQ;AAC/C,UAAM,MAAM,SAAS,mBAAmB,QAAQ;AAChD,QAAI,CAAC,IAAK,QAAO;AAEjB,UAAM,EAAE,MAAM,iBAAiB,aAAA,IAAiB;AAEhD,UAAM,aAAa,gBAAgB;AACnC,UAAM,aAAa,IAAI;AACvB,UAAM,YAAY,IAAI;AAEtB,QAAI,aAAa,EAAG,QAAO;AAE3B,UAAM,YAAY,KAAK,IAAI,GAAG,aAAa,YAAY;AACvD,UAAM,UAAU,aAAa;AAG7B,QAAI,UAAU;AACd,QAAI,MAAM;AACV,WAAO,UAAU,aAAa,MAAM,WAAW,QAAQ;AACrD,YAAM,KAAK,WAAW,WAAW,GAAG;AACpC,UAAI,OAAO,IAAI;AACb;AAAA,MACF,WAAW,OAAO,IAAI;AACpB;AACA,YACE,MAAM,IAAI,WAAW,UACrB,WAAW,WAAW,MAAM,CAAC,MAAM;AAEnC;AAAA,MACJ;AACA;AAAA,IACF;AACA,QAAI,UAAU,UAAW,QAAO;AAEhC,UAAM,QAAuB,CAAA;AAC7B,QAAI,UAAU;AACd,WAAO,WAAW,WAAW,OAAO,WAAW,QAAQ;AAErD,UAAI,MAAM;AACV,aAAO,MAAM,WAAW,QAAQ;AAC9B,cAAM,KAAK,WAAW,WAAW,GAAG;AACpC,YAAI,OAAO,MAAM,OAAO,GAAI;AAC5B;AAAA,MACF;AACA,YAAM,KAAK,WAAW,MAAM,KAAK,GAAG,CAAC;AACrC;AACA,UAAI,MAAM,WAAW,QAAQ;AAC3B,YACE,WAAW,WAAW,GAAG,MAAM,MAC/B,MAAM,IAAI,WAAW,UACrB,WAAW,WAAW,MAAM,CAAC,MAAM,IACnC;AACA,gBAAM,MAAM;AAAA,QACd,OAAO;AACL,gBAAM,MAAM;AAAA,QACd;AAAA,MACF,OAAO;AACL,cAAM,MAAM;AAAA,MACd;AAAA,IACF;AAEA,QAAI,aAAa,YAAY,MAAM,SAAS,EAAG,QAAO;AAEtD,UAAM,YAAY,YAAY,MAAM,SAAS;AAC7C,UAAM,cAAc,OAAO,SAAS,EAAE;AAEtC,UAAM,aAAa,IAAI,QAAQ;AAC/B,UAAM,eAA8B,CAAA;AACpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,KAAK,YAAY;AACvB,YAAM,cAAc,MAAM,CAAC;AAC3B,YAAM,aAAa,OAAO,EAAE,EAAE,SAAS,aAAa,GAAG;AACvD,YAAM,SAAS,OAAO,aAAa,MAAM;AACzC,mBAAa,KAAK,KAAK,MAAM,IAAI,UAAU,MAAM,WAAW,EAAE;AAE9D,UAAI,OAAO,cAAc,YAAY,GAAG;AACtC,cAAM,UAAU,IAAI,OAAO,YAAY,CAAC;AACxC,qBAAa,KAAK,OAAO,IAAI,OAAO,WAAW,CAAC,MAAM,OAAO,GAAG;AAAA,MAClE;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,eAAe;AAAA,MACf,UAAU,GAAG,UAAU,IAAI,UAAU,IAAI,SAAS;AAAA,IAAA;AAAA,EAEtD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;"}
1
+ {"version":3,"file":"sourceLocation.js","sources":["../../../src/import-protection-plugin/sourceLocation.ts"],"sourcesContent":["import { SourceMapConsumer } from 'source-map'\nimport * as path from 'pathe'\n\nimport { findPostCompileUsagePos } from './postCompileUsage'\nimport { getOrCreate, normalizeFilePath } from './utils'\nimport type { Loc } from './trace'\nimport type { RawSourceMap } from 'source-map'\n\n// Source-map type compatible with both Rollup's SourceMap and source-map's\n// RawSourceMap. Structural type avoids version: number vs string mismatch.\n\n/**\n * Minimal source-map shape used throughout the import-protection plugin.\n */\nexport interface SourceMapLike {\n file?: string\n sourceRoot?: string\n version: number | string\n sources: Array<string>\n names: Array<string>\n sourcesContent?: Array<string | null>\n mappings: string\n}\n\n// Transform result provider (replaces ctx.load() which doesn't work in dev)\nexport interface TransformResult {\n code: string\n map: SourceMapLike | undefined\n originalCode: string | undefined\n /** Precomputed line index for `code` (index → line/col). */\n lineIndex?: LineIndex\n}\n\n/**\n * Provides the transformed code and composed sourcemap for a module.\n *\n * Populated from a late-running transform hook. By the time `resolveId`\n * fires for an import, the importer has already been fully transformed.\n */\nexport interface TransformResultProvider {\n getTransformResult: (id: string) => TransformResult | undefined\n}\n\n// Index → line/column conversion\n\nexport type LineIndex = {\n offsets: Array<number>\n}\n\nexport function buildLineIndex(code: string): LineIndex {\n const offsets: Array<number> = [0]\n for (let i = 0; i < code.length; i++) {\n if (code.charCodeAt(i) === 10) {\n offsets.push(i + 1)\n }\n }\n return { offsets }\n}\n\nfunction upperBound(values: Array<number>, x: number): number {\n let lo = 0\n let hi = values.length\n while (lo < hi) {\n const mid = (lo + hi) >> 1\n if (values[mid]! <= x) lo = mid + 1\n else hi = mid\n }\n return lo\n}\n\nfunction indexToLineColWithIndex(\n lineIndex: LineIndex,\n idx: number,\n): { line: number; column0: number } {\n const offsets = lineIndex.offsets\n const ub = upperBound(offsets, idx)\n const lineIdx = Math.max(0, ub - 1)\n const line = lineIdx + 1\n\n const lineStart = offsets[lineIdx] ?? 0\n return { line, column0: Math.max(0, idx - lineStart) }\n}\n\n/**\n * Pick the most-likely original source text for `importerFile` from\n * a sourcemap that may contain multiple sources.\n */\nexport function pickOriginalCodeFromSourcesContent(\n map: SourceMapLike | undefined,\n importerFile: string,\n root: string,\n): string | undefined {\n if (!map?.sourcesContent || map.sources.length === 0) {\n return undefined\n }\n\n const file = normalizeFilePath(importerFile)\n const sourceRoot = map.sourceRoot\n const fileSeg = file.split('/').filter(Boolean)\n\n const resolveBase = sourceRoot ? path.resolve(root, sourceRoot) : root\n\n let bestIdx = -1\n let bestScore = -1\n\n for (let i = 0; i < map.sources.length; i++) {\n const content = map.sourcesContent[i]\n if (typeof content !== 'string') continue\n\n const src = map.sources[i] ?? ''\n\n const normalizedSrc = normalizeFilePath(src)\n if (normalizedSrc === file) {\n return content\n }\n\n let resolved: string\n if (!src) {\n resolved = ''\n } else if (path.isAbsolute(src)) {\n resolved = normalizeFilePath(src)\n } else {\n resolved = normalizeFilePath(path.resolve(resolveBase, src))\n }\n if (resolved === file) {\n return content\n }\n\n // Count matching path segments from the end.\n const normalizedSrcSeg = normalizedSrc.split('/').filter(Boolean)\n const resolvedSeg =\n resolved !== normalizedSrc\n ? resolved.split('/').filter(Boolean)\n : normalizedSrcSeg\n const score = Math.max(\n segmentSuffixScore(normalizedSrcSeg, fileSeg),\n segmentSuffixScore(resolvedSeg, fileSeg),\n )\n\n if (score > bestScore) {\n bestScore = score\n bestIdx = i\n }\n }\n\n if (bestIdx !== -1 && bestScore >= 1) {\n return map.sourcesContent[bestIdx] ?? undefined\n }\n\n return map.sourcesContent[0] ?? undefined\n}\n\n/** Count matching path segments from the end of `aSeg` against `bSeg`. */\nfunction segmentSuffixScore(aSeg: Array<string>, bSeg: Array<string>): number {\n let score = 0\n for (\n let i = aSeg.length - 1, j = bSeg.length - 1;\n i >= 0 && j >= 0;\n i--, j--\n ) {\n if (aSeg[i] !== bSeg[j]) break\n score++\n }\n return score\n}\n\nasync function mapGeneratedToOriginal(\n map: SourceMapLike | undefined,\n generated: { line: number; column0: number },\n fallbackFile: string,\n): Promise<Loc> {\n const fallback: Loc = {\n file: fallbackFile,\n line: generated.line,\n column: generated.column0 + 1,\n }\n\n if (!map) {\n return fallback\n }\n\n const consumer = await getSourceMapConsumer(map)\n if (!consumer) return fallback\n\n try {\n const orig = consumer.originalPositionFor({\n line: generated.line,\n column: generated.column0,\n })\n if (orig.line != null && orig.column != null) {\n return {\n file: orig.source ? normalizeFilePath(orig.source) : fallbackFile,\n line: orig.line,\n column: orig.column + 1,\n }\n }\n } catch {\n // Malformed sourcemap\n }\n\n return fallback\n}\n\nconst consumerCache = new WeakMap<object, Promise<SourceMapConsumer | null>>()\n\nfunction toRawSourceMap(map: SourceMapLike): RawSourceMap {\n return {\n ...map,\n file: map.file ?? '',\n version: Number(map.version),\n sourcesContent: map.sourcesContent?.map((s) => s ?? '') ?? [],\n }\n}\n\nasync function getSourceMapConsumer(\n map: SourceMapLike,\n): Promise<SourceMapConsumer | null> {\n const cached = consumerCache.get(map)\n if (cached) return cached\n\n const promise = (async () => {\n try {\n return await new SourceMapConsumer(toRawSourceMap(map))\n } catch {\n return null\n }\n })()\n\n consumerCache.set(map, promise)\n return promise\n}\n\nexport type ImportLocEntry = { file?: string; line: number; column: number }\n\n/**\n * Cache for import statement locations with reverse index for O(1)\n * invalidation by file. Keys: `${importerFile}::${source}`.\n */\nexport class ImportLocCache {\n private cache = new Map<string, ImportLocEntry | null>()\n private reverseIndex = new Map<string, Set<string>>()\n\n has(key: string): boolean {\n return this.cache.has(key)\n }\n\n get(key: string): ImportLocEntry | null | undefined {\n return this.cache.get(key)\n }\n\n set(key: string, value: ImportLocEntry | null): void {\n this.cache.set(key, value)\n const file = key.slice(0, key.indexOf('::'))\n getOrCreate(this.reverseIndex, file, () => new Set()).add(key)\n }\n\n clear(): void {\n this.cache.clear()\n this.reverseIndex.clear()\n }\n\n /** Remove all cache entries where the importer matches `file`. */\n deleteByFile(file: string): void {\n const keys = this.reverseIndex.get(file)\n if (keys) {\n for (const key of keys) {\n this.cache.delete(key)\n }\n this.reverseIndex.delete(file)\n }\n }\n}\n\nexport type FindImportSpecifierIndex = (code: string, source: string) => number\n\n/**\n * Find the location of an import statement in a transformed module\n * by searching the post-transform code and mapping back via sourcemap.\n * Results are cached in `importLocCache`.\n */\nexport async function findImportStatementLocationFromTransformed(\n provider: TransformResultProvider,\n importerId: string,\n source: string,\n importLocCache: ImportLocCache,\n findImportSpecifierIndex: FindImportSpecifierIndex,\n): Promise<Loc | undefined> {\n const importerFile = normalizeFilePath(importerId)\n const cacheKey = `${importerFile}::${source}`\n if (importLocCache.has(cacheKey)) {\n return importLocCache.get(cacheKey) ?? undefined\n }\n\n try {\n const res = provider.getTransformResult(importerId)\n if (!res) {\n importLocCache.set(cacheKey, null)\n return undefined\n }\n\n const { code, map } = res\n\n const lineIndex = res.lineIndex ?? buildLineIndex(code)\n\n const idx = findImportSpecifierIndex(code, source)\n if (idx === -1) {\n importLocCache.set(cacheKey, null)\n return undefined\n }\n\n const generated = indexToLineColWithIndex(lineIndex, idx)\n const loc = await mapGeneratedToOriginal(map, generated, importerFile)\n importLocCache.set(cacheKey, loc)\n return loc\n } catch {\n importLocCache.set(cacheKey, null)\n return undefined\n }\n}\n\n/**\n * Find the first post-compile usage location for a denied import specifier.\n * Best-effort: searches transformed code for non-import uses of imported\n * bindings and maps back to original source via sourcemap.\n */\nexport async function findPostCompileUsageLocation(\n provider: TransformResultProvider,\n importerId: string,\n source: string,\n): Promise<Loc | undefined> {\n try {\n const importerFile = normalizeFilePath(importerId)\n const res = provider.getTransformResult(importerId)\n if (!res) return undefined\n const { code, map } = res\n\n if (!res.lineIndex) {\n res.lineIndex = buildLineIndex(code)\n }\n\n const pos = findPostCompileUsagePos(code, source)\n if (!pos) return undefined\n\n return await mapGeneratedToOriginal(map, pos, importerFile)\n } catch {\n return undefined\n }\n}\n\n/**\n * Annotate each trace hop with the location of the import that created the\n * edge (file:line:col). Skips steps that already have a location.\n */\nexport async function addTraceImportLocations(\n provider: TransformResultProvider,\n trace: Array<{\n file: string\n specifier?: string\n line?: number\n column?: number\n }>,\n importLocCache: ImportLocCache,\n findImportSpecifierIndex: FindImportSpecifierIndex,\n): Promise<void> {\n for (const step of trace) {\n if (!step.specifier) continue\n if (step.line != null && step.column != null) continue\n const loc = await findImportStatementLocationFromTransformed(\n provider,\n step.file,\n step.specifier,\n importLocCache,\n findImportSpecifierIndex,\n )\n if (!loc) continue\n step.line = loc.line\n step.column = loc.column\n }\n}\n\n// Code snippet extraction (vitest-style context around a location)\n\nexport interface CodeSnippet {\n /** Source lines with line numbers, e.g. `[\" 6 | import { getSecret } from './secret.server'\", ...]` */\n lines: Array<string>\n /** The highlighted line (1-indexed original line number) */\n highlightLine: number\n /** Clickable file:line reference */\n location: string\n}\n\n/**\n * Build a vitest-style code snippet showing lines surrounding a location.\n *\n * Prefers `originalCode` from the sourcemap's sourcesContent; falls back\n * to transformed code when unavailable.\n */\nexport function buildCodeSnippet(\n provider: TransformResultProvider,\n moduleId: string,\n loc: Loc,\n contextLines: number = 2,\n): CodeSnippet | undefined {\n try {\n const importerFile = normalizeFilePath(moduleId)\n const res = provider.getTransformResult(moduleId)\n if (!res) return undefined\n\n const sourceCode = res.originalCode ?? res.code\n const targetLine = loc.line // 1-indexed\n const targetCol = loc.column // 1-indexed\n\n if (targetLine < 1) return undefined\n\n const allLines = sourceCode.split('\\n')\n // Strip trailing \\r from \\r\\n line endings\n for (let i = 0; i < allLines.length; i++) {\n const line = allLines[i]!\n if (line.endsWith('\\r')) allLines[i] = line.slice(0, -1)\n }\n\n const wantStart = Math.max(1, targetLine - contextLines)\n const wantEnd = Math.min(allLines.length, targetLine + contextLines)\n\n if (targetLine > allLines.length) return undefined\n\n const lines = allLines.slice(wantStart - 1, wantEnd)\n const gutterWidth = String(wantEnd).length\n\n const sourceFile = loc.file ?? importerFile\n const snippetLines: Array<string> = []\n for (let i = 0; i < lines.length; i++) {\n const ln = wantStart + i\n const lineContent = lines[i]!\n const lineNumStr = String(ln).padStart(gutterWidth, ' ')\n const marker = ln === targetLine ? '>' : ' '\n snippetLines.push(` ${marker} ${lineNumStr} | ${lineContent}`)\n\n if (ln === targetLine && targetCol > 0) {\n const padding = ' '.repeat(targetCol - 1)\n snippetLines.push(` ${' '.repeat(gutterWidth)} | ${padding}^`)\n }\n }\n\n return {\n lines: snippetLines,\n highlightLine: targetLine,\n location: `${sourceFile}:${targetLine}:${targetCol}`,\n }\n } catch {\n return undefined\n }\n}\n"],"names":[],"mappings":";;;;AAiDO,SAAS,eAAe,MAAyB;AACtD,QAAM,UAAyB,CAAC,CAAC;AACjC,WAAS,IAAI,GAAG,IAAI,KAAK,QAAQ,KAAK;AACpC,QAAI,KAAK,WAAW,CAAC,MAAM,IAAI;AAC7B,cAAQ,KAAK,IAAI,CAAC;AAAA,IACpB;AAAA,EACF;AACA,SAAO,EAAE,QAAA;AACX;AAEA,SAAS,WAAW,QAAuB,GAAmB;AAC5D,MAAI,KAAK;AACT,MAAI,KAAK,OAAO;AAChB,SAAO,KAAK,IAAI;AACd,UAAM,MAAO,KAAK,MAAO;AACzB,QAAI,OAAO,GAAG,KAAM,QAAQ,MAAM;AAAA,QAC7B,MAAK;AAAA,EACZ;AACA,SAAO;AACT;AAEA,SAAS,wBACP,WACA,KACmC;AACnC,QAAM,UAAU,UAAU;AAC1B,QAAM,KAAK,WAAW,SAAS,GAAG;AAClC,QAAM,UAAU,KAAK,IAAI,GAAG,KAAK,CAAC;AAClC,QAAM,OAAO,UAAU;AAEvB,QAAM,YAAY,QAAQ,OAAO,KAAK;AACtC,SAAO,EAAE,MAAM,SAAS,KAAK,IAAI,GAAG,MAAM,SAAS,EAAA;AACrD;AAMO,SAAS,mCACd,KACA,cACA,MACoB;AACpB,MAAI,CAAC,KAAK,kBAAkB,IAAI,QAAQ,WAAW,GAAG;AACpD,WAAO;AAAA,EACT;AAEA,QAAM,OAAO,kBAAkB,YAAY;AAC3C,QAAM,aAAa,IAAI;AACvB,QAAM,UAAU,KAAK,MAAM,GAAG,EAAE,OAAO,OAAO;AAE9C,QAAM,cAAc,aAAa,KAAK,QAAQ,MAAM,UAAU,IAAI;AAElE,MAAI,UAAU;AACd,MAAI,YAAY;AAEhB,WAAS,IAAI,GAAG,IAAI,IAAI,QAAQ,QAAQ,KAAK;AAC3C,UAAM,UAAU,IAAI,eAAe,CAAC;AACpC,QAAI,OAAO,YAAY,SAAU;AAEjC,UAAM,MAAM,IAAI,QAAQ,CAAC,KAAK;AAE9B,UAAM,gBAAgB,kBAAkB,GAAG;AAC3C,QAAI,kBAAkB,MAAM;AAC1B,aAAO;AAAA,IACT;AAEA,QAAI;AACJ,QAAI,CAAC,KAAK;AACR,iBAAW;AAAA,IACb,WAAW,KAAK,WAAW,GAAG,GAAG;AAC/B,iBAAW,kBAAkB,GAAG;AAAA,IAClC,OAAO;AACL,iBAAW,kBAAkB,KAAK,QAAQ,aAAa,GAAG,CAAC;AAAA,IAC7D;AACA,QAAI,aAAa,MAAM;AACrB,aAAO;AAAA,IACT;AAGA,UAAM,mBAAmB,cAAc,MAAM,GAAG,EAAE,OAAO,OAAO;AAChE,UAAM,cACJ,aAAa,gBACT,SAAS,MAAM,GAAG,EAAE,OAAO,OAAO,IAClC;AACN,UAAM,QAAQ,KAAK;AAAA,MACjB,mBAAmB,kBAAkB,OAAO;AAAA,MAC5C,mBAAmB,aAAa,OAAO;AAAA,IAAA;AAGzC,QAAI,QAAQ,WAAW;AACrB,kBAAY;AACZ,gBAAU;AAAA,IACZ;AAAA,EACF;AAEA,MAAI,YAAY,MAAM,aAAa,GAAG;AACpC,WAAO,IAAI,eAAe,OAAO,KAAK;AAAA,EACxC;AAEA,SAAO,IAAI,eAAe,CAAC,KAAK;AAClC;AAGA,SAAS,mBAAmB,MAAqB,MAA6B;AAC5E,MAAI,QAAQ;AACZ,WACM,IAAI,KAAK,SAAS,GAAG,IAAI,KAAK,SAAS,GAC3C,KAAK,KAAK,KAAK,GACf,KAAK,KACL;AACA,QAAI,KAAK,CAAC,MAAM,KAAK,CAAC,EAAG;AACzB;AAAA,EACF;AACA,SAAO;AACT;AAEA,eAAe,uBACb,KACA,WACA,cACc;AACd,QAAM,WAAgB;AAAA,IACpB,MAAM;AAAA,IACN,MAAM,UAAU;AAAA,IAChB,QAAQ,UAAU,UAAU;AAAA,EAAA;AAG9B,MAAI,CAAC,KAAK;AACR,WAAO;AAAA,EACT;AAEA,QAAM,WAAW,MAAM,qBAAqB,GAAG;AAC/C,MAAI,CAAC,SAAU,QAAO;AAEtB,MAAI;AACF,UAAM,OAAO,SAAS,oBAAoB;AAAA,MACxC,MAAM,UAAU;AAAA,MAChB,QAAQ,UAAU;AAAA,IAAA,CACnB;AACD,QAAI,KAAK,QAAQ,QAAQ,KAAK,UAAU,MAAM;AAC5C,aAAO;AAAA,QACL,MAAM,KAAK,SAAS,kBAAkB,KAAK,MAAM,IAAI;AAAA,QACrD,MAAM,KAAK;AAAA,QACX,QAAQ,KAAK,SAAS;AAAA,MAAA;AAAA,IAE1B;AAAA,EACF,QAAQ;AAAA,EAER;AAEA,SAAO;AACT;AAEA,MAAM,oCAAoB,QAAA;AAE1B,SAAS,eAAe,KAAkC;AACxD,SAAO;AAAA,IACL,GAAG;AAAA,IACH,MAAM,IAAI,QAAQ;AAAA,IAClB,SAAS,OAAO,IAAI,OAAO;AAAA,IAC3B,gBAAgB,IAAI,gBAAgB,IAAI,CAAC,MAAM,KAAK,EAAE,KAAK,CAAA;AAAA,EAAC;AAEhE;AAEA,eAAe,qBACb,KACmC;AACnC,QAAM,SAAS,cAAc,IAAI,GAAG;AACpC,MAAI,OAAQ,QAAO;AAEnB,QAAM,WAAW,YAAY;AAC3B,QAAI;AACF,aAAO,MAAM,IAAI,kBAAkB,eAAe,GAAG,CAAC;AAAA,IACxD,QAAQ;AACN,aAAO;AAAA,IACT;AAAA,EACF,GAAA;AAEA,gBAAc,IAAI,KAAK,OAAO;AAC9B,SAAO;AACT;AAQO,MAAM,eAAe;AAAA,EAClB,4BAAY,IAAA;AAAA,EACZ,mCAAmB,IAAA;AAAA,EAE3B,IAAI,KAAsB;AACxB,WAAO,KAAK,MAAM,IAAI,GAAG;AAAA,EAC3B;AAAA,EAEA,IAAI,KAAgD;AAClD,WAAO,KAAK,MAAM,IAAI,GAAG;AAAA,EAC3B;AAAA,EAEA,IAAI,KAAa,OAAoC;AACnD,SAAK,MAAM,IAAI,KAAK,KAAK;AACzB,UAAM,OAAO,IAAI,MAAM,GAAG,IAAI,QAAQ,IAAI,CAAC;AAC3C,gBAAY,KAAK,cAAc,MAAM,0BAAU,IAAA,CAAK,EAAE,IAAI,GAAG;AAAA,EAC/D;AAAA,EAEA,QAAc;AACZ,SAAK,MAAM,MAAA;AACX,SAAK,aAAa,MAAA;AAAA,EACpB;AAAA;AAAA,EAGA,aAAa,MAAoB;AAC/B,UAAM,OAAO,KAAK,aAAa,IAAI,IAAI;AACvC,QAAI,MAAM;AACR,iBAAW,OAAO,MAAM;AACtB,aAAK,MAAM,OAAO,GAAG;AAAA,MACvB;AACA,WAAK,aAAa,OAAO,IAAI;AAAA,IAC/B;AAAA,EACF;AACF;AASA,eAAsB,2CACpB,UACA,YACA,QACA,gBACA,0BAC0B;AAC1B,QAAM,eAAe,kBAAkB,UAAU;AACjD,QAAM,WAAW,GAAG,YAAY,KAAK,MAAM;AAC3C,MAAI,eAAe,IAAI,QAAQ,GAAG;AAChC,WAAO,eAAe,IAAI,QAAQ,KAAK;AAAA,EACzC;AAEA,MAAI;AACF,UAAM,MAAM,SAAS,mBAAmB,UAAU;AAClD,QAAI,CAAC,KAAK;AACR,qBAAe,IAAI,UAAU,IAAI;AACjC,aAAO;AAAA,IACT;AAEA,UAAM,EAAE,MAAM,IAAA,IAAQ;AAEtB,UAAM,YAAY,IAAI,aAAa,eAAe,IAAI;AAEtD,UAAM,MAAM,yBAAyB,MAAM,MAAM;AACjD,QAAI,QAAQ,IAAI;AACd,qBAAe,IAAI,UAAU,IAAI;AACjC,aAAO;AAAA,IACT;AAEA,UAAM,YAAY,wBAAwB,WAAW,GAAG;AACxD,UAAM,MAAM,MAAM,uBAAuB,KAAK,WAAW,YAAY;AACrE,mBAAe,IAAI,UAAU,GAAG;AAChC,WAAO;AAAA,EACT,QAAQ;AACN,mBAAe,IAAI,UAAU,IAAI;AACjC,WAAO;AAAA,EACT;AACF;AAOA,eAAsB,6BACpB,UACA,YACA,QAC0B;AAC1B,MAAI;AACF,UAAM,eAAe,kBAAkB,UAAU;AACjD,UAAM,MAAM,SAAS,mBAAmB,UAAU;AAClD,QAAI,CAAC,IAAK,QAAO;AACjB,UAAM,EAAE,MAAM,IAAA,IAAQ;AAEtB,QAAI,CAAC,IAAI,WAAW;AAClB,UAAI,YAAY,eAAe,IAAI;AAAA,IACrC;AAEA,UAAM,MAAM,wBAAwB,MAAM,MAAM;AAChD,QAAI,CAAC,IAAK,QAAO;AAEjB,WAAO,MAAM,uBAAuB,KAAK,KAAK,YAAY;AAAA,EAC5D,QAAQ;AACN,WAAO;AAAA,EACT;AACF;AAMA,eAAsB,wBACpB,UACA,OAMA,gBACA,0BACe;AACf,aAAW,QAAQ,OAAO;AACxB,QAAI,CAAC,KAAK,UAAW;AACrB,QAAI,KAAK,QAAQ,QAAQ,KAAK,UAAU,KAAM;AAC9C,UAAM,MAAM,MAAM;AAAA,MAChB;AAAA,MACA,KAAK;AAAA,MACL,KAAK;AAAA,MACL;AAAA,MACA;AAAA,IAAA;AAEF,QAAI,CAAC,IAAK;AACV,SAAK,OAAO,IAAI;AAChB,SAAK,SAAS,IAAI;AAAA,EACpB;AACF;AAmBO,SAAS,iBACd,UACA,UACA,KACA,eAAuB,GACE;AACzB,MAAI;AACF,UAAM,eAAe,kBAAkB,QAAQ;AAC/C,UAAM,MAAM,SAAS,mBAAmB,QAAQ;AAChD,QAAI,CAAC,IAAK,QAAO;AAEjB,UAAM,aAAa,IAAI,gBAAgB,IAAI;AAC3C,UAAM,aAAa,IAAI;AACvB,UAAM,YAAY,IAAI;AAEtB,QAAI,aAAa,EAAG,QAAO;AAE3B,UAAM,WAAW,WAAW,MAAM,IAAI;AAEtC,aAAS,IAAI,GAAG,IAAI,SAAS,QAAQ,KAAK;AACxC,YAAM,OAAO,SAAS,CAAC;AACvB,UAAI,KAAK,SAAS,IAAI,EAAG,UAAS,CAAC,IAAI,KAAK,MAAM,GAAG,EAAE;AAAA,IACzD;AAEA,UAAM,YAAY,KAAK,IAAI,GAAG,aAAa,YAAY;AACvD,UAAM,UAAU,KAAK,IAAI,SAAS,QAAQ,aAAa,YAAY;AAEnE,QAAI,aAAa,SAAS,OAAQ,QAAO;AAEzC,UAAM,QAAQ,SAAS,MAAM,YAAY,GAAG,OAAO;AACnD,UAAM,cAAc,OAAO,OAAO,EAAE;AAEpC,UAAM,aAAa,IAAI,QAAQ;AAC/B,UAAM,eAA8B,CAAA;AACpC,aAAS,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,YAAM,KAAK,YAAY;AACvB,YAAM,cAAc,MAAM,CAAC;AAC3B,YAAM,aAAa,OAAO,EAAE,EAAE,SAAS,aAAa,GAAG;AACvD,YAAM,SAAS,OAAO,aAAa,MAAM;AACzC,mBAAa,KAAK,KAAK,MAAM,IAAI,UAAU,MAAM,WAAW,EAAE;AAE9D,UAAI,OAAO,cAAc,YAAY,GAAG;AACtC,cAAM,UAAU,IAAI,OAAO,YAAY,CAAC;AACxC,qBAAa,KAAK,OAAO,IAAI,OAAO,WAAW,CAAC,MAAM,OAAO,GAAG;AAAA,MAClE;AAAA,IACF;AAEA,WAAO;AAAA,MACL,OAAO;AAAA,MACP,eAAe;AAAA,MACf,UAAU,GAAG,UAAU,IAAI,UAAU,IAAI,SAAS;AAAA,IAAA;AAAA,EAEtD,QAAQ;AACN,WAAO;AAAA,EACT;AACF;"}