ripple 0.2.137 → 0.2.139
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"description": "Ripple is an elegant TypeScript UI framework",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"author": "Dominic Gannaway",
|
|
6
|
-
"version": "0.2.
|
|
6
|
+
"version": "0.2.139",
|
|
7
7
|
"type": "module",
|
|
8
8
|
"module": "src/runtime/index-client.js",
|
|
9
9
|
"main": "src/runtime/index-client.js",
|
|
@@ -81,6 +81,6 @@
|
|
|
81
81
|
"typescript": "^5.9.2"
|
|
82
82
|
},
|
|
83
83
|
"peerDependencies": {
|
|
84
|
-
"ripple": "0.2.
|
|
84
|
+
"ripple": "0.2.139"
|
|
85
85
|
}
|
|
86
86
|
}
|
package/src/compiler/index.js
CHANGED
|
@@ -12,7 +12,7 @@ import { convert_source_map_to_mappings } from './phases/3-transform/segments.js
|
|
|
12
12
|
* @returns {Program}
|
|
13
13
|
*/
|
|
14
14
|
export function parse(source) {
|
|
15
|
-
|
|
15
|
+
return parse_module(source);
|
|
16
16
|
}
|
|
17
17
|
|
|
18
18
|
/**
|
|
@@ -23,46 +23,35 @@ export function parse(source) {
|
|
|
23
23
|
* @returns {object}
|
|
24
24
|
*/
|
|
25
25
|
export function compile(source, filename, options = {}) {
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
26
|
+
const ast = parse_module(source);
|
|
27
|
+
const analysis = analyze(ast, filename, options);
|
|
28
|
+
const result = options.mode === 'server'
|
|
29
|
+
? transform_server(filename, source, analysis)
|
|
30
|
+
: transform_client(filename, source, analysis, false);
|
|
31
31
|
|
|
32
|
-
|
|
32
|
+
return result;
|
|
33
33
|
}
|
|
34
34
|
|
|
35
|
+
/** @import { PostProcessingChanges, LineOffsets } from './phases/3-transform/client/index.js' */
|
|
36
|
+
|
|
35
37
|
/**
|
|
36
|
-
* Compile Ripple
|
|
38
|
+
* Compile Ripple component to Volar virtual code with TypeScript mappings
|
|
37
39
|
* @param {string} source
|
|
38
40
|
* @param {string} filename
|
|
39
41
|
* @returns {object} Volar mappings object
|
|
40
42
|
*/
|
|
41
43
|
export function compile_to_volar_mappings(source, filename) {
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
// Add a unique ID as a string property that will be copied during transformation
|
|
56
|
-
const id = `__volar_import_${gen_id++}__`;
|
|
57
|
-
/** @type {any} */ (node).__volar_id = id;
|
|
58
|
-
source_import_map.set(id, { start, end });
|
|
59
|
-
}
|
|
60
|
-
}
|
|
61
|
-
}
|
|
62
|
-
|
|
63
|
-
const analysis = analyze(ast, filename);
|
|
64
|
-
const transformed = transform_client(filename, source, analysis, true);
|
|
65
|
-
|
|
66
|
-
// Create volar mappings directly from the AST instead of relying on esrap's sourcemap
|
|
67
|
-
return convert_source_map_to_mappings(transformed.ast, source, transformed.js.code, source_import_map);
|
|
44
|
+
const ast = parse_module(source);
|
|
45
|
+
const analysis = analyze(ast, filename, { to_ts: true });
|
|
46
|
+
const transformed = transform_client(filename, source, analysis, true);
|
|
47
|
+
|
|
48
|
+
// Create volar mappings with esrap source map for accurate positioning
|
|
49
|
+
return convert_source_map_to_mappings(
|
|
50
|
+
transformed.ast,
|
|
51
|
+
source,
|
|
52
|
+
transformed.js.code,
|
|
53
|
+
transformed.js.map,
|
|
54
|
+
/** @type {PostProcessingChanges} */(transformed.js.post_processing_changes),
|
|
55
|
+
/** @type {LineOffsets} */(transformed.js.line_offsets)
|
|
56
|
+
);
|
|
68
57
|
}
|
|
@@ -436,9 +436,13 @@ const visitors = {
|
|
|
436
436
|
const pattern = node.left.declarations[0].id;
|
|
437
437
|
const paths = extract_paths(pattern);
|
|
438
438
|
const scope = state.scopes.get(node);
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
439
|
+
let pattern_id;
|
|
440
|
+
if (state.to_ts) {
|
|
441
|
+
pattern_id = pattern;
|
|
442
|
+
} else {
|
|
443
|
+
pattern_id = b.id(scope.generate('pattern'));
|
|
444
|
+
node.left.declarations[0].id = pattern_id;
|
|
445
|
+
}
|
|
442
446
|
|
|
443
447
|
for (const path of paths) {
|
|
444
448
|
const name = path.node.name;
|
|
@@ -544,10 +548,10 @@ const visitors = {
|
|
|
544
548
|
}
|
|
545
549
|
},
|
|
546
550
|
/**
|
|
547
|
-
*
|
|
548
|
-
* @param {any} node
|
|
549
|
-
* @param {any} context
|
|
550
|
-
* @returns
|
|
551
|
+
*
|
|
552
|
+
* @param {any} node
|
|
553
|
+
* @param {any} context
|
|
554
|
+
* @returns
|
|
551
555
|
*/
|
|
552
556
|
TryStatement(node, context) {
|
|
553
557
|
if (!is_inside_component(context)) {
|
|
@@ -808,9 +812,9 @@ const visitors = {
|
|
|
808
812
|
},
|
|
809
813
|
|
|
810
814
|
/**
|
|
811
|
-
*
|
|
812
|
-
* @param {any} node
|
|
813
|
-
* @param {any} context
|
|
815
|
+
*
|
|
816
|
+
* @param {any} node
|
|
817
|
+
* @param {any} context
|
|
814
818
|
*/
|
|
815
819
|
AwaitExpression(node, context) {
|
|
816
820
|
if (is_inside_component(context)) {
|
|
@@ -862,6 +866,7 @@ export function analyze(ast, filename, options = {}) {
|
|
|
862
866
|
analysis,
|
|
863
867
|
inside_head: false,
|
|
864
868
|
inside_server_block: options.mode === 'server',
|
|
869
|
+
to_ts: options.to_ts ?? false,
|
|
865
870
|
},
|
|
866
871
|
visitors,
|
|
867
872
|
);
|
|
@@ -1,5 +1,8 @@
|
|
|
1
1
|
/** @import {Expression, FunctionExpression, Node, Program} from 'estree' */
|
|
2
2
|
|
|
3
|
+
/** @typedef {Map<number, {offset: number, delta: number}>} PostProcessingChanges */
|
|
4
|
+
/** @typedef {number[]} LineOffsets */
|
|
5
|
+
|
|
3
6
|
import { walk } from 'zimmerframe';
|
|
4
7
|
import path from 'node:path';
|
|
5
8
|
import { print } from 'esrap';
|
|
@@ -2175,8 +2178,57 @@ function create_tsx_with_typescript_support() {
|
|
|
2175
2178
|
const base_tsx = tsx();
|
|
2176
2179
|
|
|
2177
2180
|
// Add custom TypeScript node handlers that aren't in tsx
|
|
2181
|
+
|
|
2182
|
+
// Shared handler for function-like nodes to support component->function mapping
|
|
2183
|
+
// Creates source maps for 'function' keyword by passing node to context.write()
|
|
2184
|
+
const handle_function = (node, context) => {
|
|
2185
|
+
if (node.async) {
|
|
2186
|
+
context.write('async ');
|
|
2187
|
+
}
|
|
2188
|
+
// Write 'function' keyword with node location for source mapping
|
|
2189
|
+
// This creates a mapping from the source position (which may have 'component')
|
|
2190
|
+
// to the generated 'function' keyword
|
|
2191
|
+
context.write('function', node);
|
|
2192
|
+
if (node.generator) {
|
|
2193
|
+
context.write('*');
|
|
2194
|
+
}
|
|
2195
|
+
// FunctionDeclaration always has a space before id, FunctionExpression only if id exists
|
|
2196
|
+
if (node.type === 'FunctionDeclaration' || node.id) {
|
|
2197
|
+
context.write(' ');
|
|
2198
|
+
}
|
|
2199
|
+
if (node.id) {
|
|
2200
|
+
context.visit(node.id);
|
|
2201
|
+
}
|
|
2202
|
+
if (node.typeParameters) {
|
|
2203
|
+
context.visit(node.typeParameters);
|
|
2204
|
+
}
|
|
2205
|
+
context.write('(');
|
|
2206
|
+
for (let i = 0; i < node.params.length; i++) {
|
|
2207
|
+
if (i > 0) context.write(', ');
|
|
2208
|
+
context.visit(node.params[i]);
|
|
2209
|
+
}
|
|
2210
|
+
context.write(')');
|
|
2211
|
+
if (node.returnType) {
|
|
2212
|
+
context.visit(node.returnType);
|
|
2213
|
+
}
|
|
2214
|
+
context.write(' ');
|
|
2215
|
+
if (node.body) {
|
|
2216
|
+
context.visit(node.body);
|
|
2217
|
+
}
|
|
2218
|
+
};
|
|
2219
|
+
|
|
2178
2220
|
return {
|
|
2179
2221
|
...base_tsx,
|
|
2222
|
+
// Custom handler for FunctionDeclaration to support component->function mapping
|
|
2223
|
+
// Needed for volar mappings and intellisense on function or component keyword
|
|
2224
|
+
FunctionDeclaration(node, context) {
|
|
2225
|
+
handle_function(node, context);
|
|
2226
|
+
},
|
|
2227
|
+
// Custom handler for FunctionExpression to support component->function mapping
|
|
2228
|
+
// This is used for components transformed by the Component visitor
|
|
2229
|
+
FunctionExpression(node, context) {
|
|
2230
|
+
handle_function(node, context);
|
|
2231
|
+
},
|
|
2180
2232
|
// Custom handler for TSParenthesizedType: (Type)
|
|
2181
2233
|
TSParenthesizedType(node, context) {
|
|
2182
2234
|
context.write('(');
|
|
@@ -2216,13 +2268,14 @@ function create_tsx_with_typescript_support() {
|
|
|
2216
2268
|
context.write(' }');
|
|
2217
2269
|
},
|
|
2218
2270
|
// Custom handler for TSTypeParameter: K in T (for mapped types)
|
|
2219
|
-
//
|
|
2271
|
+
// acorn-ts has a bug where `in` is printed as `extends`, so we override it here
|
|
2220
2272
|
TSTypeParameter(node, context) {
|
|
2221
2273
|
// For mapped types, the name is just a string, not an Identifier node
|
|
2274
|
+
// Pass the node as second parameter to context.write() to create source map entry
|
|
2222
2275
|
if (typeof node.name === 'string') {
|
|
2223
|
-
context.write(node.name);
|
|
2276
|
+
context.write(node.name, node);
|
|
2224
2277
|
} else if (node.name && node.name.name) {
|
|
2225
|
-
context.write(node.name.name);
|
|
2278
|
+
context.write(node.name.name, node.name);
|
|
2226
2279
|
}
|
|
2227
2280
|
if (node.constraint) {
|
|
2228
2281
|
context.write(' in ');
|
|
@@ -2268,6 +2321,14 @@ function create_tsx_with_typescript_support() {
|
|
|
2268
2321
|
};
|
|
2269
2322
|
}
|
|
2270
2323
|
|
|
2324
|
+
/**
|
|
2325
|
+
* Transform Ripple AST to JavaScript/TypeScript
|
|
2326
|
+
* @param {string} filename - Source filename
|
|
2327
|
+
* @param {string} source - Original source code
|
|
2328
|
+
* @param {any} analysis - Analysis result
|
|
2329
|
+
* @param {boolean} to_ts - Whether to generate TypeScript output
|
|
2330
|
+
* @returns {{ ast: any, js: { code: string, map: any, post_processing_changes?: PostProcessingChanges, line_offsets?: LineOffsets }, css: any }}
|
|
2331
|
+
*/
|
|
2271
2332
|
export function transform_client(filename, source, analysis, to_ts) {
|
|
2272
2333
|
/**
|
|
2273
2334
|
* User's named imports from 'ripple' so we can reuse them in TS output
|
|
@@ -2333,18 +2394,78 @@ export function transform_client(filename, source, analysis, to_ts) {
|
|
|
2333
2394
|
);
|
|
2334
2395
|
}
|
|
2335
2396
|
|
|
2336
|
-
const
|
|
2337
|
-
|
|
2338
|
-
|
|
2339
|
-
|
|
2397
|
+
const language_handler = to_ts ? create_tsx_with_typescript_support() : tsx();
|
|
2398
|
+
|
|
2399
|
+
const js = /** @type {ReturnType<typeof print> & { post_processing_changes?: PostProcessingChanges, line_offsets?: number[] }} */ (
|
|
2400
|
+
print(program, language_handler, {
|
|
2401
|
+
sourceMapContent: source,
|
|
2402
|
+
sourceMapSource: path.basename(filename),
|
|
2403
|
+
})
|
|
2404
|
+
);
|
|
2340
2405
|
|
|
2341
2406
|
// Post-process TypeScript output to remove 'declare' from function overload signatures
|
|
2342
2407
|
// Function overload signatures in regular .ts files should not have 'declare' keyword
|
|
2408
|
+
// Track changes for source map adjustment - organize them for efficient lookup
|
|
2409
|
+
/** @type {PostProcessingChanges | null} */
|
|
2410
|
+
let post_processing_changes = null;
|
|
2411
|
+
/** @type {LineOffsets} */
|
|
2412
|
+
let line_offsets = [];
|
|
2413
|
+
|
|
2343
2414
|
if (to_ts) {
|
|
2415
|
+
// Build line offset map for converting byte offset to line:column
|
|
2416
|
+
line_offsets = [0];
|
|
2417
|
+
for (let i = 0; i < js.code.length; i++) {
|
|
2418
|
+
if (js.code[i] === '\n') {
|
|
2419
|
+
line_offsets.push(i + 1);
|
|
2420
|
+
}
|
|
2421
|
+
}
|
|
2422
|
+
|
|
2423
|
+
/**
|
|
2424
|
+
* Convert byte offset to line number (1-based)
|
|
2425
|
+
* @param {number} offset
|
|
2426
|
+
* @returns {number}
|
|
2427
|
+
*/
|
|
2428
|
+
const offset_to_line = (offset) => {
|
|
2429
|
+
for (let i = 0; i < line_offsets.length; i++) {
|
|
2430
|
+
if (offset >= line_offsets[i] && (i === line_offsets.length - 1 || offset < line_offsets[i + 1])) {
|
|
2431
|
+
return i + 1;
|
|
2432
|
+
}
|
|
2433
|
+
}
|
|
2434
|
+
return 1;
|
|
2435
|
+
};
|
|
2436
|
+
|
|
2437
|
+
/** @type {Map<number, {offset: number, delta: number}>} */
|
|
2438
|
+
const line_deltas = new Map(); // line -> {offset: first change offset, delta: total delta for line}
|
|
2439
|
+
|
|
2344
2440
|
// Remove 'export declare function' -> 'export function' (for overloads only, not implementations)
|
|
2345
2441
|
// Match: export declare function name(...): type;
|
|
2346
2442
|
// Don't match: export declare function name(...): type { (has body)
|
|
2347
|
-
js.code = js.code.replace(/^(export\s+)declare\s+(function\s+\w+[^{\n]*;)$/gm,
|
|
2443
|
+
js.code = js.code.replace(/^(export\s+)declare\s+(function\s+\w+[^{\n]*;)$/gm, (match, p1, p2, offset) => {
|
|
2444
|
+
const replacement = p1 + p2;
|
|
2445
|
+
const line = offset_to_line(offset);
|
|
2446
|
+
const delta = replacement.length - match.length; // negative (removing 'declare ')
|
|
2447
|
+
|
|
2448
|
+
// Track first change offset and total delta per line
|
|
2449
|
+
if (!line_deltas.has(line)) {
|
|
2450
|
+
line_deltas.set(line, { offset, delta });
|
|
2451
|
+
} else {
|
|
2452
|
+
// Additional change on same line - accumulate delta
|
|
2453
|
+
// @ts-ignore
|
|
2454
|
+
line_deltas.get(line).delta += delta;
|
|
2455
|
+
}
|
|
2456
|
+
|
|
2457
|
+
return replacement;
|
|
2458
|
+
});
|
|
2459
|
+
|
|
2460
|
+
post_processing_changes = line_deltas;
|
|
2461
|
+
}
|
|
2462
|
+
|
|
2463
|
+
if (post_processing_changes) {
|
|
2464
|
+
js.post_processing_changes = post_processing_changes;
|
|
2465
|
+
}
|
|
2466
|
+
|
|
2467
|
+
if (line_offsets.length > 0) {
|
|
2468
|
+
js.line_offsets = line_offsets;
|
|
2348
2469
|
}
|
|
2349
2470
|
|
|
2350
2471
|
const css = render_stylesheets(state.stylesheets);
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import { walk } from 'zimmerframe';
|
|
2
|
+
import { build_source_to_generated_map, get_generated_position } from '../../source-map-utils.js';
|
|
2
3
|
|
|
3
4
|
export const mapping_data = {
|
|
4
5
|
verification: true,
|
|
@@ -15,21 +16,59 @@ export const mapping_data = {
|
|
|
15
16
|
// references: true,
|
|
16
17
|
};
|
|
17
18
|
|
|
19
|
+
/**
|
|
20
|
+
* @import { PostProcessingChanges } from './client/index.js';
|
|
21
|
+
*/
|
|
22
|
+
|
|
18
23
|
/**
|
|
19
24
|
* Create Volar mappings by walking the transformed AST
|
|
20
25
|
* @param {any} ast - The transformed AST
|
|
21
26
|
* @param {string} source - Original source code
|
|
22
|
-
* @param {string} generated_code - Generated code
|
|
23
|
-
* @param {
|
|
24
|
-
* @
|
|
27
|
+
* @param {string} generated_code - Generated code (returned in output, not used for searching)
|
|
28
|
+
* @param {object} esrap_source_map - Esrap source map for accurate position lookup
|
|
29
|
+
* @param {PostProcessingChanges } post_processing_changes - Optional post-processing changes
|
|
30
|
+
* @param {number[]} line_offsets - Pre-computed line offsets array for generated code
|
|
31
|
+
* @returns {{ code: string, mappings: Array<{sourceOffsets: number[], generatedOffsets: number[], lengths: number[], data: any}> }}
|
|
25
32
|
*/
|
|
26
|
-
export function convert_source_map_to_mappings(ast, source, generated_code,
|
|
33
|
+
export function convert_source_map_to_mappings(ast, source, generated_code, esrap_source_map, post_processing_changes, line_offsets) {
|
|
27
34
|
/** @type {Array<{sourceOffsets: number[], generatedOffsets: number[], lengths: number[], data: any}>} */
|
|
28
35
|
const mappings = [];
|
|
29
36
|
|
|
30
|
-
//
|
|
31
|
-
|
|
32
|
-
|
|
37
|
+
// Build line offset maps for source and generated code
|
|
38
|
+
// This allows us to convert line/column positions to byte offsets
|
|
39
|
+
const build_line_offsets = (text) => {
|
|
40
|
+
const offsets = [0]; // Line 1 starts at offset 0
|
|
41
|
+
for (let i = 0; i < text.length; i++) {
|
|
42
|
+
if (text[i] === '\n') {
|
|
43
|
+
offsets.push(i + 1);
|
|
44
|
+
}
|
|
45
|
+
}
|
|
46
|
+
return offsets;
|
|
47
|
+
};
|
|
48
|
+
const source_line_offsets = build_line_offsets(source);
|
|
49
|
+
|
|
50
|
+
/**
|
|
51
|
+
* Convert line/column to byte offset
|
|
52
|
+
* @param {number} line
|
|
53
|
+
* @param {number} column
|
|
54
|
+
* @param {number[]} line_offsets
|
|
55
|
+
* @returns {number | null}
|
|
56
|
+
*/
|
|
57
|
+
const loc_to_offset = (line, column, line_offsets) => {
|
|
58
|
+
if (line < 1 || line > line_offsets.length) return null;
|
|
59
|
+
return line_offsets[line - 1] + column;
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
/**
|
|
63
|
+
* Convert generated line/column to byte offset using pre-computed line_offsets
|
|
64
|
+
* @param {number} line
|
|
65
|
+
* @param {number} column
|
|
66
|
+
* @returns {number}
|
|
67
|
+
*/
|
|
68
|
+
const gen_loc_to_offset = (line, column) => {
|
|
69
|
+
if (line === 1) return column;
|
|
70
|
+
return line_offsets[line - 1] + column;
|
|
71
|
+
};
|
|
33
72
|
|
|
34
73
|
// Map to track capitalized names: original name -> capitalized name
|
|
35
74
|
/** @type {Map<string, string>} */
|
|
@@ -51,144 +90,51 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
51
90
|
}
|
|
52
91
|
});
|
|
53
92
|
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
const is_word_boundary = (char) => {
|
|
60
|
-
return char === undefined || !/[a-zA-Z0-9_$]/.test(char);
|
|
61
|
-
};
|
|
62
|
-
|
|
63
|
-
/**
|
|
64
|
-
* Check if a position is inside a comment
|
|
65
|
-
* @param {number} pos - Position to check
|
|
66
|
-
* @returns {boolean}
|
|
67
|
-
*/
|
|
68
|
-
const is_in_comment = (pos) => {
|
|
69
|
-
// Check for single-line comment: find start of line and check if there's // before this position
|
|
70
|
-
let lineStart = source.lastIndexOf('\n', pos - 1) + 1;
|
|
71
|
-
const lineBeforePos = source.substring(lineStart, pos);
|
|
72
|
-
if (lineBeforePos.includes('//')) {
|
|
73
|
-
return true;
|
|
74
|
-
}
|
|
75
|
-
// Check for multi-line comment: look backwards for /* and forwards for */
|
|
76
|
-
const lastCommentStart = source.lastIndexOf('/*', pos);
|
|
77
|
-
if (lastCommentStart !== -1) {
|
|
78
|
-
const commentEnd = source.indexOf('*/', lastCommentStart);
|
|
79
|
-
if (commentEnd === -1 || commentEnd > pos) {
|
|
80
|
-
return true; // We're inside an unclosed or open comment
|
|
81
|
-
}
|
|
82
|
-
}
|
|
83
|
-
return false;
|
|
84
|
-
};
|
|
85
|
-
|
|
86
|
-
/**
|
|
87
|
-
* Find text in source string, searching character by character from sourceIndex
|
|
88
|
-
* @param {string} text - Text to find
|
|
89
|
-
* @returns {number|null} - Source position or null
|
|
90
|
-
*/
|
|
91
|
-
const find_in_source = (text) => {
|
|
92
|
-
for (let i = source_index; i <= source.length - text.length; i++) {
|
|
93
|
-
let match = true;
|
|
94
|
-
for (let j = 0; j < text.length; j++) {
|
|
95
|
-
if (source[i + j] !== text[j]) {
|
|
96
|
-
match = false;
|
|
97
|
-
break;
|
|
98
|
-
}
|
|
99
|
-
}
|
|
100
|
-
if (match) {
|
|
101
|
-
// Skip if this match is inside a comment
|
|
102
|
-
if (is_in_comment(i)) {
|
|
103
|
-
continue;
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
// Check word boundaries for identifier-like tokens
|
|
107
|
-
const isIdentifierLike = /^[a-zA-Z_$]/.test(text);
|
|
108
|
-
if (isIdentifierLike) {
|
|
109
|
-
const charBefore = source[i - 1];
|
|
110
|
-
const charAfter = source[i + text.length];
|
|
111
|
-
if (!is_word_boundary(charBefore) || !is_word_boundary(charAfter)) {
|
|
112
|
-
continue; // Not a whole word match, keep searching
|
|
113
|
-
}
|
|
114
|
-
}
|
|
115
|
-
|
|
116
|
-
source_index = i + text.length;
|
|
117
|
-
return i;
|
|
118
|
-
}
|
|
119
|
-
}
|
|
120
|
-
return null;
|
|
121
|
-
};
|
|
122
|
-
|
|
123
|
-
/**
|
|
124
|
-
* Find text in generated code, searching character by character from generated_index
|
|
125
|
-
* @param {string} text - Text to find
|
|
126
|
-
* @returns {number|null} - Generated position or null
|
|
127
|
-
*/
|
|
128
|
-
const find_in_generated = (text) => {
|
|
129
|
-
for (let i = generated_index; i <= generated_code.length - text.length; i++) {
|
|
130
|
-
let match = true;
|
|
131
|
-
for (let j = 0; j < text.length; j++) {
|
|
132
|
-
if (generated_code[i + j] !== text[j]) {
|
|
133
|
-
match = false;
|
|
134
|
-
break;
|
|
135
|
-
}
|
|
136
|
-
}
|
|
137
|
-
if (match) {
|
|
138
|
-
// Check word boundaries for identifier-like tokens
|
|
139
|
-
const isIdentifierLike = /^[a-zA-Z_$]/.test(text);
|
|
140
|
-
if (isIdentifierLike) {
|
|
141
|
-
const charBefore = generated_code[i - 1];
|
|
142
|
-
const charAfter = generated_code[i + text.length];
|
|
143
|
-
if (!is_word_boundary(charBefore) || !is_word_boundary(charAfter)) {
|
|
144
|
-
continue; // Not a whole word match, keep searching
|
|
145
|
-
}
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
generated_index = i + text.length;
|
|
149
|
-
return i;
|
|
150
|
-
}
|
|
151
|
-
}
|
|
152
|
-
return null;
|
|
153
|
-
};
|
|
93
|
+
const adjusted_source_map = build_source_to_generated_map(
|
|
94
|
+
esrap_source_map,
|
|
95
|
+
post_processing_changes,
|
|
96
|
+
line_offsets
|
|
97
|
+
);
|
|
154
98
|
|
|
155
99
|
// Collect text tokens from AST nodes
|
|
156
|
-
//
|
|
157
|
-
/** @type {Array<
|
|
100
|
+
// All tokens must have source/generated text and loc property for accurate positioning
|
|
101
|
+
/** @type {Array<{
|
|
102
|
+
source: string,
|
|
103
|
+
generated: string,
|
|
104
|
+
is_import_statement?: boolean,
|
|
105
|
+
loc: {start: {line: number, column: number}, end: {line: number, column: number}}
|
|
106
|
+
}>} */
|
|
158
107
|
const tokens = [];
|
|
159
108
|
|
|
160
|
-
// Collect import declarations for full-statement mappings
|
|
161
|
-
/** @type {Array<{id: string, node: any}>} */
|
|
162
|
-
const import_declarations = [];
|
|
163
|
-
|
|
164
109
|
// We have to visit everything in generated order to maintain correct indices
|
|
165
110
|
walk(ast, null, {
|
|
166
111
|
_(node, { visit }) {
|
|
167
112
|
// Collect key node types: Identifiers, Literals, and JSX Elements
|
|
168
|
-
//
|
|
113
|
+
// Skip nodes without .loc (synthesized during transformation, not in original source)
|
|
169
114
|
if (node.type === 'Identifier' && node.name) {
|
|
170
115
|
if (node.loc) {
|
|
171
116
|
// Check if this identifier has tracked_shorthand metadata (e.g., TrackedMap -> #Map)
|
|
172
117
|
if (node.metadata?.tracked_shorthand) {
|
|
173
|
-
tokens.push({ source: node.metadata.tracked_shorthand, generated: node.name });
|
|
118
|
+
tokens.push({ source: node.metadata.tracked_shorthand, generated: node.name, loc: node.loc });
|
|
174
119
|
} else {
|
|
175
120
|
// Check if this identifier was capitalized (reverse lookup)
|
|
176
121
|
const original_name = reverse_capitalized_names.get(node.name);
|
|
177
122
|
if (original_name) {
|
|
178
123
|
// This is a capitalized name in generated code, map to lowercase in source
|
|
179
|
-
tokens.push({ source: original_name, generated: node.name });
|
|
124
|
+
tokens.push({ source: original_name, generated: node.name, loc: node.loc });
|
|
180
125
|
} else {
|
|
181
126
|
// Check if this identifier should be capitalized (forward lookup)
|
|
182
127
|
const cap_name = capitalized_names.get(node.name);
|
|
183
128
|
if (cap_name) {
|
|
184
|
-
tokens.push({ source: node.name, generated: cap_name });
|
|
129
|
+
tokens.push({ source: node.name, generated: cap_name, loc: node.loc });
|
|
185
130
|
} else {
|
|
186
131
|
// Check if this identifier should be capitalized (forward lookup)
|
|
187
132
|
const cap_name = capitalized_names.get(node.name);
|
|
188
133
|
if (cap_name) {
|
|
189
|
-
tokens.push({ source: node.name, generated: cap_name });
|
|
134
|
+
tokens.push({ source: node.name, generated: cap_name, loc: node.loc });
|
|
190
135
|
} else {
|
|
191
|
-
|
|
136
|
+
// Store token with .loc for accurate positioning
|
|
137
|
+
tokens.push({ source: node.name, generated: node.name, loc: node.loc });
|
|
192
138
|
}
|
|
193
139
|
}
|
|
194
140
|
}
|
|
@@ -200,33 +146,32 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
200
146
|
// Check if this was capitalized (reverse lookup)
|
|
201
147
|
const originalName = reverse_capitalized_names.get(node.name);
|
|
202
148
|
if (originalName) {
|
|
203
|
-
tokens.push({ source: originalName, generated: node.name });
|
|
149
|
+
tokens.push({ source: originalName, generated: node.name, loc: node.loc });
|
|
204
150
|
} else {
|
|
205
151
|
// Check if this should be capitalized (forward lookup)
|
|
206
152
|
const capitalizedName = capitalized_names.get(node.name);
|
|
207
153
|
if (capitalizedName) {
|
|
208
|
-
tokens.push({ source: node.name, generated: capitalizedName });
|
|
154
|
+
tokens.push({ source: node.name, generated: capitalizedName, loc: node.loc });
|
|
209
155
|
} else {
|
|
210
|
-
tokens.push(node.name);
|
|
156
|
+
tokens.push({ source: node.name, generated: node.name, loc: node.loc });
|
|
211
157
|
}
|
|
212
158
|
}
|
|
213
159
|
}
|
|
214
160
|
return; // Leaf node, don't traverse further
|
|
215
161
|
} else if (node.type === 'Literal' && node.raw) {
|
|
216
162
|
if (node.loc) {
|
|
217
|
-
tokens.push(node.raw);
|
|
163
|
+
tokens.push({ source: node.raw, generated: node.raw, loc: node.loc });
|
|
218
164
|
}
|
|
219
165
|
return; // Leaf node, don't traverse further
|
|
220
166
|
} else if (node.type === 'ImportDeclaration') {
|
|
221
|
-
//
|
|
167
|
+
// Add import declaration as a special token for full-statement mapping
|
|
222
168
|
// TypeScript reports unused imports with diagnostics covering the entire statement
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
node: node
|
|
169
|
+
if (node.loc) {
|
|
170
|
+
tokens.push({
|
|
171
|
+
source: '',
|
|
172
|
+
generated: '',
|
|
173
|
+
loc: node.loc,
|
|
174
|
+
is_import_statement: true
|
|
230
175
|
});
|
|
231
176
|
}
|
|
232
177
|
|
|
@@ -336,29 +281,37 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
336
281
|
|
|
337
282
|
// 3. Push closing tag name (not visited by AST walker)
|
|
338
283
|
if (!node.openingElement?.selfClosing && node.closingElement?.name?.type === 'JSXIdentifier') {
|
|
339
|
-
const
|
|
284
|
+
const closingNameNode = node.closingElement.name;
|
|
285
|
+
const closingName = closingNameNode.name;
|
|
340
286
|
// Check if this was capitalized (reverse lookup)
|
|
341
287
|
const originalName = reverse_capitalized_names.get(closingName);
|
|
342
288
|
if (originalName) {
|
|
343
|
-
tokens.push({ source: originalName, generated: closingName });
|
|
289
|
+
tokens.push({ source: originalName, generated: closingName, loc: closingNameNode.loc });
|
|
344
290
|
} else {
|
|
345
291
|
// Check if this should be capitalized (forward lookup)
|
|
346
292
|
const capitalizedName = capitalized_names.get(closingName);
|
|
347
293
|
if (capitalizedName) {
|
|
348
|
-
tokens.push({ source: closingName, generated: capitalizedName });
|
|
294
|
+
tokens.push({ source: closingName, generated: capitalizedName, loc: closingNameNode.loc });
|
|
349
295
|
} else {
|
|
350
|
-
tokens.push(closingName);
|
|
296
|
+
tokens.push({ source: closingName, generated: closingName, loc: closingNameNode.loc });
|
|
351
297
|
}
|
|
352
298
|
}
|
|
353
299
|
}
|
|
354
300
|
|
|
355
301
|
return;
|
|
356
302
|
} else if (node.type === 'FunctionDeclaration' || node.type === 'FunctionExpression' || node.type === 'ArrowFunctionExpression') {
|
|
357
|
-
//
|
|
358
|
-
if (node.
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
tokens.push(
|
|
303
|
+
// Add function/component keyword token
|
|
304
|
+
if (node.type === 'FunctionDeclaration' || node.type === 'FunctionExpression') {
|
|
305
|
+
const source_keyword = node.metadata?.was_component ? 'component' : 'function';
|
|
306
|
+
// Add token for the keyword - esrap already mapped it via context.write('function', node)
|
|
307
|
+
tokens.push({
|
|
308
|
+
source: source_keyword,
|
|
309
|
+
generated: 'function',
|
|
310
|
+
loc: {
|
|
311
|
+
start: { line: node.loc.start.line, column: node.loc.start.column },
|
|
312
|
+
end: { line: node.loc.start.line, column: node.loc.start.column + source_keyword.length }
|
|
313
|
+
}
|
|
314
|
+
});
|
|
362
315
|
}
|
|
363
316
|
|
|
364
317
|
// Visit in source order: id, params, body
|
|
@@ -835,8 +788,12 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
835
788
|
}
|
|
836
789
|
return;
|
|
837
790
|
} else if (node.type === 'TSTypeParameter') {
|
|
838
|
-
// Type parameter like T in <T>
|
|
839
|
-
|
|
791
|
+
// Type parameter like T in <T> or key in mapped types
|
|
792
|
+
// Note: node.name is a string, not an Identifier node
|
|
793
|
+
if (node.name && node.loc && typeof node.name === 'string') {
|
|
794
|
+
tokens.push({ source: node.name, generated: node.name, loc: node.loc });
|
|
795
|
+
} else if (node.name && typeof node.name === 'object') {
|
|
796
|
+
// In some cases, name might be an Identifier node
|
|
840
797
|
visit(node.name);
|
|
841
798
|
}
|
|
842
799
|
if (node.constraint) {
|
|
@@ -857,9 +814,13 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
857
814
|
if (node.typeName) {
|
|
858
815
|
visit(node.typeName);
|
|
859
816
|
}
|
|
817
|
+
// Check both typeParameters and typeArguments (different parsers use different names)
|
|
860
818
|
if (node.typeParameters) {
|
|
861
819
|
visit(node.typeParameters);
|
|
862
820
|
}
|
|
821
|
+
if (node.typeArguments) {
|
|
822
|
+
visit(node.typeArguments);
|
|
823
|
+
}
|
|
863
824
|
return;
|
|
864
825
|
} else if (node.type === 'TSQualifiedName') {
|
|
865
826
|
// Qualified name (e.g., Foo.Bar in types)
|
|
@@ -900,6 +861,10 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
900
861
|
if (node.parameters) {
|
|
901
862
|
for (const param of node.parameters) {
|
|
902
863
|
visit(param);
|
|
864
|
+
// Visit type annotation on the parameter
|
|
865
|
+
if (param.typeAnnotation) {
|
|
866
|
+
visit(param.typeAnnotation);
|
|
867
|
+
}
|
|
903
868
|
}
|
|
904
869
|
}
|
|
905
870
|
if (node.typeAnnotation) {
|
|
@@ -934,6 +899,10 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
934
899
|
if (node.parameters) {
|
|
935
900
|
for (const param of node.parameters) {
|
|
936
901
|
visit(param);
|
|
902
|
+
// Visit type annotation on the parameter
|
|
903
|
+
if (param.typeAnnotation) {
|
|
904
|
+
visit(param.typeAnnotation);
|
|
905
|
+
}
|
|
937
906
|
}
|
|
938
907
|
}
|
|
939
908
|
if (node.typeAnnotation) {
|
|
@@ -945,6 +914,10 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
945
914
|
if (node.parameters) {
|
|
946
915
|
for (const param of node.parameters) {
|
|
947
916
|
visit(param);
|
|
917
|
+
// Visit type annotation on the parameter
|
|
918
|
+
if (param.typeAnnotation) {
|
|
919
|
+
visit(param.typeAnnotation);
|
|
920
|
+
}
|
|
948
921
|
}
|
|
949
922
|
}
|
|
950
923
|
if (node.typeAnnotation) {
|
|
@@ -959,6 +932,10 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
959
932
|
if (node.parameters) {
|
|
960
933
|
for (const param of node.parameters) {
|
|
961
934
|
visit(param);
|
|
935
|
+
// Visit type annotation on the parameter
|
|
936
|
+
if (param.typeAnnotation) {
|
|
937
|
+
visit(param.typeAnnotation);
|
|
938
|
+
}
|
|
962
939
|
}
|
|
963
940
|
}
|
|
964
941
|
if (node.typeAnnotation) {
|
|
@@ -1190,20 +1167,54 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
1190
1167
|
});
|
|
1191
1168
|
|
|
1192
1169
|
// Process each token in order
|
|
1170
|
+
// All tokens now have .loc property - no need for fallback logic
|
|
1193
1171
|
for (const token of tokens) {
|
|
1194
|
-
|
|
1172
|
+
const source_text = token.source;
|
|
1195
1173
|
|
|
1196
|
-
|
|
1197
|
-
|
|
1198
|
-
|
|
1199
|
-
|
|
1200
|
-
|
|
1201
|
-
|
|
1202
|
-
|
|
1174
|
+
// Handle import statement full-statement mapping
|
|
1175
|
+
if (token.is_import_statement) {
|
|
1176
|
+
// Get source position from start
|
|
1177
|
+
const source_start = loc_to_offset(token.loc.start.line, token.loc.start.column, source_line_offsets);
|
|
1178
|
+
const source_end = loc_to_offset(token.loc.end.line, token.loc.end.column, source_line_offsets);
|
|
1179
|
+
|
|
1180
|
+
// Get generated positions using source map
|
|
1181
|
+
const gen_start_pos = get_generated_position(token.loc.start.line, token.loc.start.column, adjusted_source_map);
|
|
1182
|
+
const gen_end_pos = get_generated_position(token.loc.end.line, token.loc.end.column, adjusted_source_map);
|
|
1183
|
+
|
|
1184
|
+
if (source_start !== null && source_end !== null && gen_start_pos && gen_end_pos) {
|
|
1185
|
+
// Convert generated line:col to byte offsets
|
|
1186
|
+
const gen_start = gen_loc_to_offset(gen_start_pos.line, gen_start_pos.column);
|
|
1187
|
+
const gen_end = gen_loc_to_offset(gen_end_pos.line, gen_end_pos.column);
|
|
1188
|
+
|
|
1189
|
+
const source_length = source_end - source_start;
|
|
1190
|
+
const gen_length = gen_end - gen_start;
|
|
1191
|
+
|
|
1192
|
+
mappings.push({
|
|
1193
|
+
sourceOffsets: [source_start],
|
|
1194
|
+
generatedOffsets: [gen_start],
|
|
1195
|
+
lengths: [Math.min(source_length, gen_length)],
|
|
1196
|
+
data: {
|
|
1197
|
+
// only verification (diagnostics) to avoid duplicate hover/completion
|
|
1198
|
+
verification: true
|
|
1199
|
+
},
|
|
1200
|
+
});
|
|
1201
|
+
}
|
|
1202
|
+
continue;
|
|
1203
1203
|
}
|
|
1204
1204
|
|
|
1205
|
-
|
|
1206
|
-
const
|
|
1205
|
+
// Use .loc to get the exact source position
|
|
1206
|
+
const source_pos = loc_to_offset(token.loc.start.line, token.loc.start.column, source_line_offsets);
|
|
1207
|
+
|
|
1208
|
+
// Get generated position using source map
|
|
1209
|
+
const gen_line_col = get_generated_position(token.loc.start.line, token.loc.start.column, adjusted_source_map);
|
|
1210
|
+
let gen_pos = null;
|
|
1211
|
+
if (gen_line_col) {
|
|
1212
|
+
// Convert generated line:col to byte offset
|
|
1213
|
+
gen_pos = gen_loc_to_offset(gen_line_col.line, gen_line_col.column);
|
|
1214
|
+
} else {
|
|
1215
|
+
// No mapping found in source map - this shouldn't happen since all tokens should have mappings
|
|
1216
|
+
console.warn(`[segments.js] No source map entry for token "${source_text}" at ${token.loc.start.line}:${token.loc.start.column}`);
|
|
1217
|
+
}
|
|
1207
1218
|
|
|
1208
1219
|
if (source_pos !== null && gen_pos !== null) {
|
|
1209
1220
|
mappings.push({
|
|
@@ -1215,52 +1226,7 @@ export function convert_source_map_to_mappings(ast, source, generated_code, sour
|
|
|
1215
1226
|
}
|
|
1216
1227
|
}
|
|
1217
1228
|
|
|
1218
|
-
//
|
|
1219
|
-
// TypeScript reports unused import diagnostics covering the entire import statement
|
|
1220
|
-
// Use verification-only mapping to avoid duplicate hover/completion
|
|
1221
|
-
|
|
1222
|
-
// Use the source import map from the original AST (before transformation)
|
|
1223
|
-
// The __volar_id property is preserved through transformation via object spread
|
|
1224
|
-
if (source_import_map && import_declarations.length > 0) {
|
|
1225
|
-
// We need to find where each import appears in the generated code
|
|
1226
|
-
// Search for "import" keywords and match them to our collected imports
|
|
1227
|
-
let gen_search_index = 0;
|
|
1228
|
-
|
|
1229
|
-
for (const import_decl of import_declarations) {
|
|
1230
|
-
// Look up the source position using the __volar_id
|
|
1231
|
-
const source_range = source_import_map.get(import_decl.id);
|
|
1232
|
-
if (!source_range) continue; // Skip if we don't have source info for this ID
|
|
1233
|
-
|
|
1234
|
-
// Find this import statement in the generated code
|
|
1235
|
-
// Search for "import " starting from our last position
|
|
1236
|
-
const import_keyword_index = generated_code.indexOf('import ', gen_search_index);
|
|
1237
|
-
if (import_keyword_index === -1) continue; // Couldn't find it
|
|
1238
|
-
|
|
1239
|
-
// Find the semicolon or end of line for this import
|
|
1240
|
-
let gen_end = generated_code.indexOf(';', import_keyword_index);
|
|
1241
|
-
if (gen_end === -1) gen_end = generated_code.indexOf('\n', import_keyword_index);
|
|
1242
|
-
if (gen_end === -1) gen_end = generated_code.length;
|
|
1243
|
-
else gen_end += 1; // Include the semicolon
|
|
1244
|
-
|
|
1245
|
-
const get_start = import_keyword_index;
|
|
1246
|
-
gen_search_index = gen_end; // Next search starts after this import
|
|
1247
|
-
|
|
1248
|
-
const source_length = source_range.end - source_range.start;
|
|
1249
|
-
const get_length = gen_end - get_start;
|
|
1250
|
-
|
|
1251
|
-
mappings.push({
|
|
1252
|
-
sourceOffsets: [source_range.start],
|
|
1253
|
-
generatedOffsets: [get_start],
|
|
1254
|
-
lengths: [Math.min(source_length, get_length)],
|
|
1255
|
-
data: {
|
|
1256
|
-
// only verification (diagnostics) to avoid duplicate hover/completion
|
|
1257
|
-
verification: true
|
|
1258
|
-
},
|
|
1259
|
-
});
|
|
1260
|
-
}
|
|
1261
|
-
}
|
|
1262
|
-
|
|
1263
|
-
// Sort mappings by source offset
|
|
1229
|
+
// Sort mappings by source offset // Sort mappings by source offset
|
|
1264
1230
|
mappings.sort((a, b) => a.sourceOffsets[0] - b.sourceOffsets[0]);
|
|
1265
1231
|
|
|
1266
1232
|
// Add a mapping for the very beginning of the file to handle import additions
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
import { decode } from '@jridgewell/sourcemap-codec';
|
|
2
|
+
|
|
3
|
+
/** @import { PostProcessingChanges, LineOffsets } from './phases/3-transform/client/index.js' */
|
|
4
|
+
|
|
5
|
+
/** @typedef {{line: number, column: number}} GeneratedPosition */
|
|
6
|
+
/** @typedef {Map<string, GeneratedPosition[]>} SourceToGeneratedMap */
|
|
7
|
+
|
|
8
|
+
/**
|
|
9
|
+
* Build a source-to-generated position lookup map from an esrap source map
|
|
10
|
+
* Applies post-processing adjustments during map building for efficiency
|
|
11
|
+
* @param {object} source_map - The source map object from esrap (v3 format)
|
|
12
|
+
* @param {PostProcessingChanges} post_processing_changes - Optional post-processing changes to apply
|
|
13
|
+
* @param {LineOffsets} line_offsets - Pre-computed line offsets array
|
|
14
|
+
* @returns {SourceToGeneratedMap} Map from "sourceLine:sourceColumn" to array of generated positions
|
|
15
|
+
*/
|
|
16
|
+
export function build_source_to_generated_map(source_map, post_processing_changes, line_offsets) {
|
|
17
|
+
/** @type {SourceToGeneratedMap} */
|
|
18
|
+
const map = new Map();
|
|
19
|
+
|
|
20
|
+
// Decode the VLQ-encoded mappings string
|
|
21
|
+
// @ts-ignore
|
|
22
|
+
const decoded = decode(source_map.mappings);
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Convert line/column position to byte offset
|
|
26
|
+
* @param {number} line - 1-based line number
|
|
27
|
+
* @param {number} column - 0-based column number
|
|
28
|
+
* @returns {number} Byte offset
|
|
29
|
+
*/
|
|
30
|
+
const line_col_to_byte_offset = (line, column) => {
|
|
31
|
+
return line_offsets[line - 1] + column;
|
|
32
|
+
};
|
|
33
|
+
|
|
34
|
+
/**
|
|
35
|
+
* Convert byte offset to line/column
|
|
36
|
+
* @param {number} offset
|
|
37
|
+
* @returns {{ line: number, column: number }}
|
|
38
|
+
*/
|
|
39
|
+
const offset_to_line_col = (offset) => {
|
|
40
|
+
// Binary search
|
|
41
|
+
let left = 0;
|
|
42
|
+
let right = line_offsets.length - 1;
|
|
43
|
+
let line = 1;
|
|
44
|
+
|
|
45
|
+
while (left <= right) {
|
|
46
|
+
const mid = Math.floor((left + right) / 2);
|
|
47
|
+
if (offset >= line_offsets[mid] && (mid === line_offsets.length - 1 || offset < line_offsets[mid + 1])) {
|
|
48
|
+
line = mid + 1;
|
|
49
|
+
break;
|
|
50
|
+
} else if (offset < line_offsets[mid]) {
|
|
51
|
+
right = mid - 1;
|
|
52
|
+
} else {
|
|
53
|
+
left = mid + 1;
|
|
54
|
+
}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const column = offset - line_offsets[line - 1];
|
|
58
|
+
return { line, column };
|
|
59
|
+
};
|
|
60
|
+
|
|
61
|
+
// decoded is an array of lines, each line is an array of segments
|
|
62
|
+
// Each segment is [generatedColumn, sourceIndex, sourceLine, sourceColumn, nameIndex?]
|
|
63
|
+
for (let generated_line = 0; generated_line < decoded.length; generated_line++) {
|
|
64
|
+
const line = decoded[generated_line];
|
|
65
|
+
|
|
66
|
+
for (const segment of line) {
|
|
67
|
+
if (segment.length >= 4) {
|
|
68
|
+
let generated_column = segment[0];
|
|
69
|
+
// just keeping this unused for context
|
|
70
|
+
// const source_index = segment[1]; // which source file (we only have one)
|
|
71
|
+
const source_line = /** @type {number} */ (segment[2]);
|
|
72
|
+
const source_column = /** @type {number} */ (segment[3]);
|
|
73
|
+
|
|
74
|
+
// Apply post-processing adjustments if needed
|
|
75
|
+
let adjusted_line = generated_line + 1;
|
|
76
|
+
let adjusted_column = generated_column;
|
|
77
|
+
|
|
78
|
+
if (post_processing_changes) {
|
|
79
|
+
const line_change = post_processing_changes.get(adjusted_line);
|
|
80
|
+
|
|
81
|
+
if (line_change) {
|
|
82
|
+
// Check if this position is affected by the change
|
|
83
|
+
const pos_offset = line_col_to_byte_offset(adjusted_line, adjusted_column);
|
|
84
|
+
|
|
85
|
+
if (pos_offset >= line_change.offset) {
|
|
86
|
+
// Position is on or after the change - apply delta
|
|
87
|
+
const adjusted_offset = pos_offset + line_change.delta;
|
|
88
|
+
const adjusted_pos = offset_to_line_col(adjusted_offset);
|
|
89
|
+
adjusted_line = adjusted_pos.line;
|
|
90
|
+
adjusted_column = adjusted_pos.column;
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
// Create key from source position (1-indexed line, 0-indexed column)
|
|
96
|
+
const key = `${source_line + 1}:${source_column}`;
|
|
97
|
+
|
|
98
|
+
// Store adjusted generated position
|
|
99
|
+
const gen_pos = { line: adjusted_line, column: adjusted_column };
|
|
100
|
+
|
|
101
|
+
if (!map.has(key)) {
|
|
102
|
+
map.set(key, []);
|
|
103
|
+
}
|
|
104
|
+
/** @type {GeneratedPosition[]} */ (map.get(key)).push(gen_pos);
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
}
|
|
108
|
+
|
|
109
|
+
return map;
|
|
110
|
+
}
|
|
111
|
+
|
|
112
|
+
/**
|
|
113
|
+
* Look up generated position for a given source position
|
|
114
|
+
* @param {number} source_line - 1-based line number in source
|
|
115
|
+
* @param {number} source_column - 0-based column number in source
|
|
116
|
+
* @param {SourceToGeneratedMap} source_to_gen_map - Lookup map
|
|
117
|
+
* @returns {{line: number, column: number} | null} Generated position or null if not found
|
|
118
|
+
*/
|
|
119
|
+
export function get_generated_position(source_line, source_column, source_to_gen_map) {
|
|
120
|
+
const key = `${source_line}:${source_column}`;
|
|
121
|
+
const positions = source_to_gen_map.get(key);
|
|
122
|
+
|
|
123
|
+
if (!positions || positions.length === 0) {
|
|
124
|
+
return null;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
// If multiple generated positions map to same source, return the first
|
|
128
|
+
return positions[0];
|
|
129
|
+
}
|