ripple 0.2.174 → 0.2.175
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"description": "Ripple is an elegant TypeScript UI framework",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"author": "Dominic Gannaway",
|
|
6
|
-
"version": "0.2.
|
|
6
|
+
"version": "0.2.175",
|
|
7
7
|
"type": "module",
|
|
8
8
|
"module": "src/runtime/index-client.js",
|
|
9
9
|
"main": "src/runtime/index-client.js",
|
|
@@ -82,6 +82,6 @@
|
|
|
82
82
|
"@volar/language-core": "~2.4.23"
|
|
83
83
|
},
|
|
84
84
|
"peerDependencies": {
|
|
85
|
-
"ripple": "0.2.
|
|
85
|
+
"ripple": "0.2.175"
|
|
86
86
|
}
|
|
87
87
|
}
|
|
@@ -2527,6 +2527,62 @@ function create_tsx_with_typescript_support() {
|
|
|
2527
2527
|
FunctionExpression(node, context) {
|
|
2528
2528
|
handle_function(node, context);
|
|
2529
2529
|
},
|
|
2530
|
+
// Custom handler for ImportDeclaration to ensure 'import' keyword has source mapping
|
|
2531
|
+
// This creates a source map entry at the start of the import statement
|
|
2532
|
+
// Esrap's default handler writes 'import' without passing the node, so no source map entry
|
|
2533
|
+
ImportDeclaration(node, context) {
|
|
2534
|
+
// Write 'import' keyword with node location for source mapping
|
|
2535
|
+
context.write('import', node);
|
|
2536
|
+
context.write(' ');
|
|
2537
|
+
|
|
2538
|
+
// Write specifiers - handle default, namespace, and named imports
|
|
2539
|
+
if (node.specifiers && node.specifiers.length > 0) {
|
|
2540
|
+
let default_specifier = null;
|
|
2541
|
+
let namespace_specifier = null;
|
|
2542
|
+
const named_specifiers = [];
|
|
2543
|
+
|
|
2544
|
+
for (const spec of node.specifiers) {
|
|
2545
|
+
if (spec.type === 'ImportDefaultSpecifier') {
|
|
2546
|
+
default_specifier = spec;
|
|
2547
|
+
} else if (spec.type === 'ImportNamespaceSpecifier') {
|
|
2548
|
+
namespace_specifier = spec;
|
|
2549
|
+
} else if (spec.type === 'ImportSpecifier') {
|
|
2550
|
+
named_specifiers.push(spec);
|
|
2551
|
+
}
|
|
2552
|
+
}
|
|
2553
|
+
|
|
2554
|
+
// Write default import
|
|
2555
|
+
if (default_specifier) {
|
|
2556
|
+
context.visit(default_specifier);
|
|
2557
|
+
if (namespace_specifier || named_specifiers.length > 0) {
|
|
2558
|
+
context.write(', ');
|
|
2559
|
+
}
|
|
2560
|
+
}
|
|
2561
|
+
|
|
2562
|
+
// Write namespace import
|
|
2563
|
+
if (namespace_specifier) {
|
|
2564
|
+
context.visit(namespace_specifier);
|
|
2565
|
+
if (named_specifiers.length > 0) {
|
|
2566
|
+
context.write(', ');
|
|
2567
|
+
}
|
|
2568
|
+
}
|
|
2569
|
+
|
|
2570
|
+
// Write named imports
|
|
2571
|
+
if (named_specifiers.length > 0) {
|
|
2572
|
+
context.write('{ ');
|
|
2573
|
+
for (let i = 0; i < named_specifiers.length; i++) {
|
|
2574
|
+
if (i > 0) context.write(', ');
|
|
2575
|
+
context.visit(named_specifiers[i]);
|
|
2576
|
+
}
|
|
2577
|
+
context.write(' }');
|
|
2578
|
+
}
|
|
2579
|
+
|
|
2580
|
+
context.write(' from ');
|
|
2581
|
+
}
|
|
2582
|
+
|
|
2583
|
+
// Write source
|
|
2584
|
+
context.visit(node.source);
|
|
2585
|
+
},
|
|
2530
2586
|
// Custom handler for TSParenthesizedType: (Type)
|
|
2531
2587
|
TSParenthesizedType(node, context) {
|
|
2532
2588
|
context.write('(');
|
|
@@ -1,20 +1,17 @@
|
|
|
1
|
-
/** @typedef {import('@volar/language-core').CodeMapping} VolarCodeMapping */
|
|
2
|
-
|
|
3
1
|
/**
|
|
4
2
|
* @typedef {Object} CustomMappingData
|
|
5
|
-
* @property {[
|
|
3
|
+
* @property {number[]} generatedLengths
|
|
6
4
|
*/
|
|
7
5
|
|
|
8
6
|
/**
|
|
9
|
-
* @typedef {
|
|
10
|
-
*
|
|
11
|
-
*
|
|
12
|
-
*
|
|
13
|
-
* }} CodeMapping
|
|
7
|
+
* @typedef {import('estree').Position} Position
|
|
8
|
+
* @typedef {{start: Position, end: Position}} Location
|
|
9
|
+
* @typedef {import('@volar/language-core').CodeMapping} VolarCodeMapping
|
|
10
|
+
* @typedef {VolarCodeMapping['data'] & {customData: CustomMappingData}} MappingData
|
|
11
|
+
* @typedef {VolarCodeMapping & {data: MappingData}} CodeMapping
|
|
12
|
+
* @typedef {{code: string, mappings: CodeMapping[]}} MappingsResult
|
|
14
13
|
*/
|
|
15
14
|
|
|
16
|
-
/** @typedef {{code: string, mappings: CodeMapping[]}} MappingsResult */
|
|
17
|
-
|
|
18
15
|
import { walk } from 'zimmerframe';
|
|
19
16
|
import { build_source_to_generated_map, get_generated_position } from '../../source-map-utils.js';
|
|
20
17
|
|
|
@@ -52,6 +49,7 @@ export function convert_source_map_to_mappings(
|
|
|
52
49
|
) {
|
|
53
50
|
/** @type {CodeMapping[]} */
|
|
54
51
|
const mappings = [];
|
|
52
|
+
let isImportDeclarationPresent = false;
|
|
55
53
|
|
|
56
54
|
/**
|
|
57
55
|
* Converts line/column positions to byte offsets
|
|
@@ -74,10 +72,14 @@ export function convert_source_map_to_mappings(
|
|
|
74
72
|
* @param {number} line
|
|
75
73
|
* @param {number} column
|
|
76
74
|
* @param {number[]} line_offsets
|
|
77
|
-
* @returns {number
|
|
75
|
+
* @returns {number}
|
|
78
76
|
*/
|
|
79
77
|
const loc_to_offset = (line, column, line_offsets) => {
|
|
80
|
-
if (line < 1 || line > line_offsets.length)
|
|
78
|
+
if (line < 1 || line > line_offsets.length) {
|
|
79
|
+
throw new Error(
|
|
80
|
+
`Location line or line offsets length is out of bounds, line: ${line}, line offsets length: ${line_offsets.length}`,
|
|
81
|
+
);
|
|
82
|
+
}
|
|
81
83
|
return line_offsets[line - 1] + column;
|
|
82
84
|
};
|
|
83
85
|
|
|
@@ -100,12 +102,15 @@ export function convert_source_map_to_mappings(
|
|
|
100
102
|
|
|
101
103
|
// Collect text tokens from AST nodes
|
|
102
104
|
// All tokens must have source/generated text and loc property for accurate positioning
|
|
103
|
-
/**
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
105
|
+
/**
|
|
106
|
+
* @type {Array<{
|
|
107
|
+
* source: string,
|
|
108
|
+
* generated: string,
|
|
109
|
+
* is_full_import_statement?: boolean,
|
|
110
|
+
* loc: Location,
|
|
111
|
+
* end_loc?: Location,
|
|
112
|
+
* }>}
|
|
113
|
+
*/
|
|
109
114
|
const tokens = [];
|
|
110
115
|
|
|
111
116
|
// We have to visit everything in generated order to maintain correct indices
|
|
@@ -157,14 +162,17 @@ export function convert_source_map_to_mappings(
|
|
|
157
162
|
}
|
|
158
163
|
return; // Leaf node, don't traverse further
|
|
159
164
|
} else if (node.type === 'ImportDeclaration') {
|
|
165
|
+
isImportDeclarationPresent = true;
|
|
166
|
+
|
|
160
167
|
// Add import declaration as a special token for full-statement mapping
|
|
161
168
|
// TypeScript reports unused imports with diagnostics covering the entire statement
|
|
162
|
-
if (node.loc) {
|
|
169
|
+
if (node.loc && node.source?.loc) {
|
|
163
170
|
tokens.push({
|
|
164
171
|
source: '',
|
|
165
172
|
generated: '',
|
|
166
173
|
loc: node.loc,
|
|
167
|
-
|
|
174
|
+
is_full_import_statement: true,
|
|
175
|
+
end_loc: node.source.loc,
|
|
168
176
|
});
|
|
169
177
|
}
|
|
170
178
|
|
|
@@ -1203,102 +1211,74 @@ export function convert_source_map_to_mappings(
|
|
|
1203
1211
|
},
|
|
1204
1212
|
});
|
|
1205
1213
|
|
|
1206
|
-
// Process each token in order
|
|
1207
|
-
// All tokens now have .loc property - no need for fallback logic
|
|
1208
1214
|
for (const token of tokens) {
|
|
1209
1215
|
const source_text = token.source;
|
|
1210
1216
|
const gen_text = token.generated;
|
|
1211
1217
|
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1216
|
-
|
|
1217
|
-
token.loc.start.column,
|
|
1218
|
-
source_line_offsets,
|
|
1219
|
-
);
|
|
1220
|
-
const source_end = loc_to_offset(
|
|
1221
|
-
token.loc.end.line,
|
|
1222
|
-
token.loc.end.column,
|
|
1223
|
-
source_line_offsets,
|
|
1224
|
-
);
|
|
1218
|
+
const source_start = loc_to_offset(
|
|
1219
|
+
token.loc.start.line,
|
|
1220
|
+
token.loc.start.column,
|
|
1221
|
+
source_line_offsets,
|
|
1222
|
+
);
|
|
1225
1223
|
|
|
1226
|
-
|
|
1224
|
+
let source_length = source_text.length;
|
|
1225
|
+
let gen_length = gen_text.length;
|
|
1226
|
+
/** @type {MappingData} */
|
|
1227
|
+
let data;
|
|
1228
|
+
/** @type {number} */
|
|
1229
|
+
let gen_start;
|
|
1230
|
+
|
|
1231
|
+
if (token.is_full_import_statement) {
|
|
1232
|
+
const end_loc = /** @type {Location} */ (token.end_loc).end;
|
|
1233
|
+
const source_end = loc_to_offset(end_loc.line, end_loc.column, source_line_offsets);
|
|
1234
|
+
|
|
1235
|
+
// Look up where import keyword and source literal map to in generated code
|
|
1227
1236
|
const gen_start_pos = get_generated_position(
|
|
1228
1237
|
token.loc.start.line,
|
|
1229
1238
|
token.loc.start.column,
|
|
1230
1239
|
adjusted_source_map,
|
|
1231
1240
|
);
|
|
1232
|
-
const gen_end_pos = get_generated_position(
|
|
1233
|
-
token.loc.end.line,
|
|
1234
|
-
token.loc.end.column,
|
|
1235
|
-
adjusted_source_map,
|
|
1236
|
-
);
|
|
1237
|
-
|
|
1238
|
-
if (source_start !== null && source_end !== null && gen_start_pos && gen_end_pos) {
|
|
1239
|
-
// Convert generated line:col to byte offsets
|
|
1240
|
-
const gen_start = gen_loc_to_offset(gen_start_pos.line, gen_start_pos.column);
|
|
1241
|
-
const gen_end = gen_loc_to_offset(gen_end_pos.line, gen_end_pos.column);
|
|
1241
|
+
const gen_end_pos = get_generated_position(end_loc.line, end_loc.column, adjusted_source_map);
|
|
1242
1242
|
|
|
1243
|
-
|
|
1244
|
-
|
|
1243
|
+
gen_start = gen_loc_to_offset(gen_start_pos.line, gen_start_pos.column);
|
|
1244
|
+
const gen_end = gen_loc_to_offset(gen_end_pos.line, gen_end_pos.column);
|
|
1245
1245
|
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
generatedOffsets: [gen_start],
|
|
1249
|
-
lengths: [Math.min(source_length, gen_length)],
|
|
1250
|
-
data: {
|
|
1251
|
-
// only verification (diagnostics) to avoid duplicate hover/completion
|
|
1252
|
-
verification: true,
|
|
1246
|
+
source_length = source_end - source_start;
|
|
1247
|
+
gen_length = gen_end - gen_start;
|
|
1253
1248
|
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1262
|
-
|
|
1263
|
-
// Use .loc to get the exact source position
|
|
1264
|
-
const source_pos = loc_to_offset(
|
|
1265
|
-
token.loc.start.line,
|
|
1266
|
-
token.loc.start.column,
|
|
1267
|
-
source_line_offsets,
|
|
1268
|
-
);
|
|
1269
|
-
|
|
1270
|
-
// Get generated position using source map
|
|
1271
|
-
const gen_line_col = get_generated_position(
|
|
1272
|
-
token.loc.start.line,
|
|
1273
|
-
token.loc.start.column,
|
|
1274
|
-
adjusted_source_map,
|
|
1275
|
-
);
|
|
1276
|
-
let gen_pos = null;
|
|
1277
|
-
if (gen_line_col) {
|
|
1278
|
-
// Convert generated line:col to byte offset
|
|
1279
|
-
gen_pos = gen_loc_to_offset(gen_line_col.line, gen_line_col.column);
|
|
1249
|
+
data = {
|
|
1250
|
+
// we only want verification here, like unused imports
|
|
1251
|
+
// since this is synthetic and otherwise we'll get duplicated actions like intellisense
|
|
1252
|
+
// each imported specifier has its own mapping
|
|
1253
|
+
verification: true,
|
|
1254
|
+
customData: {
|
|
1255
|
+
generatedLengths: [gen_length],
|
|
1256
|
+
},
|
|
1257
|
+
};
|
|
1280
1258
|
} else {
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1259
|
+
const gen_line_col = get_generated_position(
|
|
1260
|
+
token.loc.start.line,
|
|
1261
|
+
token.loc.start.column,
|
|
1262
|
+
adjusted_source_map,
|
|
1284
1263
|
);
|
|
1285
|
-
|
|
1264
|
+
gen_start = gen_loc_to_offset(gen_line_col.line, gen_line_col.column);
|
|
1286
1265
|
|
|
1287
|
-
|
|
1288
|
-
|
|
1289
|
-
|
|
1290
|
-
|
|
1291
|
-
sourceOffsets: [source_pos],
|
|
1292
|
-
generatedOffsets: [gen_pos],
|
|
1293
|
-
lengths: [source_text.length],
|
|
1294
|
-
data: {
|
|
1295
|
-
...mapping_data,
|
|
1296
|
-
customData: {
|
|
1297
|
-
generatedLengths: [gen_text.length],
|
|
1298
|
-
},
|
|
1266
|
+
data = {
|
|
1267
|
+
...mapping_data,
|
|
1268
|
+
customData: {
|
|
1269
|
+
generatedLengths: [gen_length],
|
|
1299
1270
|
},
|
|
1300
|
-
}
|
|
1271
|
+
};
|
|
1301
1272
|
}
|
|
1273
|
+
|
|
1274
|
+
// !IMPORTANT: don't set generatedLengths, otherwise Volar will use that vs our source
|
|
1275
|
+
// We're adding it to our custom metadata instead as we need it for patching positions
|
|
1276
|
+
mappings.push({
|
|
1277
|
+
sourceOffsets: [source_start],
|
|
1278
|
+
generatedOffsets: [gen_start],
|
|
1279
|
+
lengths: [source_length],
|
|
1280
|
+
data,
|
|
1281
|
+
});
|
|
1302
1282
|
}
|
|
1303
1283
|
|
|
1304
1284
|
// Sort mappings by source offset // Sort mappings by source offset
|
|
@@ -1306,7 +1286,7 @@ export function convert_source_map_to_mappings(
|
|
|
1306
1286
|
|
|
1307
1287
|
// Add a mapping for the very beginning of the file to handle import additions
|
|
1308
1288
|
// This ensures that code actions adding imports at the top work correctly
|
|
1309
|
-
if (mappings.length > 0 && mappings[0].sourceOffsets[0] > 0) {
|
|
1289
|
+
if (!isImportDeclarationPresent && mappings.length > 0 && mappings[0].sourceOffsets[0] > 0) {
|
|
1310
1290
|
mappings.unshift({
|
|
1311
1291
|
sourceOffsets: [0],
|
|
1312
1292
|
generatedOffsets: [0],
|
|
@@ -44,7 +44,10 @@ export function build_source_to_generated_map(source_map, post_processing_change
|
|
|
44
44
|
|
|
45
45
|
while (left <= right) {
|
|
46
46
|
const mid = Math.floor((left + right) / 2);
|
|
47
|
-
if (
|
|
47
|
+
if (
|
|
48
|
+
offset >= line_offsets[mid] &&
|
|
49
|
+
(mid === line_offsets.length - 1 || offset < line_offsets[mid + 1])
|
|
50
|
+
) {
|
|
48
51
|
line = mid + 1;
|
|
49
52
|
break;
|
|
50
53
|
} else if (offset < line_offsets[mid]) {
|
|
@@ -114,14 +117,15 @@ export function build_source_to_generated_map(source_map, post_processing_change
|
|
|
114
117
|
* @param {number} source_line - 1-based line number in source
|
|
115
118
|
* @param {number} source_column - 0-based column number in source
|
|
116
119
|
* @param {SourceToGeneratedMap} source_to_gen_map - Lookup map
|
|
117
|
-
* @returns {{line: number, column: number}
|
|
120
|
+
* @returns {{line: number, column: number}} Generated position
|
|
118
121
|
*/
|
|
119
122
|
export function get_generated_position(source_line, source_column, source_to_gen_map) {
|
|
120
123
|
const key = `${source_line}:${source_column}`;
|
|
121
124
|
const positions = source_to_gen_map.get(key);
|
|
122
125
|
|
|
123
126
|
if (!positions || positions.length === 0) {
|
|
124
|
-
|
|
127
|
+
// No mapping found in source map - this shouldn't happen since all tokens should have mappings
|
|
128
|
+
throw new Error(`No source map entry for position "${source_line}:${source_column}"`);
|
|
125
129
|
}
|
|
126
130
|
|
|
127
131
|
// If multiple generated positions map to same source, return the first
|