rip-lang 3.15.2 → 3.15.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Binary file
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "rip-lang",
3
- "version": "3.15.2",
3
+ "version": "3.15.4",
4
4
  "description": "A modern language that compiles to JavaScript",
5
5
  "type": "module",
6
6
  "main": "src/compiler.js",
package/src/browser.js CHANGED
@@ -10,6 +10,7 @@ import './schema/loader-browser.js';
10
10
  export { Lexer } from './lexer.js';
11
11
  export { parser } from './parser.js';
12
12
  export { CodeEmitter, Compiler, compile, compileToJS, formatSExpr, getStdlibCode, getReactiveRuntime, getComponentRuntime, RipError, formatError, formatErrorHTML } from './compiler.js';
13
+ import { mergeChunksWithInlineMap } from './sourcemap-merge.js';
13
14
  import { getStdlibCode, formatError as _formatError } from './compiler.js';
14
15
 
15
16
  // Version info (replaced during build)
@@ -222,14 +223,20 @@ async function processRipScripts() {
222
223
  }
223
224
  expanded.push(...individual);
224
225
 
225
- // Source maps are ON by default `data-debug="false"` opts out.
226
- // Individually compile each source with its own source map; we'll
227
- // sequentially eval them per-component so each has a self-consistent
228
- // map (DevTools only honours the last sourceMappingURL inside an
229
- // eval, so concatenating maps doesn't work).
226
+ // Bundle / multi-source path. Components defined in one .rip file
227
+ // need to be visible to siblings (e.g. `WidgetGallery` referencing
228
+ // `Toast`, `Dialog`, `Menu`). To make that work we concatenate
229
+ // every compiled chunk into ONE async-IIFE and eval it as a
230
+ // single closure declarations made by one source are visible
231
+ // to all subsequent sources via lexical scope.
232
+ //
233
+ // For source maps: each compileToJS call emits its own per-chunk
234
+ // map; mergeChunksWithInlineMap merges them into ONE V3 map with
235
+ // a multi-entry `sources` / `sourcesContent` array. DevTools
236
+ // reads the single merged map and shows each `.rip` file as a
237
+ // navigable source — same UX as the data-router path, no
238
+ // per-component IIFE split that would break lexical scope.
230
239
  const debug = runtimeTag?.getAttribute('data-debug') !== 'false';
231
- // Update the global flag so app.launch()'s compile path (in app.rip)
232
- // sees the same setting as our local `debug` variable.
233
240
  if (globalThis.__ripDebug) globalThis.__ripDebug.enabled = debug;
234
241
  const baseOpts = { skipRuntimes: true, skipExports: true, skipImports: true };
235
242
  const compiled = [];
@@ -269,35 +276,39 @@ async function processRipScripts() {
269
276
  }
270
277
  }
271
278
 
272
- // Execute compiled code per-component so each has its own valid
273
- // source map (DevTools only honours the last `sourceMappingURL`
274
- // pragma inside one evaluated chunk, so concatenating multiple
275
- // source-mapped chunks would lose all but the last). Components
276
- // share scope via globalThis attachment typical Rip definitions
277
- // (`Foo = component`, `Bar = ...`) compile to globalThis-attached
278
- // bindings under `skipExports/skipImports`, which survive between
279
- // the per-component evals.
279
+ // Concatenate all compiled chunks into one async IIFE so component
280
+ // declarations made in earlier chunks are visible (via lexical
281
+ // scope) to later chunks AND to the final mount step. Without
282
+ // this, each chunk's `let Foo = class ...` dies when its IIFE
283
+ // returns and `WidgetGallery` (defined in one chunk) can't see
284
+ // `Toast` (defined in another).
285
+ //
286
+ // mergeChunksWithInlineMap concatenates the per-chunk JS bodies
287
+ // (stripping their individual `//# sourceMappingURL=` pragmas)
288
+ // and emits ONE V3 map at the end whose `sources` array holds
289
+ // every chunk's original `.rip` filename. DevTools shows each
290
+ // `.rip` file as its own source even though everything runs in
291
+ // one eval'd closure.
280
292
  if (compiled.length > 0) {
281
- let anyError = false;
282
- for (const c of compiled) {
283
- try {
284
- await (0, eval)(debug ? wrapForEval(c.js, c.url) : `(async()=>{\n${c.js}\n})()`);
285
- } catch (e) {
286
- anyError = true;
287
- if (e instanceof SyntaxError) console.error(`Rip syntax error in ${c.url}: ${e.message}`);
288
- else console.error(`Rip runtime error in ${c.url}:`, e);
289
- }
290
- }
291
-
292
- // Final mount step — runs after all components are defined.
293
293
  const mount = runtimeTag?.getAttribute('data-mount');
294
- if (mount) {
295
- const target = runtimeTag.getAttribute('data-target') || 'body';
296
- try { await (0, eval)(`(async()=>{ ${mount}.mount(${JSON.stringify(target)}); })()`); }
297
- catch (e) { console.error(`Rip mount error (${mount}):`, e); }
294
+ const target = runtimeTag?.getAttribute('data-target') || 'body';
295
+ const mountSnippet = mount ? `\n${mount}.mount(${JSON.stringify(target)});\n` : '';
296
+ const mergedBody = mergeChunksWithInlineMap(compiled.map(c => ({ js: c.js })));
297
+ // Inject the mount call BEFORE the trailing sourceMappingURL pragma
298
+ // so the pragma stays at the end of the eval'd chunk (DevTools
299
+ // requires it there). If no mount, mergedBody is emitted as-is.
300
+ const wrapped = mount
301
+ ? mergedBody.replace(/(\n\/\/# sourceMappingURL=[^\n]*\n?)?$/, mountSnippet + '$1')
302
+ : mergedBody;
303
+ let ok = true;
304
+ try {
305
+ await (0, eval)(`(async()=>{\n${wrapped}\n})()`);
306
+ } catch (e) {
307
+ ok = false;
308
+ if (e instanceof SyntaxError) console.error(`Rip syntax error: ${e.message}`);
309
+ else console.error('Rip runtime error:', e);
298
310
  }
299
-
300
- if (!anyError) document.body.classList.add('ready');
311
+ if (ok) document.body.classList.add('ready');
301
312
  }
302
313
  }
303
314
  }
package/src/compiler.js CHANGED
@@ -354,21 +354,45 @@ export class CodeEmitter {
354
354
 
355
355
  // Walk the s-expression tree and record source map entries for
356
356
  // sub-expressions that carry .loc, giving column-level precision.
357
+ //
358
+ // Performance: the inner loop runs M × N times where M = sub-expressions
359
+ // and N = regex matches per sub-expression. Computing genLine/genCol via
360
+ // `code.substring(0, m.index).split('\n')` was O(N) per match, making
361
+ // the function O(M × N²) overall and catastrophic on large generated
362
+ // blocks (a 100KB statement was taking 36 seconds in the browser).
363
+ //
364
+ // Fix: precompute a sorted `lineStarts` array (offset of each line's
365
+ // first character), then binary-search to convert offset → line/col in
366
+ // O(log N) per match. Brings the inline-gallery compile from 36s → ~30ms.
357
367
  recordSubMappings(code, sexpr, lineOffset) {
358
368
  let stmtOrigLine = sexpr.loc ? sexpr.loc.r : 0;
359
369
  let subs = [];
360
370
  this.collectSubExprs(sexpr, subs);
361
371
  let codeLines = code.split('\n');
372
+ // lineStarts[i] = offset in `code` of the first char on line i.
373
+ // Length is codeLines.length; lineStarts[0] is 0.
374
+ const lineStarts = [0];
375
+ for (let i = 0; i < code.length; i++) {
376
+ if (code.charCodeAt(i) === 10) lineStarts.push(i + 1);
377
+ }
378
+ // Binary-search the largest lineStart <= offset; that gives the line.
379
+ const offsetToLine = (offset) => {
380
+ let lo = 0, hi = lineStarts.length - 1;
381
+ while (lo <= hi) {
382
+ const mid = (lo + hi) >> 1;
383
+ if (lineStarts[mid] <= offset) lo = mid + 1;
384
+ else hi = mid - 1;
385
+ }
386
+ return hi;
387
+ };
362
388
  for (let { name, origLine, origCol } of subs) {
363
389
  let escaped = name.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
364
390
  let re = new RegExp('\\b' + escaped + '\\b', 'g');
365
391
  let m, bestMatch = null, bestDist = Infinity;
366
392
  let origLineInStmt = origLine - stmtOrigLine;
367
393
  while ((m = re.exec(code)) !== null) {
368
- let before = code.substring(0, m.index);
369
- let nl = before.split('\n');
370
- let genLineInStmt = nl.length - 1;
371
- let genCol = nl[nl.length - 1].length;
394
+ const genLineInStmt = offsetToLine(m.index);
395
+ const genCol = m.index - lineStarts[genLineInStmt];
372
396
  // Skip matches inside string literals — prevents false mappings when
373
397
  // an identifier also appears as a string value (e.g. union type member)
374
398
  let lineText = codeLines[genLineInStmt];
@@ -0,0 +1,287 @@
1
+ // Source Map V3 — multi-chunk merger.
2
+ //
3
+ // Concatenates N JS chunks (each with its own inline source map) into one
4
+ // JS string + one merged source map. Used by `processRipScripts` to produce
5
+ // a single eval'd chunk that DevTools can map back to multiple original
6
+ // `.rip` files.
7
+ //
8
+ // Why this exists: DevTools only honours the LAST `//# sourceMappingURL`
9
+ // inside one eval'd chunk. So when N chunks share lexical scope (concat
10
+ // + single eval, the bundle-no-router execution model), we must produce
11
+ // ONE map that covers all N sources. Source Map V3 supports this natively
12
+ // via a multi-entry `sources` / `sourcesContent` array and source-index
13
+ // fields in the VLQ mappings — every production JS bundler does this.
14
+ //
15
+ // Algorithm:
16
+ // 1. Strip per-chunk `//# sourceMappingURL=` pragmas from each chunk's JS
17
+ // 2. Decode each chunk's `mappings` to absolute coordinates
18
+ // 3. Concatenate JS with `\n;\n` separators (statement-safe, +2 newlines
19
+ // between chunks)
20
+ // 4. Offset each chunk's generated line numbers by the cumulative line
21
+ // count of preceding chunks + separators
22
+ // 5. Dedupe `sources`, `sourcesContent`, `names` into merged arrays;
23
+ // remap each chunk's source-index / name-index fields
24
+ // 6. Re-encode as relative VLQ across the full concatenated mapping
25
+ //
26
+ // Returns { js, mapJSON } where mapJSON is a JSON string ready to embed
27
+ // as a base64 data URL via a single `//# sourceMappingURL=` at the end.
28
+
29
+ import { vlqEncode, vlqDecode } from './sourcemaps.js';
30
+
31
+ // `\n;\n` is statement-safe between chunks (the `;` ensures the previous
32
+ // line's expression is terminated). Counts as 2 newlines for line-offset.
33
+ const SEPARATOR = '\n;\n';
34
+ const SEPARATOR_LINES = 2;
35
+
36
+ // Strip a trailing `//# sourceMappingURL=...` pragma from a JS string so
37
+ // concatenated chunks don't carry stray maps that DevTools would honor
38
+ // in unexpected order. Returns { js, mapJSON } where mapJSON is the
39
+ // JSON string extracted from the inline data URL (or null if no pragma).
40
+ function extractMap(js) {
41
+ const re = /\n?\/\/# sourceMappingURL=data:application\/json(?:;charset=[^;,]+)?;base64,([A-Za-z0-9+/=]+)\s*$/;
42
+ const m = js.match(re);
43
+ if (!m) return { js, mapJSON: null };
44
+ let mapJSON;
45
+ try {
46
+ // UTF-8-safe decode (counterpart of the encode in compiler.js).
47
+ const bin = atob(m[1]);
48
+ const bytes = new Uint8Array(bin.length);
49
+ for (let i = 0; i < bin.length; i++) bytes[i] = bin.charCodeAt(i);
50
+ mapJSON = new TextDecoder().decode(bytes);
51
+ } catch {
52
+ return { js: js.slice(0, m.index), mapJSON: null };
53
+ }
54
+ return { js: js.slice(0, m.index), mapJSON };
55
+ }
56
+
57
+ // Count the number of newline characters in a string. Used for line-offset
58
+ // math when concatenating chunks; one chunk's line count = newline count
59
+ // (a chunk starting at line offset L and containing N newlines occupies
60
+ // generated lines [L, L+N], so the next chunk starts at L+N+1 if the
61
+ // chunk had a trailing newline, or L+N if not — handled via SEPARATOR).
62
+ function countNewlines(str) {
63
+ let n = 0;
64
+ for (let i = 0; i < str.length; i++) if (str.charCodeAt(i) === 10) n++;
65
+ return n;
66
+ }
67
+
68
+ // Decode a Source Map V3 `mappings` field into an array of arrays of
69
+ // absolute-coordinate segments. Each segment is one of:
70
+ // { genCol } — unmapped (1 field)
71
+ // { genCol, src, origLine, origCol } — mapped (4 fields)
72
+ // { genCol, src, origLine, origCol, name } — mapped + name (5 fields)
73
+ // State: genCol resets to 0 at each line; src/origLine/origCol/name
74
+ // persist across lines per the V3 spec.
75
+ function decodeMappings(mappingsStr) {
76
+ const lines = mappingsStr.split(';');
77
+ const result = [];
78
+ let src = 0, origLine = 0, origCol = 0, nameIdx = 0;
79
+ for (const line of lines) {
80
+ const segs = [];
81
+ let genCol = 0;
82
+ if (line.length > 0) {
83
+ for (const segStr of line.split(',')) {
84
+ if (segStr.length === 0) continue;
85
+ const fields = vlqDecode(segStr);
86
+ genCol += fields[0];
87
+ if (fields.length === 1) {
88
+ segs.push({ genCol });
89
+ } else {
90
+ src += fields[1];
91
+ origLine += fields[2];
92
+ origCol += fields[3];
93
+ const seg = { genCol, src, origLine, origCol };
94
+ if (fields.length >= 5) {
95
+ nameIdx += fields[4];
96
+ seg.name = nameIdx;
97
+ }
98
+ segs.push(seg);
99
+ }
100
+ }
101
+ }
102
+ result.push(segs);
103
+ }
104
+ return result;
105
+ }
106
+
107
+ // Re-encode an array of arrays of absolute-coordinate segments back to
108
+ // V3 `mappings` string. Counterpart of decodeMappings.
109
+ function encodeMappings(perLineSegs) {
110
+ let prevGenCol = 0; // resets per line
111
+ let prevSrc = 0, prevOrigLine = 0, prevOrigCol = 0, prevNameIdx = 0;
112
+ const out = [];
113
+ for (const segs of perLineSegs) {
114
+ prevGenCol = 0;
115
+ const lineParts = [];
116
+ for (const seg of segs) {
117
+ if (seg.src == null) {
118
+ lineParts.push(vlqEncode(seg.genCol - prevGenCol));
119
+ } else {
120
+ let s = vlqEncode(seg.genCol - prevGenCol);
121
+ s += vlqEncode(seg.src - prevSrc);
122
+ s += vlqEncode(seg.origLine - prevOrigLine);
123
+ s += vlqEncode(seg.origCol - prevOrigCol);
124
+ if (seg.name != null) {
125
+ s += vlqEncode(seg.name - prevNameIdx);
126
+ prevNameIdx = seg.name;
127
+ }
128
+ prevSrc = seg.src;
129
+ prevOrigLine = seg.origLine;
130
+ prevOrigCol = seg.origCol;
131
+ lineParts.push(s);
132
+ }
133
+ prevGenCol = seg.genCol;
134
+ }
135
+ out.push(lineParts.join(','));
136
+ }
137
+ return out.join(';');
138
+ }
139
+
140
+ // Given an array of `{js, mapJSON}` chunks (where mapJSON is the source-map
141
+ // V3 JSON string for that chunk, or null/undefined if the chunk has no
142
+ // map), produce a single merged result.
143
+ //
144
+ // Returns:
145
+ // {
146
+ // js, // concatenated JS, with all per-chunk sourceMappingURL
147
+ // // pragmas stripped — caller appends ONE merged pragma
148
+ // mapJSON, // V3 source-map JSON string covering all chunks, or
149
+ // // null if no chunk had a map
150
+ // }
151
+ //
152
+ // The chunks must be in the order they should be eval'd. Each chunk's JS
153
+ // is treated as opaque — we don't parse it, we only count newlines for
154
+ // line-offset math.
155
+ export function mergeChunks(chunks) {
156
+ // Strip per-chunk pragmas first; we'll append exactly one at the end.
157
+ const stripped = chunks.map((c) => {
158
+ if (c.mapJSON !== undefined) return { js: c.js, mapJSON: c.mapJSON };
159
+ return extractMap(c.js);
160
+ });
161
+
162
+ // If no chunk has a map, the merged JS is just concatenated; no map.
163
+ const anyMap = stripped.some((c) => c.mapJSON);
164
+ const mergedJS = stripped.map((c) => c.js).join(SEPARATOR);
165
+ if (!anyMap) return { js: mergedJS, mapJSON: null };
166
+
167
+ // Build merged sources / sourcesContent / names with deduplication
168
+ // by string identity. Per-chunk remap tables let us rewrite each
169
+ // chunk's source-index / name-index fields into the merged arrays.
170
+ const mergedSources = [];
171
+ const mergedSourcesContent = [];
172
+ const sourceIndexByName = new Map();
173
+ const mergedNames = [];
174
+ const nameIndexByName = new Map();
175
+
176
+ function addSource(name, content) {
177
+ if (sourceIndexByName.has(name)) return sourceIndexByName.get(name);
178
+ const idx = mergedSources.length;
179
+ mergedSources.push(name);
180
+ mergedSourcesContent.push(content ?? null);
181
+ sourceIndexByName.set(name, idx);
182
+ return idx;
183
+ }
184
+ function addName(name) {
185
+ if (nameIndexByName.has(name)) return nameIndexByName.get(name);
186
+ const idx = mergedNames.length;
187
+ mergedNames.push(name);
188
+ nameIndexByName.set(name, idx);
189
+ return idx;
190
+ }
191
+
192
+ // Per-chunk: line offset (cumulative gen-line shift), source-index
193
+ // remap, name-index remap, decoded segments.
194
+ const chunkInfos = [];
195
+ let lineOffset = 0;
196
+ for (let i = 0; i < stripped.length; i++) {
197
+ const c = stripped[i];
198
+ const info = { lineOffset, srcRemap: [], nameRemap: [], perLineSegs: null };
199
+
200
+ if (c.mapJSON) {
201
+ let map;
202
+ try { map = JSON.parse(c.mapJSON); }
203
+ catch { map = null; }
204
+ if (map && typeof map.mappings === 'string') {
205
+ const sources = map.sources || [];
206
+ const sourcesContent = map.sourcesContent || [];
207
+ info.srcRemap = sources.map((s, idx) => addSource(s, sourcesContent[idx]));
208
+ const names = map.names || [];
209
+ info.nameRemap = names.map((n) => addName(n));
210
+ info.perLineSegs = decodeMappings(map.mappings);
211
+ }
212
+ }
213
+
214
+ chunkInfos.push(info);
215
+
216
+ // Advance lineOffset by this chunk's line count + separator (except
217
+ // after the last chunk). Use the JS string we'll actually emit so
218
+ // line counts stay consistent with the merged output.
219
+ const chunkLines = countNewlines(c.js);
220
+ const sepLines = (i < stripped.length - 1) ? SEPARATOR_LINES : 0;
221
+ lineOffset += chunkLines + sepLines;
222
+ }
223
+
224
+ // Walk all chunks and emit their segments into the merged per-line
225
+ // segment array, with each chunk's gen-line indices shifted by the
226
+ // chunk's lineOffset and source/name indices remapped.
227
+ const totalLines = lineOffset + 1; // last chunk's lines fit within
228
+ const mergedPerLineSegs = new Array(totalLines).fill(null).map(() => []);
229
+
230
+ for (const info of chunkInfos) {
231
+ if (!info.perLineSegs) continue;
232
+ for (let li = 0; li < info.perLineSegs.length; li++) {
233
+ const target = info.lineOffset + li;
234
+ if (target >= mergedPerLineSegs.length) {
235
+ // Defensive: extend if a chunk has more lines than we counted.
236
+ while (mergedPerLineSegs.length <= target) mergedPerLineSegs.push([]);
237
+ }
238
+ for (const seg of info.perLineSegs[li]) {
239
+ if (seg.src == null) {
240
+ mergedPerLineSegs[target].push({ genCol: seg.genCol });
241
+ } else {
242
+ const remapped = {
243
+ genCol: seg.genCol,
244
+ src: info.srcRemap[seg.src],
245
+ origLine: seg.origLine,
246
+ origCol: seg.origCol,
247
+ };
248
+ if (seg.name != null) remapped.name = info.nameRemap[seg.name];
249
+ mergedPerLineSegs[target].push(remapped);
250
+ }
251
+ }
252
+ }
253
+ }
254
+
255
+ const mergedMappings = encodeMappings(mergedPerLineSegs);
256
+ const mergedMap = {
257
+ version: 3,
258
+ sources: mergedSources,
259
+ sourcesContent: mergedSourcesContent,
260
+ names: mergedNames,
261
+ mappings: mergedMappings,
262
+ };
263
+ return { js: mergedJS, mapJSON: JSON.stringify(mergedMap) };
264
+ }
265
+
266
+ // UTF-8-safe base64 encode for embedding mapJSON as a data URL pragma.
267
+ // Counterpart of the encode used in src/compiler.js.
268
+ function utf8ToBase64(str) {
269
+ if (typeof Buffer !== 'undefined') {
270
+ return Buffer.from(str, 'utf8').toString('base64');
271
+ }
272
+ // Browser path: TextEncoder + btoa over latin1 bytes.
273
+ const bytes = new TextEncoder().encode(str);
274
+ let bin = '';
275
+ for (let i = 0; i < bytes.length; i++) bin += String.fromCharCode(bytes[i]);
276
+ return btoa(bin);
277
+ }
278
+
279
+ // Convenience: take chunks, run mergeChunks, append a single
280
+ // `//# sourceMappingURL=` data-URL pragma to the merged JS, return it.
281
+ // If no chunk had a map, returns the concat'd JS unchanged.
282
+ export function mergeChunksWithInlineMap(chunks) {
283
+ const { js, mapJSON } = mergeChunks(chunks);
284
+ if (!mapJSON) return js;
285
+ const b64 = utf8ToBase64(mapJSON);
286
+ return js + '\n//# sourceMappingURL=data:application/json;base64,' + b64 + '\n';
287
+ }