fastscript 0.1.1 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (108) hide show
  1. package/CHANGELOG.md +31 -2
  2. package/LICENSE +33 -21
  3. package/README.md +568 -59
  4. package/node_modules/@fastscript/core-private/BOUNDARY.json +15 -0
  5. package/node_modules/@fastscript/core-private/README.md +5 -0
  6. package/node_modules/@fastscript/core-private/package.json +34 -0
  7. package/node_modules/@fastscript/core-private/src/asset-optimizer.mjs +67 -0
  8. package/node_modules/@fastscript/core-private/src/audit-log.mjs +50 -0
  9. package/node_modules/@fastscript/core-private/src/auth-flows.mjs +29 -0
  10. package/node_modules/@fastscript/core-private/src/auth.mjs +115 -0
  11. package/node_modules/@fastscript/core-private/src/bench.mjs +45 -0
  12. package/node_modules/@fastscript/core-private/src/build.mjs +670 -0
  13. package/node_modules/@fastscript/core-private/src/cache.mjs +248 -0
  14. package/node_modules/@fastscript/core-private/src/check.mjs +22 -0
  15. package/node_modules/@fastscript/core-private/src/cli.mjs +95 -0
  16. package/node_modules/@fastscript/core-private/src/compat.mjs +128 -0
  17. package/node_modules/@fastscript/core-private/src/create.mjs +278 -0
  18. package/node_modules/@fastscript/core-private/src/csp.mjs +26 -0
  19. package/node_modules/@fastscript/core-private/src/db-cli.mjs +185 -0
  20. package/node_modules/@fastscript/core-private/src/db-postgres-collection.mjs +110 -0
  21. package/node_modules/@fastscript/core-private/src/db-postgres.mjs +40 -0
  22. package/node_modules/@fastscript/core-private/src/db.mjs +103 -0
  23. package/node_modules/@fastscript/core-private/src/deploy.mjs +662 -0
  24. package/node_modules/@fastscript/core-private/src/dev.mjs +5 -0
  25. package/node_modules/@fastscript/core-private/src/docs-search.mjs +35 -0
  26. package/node_modules/@fastscript/core-private/src/env.mjs +118 -0
  27. package/node_modules/@fastscript/core-private/src/export.mjs +83 -0
  28. package/node_modules/@fastscript/core-private/src/fs-diagnostics.mjs +70 -0
  29. package/node_modules/@fastscript/core-private/src/fs-error-codes.mjs +141 -0
  30. package/node_modules/@fastscript/core-private/src/fs-formatter.mjs +66 -0
  31. package/node_modules/@fastscript/core-private/src/fs-linter.mjs +274 -0
  32. package/node_modules/@fastscript/core-private/src/fs-normalize.mjs +91 -0
  33. package/node_modules/@fastscript/core-private/src/fs-parser.mjs +980 -0
  34. package/node_modules/@fastscript/core-private/src/generated/docs-search-index.mjs +3182 -0
  35. package/node_modules/@fastscript/core-private/src/i18n.mjs +25 -0
  36. package/node_modules/@fastscript/core-private/src/interop.mjs +16 -0
  37. package/node_modules/@fastscript/core-private/src/jobs.mjs +378 -0
  38. package/node_modules/@fastscript/core-private/src/logger.mjs +27 -0
  39. package/node_modules/@fastscript/core-private/src/metrics.mjs +45 -0
  40. package/node_modules/@fastscript/core-private/src/middleware.mjs +14 -0
  41. package/node_modules/@fastscript/core-private/src/migrate.mjs +81 -0
  42. package/node_modules/@fastscript/core-private/src/migration-wizard.mjs +16 -0
  43. package/node_modules/@fastscript/core-private/src/module-loader.mjs +46 -0
  44. package/node_modules/@fastscript/core-private/src/oauth-providers.mjs +103 -0
  45. package/node_modules/@fastscript/core-private/src/observability.mjs +21 -0
  46. package/node_modules/@fastscript/core-private/src/plugins.mjs +194 -0
  47. package/node_modules/@fastscript/core-private/src/retention.mjs +57 -0
  48. package/node_modules/@fastscript/core-private/src/routes.mjs +178 -0
  49. package/node_modules/@fastscript/core-private/src/scheduler.mjs +104 -0
  50. package/node_modules/@fastscript/core-private/src/security.mjs +233 -0
  51. package/node_modules/@fastscript/core-private/src/server-runtime.mjs +849 -0
  52. package/node_modules/@fastscript/core-private/src/serverless-handler.mjs +20 -0
  53. package/node_modules/@fastscript/core-private/src/session-policy.mjs +38 -0
  54. package/node_modules/@fastscript/core-private/src/start.mjs +10 -0
  55. package/node_modules/@fastscript/core-private/src/storage.mjs +155 -0
  56. package/node_modules/@fastscript/core-private/src/style-primitives.mjs +538 -0
  57. package/node_modules/@fastscript/core-private/src/style-system.mjs +461 -0
  58. package/node_modules/@fastscript/core-private/src/tenant.mjs +55 -0
  59. package/node_modules/@fastscript/core-private/src/typecheck.mjs +1464 -0
  60. package/node_modules/@fastscript/core-private/src/validate.mjs +22 -0
  61. package/node_modules/@fastscript/core-private/src/validation.mjs +88 -0
  62. package/node_modules/@fastscript/core-private/src/webhook.mjs +81 -0
  63. package/node_modules/@fastscript/core-private/src/worker.mjs +24 -0
  64. package/package.json +88 -8
  65. package/src/asset-optimizer.mjs +67 -0
  66. package/src/audit-log.mjs +50 -0
  67. package/src/auth.mjs +1 -115
  68. package/src/bench.mjs +20 -7
  69. package/src/build.mjs +1 -222
  70. package/src/cache.mjs +210 -20
  71. package/src/cli.mjs +29 -5
  72. package/src/compat.mjs +7 -1
  73. package/src/create.mjs +65 -11
  74. package/src/csp.mjs +26 -0
  75. package/src/db-cli.mjs +158 -18
  76. package/src/db-postgres-collection.mjs +110 -0
  77. package/src/deploy.mjs +1 -65
  78. package/src/docs-search.mjs +35 -0
  79. package/src/env.mjs +34 -5
  80. package/src/fs-diagnostics.mjs +70 -0
  81. package/src/fs-error-codes.mjs +126 -0
  82. package/src/fs-formatter.mjs +66 -0
  83. package/src/fs-linter.mjs +274 -0
  84. package/src/fs-normalize.mjs +17 -26
  85. package/src/fs-parser.mjs +1 -0
  86. package/src/generated/docs-search-index.mjs +3220 -0
  87. package/src/i18n.mjs +25 -0
  88. package/src/jobs.mjs +283 -32
  89. package/src/metrics.mjs +45 -0
  90. package/src/migration-wizard.mjs +16 -0
  91. package/src/module-loader.mjs +46 -0
  92. package/src/oauth-providers.mjs +103 -0
  93. package/src/plugins.mjs +194 -0
  94. package/src/retention.mjs +57 -0
  95. package/src/routes.mjs +178 -0
  96. package/src/scheduler.mjs +104 -0
  97. package/src/security.mjs +197 -19
  98. package/src/server-runtime.mjs +1 -339
  99. package/src/serverless-handler.mjs +20 -0
  100. package/src/session-policy.mjs +38 -0
  101. package/src/storage.mjs +1 -56
  102. package/src/style-system.mjs +461 -0
  103. package/src/tenant.mjs +55 -0
  104. package/src/typecheck.mjs +1 -0
  105. package/src/validate.mjs +5 -1
  106. package/src/validation.mjs +14 -5
  107. package/src/webhook.mjs +1 -71
  108. package/src/worker.mjs +23 -4
@@ -0,0 +1,980 @@
1
+ import { parse as acornParse, tokenizer as acornTokenizer } from "acorn";
2
+ import { resolveErrorMeta } from "./fs-error-codes.mjs";
3
+
4
+ export const FASTSCRIPT_AST_VERSION = "1.0.0";
5
+
6
+ const STATEMENT_BOUNDARY_TOKENS = new Set([";", "{", "}"]);
7
+ const TYPE_DECLARATION_KEYWORDS = new Set(["type", "interface", "enum"]);
8
+
9
+ function createLineStarts(source) {
10
+ const text = String(source ?? "");
11
+ const out = [0];
12
+ for (let i = 0; i < text.length; i += 1) {
13
+ if (text[i] === "\n") out.push(i + 1);
14
+ }
15
+ return out;
16
+ }
17
+
18
+ function binarySearchLine(lineStarts, offset) {
19
+ let lo = 0;
20
+ let hi = lineStarts.length - 1;
21
+ while (lo <= hi) {
22
+ const mid = (lo + hi) >> 1;
23
+ const start = lineStarts[mid];
24
+ const next = lineStarts[mid + 1] ?? Number.POSITIVE_INFINITY;
25
+ if (offset < start) hi = mid - 1;
26
+ else if (offset >= next) lo = mid + 1;
27
+ else return mid;
28
+ }
29
+ return Math.max(0, Math.min(lineStarts.length - 1, lo));
30
+ }
31
+
32
+ function clamp(value, min, max) {
33
+ return Math.max(min, Math.min(max, value));
34
+ }
35
+
36
+ function offsetToLineColumn(offset, lineStarts) {
37
+ const clamped = clamp(offset, 0, Math.max(0, (lineStarts.at(-1) ?? 0) + 1_000_000));
38
+ const index = binarySearchLine(lineStarts, clamped);
39
+ return {
40
+ line: index + 1,
41
+ column: clamped - lineStarts[index] + 1,
42
+ offset: clamped,
43
+ };
44
+ }
45
+
46
+ function lineEndOffset(lineStarts, sourceLength, line) {
47
+ const index = clamp(line - 1, 0, lineStarts.length - 1);
48
+ return lineStarts[index + 1] ?? sourceLength;
49
+ }
50
+
51
+ function normalizeSpan(span, sourceLength) {
52
+ if (!span) return { start: 0, end: 0 };
53
+ const start = clamp(Number(span.start ?? 0), 0, sourceLength);
54
+ const end = clamp(Number(span.end ?? start), start, sourceLength);
55
+ return { start, end };
56
+ }
57
+
58
+ function createDiagnostic({
59
+ source,
60
+ lineStarts,
61
+ file = "",
62
+ code = "FS1005",
63
+ message,
64
+ hint,
65
+ severity,
66
+ span,
67
+ related = [],
68
+ fixes = [],
69
+ recoverable = true,
70
+ }) {
71
+ const meta = resolveErrorMeta(code);
72
+ const sourceLength = String(source ?? "").length;
73
+ const normalized = normalizeSpan(span, sourceLength);
74
+ const start = offsetToLineColumn(normalized.start, lineStarts);
75
+ const end = offsetToLineColumn(Math.max(normalized.start, normalized.end), lineStarts);
76
+ return {
77
+ code,
78
+ severity: severity || meta.severity || "error",
79
+ message: message || meta.message,
80
+ hint: hint ?? meta.hint ?? "",
81
+ file,
82
+ span: normalized,
83
+ line: start.line,
84
+ column: start.column,
85
+ endLine: end.line,
86
+ endColumn: end.column,
87
+ related: related
88
+ .map((entry) => {
89
+ const relSpan = normalizeSpan(entry.span, sourceLength);
90
+ const relLoc = offsetToLineColumn(relSpan.start, lineStarts);
91
+ return {
92
+ message: entry.message || "Related location",
93
+ file: entry.file || file,
94
+ span: relSpan,
95
+ line: relLoc.line,
96
+ column: relLoc.column,
97
+ };
98
+ })
99
+ .sort((a, b) => a.span.start - b.span.start),
100
+ fixes,
101
+ recoverable,
102
+ };
103
+ }
104
+
105
+ function tokenTypeFromAcorn(token) {
106
+ if (!token?.type) return "unknown";
107
+ if (token.type.keyword) return "keyword";
108
+ if (token.type.label === "name") return "identifier";
109
+ if (token.type.label === "num") return "number";
110
+ if (token.type.label === "string") return "string";
111
+ if (token.type.label === "regexp") return "regex";
112
+ if (token.type.label === "template") return "template";
113
+ if (token.type.label === "eof") return "eof";
114
+ return "symbol";
115
+ }
116
+
117
+ function makeToken({ type, value, start, end, file, lineStarts, channel = "syntax", label = "" }) {
118
+ const startLoc = offsetToLineColumn(start, lineStarts);
119
+ const endLoc = offsetToLineColumn(end, lineStarts);
120
+ return {
121
+ type,
122
+ value,
123
+ label,
124
+ channel,
125
+ start: startLoc,
126
+ end: endLoc,
127
+ range: [start, end],
128
+ file,
129
+ };
130
+ }
131
+
132
+ function pushTriviaToken(tokens, source, start, end, file, lineStarts) {
133
+ if (end <= start) return;
134
+ const value = source.slice(start, end);
135
+ const text = value.replace(/\r/g, "");
136
+ if (!text) return;
137
+
138
+ if (/^\n+$/.test(text)) {
139
+ tokens.push(makeToken({ type: "newline", value: text, start, end, file, lineStarts, channel: "trivia", label: "newline" }));
140
+ return;
141
+ }
142
+ if (/^[ \t]+$/.test(text)) {
143
+ tokens.push(makeToken({ type: "whitespace", value: text, start, end, file, lineStarts, channel: "trivia", label: "whitespace" }));
144
+ return;
145
+ }
146
+ tokens.push(makeToken({ type: "trivia", value: text, start, end, file, lineStarts, channel: "trivia", label: "trivia" }));
147
+ }
148
+
149
+ function splitAndPushTrivia(tokens, source, start, end, file, lineStarts) {
150
+ if (end <= start) return;
151
+ let cursor = start;
152
+ while (cursor < end) {
153
+ const ch = source[cursor];
154
+ if (ch === "\r") {
155
+ cursor += 1;
156
+ continue;
157
+ }
158
+ if (ch === "\n") {
159
+ const begin = cursor;
160
+ cursor += 1;
161
+ while (cursor < end && source[cursor] === "\n") cursor += 1;
162
+ pushTriviaToken(tokens, source, begin, cursor, file, lineStarts);
163
+ continue;
164
+ }
165
+ if (ch === " " || ch === "\t") {
166
+ const begin = cursor;
167
+ cursor += 1;
168
+ while (cursor < end && (source[cursor] === " " || source[cursor] === "\t")) cursor += 1;
169
+ pushTriviaToken(tokens, source, begin, cursor, file, lineStarts);
170
+ continue;
171
+ }
172
+ const begin = cursor;
173
+ cursor += 1;
174
+ while (cursor < end && !["\r", "\n", " ", "\t"].includes(source[cursor])) cursor += 1;
175
+ pushTriviaToken(tokens, source, begin, cursor, file, lineStarts);
176
+ }
177
+ }
178
+
179
+ function tokenizeWithAcorn(source, { file = "", lineStarts }) {
180
+ const significant = [];
181
+ const comments = [];
182
+ let lexicalError = null;
183
+
184
+ const onComment = (block, text, start, end, startLoc, endLoc) => {
185
+ comments.push({
186
+ kind: "comment",
187
+ block,
188
+ text,
189
+ start,
190
+ end,
191
+ startLoc,
192
+ endLoc,
193
+ value: source.slice(start, end),
194
+ });
195
+ };
196
+
197
+ try {
198
+ const stream = acornTokenizer(source, {
199
+ ecmaVersion: "latest",
200
+ sourceType: "module",
201
+ locations: true,
202
+ ranges: true,
203
+ allowHashBang: true,
204
+ onComment,
205
+ });
206
+
207
+ while (true) {
208
+ const token = stream.getToken();
209
+ const value = source.slice(token.start, token.end);
210
+ significant.push(
211
+ makeToken({
212
+ type: tokenTypeFromAcorn(token),
213
+ value,
214
+ start: token.start,
215
+ end: token.end,
216
+ file,
217
+ lineStarts,
218
+ label: token.type.label,
219
+ channel: "syntax",
220
+ }),
221
+ );
222
+ if (token.type.label === "eof") break;
223
+ }
224
+ } catch (error) {
225
+ lexicalError = error;
226
+ }
227
+
228
+ return { significant, comments, lexicalError };
229
+ }
230
+
231
+ function mergeTriviaAndTokens(source, { file = "", lineStarts, significant, comments }) {
232
+ const out = [];
233
+ const events = [
234
+ ...comments.map((comment) => ({ kind: "comment", start: comment.start, end: comment.end, value: comment.value })),
235
+ ...significant.filter((token) => token.type !== "eof").map((token) => ({ kind: "token", start: token.range[0], end: token.range[1], token })),
236
+ ].sort((a, b) => {
237
+ if (a.start !== b.start) return a.start - b.start;
238
+ if (a.kind === b.kind) return a.end - b.end;
239
+ return a.kind === "comment" ? -1 : 1;
240
+ });
241
+
242
+ let cursor = 0;
243
+ for (const event of events) {
244
+ if (event.start > cursor) {
245
+ splitAndPushTrivia(out, source, cursor, event.start, file, lineStarts);
246
+ }
247
+
248
+ if (event.kind === "comment") {
249
+ out.push(
250
+ makeToken({
251
+ type: "comment",
252
+ value: event.value,
253
+ start: event.start,
254
+ end: event.end,
255
+ file,
256
+ lineStarts,
257
+ label: "comment",
258
+ channel: "trivia",
259
+ }),
260
+ );
261
+ } else {
262
+ out.push(event.token);
263
+ }
264
+
265
+ cursor = Math.max(cursor, event.end);
266
+ }
267
+
268
+ if (cursor < source.length) splitAndPushTrivia(out, source, cursor, source.length, file, lineStarts);
269
+
270
+ out.push(
271
+ makeToken({
272
+ type: "eof",
273
+ value: "",
274
+ start: source.length,
275
+ end: source.length,
276
+ file,
277
+ lineStarts,
278
+ label: "eof",
279
+ channel: "syntax",
280
+ }),
281
+ );
282
+
283
+ return out;
284
+ }
285
+
286
+ function previousSignificant(tokens, idx) {
287
+ for (let i = idx - 1; i >= 0; i -= 1) {
288
+ if (tokens[i].type !== "eof") return tokens[i];
289
+ }
290
+ return null;
291
+ }
292
+
293
+ function nextSignificant(tokens, idx, offset = 1) {
294
+ const nextIdx = idx + offset;
295
+ if (nextIdx < 0 || nextIdx >= tokens.length) return null;
296
+ const token = tokens[nextIdx];
297
+ if (!token || token.type === "eof") return null;
298
+ return token;
299
+ }
300
+
301
+ function isIdentifierToken(token) {
302
+ return Boolean(token) && token.type === "identifier";
303
+ }
304
+
305
+ function isStatementStart(tokens, idx) {
306
+ const token = tokens[idx];
307
+ const prev = previousSignificant(tokens, idx);
308
+ if (!token) return false;
309
+ if (!prev) return true;
310
+ if (prev.end.line < token.start.line) return true;
311
+ if (STATEMENT_BOUNDARY_TOKENS.has(prev.value)) return true;
312
+ return false;
313
+ }
314
+
315
+ function declarationSpanFromKeyword(source, lineStarts, sourceLength, tokens, idx) {
316
+ const token = tokens[idx];
317
+ if (!token) return { start: 0, end: 0 };
318
+
319
+ if (token.value === "type") {
320
+ let end = lineEndOffset(lineStarts, sourceLength, token.start.line);
321
+ let depth = 0;
322
+ for (let i = idx + 1; i < tokens.length; i += 1) {
323
+ const current = tokens[i];
324
+ if (!current || current.type === "eof") break;
325
+ if (depth === 0 && current.start.line > token.start.line) break;
326
+ if (current.value === "{" || current.value === "(" || current.value === "[") depth += 1;
327
+ if (current.value === "}" || current.value === ")" || current.value === "]") depth = Math.max(0, depth - 1);
328
+ end = current.range[1];
329
+ if (depth === 0 && current.value === ";") break;
330
+ }
331
+ return { start: token.range[0], end };
332
+ }
333
+
334
+ let openBrace = null;
335
+ for (let i = idx + 1; i < tokens.length; i += 1) {
336
+ const current = tokens[i];
337
+ if (!current || current.type === "eof") break;
338
+ if (current.value === "{") {
339
+ openBrace = i;
340
+ break;
341
+ }
342
+ if (current.end.line > token.start.line && token.value === "interface") break;
343
+ }
344
+
345
+ if (openBrace == null) {
346
+ return {
347
+ start: token.range[0],
348
+ end: lineEndOffset(lineStarts, sourceLength, token.start.line),
349
+ };
350
+ }
351
+
352
+ let depth = 0;
353
+ let end = tokens[openBrace].range[1];
354
+ for (let i = openBrace; i < tokens.length; i += 1) {
355
+ const current = tokens[i];
356
+ if (!current || current.type === "eof") break;
357
+ if (current.value === "{") depth += 1;
358
+ if (current.value === "}") {
359
+ depth -= 1;
360
+ if (depth === 0) {
361
+ end = current.range[1];
362
+ const maybeSemicolon = nextSignificant(tokens, i, 1);
363
+ if (maybeSemicolon?.value === ";") end = maybeSemicolon.range[1];
364
+ break;
365
+ }
366
+ }
367
+ end = current.range[1];
368
+ }
369
+ return { start: token.range[0], end };
370
+ }
371
+
372
+ function operation(start, end, replacement, kind) {
373
+ return { start, end, replacement, kind };
374
+ }
375
+
376
+ function collectRewriteOperations(source, { file = "", lineStarts, significant }) {
377
+ const diagnostics = [];
378
+ const ops = [];
379
+ const sourceLength = source.length;
380
+
381
+ for (let idx = 0; idx < significant.length; idx += 1) {
382
+ const token = significant[idx];
383
+ if (!token || token.type === "eof") continue;
384
+
385
+ if (token.value === "~" && isStatementStart(significant, idx)) {
386
+ const nameToken = nextSignificant(significant, idx, 1);
387
+ const assignToken = nextSignificant(significant, idx, 2);
388
+ if (isIdentifierToken(nameToken) && assignToken?.value === "=") {
389
+ ops.push(operation(token.range[0], token.range[1], "let ", "reactive"));
390
+ } else if (isIdentifierToken(nameToken)) {
391
+ diagnostics.push(
392
+ createDiagnostic({
393
+ source,
394
+ lineStarts,
395
+ file,
396
+ code: "FS1001",
397
+ span: { start: token.range[0], end: (assignToken || nameToken).range[1] },
398
+ fixes: [
399
+ {
400
+ message: "Insert assignment operator after reactive variable name.",
401
+ span: { start: nameToken.range[1], end: nameToken.range[1] },
402
+ text: " = <expression>",
403
+ },
404
+ ],
405
+ }),
406
+ );
407
+ }
408
+ continue;
409
+ }
410
+
411
+ if (token.value === "state" && token.type === "identifier" && isStatementStart(significant, idx)) {
412
+ const nameToken = nextSignificant(significant, idx, 1);
413
+ const assignToken = nextSignificant(significant, idx, 2);
414
+ if (isIdentifierToken(nameToken) && assignToken?.value === "=") {
415
+ ops.push(operation(token.range[0], token.range[1], "let", "state"));
416
+ } else if (isIdentifierToken(nameToken)) {
417
+ diagnostics.push(
418
+ createDiagnostic({
419
+ source,
420
+ lineStarts,
421
+ file,
422
+ code: "FS1002",
423
+ span: { start: token.range[0], end: nameToken.range[1] },
424
+ fixes: [
425
+ {
426
+ message: "Convert to a complete state declaration.",
427
+ span: { start: nameToken.range[1], end: nameToken.range[1] },
428
+ text: " = <expression>",
429
+ },
430
+ ],
431
+ }),
432
+ );
433
+ }
434
+ continue;
435
+ }
436
+
437
+ if (token.value === "fn" && token.type === "identifier") {
438
+ const prev = previousSignificant(significant, idx);
439
+ const startsStatement = isStatementStart(significant, idx);
440
+ const followsExport = prev?.value === "export" && prev.end.line === token.start.line;
441
+ if (startsStatement || followsExport) {
442
+ const nameToken = nextSignificant(significant, idx, 1);
443
+ const openParenToken = nextSignificant(significant, idx, 2);
444
+ if (isIdentifierToken(nameToken) && openParenToken?.value === "(") {
445
+ ops.push(operation(token.range[0], token.range[1], "function", "function"));
446
+ } else {
447
+ diagnostics.push(
448
+ createDiagnostic({
449
+ source,
450
+ lineStarts,
451
+ file,
452
+ code: "FS1003",
453
+ span: { start: token.range[0], end: (nameToken || token).range[1] },
454
+ fixes: [
455
+ {
456
+ message: "Use `fn name(...)` syntax.",
457
+ span: { start: token.range[0], end: token.range[1] },
458
+ text: "fn <name>",
459
+ },
460
+ ],
461
+ }),
462
+ );
463
+ }
464
+ }
465
+ continue;
466
+ }
467
+
468
+ if (TYPE_DECLARATION_KEYWORDS.has(token.value) && token.type === "identifier" && isStatementStart(significant, idx)) {
469
+ const nextToken = nextSignificant(significant, idx, 1);
470
+ if (!isIdentifierToken(nextToken)) continue;
471
+ const shouldHandleType = token.value === "type" && nextSignificant(significant, idx, 2)?.value === "=";
472
+ const shouldHandleBlock = (token.value === "interface" || token.value === "enum") && nextSignificant(significant, idx, 2);
473
+ if (!shouldHandleType && !shouldHandleBlock) continue;
474
+
475
+ const span = declarationSpanFromKeyword(source, lineStarts, sourceLength, significant, idx);
476
+ const snippet = source.slice(span.start, span.end).trim();
477
+ diagnostics.push(
478
+ createDiagnostic({
479
+ source,
480
+ lineStarts,
481
+ file,
482
+ code: "FS1004",
483
+ span,
484
+ fixes: [
485
+ {
486
+ message: "Remove declaration from runtime source.",
487
+ span,
488
+ text: "",
489
+ },
490
+ ],
491
+ }),
492
+ );
493
+ ops.push(
494
+ operation(
495
+ span.start,
496
+ span.end,
497
+ `/* ${snippet || token.value} (removed by fastscript compiler) */`,
498
+ "erase-type",
499
+ ),
500
+ );
501
+ }
502
+ }
503
+
504
+ const lexicalTodoPattern = /\bTODO_ERROR\b/g;
505
+ for (const match of source.matchAll(lexicalTodoPattern)) {
506
+ diagnostics.push(
507
+ createDiagnostic({
508
+ source,
509
+ lineStarts,
510
+ file,
511
+ code: "FS1007",
512
+ span: { start: match.index ?? 0, end: (match.index ?? 0) + match[0].length },
513
+ }),
514
+ );
515
+ }
516
+
517
+ return { ops, diagnostics };
518
+ }
519
+
520
+ function applyRewriteOperations(source, ops) {
521
+ if (!ops.length) {
522
+ const mapping = [];
523
+ for (let i = 0; i < source.length; i += 1) mapping.push(i);
524
+ mapping.push(source.length);
525
+ return {
526
+ code: source,
527
+ mapGeneratedToSource: mapping,
528
+ rewrites: [],
529
+ };
530
+ }
531
+
532
+ const sorted = [...ops]
533
+ .sort((a, b) => {
534
+ if (a.start !== b.start) return a.start - b.start;
535
+ return a.end - b.end;
536
+ })
537
+ .filter((entry) => Number.isFinite(entry.start) && Number.isFinite(entry.end) && entry.start <= entry.end);
538
+
539
+ const normalized = [];
540
+ let cursor = 0;
541
+ for (const op of sorted) {
542
+ if (op.start < cursor) continue;
543
+ normalized.push(op);
544
+ cursor = op.end;
545
+ }
546
+
547
+ const outputParts = [];
548
+ const mapGeneratedToSource = [];
549
+ const rewrites = [];
550
+ let readCursor = 0;
551
+ let generatedCursor = 0;
552
+
553
+ function pushCopied(start, end) {
554
+ if (end <= start) return;
555
+ outputParts.push(source.slice(start, end));
556
+ for (let offset = start; offset < end; offset += 1) mapGeneratedToSource.push(offset);
557
+ generatedCursor += end - start;
558
+ }
559
+
560
+ function pushReplacement(op) {
561
+ const replacement = String(op.replacement ?? "");
562
+ outputParts.push(replacement);
563
+ const sourceLength = Math.max(1, op.end - op.start);
564
+ const generatedStart = generatedCursor;
565
+ for (let i = 0; i < replacement.length; i += 1) {
566
+ const mapped = op.start + Math.min(i, sourceLength - 1);
567
+ mapGeneratedToSource.push(mapped);
568
+ }
569
+ generatedCursor += replacement.length;
570
+ rewrites.push({
571
+ kind: op.kind,
572
+ source: { start: op.start, end: op.end },
573
+ generated: { start: generatedStart, end: generatedCursor },
574
+ replacement,
575
+ });
576
+ }
577
+
578
+ for (const op of normalized) {
579
+ pushCopied(readCursor, op.start);
580
+ pushReplacement(op);
581
+ readCursor = op.end;
582
+ }
583
+
584
+ pushCopied(readCursor, source.length);
585
+
586
+ const code = outputParts.join("");
587
+ mapGeneratedToSource.push(source.length);
588
+
589
+ return { code, mapGeneratedToSource, rewrites };
590
+ }
591
+
592
+ const BASE64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
593
+
594
+ function encodeVlq(value) {
595
+ let num = value < 0 ? ((-value) << 1) + 1 : value << 1;
596
+ let out = "";
597
+ do {
598
+ let digit = num & 31;
599
+ num >>>= 5;
600
+ if (num > 0) digit |= 32;
601
+ out += BASE64[digit];
602
+ } while (num > 0);
603
+ return out;
604
+ }
605
+
606
+ function offsetToZeroBased(offset, lineStarts, sourceLength) {
607
+ const safe = clamp(offset, 0, sourceLength);
608
+ const lineIndex = binarySearchLine(lineStarts, safe);
609
+ return {
610
+ line: lineIndex,
611
+ column: safe - lineStarts[lineIndex],
612
+ };
613
+ }
614
+
615
+ function mapGeneratedOffsetToSourceOffset(mapGeneratedToSource, generatedOffset, sourceLength) {
616
+ if (!mapGeneratedToSource.length) return 0;
617
+ const safeGenerated = clamp(generatedOffset, 0, mapGeneratedToSource.length - 1);
618
+ const mapped = mapGeneratedToSource[safeGenerated];
619
+ return clamp(Number(mapped ?? 0), 0, sourceLength);
620
+ }
621
+
622
+ function buildSourceMap({ source, generated, mapGeneratedToSource, file = "" }) {
623
+ const sourceText = String(source ?? "");
624
+ const generatedText = String(generated ?? "");
625
+ const sourceLineStarts = createLineStarts(sourceText);
626
+ const sourceLength = sourceText.length;
627
+
628
+ let mappings = "";
629
+ let previousSourceLine = 0;
630
+ let previousSourceColumn = 0;
631
+ let generatedColumn = 0;
632
+ let previousGeneratedColumn = 0;
633
+ let lineStarted = false;
634
+
635
+ for (let i = 0; i <= generatedText.length; i += 1) {
636
+ const isLineBreak = i === generatedText.length || generatedText[i] === "\n";
637
+ if (!isLineBreak) {
638
+ const sourceOffset = mapGeneratedOffsetToSourceOffset(mapGeneratedToSource, i, sourceLength);
639
+ const sourceLoc = offsetToZeroBased(sourceOffset, sourceLineStarts, sourceLength);
640
+ const segment =
641
+ encodeVlq(generatedColumn - previousGeneratedColumn) +
642
+ encodeVlq(0) +
643
+ encodeVlq(sourceLoc.line - previousSourceLine) +
644
+ encodeVlq(sourceLoc.column - previousSourceColumn);
645
+ mappings += lineStarted ? `,${segment}` : segment;
646
+ lineStarted = true;
647
+ previousGeneratedColumn = generatedColumn;
648
+ previousSourceLine = sourceLoc.line;
649
+ previousSourceColumn = sourceLoc.column;
650
+ generatedColumn += 1;
651
+ continue;
652
+ }
653
+
654
+ if (i < generatedText.length) {
655
+ mappings += ";";
656
+ generatedColumn = 0;
657
+ previousGeneratedColumn = 0;
658
+ lineStarted = false;
659
+ }
660
+ }
661
+
662
+ return {
663
+ version: 3,
664
+ file,
665
+ sources: [file || "<memory>"],
666
+ sourcesContent: [sourceText],
667
+ names: [],
668
+ mappings,
669
+ };
670
+ }
671
+
672
+ function remapAstNodeLocations(estree, source, lineStarts, mapGeneratedToSource) {
673
+ if (!estree || typeof estree !== "object") return;
674
+ const stack = [estree];
675
+ const sourceLength = source.length;
676
+
677
+ while (stack.length > 0) {
678
+ const node = stack.pop();
679
+ if (!node || typeof node !== "object") continue;
680
+
681
+ if (typeof node.start === "number" && typeof node.end === "number") {
682
+ const mappedStart = mapGeneratedOffsetToSourceOffset(mapGeneratedToSource, node.start, sourceLength);
683
+ const mappedEndBase = mapGeneratedOffsetToSourceOffset(mapGeneratedToSource, Math.max(node.start, node.end - 1), sourceLength);
684
+ const mappedEnd = clamp(mappedEndBase + (node.end > node.start ? 1 : 0), mappedStart, sourceLength);
685
+ const startLoc = offsetToLineColumn(mappedStart, lineStarts);
686
+ const endLoc = offsetToLineColumn(mappedEnd, lineStarts);
687
+ node.fsRange = [mappedStart, mappedEnd];
688
+ node.fsLoc = { start: startLoc, end: endLoc };
689
+ node.line = startLoc.line;
690
+ node.column = startLoc.column;
691
+ }
692
+
693
+ for (const value of Object.values(node)) {
694
+ if (!value) continue;
695
+ if (Array.isArray(value)) {
696
+ for (const item of value) {
697
+ if (item && typeof item === "object") stack.push(item);
698
+ }
699
+ } else if (typeof value === "object") {
700
+ stack.push(value);
701
+ }
702
+ }
703
+ }
704
+ }
705
+
706
+ function findTokenIndicesForSpan(tokens, start, end) {
707
+ let first = -1;
708
+ let last = -1;
709
+ for (let i = 0; i < tokens.length; i += 1) {
710
+ const token = tokens[i];
711
+ if (!token || token.type === "eof") continue;
712
+ const tStart = token.range[0];
713
+ const tEnd = token.range[1];
714
+ if (tEnd <= start || tStart >= end) continue;
715
+ if (first < 0) first = i;
716
+ last = i;
717
+ }
718
+ return { first, last };
719
+ }
720
+
721
+ function buildCst({ source, file, lineStarts, tokens, estree }) {
722
+ const statements = [];
723
+ const body = Array.isArray(estree?.body) ? estree.body : [];
724
+ for (const node of body) {
725
+ const start = node?.fsRange?.[0] ?? 0;
726
+ const end = node?.fsRange?.[1] ?? start;
727
+ const tokenRange = findTokenIndicesForSpan(tokens, start, end);
728
+ statements.push({
729
+ type: "StatementCST",
730
+ kind: node.type,
731
+ range: [start, end],
732
+ loc: node.fsLoc || {
733
+ start: offsetToLineColumn(start, lineStarts),
734
+ end: offsetToLineColumn(end, lineStarts),
735
+ },
736
+ raw: source.slice(start, end),
737
+ tokenStart: tokenRange.first,
738
+ tokenEnd: tokenRange.last,
739
+ });
740
+ }
741
+
742
+ return {
743
+ type: "ProgramCST",
744
+ file,
745
+ range: [0, source.length],
746
+ tokens,
747
+ statements,
748
+ };
749
+ }
750
+
751
+ function sortAndDedupeDiagnostics(diagnostics) {
752
+ const deduped = [];
753
+ const seen = new Set();
754
+ for (const diagnostic of diagnostics) {
755
+ const key = [
756
+ diagnostic.code,
757
+ diagnostic.severity,
758
+ diagnostic.message,
759
+ diagnostic.file || "",
760
+ diagnostic.span?.start ?? 0,
761
+ diagnostic.span?.end ?? 0,
762
+ ].join("|");
763
+ if (seen.has(key)) continue;
764
+ seen.add(key);
765
+ deduped.push(diagnostic);
766
+ }
767
+
768
+ deduped.sort((a, b) => {
769
+ const offsetA = a.span?.start ?? 0;
770
+ const offsetB = b.span?.start ?? 0;
771
+ if (offsetA !== offsetB) return offsetA - offsetB;
772
+ if (a.code !== b.code) return a.code.localeCompare(b.code);
773
+ return a.message.localeCompare(b.message);
774
+ });
775
+
776
+ return deduped;
777
+ }
778
+
779
+ export function tokenizeFastScript(source, { file = "" } = {}) {
780
+ const text = String(source ?? "");
781
+ const lineStarts = createLineStarts(text);
782
+ const { significant, comments } = tokenizeWithAcorn(text, { file, lineStarts });
783
+ return mergeTriviaAndTokens(text, { file, lineStarts, significant, comments });
784
+ }
785
+
786
+ function parseGeneratedJavaScript(generated, { file = "" } = {}) {
787
+ const parseTokens = [];
788
+ const parseComments = [];
789
+ try {
790
+ const estree = acornParse(generated, {
791
+ ecmaVersion: "latest",
792
+ sourceType: "module",
793
+ allowHashBang: true,
794
+ locations: true,
795
+ ranges: true,
796
+ onToken: parseTokens,
797
+ onComment: parseComments,
798
+ });
799
+ return { estree, parseTokens, parseComments, error: null };
800
+ } catch (error) {
801
+ return { estree: null, parseTokens, parseComments, error };
802
+ }
803
+ }
804
+
805
+ function ensureProgramShape(program) {
806
+ if (!program || typeof program !== "object") {
807
+ return {
808
+ type: "Program",
809
+ sourceType: "module",
810
+ body: [],
811
+ start: 0,
812
+ end: 0,
813
+ };
814
+ }
815
+ if (!Array.isArray(program.body)) program.body = [];
816
+ return program;
817
+ }
818
+
819
+ function createParserError({ source, file, diagnostics }) {
820
+ const primary = diagnostics.find((diagnostic) => diagnostic.severity !== "warning") || diagnostics[0];
821
+ const lines = diagnostics.map((diagnostic) => formatDiagnostic(diagnostic));
822
+ const head = `${primary.file || file || "<memory>"}:${primary.line}:${primary.column} ${primary.code} ${primary.message}`;
823
+ const error = new Error(`${head}\n${lines.join("\n")}`);
824
+ error.code = primary.code || "FS1000";
825
+ error.status = 1;
826
+ error.details = diagnostics;
827
+ error.source = source;
828
+ return error;
829
+ }
830
+
831
+ export function parseFastScript(source, { file = "", mode = "lenient", recover = true } = {}) {
832
+ const text = String(source ?? "");
833
+ const lineStarts = createLineStarts(text);
834
+
835
+ const tokenization = tokenizeWithAcorn(text, { file, lineStarts });
836
+ const tokens = mergeTriviaAndTokens(text, {
837
+ file,
838
+ lineStarts,
839
+ significant: tokenization.significant,
840
+ comments: tokenization.comments,
841
+ });
842
+
843
+ const diagnostics = [];
844
+
845
+ if (tokenization.lexicalError) {
846
+ const loc = tokenization.lexicalError.loc || { line: 1, column: 0 };
847
+ const lineStart = lineStarts[clamp((loc.line || 1) - 1, 0, lineStarts.length - 1)] ?? 0;
848
+ const offset = clamp(lineStart + (loc.column || 0), 0, text.length);
849
+ diagnostics.push(
850
+ createDiagnostic({
851
+ source: text,
852
+ lineStarts,
853
+ file,
854
+ code: "FS1010",
855
+ span: { start: offset, end: offset + 1 },
856
+ message: tokenization.lexicalError.message || resolveErrorMeta("FS1010").message,
857
+ }),
858
+ );
859
+ }
860
+
861
+ const rewrites = collectRewriteOperations(text, {
862
+ file,
863
+ lineStarts,
864
+ significant: tokenization.significant,
865
+ });
866
+ diagnostics.push(...rewrites.diagnostics);
867
+
868
+ const transformed = applyRewriteOperations(text, rewrites.ops);
869
+ const parsedGenerated = parseGeneratedJavaScript(transformed.code, { file });
870
+
871
+ if (parsedGenerated.error) {
872
+ const parseOffset = clamp(parsedGenerated.error.pos ?? 0, 0, transformed.code.length);
873
+ const sourceOffset = mapGeneratedOffsetToSourceOffset(transformed.mapGeneratedToSource, parseOffset, text.length);
874
+ diagnostics.push(
875
+ createDiagnostic({
876
+ source: text,
877
+ lineStarts,
878
+ file,
879
+ code: "FS1005",
880
+ span: { start: sourceOffset, end: Math.min(sourceOffset + 1, text.length) },
881
+ message: parsedGenerated.error.message.replace(/ \(\d+:\d+\)$/, ""),
882
+ }),
883
+ );
884
+ }
885
+
886
+ const estree = ensureProgramShape(parsedGenerated.estree);
887
+ remapAstNodeLocations(estree, text, lineStarts, transformed.mapGeneratedToSource);
888
+
889
+ const cst = buildCst({ source: text, file, lineStarts, tokens, estree });
890
+ const sourceMap = buildSourceMap({
891
+ source: text,
892
+ generated: transformed.code,
893
+ mapGeneratedToSource: transformed.mapGeneratedToSource,
894
+ file,
895
+ });
896
+
897
+ const normalizedDiagnostics = sortAndDedupeDiagnostics(diagnostics);
898
+
899
+ const body = Array.isArray(estree.body)
900
+ ? estree.body.map((node) => ({
901
+ ...node,
902
+ line: node?.fsLoc?.start?.line ?? node?.loc?.start?.line ?? 1,
903
+ }))
904
+ : [];
905
+
906
+ const program = {
907
+ type: "Program",
908
+ version: FASTSCRIPT_AST_VERSION,
909
+ file,
910
+ mode,
911
+ body,
912
+ estree,
913
+ cst,
914
+ tokens,
915
+ diagnostics: normalizedDiagnostics,
916
+ transformedCode: transformed.code,
917
+ sourceMap,
918
+ mapGeneratedToSource: transformed.mapGeneratedToSource,
919
+ rewrites: transformed.rewrites,
920
+ source: text,
921
+ };
922
+
923
+ const blocking = normalizedDiagnostics.filter((diagnostic) => diagnostic.severity !== "warning");
924
+ if (mode === "strict" && blocking.length > 0) {
925
+ throw createParserError({ source: text, file, diagnostics: normalizedDiagnostics });
926
+ }
927
+
928
+ if (!recover && normalizedDiagnostics.length > 0) {
929
+ throw createParserError({ source: text, file, diagnostics: normalizedDiagnostics });
930
+ }
931
+
932
+ return program;
933
+ }
934
+
935
+ export function buildInlineSourceMapComment(map) {
936
+ const payload = Buffer.from(JSON.stringify(map), "utf8").toString("base64");
937
+ return `//# sourceMappingURL=data:application/json;charset=utf-8;base64,${payload}`;
938
+ }
939
+
940
+ export function compileFastScript(source, { file = "", mode = "lenient", recover = true, inlineSourceMap = false } = {}) {
941
+ const ast = parseFastScript(source, { file, mode, recover });
942
+ const map = ast.sourceMap;
943
+ const mapText = JSON.stringify(map);
944
+ const inlineComment = buildInlineSourceMapComment(map);
945
+ const code = inlineSourceMap ? `${ast.transformedCode}\n${inlineComment}` : ast.transformedCode;
946
+
947
+ return {
948
+ code,
949
+ map,
950
+ mapText,
951
+ inlineSourceMap: inlineComment,
952
+ ast,
953
+ cst: ast.cst,
954
+ diagnostics: ast.diagnostics,
955
+ rewrites: ast.rewrites,
956
+ };
957
+ }
958
+
959
+ function fixText(fix) {
960
+ if (!fix) return "";
961
+ const replacement = String(fix.text ?? "").replace(/\n/g, "\\n");
962
+ return replacement.length > 32 ? `${replacement.slice(0, 29)}...` : replacement;
963
+ }
964
+
965
+ export function formatDiagnostic(diagnostic) {
966
+ const path = diagnostic.file || "<memory>";
967
+ const base = `${path}:${diagnostic.line}:${diagnostic.column} ${diagnostic.code} ${diagnostic.severity} ${diagnostic.message}`;
968
+ const hint = diagnostic.hint ? ` hint=${diagnostic.hint}` : "";
969
+ const related = (diagnostic.related || [])
970
+ .map((entry) => ` related=${entry.file || path}:${entry.line}:${entry.column} ${entry.message}`)
971
+ .join("");
972
+ const fixes = (diagnostic.fixes || [])
973
+ .map((fix) => {
974
+ const start = fix.span?.start ?? 0;
975
+ const end = fix.span?.end ?? start;
976
+ return ` fix=[${start}-${end}]=>${fixText(fix)}`;
977
+ })
978
+ .join("");
979
+ return `${base}${hint}${related}${fixes}`.trimEnd();
980
+ }