arc-lang 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +148 -0
- package/dist/ast.d.ts +298 -0
- package/dist/ast.js +2 -0
- package/dist/build.d.ts +7 -0
- package/dist/build.js +138 -0
- package/dist/codegen-js.d.ts +2 -0
- package/dist/codegen-js.js +168 -0
- package/dist/codegen.d.ts +2 -0
- package/dist/codegen.js +364 -0
- package/dist/errors.d.ts +52 -0
- package/dist/errors.js +229 -0
- package/dist/formatter.d.ts +5 -0
- package/dist/formatter.js +361 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +165 -0
- package/dist/interpreter.d.ts +39 -0
- package/dist/interpreter.js +668 -0
- package/dist/ir.d.ts +126 -0
- package/dist/ir.js +610 -0
- package/dist/lexer.d.ts +79 -0
- package/dist/lexer.js +335 -0
- package/dist/linter.d.ts +15 -0
- package/dist/linter.js +382 -0
- package/dist/lsp.d.ts +1 -0
- package/dist/lsp.js +253 -0
- package/dist/modules.d.ts +24 -0
- package/dist/modules.js +115 -0
- package/dist/optimizer.d.ts +17 -0
- package/dist/optimizer.js +481 -0
- package/dist/package-manager.d.ts +31 -0
- package/dist/package-manager.js +180 -0
- package/dist/parser.d.ts +42 -0
- package/dist/parser.js +779 -0
- package/dist/repl.d.ts +1 -0
- package/dist/repl.js +120 -0
- package/dist/security.d.ts +48 -0
- package/dist/security.js +198 -0
- package/dist/semantic.d.ts +7 -0
- package/dist/semantic.js +327 -0
- package/dist/typechecker.d.ts +7 -0
- package/dist/typechecker.js +132 -0
- package/dist/version.d.ts +26 -0
- package/dist/version.js +71 -0
- package/package.json +51 -0
package/dist/linter.js
ADDED
|
@@ -0,0 +1,382 @@
|
|
|
1
|
+
// Arc Language Linter
|
|
2
|
+
// Checks for common code quality issues
|
|
3
|
+
import { lex } from "./lexer.js";
|
|
4
|
+
import { parse } from "./parser.js";
|
|
5
|
+
const DEFAULT_OPTIONS = {
|
|
6
|
+
maxLineLength: 100,
|
|
7
|
+
file: "<stdin>",
|
|
8
|
+
};
|
|
9
|
+
export function lint(source, options) {
|
|
10
|
+
const opts = { ...DEFAULT_OPTIONS, ...options };
|
|
11
|
+
const diagnostics = [];
|
|
12
|
+
function warn(rule, message, loc) {
|
|
13
|
+
diagnostics.push({ severity: "warning", rule, message, file: opts.file, line: loc.line, col: loc.col });
|
|
14
|
+
}
|
|
15
|
+
function info(rule, message, loc) {
|
|
16
|
+
diagnostics.push({ severity: "info", rule, message, file: opts.file, line: loc.line, col: loc.col });
|
|
17
|
+
}
|
|
18
|
+
// Check line lengths
|
|
19
|
+
const lines = source.split('\n');
|
|
20
|
+
for (let i = 0; i < lines.length; i++) {
|
|
21
|
+
if (lines[i].length > opts.maxLineLength) {
|
|
22
|
+
warn("line-length", `Line exceeds ${opts.maxLineLength} characters (${lines[i].length})`, { line: i + 1, col: opts.maxLineLength + 1 });
|
|
23
|
+
}
|
|
24
|
+
}
|
|
25
|
+
// Parse
|
|
26
|
+
let ast;
|
|
27
|
+
try {
|
|
28
|
+
const tokens = lex(source);
|
|
29
|
+
ast = parse(tokens);
|
|
30
|
+
}
|
|
31
|
+
catch {
|
|
32
|
+
return diagnostics; // can't lint if we can't parse
|
|
33
|
+
}
|
|
34
|
+
class LintScope {
|
|
35
|
+
parent;
|
|
36
|
+
vars = new Map();
|
|
37
|
+
children = [];
|
|
38
|
+
constructor(parent) {
|
|
39
|
+
this.parent = parent;
|
|
40
|
+
}
|
|
41
|
+
define(name, info) {
|
|
42
|
+
// Check shadowing
|
|
43
|
+
if (this.parent) {
|
|
44
|
+
const existing = this.parent.lookupAll(name);
|
|
45
|
+
if (existing) {
|
|
46
|
+
warn("shadowed-variable", `Variable '${name}' shadows a variable from an outer scope`, info.loc);
|
|
47
|
+
}
|
|
48
|
+
}
|
|
49
|
+
this.vars.set(name, info);
|
|
50
|
+
}
|
|
51
|
+
lookup(name) {
|
|
52
|
+
return this.vars.get(name) ?? this.parent?.lookup(name);
|
|
53
|
+
}
|
|
54
|
+
lookupAll(name) {
|
|
55
|
+
return this.vars.get(name) ?? this.parent?.lookupAll(name);
|
|
56
|
+
}
|
|
57
|
+
markUsed(name) {
|
|
58
|
+
const v = this.vars.get(name);
|
|
59
|
+
if (v) {
|
|
60
|
+
v.used = true;
|
|
61
|
+
return;
|
|
62
|
+
}
|
|
63
|
+
this.parent?.markUsed(name);
|
|
64
|
+
}
|
|
65
|
+
markMutated(name) {
|
|
66
|
+
const v = this.vars.get(name);
|
|
67
|
+
if (v) {
|
|
68
|
+
v.mutated = true;
|
|
69
|
+
return;
|
|
70
|
+
}
|
|
71
|
+
this.parent?.markMutated(name);
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
// Naming conventions
|
|
75
|
+
function isSnakeCase(name) {
|
|
76
|
+
return /^[a-z_][a-z0-9_]*$/.test(name);
|
|
77
|
+
}
|
|
78
|
+
function isPascalCase(name) {
|
|
79
|
+
return /^[A-Z][a-zA-Z0-9]*$/.test(name);
|
|
80
|
+
}
|
|
81
|
+
function checkVarNaming(name, loc) {
|
|
82
|
+
if (name === "_")
|
|
83
|
+
return;
|
|
84
|
+
if (!isSnakeCase(name)) {
|
|
85
|
+
info("naming-convention", `Variable '${name}' should use snake_case`, loc);
|
|
86
|
+
}
|
|
87
|
+
}
|
|
88
|
+
function checkFnNaming(name, loc) {
|
|
89
|
+
if (!isSnakeCase(name)) {
|
|
90
|
+
info("naming-convention", `Function '${name}' should use snake_case`, loc);
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
function checkTypeNaming(name, loc) {
|
|
94
|
+
if (!isPascalCase(name)) {
|
|
95
|
+
info("naming-convention", `Type '${name}' should use PascalCase`, loc);
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
function analyzeExpr(expr, scope) {
|
|
99
|
+
switch (expr.kind) {
|
|
100
|
+
case "IntLiteral":
|
|
101
|
+
case "FloatLiteral":
|
|
102
|
+
case "BoolLiteral":
|
|
103
|
+
case "NilLiteral":
|
|
104
|
+
case "StringLiteral":
|
|
105
|
+
break;
|
|
106
|
+
case "StringInterp":
|
|
107
|
+
for (const part of expr.parts) {
|
|
108
|
+
if (typeof part !== "string")
|
|
109
|
+
analyzeExpr(part, scope);
|
|
110
|
+
}
|
|
111
|
+
break;
|
|
112
|
+
case "Identifier":
|
|
113
|
+
scope.markUsed(expr.name);
|
|
114
|
+
break;
|
|
115
|
+
case "BinaryExpr":
|
|
116
|
+
analyzeExpr(expr.left, scope);
|
|
117
|
+
analyzeExpr(expr.right, scope);
|
|
118
|
+
break;
|
|
119
|
+
case "UnaryExpr":
|
|
120
|
+
analyzeExpr(expr.operand, scope);
|
|
121
|
+
break;
|
|
122
|
+
case "CallExpr":
|
|
123
|
+
analyzeExpr(expr.callee, scope);
|
|
124
|
+
for (const arg of expr.args)
|
|
125
|
+
analyzeExpr(arg, scope);
|
|
126
|
+
break;
|
|
127
|
+
case "MemberExpr":
|
|
128
|
+
analyzeExpr(expr.object, scope);
|
|
129
|
+
break;
|
|
130
|
+
case "IndexExpr":
|
|
131
|
+
analyzeExpr(expr.object, scope);
|
|
132
|
+
analyzeExpr(expr.index, scope);
|
|
133
|
+
break;
|
|
134
|
+
case "PipelineExpr":
|
|
135
|
+
analyzeExpr(expr.left, scope);
|
|
136
|
+
analyzeExpr(expr.right, scope);
|
|
137
|
+
break;
|
|
138
|
+
case "IfExpr":
|
|
139
|
+
analyzeExpr(expr.condition, scope);
|
|
140
|
+
analyzeExpr(expr.then, scope);
|
|
141
|
+
if (expr.else_)
|
|
142
|
+
analyzeExpr(expr.else_, scope);
|
|
143
|
+
break;
|
|
144
|
+
case "MatchExpr":
|
|
145
|
+
analyzeExpr(expr.subject, scope);
|
|
146
|
+
for (const arm of expr.arms) {
|
|
147
|
+
const armScope = new LintScope(scope);
|
|
148
|
+
scope.children.push(armScope);
|
|
149
|
+
analyzePattern(arm.pattern, armScope);
|
|
150
|
+
if (arm.guard)
|
|
151
|
+
analyzeExpr(arm.guard, armScope);
|
|
152
|
+
analyzeExpr(arm.body, armScope);
|
|
153
|
+
}
|
|
154
|
+
break;
|
|
155
|
+
case "LambdaExpr": {
|
|
156
|
+
const lambdaScope = new LintScope(scope);
|
|
157
|
+
scope.children.push(lambdaScope);
|
|
158
|
+
for (const p of expr.params) {
|
|
159
|
+
lambdaScope.define(p, { name: p, loc: expr.loc, mutable: false, used: false, mutated: false, kind: "parameter" });
|
|
160
|
+
}
|
|
161
|
+
analyzeExpr(expr.body, lambdaScope);
|
|
162
|
+
break;
|
|
163
|
+
}
|
|
164
|
+
case "ListLiteral":
|
|
165
|
+
for (const el of expr.elements)
|
|
166
|
+
analyzeExpr(el, scope);
|
|
167
|
+
break;
|
|
168
|
+
case "MapLiteral":
|
|
169
|
+
for (const entry of expr.entries) {
|
|
170
|
+
if (typeof entry.key !== "string")
|
|
171
|
+
analyzeExpr(entry.key, scope);
|
|
172
|
+
analyzeExpr(entry.value, scope);
|
|
173
|
+
}
|
|
174
|
+
break;
|
|
175
|
+
case "ListComprehension": {
|
|
176
|
+
analyzeExpr(expr.iterable, scope);
|
|
177
|
+
const compScope = new LintScope(scope);
|
|
178
|
+
scope.children.push(compScope);
|
|
179
|
+
compScope.define(expr.variable, { name: expr.variable, loc: expr.loc, mutable: false, used: false, mutated: false, kind: "loop-var" });
|
|
180
|
+
analyzeExpr(expr.expr, compScope);
|
|
181
|
+
if (expr.filter)
|
|
182
|
+
analyzeExpr(expr.filter, compScope);
|
|
183
|
+
break;
|
|
184
|
+
}
|
|
185
|
+
case "ToolCallExpr":
|
|
186
|
+
analyzeExpr(expr.arg, scope);
|
|
187
|
+
if (expr.body)
|
|
188
|
+
analyzeExpr(expr.body, scope);
|
|
189
|
+
break;
|
|
190
|
+
case "RangeExpr":
|
|
191
|
+
analyzeExpr(expr.start, scope);
|
|
192
|
+
analyzeExpr(expr.end, scope);
|
|
193
|
+
break;
|
|
194
|
+
case "BlockExpr": {
|
|
195
|
+
const blockScope = new LintScope(scope);
|
|
196
|
+
scope.children.push(blockScope);
|
|
197
|
+
analyzeStmts(expr.stmts, blockScope);
|
|
198
|
+
break;
|
|
199
|
+
}
|
|
200
|
+
case "AsyncExpr":
|
|
201
|
+
analyzeExpr(expr.body, scope);
|
|
202
|
+
break;
|
|
203
|
+
case "AwaitExpr":
|
|
204
|
+
analyzeExpr(expr.expr, scope);
|
|
205
|
+
break;
|
|
206
|
+
case "FetchExpr":
|
|
207
|
+
for (const t of expr.targets)
|
|
208
|
+
analyzeExpr(t, scope);
|
|
209
|
+
break;
|
|
210
|
+
}
|
|
211
|
+
}
|
|
212
|
+
function analyzePattern(pat, scope) {
|
|
213
|
+
if (pat.kind === "BindingPattern") {
|
|
214
|
+
scope.define(pat.name, { name: pat.name, loc: pat.loc, mutable: false, used: false, mutated: false, kind: "variable" });
|
|
215
|
+
}
|
|
216
|
+
else if (pat.kind === "ArrayPattern") {
|
|
217
|
+
for (const el of pat.elements)
|
|
218
|
+
analyzePattern(el, scope);
|
|
219
|
+
}
|
|
220
|
+
else if (pat.kind === "OrPattern") {
|
|
221
|
+
for (const p of pat.patterns)
|
|
222
|
+
analyzePattern(p, scope);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
function hasReturnInBlock(stmts) {
|
|
226
|
+
// Returns index of first ret statement, or -1
|
|
227
|
+
for (let i = 0; i < stmts.length; i++) {
|
|
228
|
+
const s = stmts[i];
|
|
229
|
+
if (s.kind === "ExprStmt" && s.expr.kind === "CallExpr" &&
|
|
230
|
+
s.expr.callee.kind === "Identifier" && s.expr.callee.name === "ret") {
|
|
231
|
+
return i;
|
|
232
|
+
}
|
|
233
|
+
// Check for `ret` keyword used as identifier in expression position
|
|
234
|
+
if (s.kind === "ExprStmt" && s.expr.kind === "Identifier" && s.expr.name === "ret") {
|
|
235
|
+
return i;
|
|
236
|
+
}
|
|
237
|
+
}
|
|
238
|
+
return -1;
|
|
239
|
+
}
|
|
240
|
+
function analyzeStmts(stmts, scope, isTopLevel = false) {
|
|
241
|
+
// First pass: register functions
|
|
242
|
+
for (const stmt of stmts) {
|
|
243
|
+
if (stmt.kind === "FnStmt") {
|
|
244
|
+
scope.define(stmt.name, {
|
|
245
|
+
name: stmt.name, loc: stmt.loc, mutable: false,
|
|
246
|
+
used: isTopLevel, // top-level fns are considered "used"
|
|
247
|
+
mutated: false, kind: "function"
|
|
248
|
+
});
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
// Check for unreachable code after ret
|
|
252
|
+
const retIdx = hasReturnInBlock(stmts);
|
|
253
|
+
if (retIdx >= 0 && retIdx < stmts.length - 1) {
|
|
254
|
+
warn("unreachable-code", "Unreachable code after ret", stmts[retIdx + 1].loc);
|
|
255
|
+
}
|
|
256
|
+
for (const stmt of stmts) {
|
|
257
|
+
analyzeStmt(stmt, scope, isTopLevel);
|
|
258
|
+
}
|
|
259
|
+
// Check unused variables in this scope
|
|
260
|
+
for (const [name, info] of scope.vars) {
|
|
261
|
+
if (name === "_")
|
|
262
|
+
continue;
|
|
263
|
+
if (!info.used) {
|
|
264
|
+
if (info.kind === "import") {
|
|
265
|
+
warn("unused-import", `Unused import: '${name}'`, info.loc);
|
|
266
|
+
}
|
|
267
|
+
else if (info.kind === "variable" || info.kind === "destructured") {
|
|
268
|
+
warn("unused-variable", `Unused variable: '${name}'`, info.loc);
|
|
269
|
+
}
|
|
270
|
+
}
|
|
271
|
+
if (info.mutable && !info.mutated && info.used) {
|
|
272
|
+
warn("unnecessary-mut", `Variable '${name}' is never mutated; use 'let' instead of 'let mut'`, info.loc);
|
|
273
|
+
}
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
function analyzeStmt(stmt, scope, isTopLevel = false) {
|
|
277
|
+
switch (stmt.kind) {
|
|
278
|
+
case "LetStmt": {
|
|
279
|
+
analyzeExpr(stmt.value, scope);
|
|
280
|
+
if (typeof stmt.name === "string") {
|
|
281
|
+
checkVarNaming(stmt.name, stmt.loc);
|
|
282
|
+
scope.define(stmt.name, {
|
|
283
|
+
name: stmt.name, loc: stmt.loc, mutable: stmt.mutable,
|
|
284
|
+
used: false, mutated: false, kind: "variable"
|
|
285
|
+
});
|
|
286
|
+
}
|
|
287
|
+
else {
|
|
288
|
+
for (const n of stmt.name.names) {
|
|
289
|
+
checkVarNaming(n, stmt.loc);
|
|
290
|
+
scope.define(n, {
|
|
291
|
+
name: n, loc: stmt.loc, mutable: stmt.mutable,
|
|
292
|
+
used: false, mutated: false, kind: "destructured"
|
|
293
|
+
});
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
break;
|
|
297
|
+
}
|
|
298
|
+
case "FnStmt": {
|
|
299
|
+
checkFnNaming(stmt.name, stmt.loc);
|
|
300
|
+
// Check for missing pub on top-level functions
|
|
301
|
+
if (isTopLevel && !stmt.pub) {
|
|
302
|
+
info("missing-pub", `Function '${stmt.name}' could be exported with 'pub'`, stmt.loc);
|
|
303
|
+
}
|
|
304
|
+
const fnScope = new LintScope(scope);
|
|
305
|
+
scope.children.push(fnScope);
|
|
306
|
+
for (const p of stmt.params) {
|
|
307
|
+
fnScope.define(p, { name: p, loc: stmt.loc, mutable: false, used: false, mutated: false, kind: "parameter" });
|
|
308
|
+
}
|
|
309
|
+
analyzeExpr(stmt.body, fnScope);
|
|
310
|
+
// Check empty body
|
|
311
|
+
if (stmt.body.kind === "BlockExpr" && stmt.body.stmts.length === 0) {
|
|
312
|
+
warn("empty-block", `Function '${stmt.name}' has an empty body`, stmt.loc);
|
|
313
|
+
}
|
|
314
|
+
break;
|
|
315
|
+
}
|
|
316
|
+
case "ForStmt": {
|
|
317
|
+
analyzeExpr(stmt.iterable, scope);
|
|
318
|
+
const forScope = new LintScope(scope);
|
|
319
|
+
scope.children.push(forScope);
|
|
320
|
+
forScope.define(stmt.variable, {
|
|
321
|
+
name: stmt.variable, loc: stmt.loc, mutable: false,
|
|
322
|
+
used: false, mutated: false, kind: "loop-var"
|
|
323
|
+
});
|
|
324
|
+
analyzeExpr(stmt.body, forScope);
|
|
325
|
+
// Check empty body
|
|
326
|
+
if (stmt.body.kind === "BlockExpr" && stmt.body.stmts.length === 0) {
|
|
327
|
+
warn("empty-block", "For loop has an empty body", stmt.loc);
|
|
328
|
+
}
|
|
329
|
+
break;
|
|
330
|
+
}
|
|
331
|
+
case "DoStmt":
|
|
332
|
+
analyzeExpr(stmt.body, scope);
|
|
333
|
+
analyzeExpr(stmt.condition, scope);
|
|
334
|
+
if (stmt.body.kind === "BlockExpr" && stmt.body.stmts.length === 0) {
|
|
335
|
+
warn("empty-block", "Do loop has an empty body", stmt.loc);
|
|
336
|
+
}
|
|
337
|
+
break;
|
|
338
|
+
case "ExprStmt":
|
|
339
|
+
analyzeExpr(stmt.expr, scope);
|
|
340
|
+
break;
|
|
341
|
+
case "UseStmt": {
|
|
342
|
+
if (stmt.imports) {
|
|
343
|
+
for (const imp of stmt.imports) {
|
|
344
|
+
scope.define(imp, { name: imp, loc: stmt.loc, mutable: false, used: false, mutated: false, kind: "import" });
|
|
345
|
+
}
|
|
346
|
+
}
|
|
347
|
+
else if (!stmt.wildcard) {
|
|
348
|
+
const moduleName = stmt.path[stmt.path.length - 1];
|
|
349
|
+
scope.define(moduleName, { name: moduleName, loc: stmt.loc, mutable: false, used: false, mutated: false, kind: "import" });
|
|
350
|
+
}
|
|
351
|
+
break;
|
|
352
|
+
}
|
|
353
|
+
case "TypeStmt":
|
|
354
|
+
checkTypeNaming(stmt.name, stmt.loc);
|
|
355
|
+
scope.define(stmt.name, { name: stmt.name, loc: stmt.loc, mutable: false, used: false, mutated: false, kind: "type" });
|
|
356
|
+
break;
|
|
357
|
+
case "AssignStmt":
|
|
358
|
+
scope.markMutated(stmt.target);
|
|
359
|
+
scope.markUsed(stmt.target);
|
|
360
|
+
analyzeExpr(stmt.value, scope);
|
|
361
|
+
break;
|
|
362
|
+
case "MemberAssignStmt":
|
|
363
|
+
analyzeExpr(stmt.object, scope);
|
|
364
|
+
analyzeExpr(stmt.value, scope);
|
|
365
|
+
break;
|
|
366
|
+
case "IndexAssignStmt":
|
|
367
|
+
analyzeExpr(stmt.object, scope);
|
|
368
|
+
analyzeExpr(stmt.index, scope);
|
|
369
|
+
analyzeExpr(stmt.value, scope);
|
|
370
|
+
break;
|
|
371
|
+
}
|
|
372
|
+
}
|
|
373
|
+
const globalScope = new LintScope();
|
|
374
|
+
analyzeStmts(ast.stmts, globalScope, true);
|
|
375
|
+
// Sort by line/col
|
|
376
|
+
diagnostics.sort((a, b) => a.line - b.line || a.col - b.col);
|
|
377
|
+
return diagnostics;
|
|
378
|
+
}
|
|
379
|
+
export function formatDiagnostic(d) {
|
|
380
|
+
const sev = d.severity === "error" ? "ERROR" : d.severity === "warning" ? "WARN" : "INFO";
|
|
381
|
+
return `${d.file}:${d.line}:${d.col} [${sev}] ${d.message} (${d.rule})`;
|
|
382
|
+
}
|
package/dist/lsp.d.ts
ADDED
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
package/dist/lsp.js
ADDED
|
@@ -0,0 +1,253 @@
|
|
|
1
|
+
// Arc Language Server Protocol Implementation
|
|
2
|
+
import { createConnection, TextDocuments, ProposedFeatures, TextDocumentSyncKind, CompletionItemKind, MarkupKind, SymbolKind, DiagnosticSeverity, } from "vscode-languageserver/node.js";
|
|
3
|
+
import { TextDocument } from "vscode-languageserver-textdocument";
|
|
4
|
+
import { lex } from "./lexer.js";
|
|
5
|
+
import { parse, ParseError } from "./parser.js";
|
|
6
|
+
import { analyze } from "./semantic.js";
|
|
7
|
+
import { typecheck } from "./typechecker.js";
|
|
8
|
+
const connection = createConnection(ProposedFeatures.all);
|
|
9
|
+
const documents = new TextDocuments(TextDocument);
|
|
10
|
+
// Cache parsed results per document
|
|
11
|
+
const documentCache = new Map();
|
|
12
|
+
connection.onInitialize((_params) => {
|
|
13
|
+
return {
|
|
14
|
+
capabilities: {
|
|
15
|
+
textDocumentSync: TextDocumentSyncKind.Full,
|
|
16
|
+
completionProvider: { triggerCharacters: [".", "@"] },
|
|
17
|
+
hoverProvider: true,
|
|
18
|
+
definitionProvider: true,
|
|
19
|
+
documentSymbolProvider: true,
|
|
20
|
+
},
|
|
21
|
+
};
|
|
22
|
+
});
|
|
23
|
+
// --- Diagnostics ---
|
|
24
|
+
function validateDocument(textDocument) {
|
|
25
|
+
const text = textDocument.getText();
|
|
26
|
+
const diagnostics = [];
|
|
27
|
+
try {
|
|
28
|
+
const tokens = lex(text);
|
|
29
|
+
const program = parse(tokens);
|
|
30
|
+
// Cache the program
|
|
31
|
+
documentCache.set(textDocument.uri, { program, version: textDocument.version });
|
|
32
|
+
// Semantic analysis
|
|
33
|
+
const semanticDiags = analyze(program);
|
|
34
|
+
for (const d of semanticDiags) {
|
|
35
|
+
diagnostics.push({
|
|
36
|
+
severity: d.level === "error" ? DiagnosticSeverity.Error : DiagnosticSeverity.Warning,
|
|
37
|
+
range: locToRange(d.loc),
|
|
38
|
+
message: d.message,
|
|
39
|
+
source: "arc",
|
|
40
|
+
});
|
|
41
|
+
}
|
|
42
|
+
// Type checking
|
|
43
|
+
const typeDiags = typecheck(program);
|
|
44
|
+
for (const d of typeDiags) {
|
|
45
|
+
diagnostics.push({
|
|
46
|
+
severity: d.level === "error" ? DiagnosticSeverity.Error : DiagnosticSeverity.Warning,
|
|
47
|
+
range: d.loc ? locToRange(d.loc) : { start: { line: 0, character: 0 }, end: { line: 0, character: 1 } },
|
|
48
|
+
message: d.message,
|
|
49
|
+
source: "arc-typecheck",
|
|
50
|
+
});
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch (e) {
|
|
54
|
+
if (e instanceof ParseError) {
|
|
55
|
+
diagnostics.push({
|
|
56
|
+
severity: DiagnosticSeverity.Error,
|
|
57
|
+
range: locToRange(e.loc),
|
|
58
|
+
message: e.message,
|
|
59
|
+
source: "arc-parser",
|
|
60
|
+
});
|
|
61
|
+
}
|
|
62
|
+
else if (e instanceof Error) {
|
|
63
|
+
// Lexer or other errors - show at start
|
|
64
|
+
diagnostics.push({
|
|
65
|
+
severity: DiagnosticSeverity.Error,
|
|
66
|
+
range: { start: { line: 0, character: 0 }, end: { line: 0, character: 1 } },
|
|
67
|
+
message: e.message,
|
|
68
|
+
source: "arc",
|
|
69
|
+
});
|
|
70
|
+
}
|
|
71
|
+
}
|
|
72
|
+
connection.sendDiagnostics({ uri: textDocument.uri, diagnostics });
|
|
73
|
+
}
|
|
74
|
+
function locToRange(loc) {
|
|
75
|
+
const line = Math.max(0, loc.line - 1);
|
|
76
|
+
const col = Math.max(0, loc.col - 1);
|
|
77
|
+
return {
|
|
78
|
+
start: { line, character: col },
|
|
79
|
+
end: { line, character: col + 10 },
|
|
80
|
+
};
|
|
81
|
+
}
|
|
82
|
+
documents.onDidChangeContent((change) => {
|
|
83
|
+
validateDocument(change.document);
|
|
84
|
+
});
|
|
85
|
+
// --- Completion ---
|
|
86
|
+
const ARC_KEYWORDS = [
|
|
87
|
+
"fn", "let", "mut", "type", "use", "pub", "match", "if", "el",
|
|
88
|
+
"for", "in", "do", "ret", "async", "await", "nil", "true", "false",
|
|
89
|
+
"and", "or", "not", "while", "until", "where", "matching",
|
|
90
|
+
];
|
|
91
|
+
const STDLIB_FUNCTIONS = [
|
|
92
|
+
"print", "println", "len", "push", "pop", "map", "filter", "reduce",
|
|
93
|
+
"range", "keys", "values", "entries", "str", "int", "float",
|
|
94
|
+
"sort", "reverse", "join", "split", "trim", "contains",
|
|
95
|
+
"starts_with", "ends_with", "replace", "to_upper", "to_lower",
|
|
96
|
+
"slice", "flat", "flat_map", "zip", "enumerate", "sum", "min", "max",
|
|
97
|
+
"abs", "head", "tail", "take", "drop", "find", "any", "all", "count",
|
|
98
|
+
"unique", "group_by", "sort_by", "chunk", "assert", "assert_eq",
|
|
99
|
+
"Some", "None", "Ok", "Err", "read_file", "write_file",
|
|
100
|
+
];
|
|
101
|
+
connection.onCompletion((_params) => {
|
|
102
|
+
const doc = documents.get(_params.textDocument.uri);
|
|
103
|
+
const items = [];
|
|
104
|
+
// Keywords
|
|
105
|
+
for (const kw of ARC_KEYWORDS) {
|
|
106
|
+
items.push({ label: kw, kind: CompletionItemKind.Keyword });
|
|
107
|
+
}
|
|
108
|
+
// Stdlib
|
|
109
|
+
for (const fn of STDLIB_FUNCTIONS) {
|
|
110
|
+
items.push({ label: fn, kind: CompletionItemKind.Function });
|
|
111
|
+
}
|
|
112
|
+
// In-scope symbols from cached program
|
|
113
|
+
const cached = documentCache.get(_params.textDocument.uri);
|
|
114
|
+
if (cached) {
|
|
115
|
+
for (const stmt of cached.program.stmts) {
|
|
116
|
+
if (stmt.kind === "FnStmt") {
|
|
117
|
+
items.push({ label: stmt.name, kind: CompletionItemKind.Function, detail: `fn ${stmt.name}(${stmt.params.join(", ")})` });
|
|
118
|
+
}
|
|
119
|
+
else if (stmt.kind === "LetStmt" && typeof stmt.name === "string") {
|
|
120
|
+
items.push({ label: stmt.name, kind: CompletionItemKind.Variable });
|
|
121
|
+
}
|
|
122
|
+
else if (stmt.kind === "TypeStmt") {
|
|
123
|
+
items.push({ label: stmt.name, kind: CompletionItemKind.Class });
|
|
124
|
+
}
|
|
125
|
+
}
|
|
126
|
+
}
|
|
127
|
+
return items;
|
|
128
|
+
});
|
|
129
|
+
// --- Hover ---
|
|
130
|
+
connection.onHover((params) => {
|
|
131
|
+
const doc = documents.get(params.textDocument.uri);
|
|
132
|
+
if (!doc)
|
|
133
|
+
return null;
|
|
134
|
+
const cached = documentCache.get(params.textDocument.uri);
|
|
135
|
+
if (!cached)
|
|
136
|
+
return null;
|
|
137
|
+
const word = getWordAtPosition(doc, params.position);
|
|
138
|
+
if (!word)
|
|
139
|
+
return null;
|
|
140
|
+
// Search for definition in program
|
|
141
|
+
for (const stmt of cached.program.stmts) {
|
|
142
|
+
if (stmt.kind === "FnStmt" && stmt.name === word) {
|
|
143
|
+
const sig = `fn ${stmt.name}(${stmt.params.join(", ")})`;
|
|
144
|
+
return {
|
|
145
|
+
contents: { kind: MarkupKind.Markdown, value: `\`\`\`arc\n${sig}\n\`\`\`` },
|
|
146
|
+
};
|
|
147
|
+
}
|
|
148
|
+
if (stmt.kind === "LetStmt" && typeof stmt.name === "string" && stmt.name === word) {
|
|
149
|
+
const mut = stmt.mutable ? "mut " : "";
|
|
150
|
+
return {
|
|
151
|
+
contents: { kind: MarkupKind.Markdown, value: `\`\`\`arc\nlet ${mut}${stmt.name}\n\`\`\`` },
|
|
152
|
+
};
|
|
153
|
+
}
|
|
154
|
+
if (stmt.kind === "TypeStmt" && stmt.name === word) {
|
|
155
|
+
return {
|
|
156
|
+
contents: { kind: MarkupKind.Markdown, value: `\`\`\`arc\ntype ${stmt.name}\n\`\`\`` },
|
|
157
|
+
};
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
// Check keywords
|
|
161
|
+
if (ARC_KEYWORDS.includes(word)) {
|
|
162
|
+
return { contents: { kind: MarkupKind.Markdown, value: `**keyword** \`${word}\`` } };
|
|
163
|
+
}
|
|
164
|
+
// Check stdlib
|
|
165
|
+
if (STDLIB_FUNCTIONS.includes(word)) {
|
|
166
|
+
return { contents: { kind: MarkupKind.Markdown, value: `**stdlib** \`${word}\`` } };
|
|
167
|
+
}
|
|
168
|
+
return null;
|
|
169
|
+
});
|
|
170
|
+
// --- Go to Definition ---
|
|
171
|
+
connection.onDefinition((params) => {
|
|
172
|
+
const doc = documents.get(params.textDocument.uri);
|
|
173
|
+
if (!doc)
|
|
174
|
+
return null;
|
|
175
|
+
const cached = documentCache.get(params.textDocument.uri);
|
|
176
|
+
if (!cached)
|
|
177
|
+
return null;
|
|
178
|
+
const word = getWordAtPosition(doc, params.position);
|
|
179
|
+
if (!word)
|
|
180
|
+
return null;
|
|
181
|
+
for (const stmt of cached.program.stmts) {
|
|
182
|
+
if (stmt.kind === "FnStmt" && stmt.name === word) {
|
|
183
|
+
return {
|
|
184
|
+
uri: params.textDocument.uri,
|
|
185
|
+
range: locToRange(stmt.loc),
|
|
186
|
+
};
|
|
187
|
+
}
|
|
188
|
+
if (stmt.kind === "LetStmt" && typeof stmt.name === "string" && stmt.name === word) {
|
|
189
|
+
return {
|
|
190
|
+
uri: params.textDocument.uri,
|
|
191
|
+
range: locToRange(stmt.loc),
|
|
192
|
+
};
|
|
193
|
+
}
|
|
194
|
+
if (stmt.kind === "TypeStmt" && stmt.name === word) {
|
|
195
|
+
return {
|
|
196
|
+
uri: params.textDocument.uri,
|
|
197
|
+
range: locToRange(stmt.loc),
|
|
198
|
+
};
|
|
199
|
+
}
|
|
200
|
+
}
|
|
201
|
+
return null;
|
|
202
|
+
});
|
|
203
|
+
// --- Document Symbols ---
|
|
204
|
+
connection.onDocumentSymbol((params) => {
|
|
205
|
+
const cached = documentCache.get(params.textDocument.uri);
|
|
206
|
+
if (!cached)
|
|
207
|
+
return [];
|
|
208
|
+
const symbols = [];
|
|
209
|
+
for (const stmt of cached.program.stmts) {
|
|
210
|
+
if (stmt.kind === "FnStmt") {
|
|
211
|
+
symbols.push({
|
|
212
|
+
name: stmt.name,
|
|
213
|
+
kind: SymbolKind.Function,
|
|
214
|
+
range: locToRange(stmt.loc),
|
|
215
|
+
selectionRange: locToRange(stmt.loc),
|
|
216
|
+
});
|
|
217
|
+
}
|
|
218
|
+
else if (stmt.kind === "LetStmt" && typeof stmt.name === "string") {
|
|
219
|
+
symbols.push({
|
|
220
|
+
name: stmt.name,
|
|
221
|
+
kind: SymbolKind.Variable,
|
|
222
|
+
range: locToRange(stmt.loc),
|
|
223
|
+
selectionRange: locToRange(stmt.loc),
|
|
224
|
+
});
|
|
225
|
+
}
|
|
226
|
+
else if (stmt.kind === "TypeStmt") {
|
|
227
|
+
symbols.push({
|
|
228
|
+
name: stmt.name,
|
|
229
|
+
kind: SymbolKind.Class,
|
|
230
|
+
range: locToRange(stmt.loc),
|
|
231
|
+
selectionRange: locToRange(stmt.loc),
|
|
232
|
+
});
|
|
233
|
+
}
|
|
234
|
+
}
|
|
235
|
+
return symbols;
|
|
236
|
+
});
|
|
237
|
+
// --- Helpers ---
|
|
238
|
+
function getWordAtPosition(doc, pos) {
|
|
239
|
+
const line = doc.getText({
|
|
240
|
+
start: { line: pos.line, character: 0 },
|
|
241
|
+
end: { line: pos.line + 1, character: 0 },
|
|
242
|
+
});
|
|
243
|
+
const before = line.slice(0, pos.character);
|
|
244
|
+
const after = line.slice(pos.character);
|
|
245
|
+
const matchBefore = before.match(/[a-zA-Z_][a-zA-Z0-9_]*$/);
|
|
246
|
+
const matchAfter = after.match(/^[a-zA-Z0-9_]*/);
|
|
247
|
+
if (!matchBefore)
|
|
248
|
+
return null;
|
|
249
|
+
return matchBefore[0] + (matchAfter?.[0] ?? "");
|
|
250
|
+
}
|
|
251
|
+
// Start
|
|
252
|
+
documents.listen(connection);
|
|
253
|
+
connection.listen();
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { Env, Value } from "./interpreter.js";
|
|
2
|
+
import type * as AST from "./ast.js";
|
|
3
|
+
export interface ModuleExports {
|
|
4
|
+
[name: string]: Value;
|
|
5
|
+
}
|
|
6
|
+
export declare function clearModuleCache(): void;
|
|
7
|
+
/**
|
|
8
|
+
* Resolve a module path to a file path.
|
|
9
|
+
* Search order: stdlib/ first (searching upward), then relative to basePath.
|
|
10
|
+
* This prevents test files from shadowing stdlib modules.
|
|
11
|
+
*/
|
|
12
|
+
export declare function resolveModule(path: string[], basePath: string): string;
|
|
13
|
+
/**
|
|
14
|
+
* Load a module, parse it, execute it, and return its pub exports.
|
|
15
|
+
*/
|
|
16
|
+
export declare function loadModule(filePath: string): ModuleExports;
|
|
17
|
+
/**
|
|
18
|
+
* Handle a use statement: resolve, load, and bind imports into env.
|
|
19
|
+
*/
|
|
20
|
+
export declare function handleUse(stmt: AST.UseStmt, env: Env, currentFile: string): void;
|
|
21
|
+
/**
|
|
22
|
+
* Create a UseHandler bound to a specific file path.
|
|
23
|
+
*/
|
|
24
|
+
export declare function createUseHandler(currentFile: string): (stmt: AST.UseStmt, env: Env) => void;
|