avrae-ls 0.3.0__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- avrae_ls/api.py +229 -229
- avrae_ls/argparser.py +16 -3
- avrae_ls/completions.py +177 -27
- avrae_ls/config.py +61 -2
- avrae_ls/context.py +62 -1
- avrae_ls/diagnostics.py +207 -2
- avrae_ls/parser.py +7 -2
- avrae_ls/runtime.py +94 -18
- avrae_ls/server.py +38 -1
- avrae_ls/symbols.py +149 -33
- avrae_ls-0.4.0.dist-info/METADATA +86 -0
- {avrae_ls-0.3.0.dist-info → avrae_ls-0.4.0.dist-info}/RECORD +16 -16
- avrae_ls-0.3.0.dist-info/METADATA +0 -47
- {avrae_ls-0.3.0.dist-info → avrae_ls-0.4.0.dist-info}/WHEEL +0 -0
- {avrae_ls-0.3.0.dist-info → avrae_ls-0.4.0.dist-info}/entry_points.txt +0 -0
- {avrae_ls-0.3.0.dist-info → avrae_ls-0.4.0.dist-info}/licenses/LICENSE +0 -0
- {avrae_ls-0.3.0.dist-info → avrae_ls-0.4.0.dist-info}/top_level.txt +0 -0
avrae_ls/diagnostics.py
CHANGED
|
@@ -1,14 +1,15 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
+
import inspect
|
|
3
4
|
import ast
|
|
4
5
|
import logging
|
|
5
|
-
import
|
|
6
|
-
from typing import Iterable, List, Sequence, Set
|
|
6
|
+
from typing import Dict, Iterable, List, Sequence, Set
|
|
7
7
|
|
|
8
8
|
import draconic
|
|
9
9
|
from lsprotocol import types
|
|
10
10
|
|
|
11
11
|
from .argument_parsing import apply_argument_parsing
|
|
12
|
+
from .completions import _infer_type_map, _resolve_type_name, _type_meta
|
|
12
13
|
from .config import DiagnosticSettings
|
|
13
14
|
from .context import ContextData, GVarResolver
|
|
14
15
|
from .parser import find_draconic_blocks
|
|
@@ -78,6 +79,9 @@ class DiagnosticProvider:
|
|
|
78
79
|
diagnostics.extend(_check_imports(body, self._settings.semantic_level))
|
|
79
80
|
diagnostics.extend(_check_call_args(body, self._builtin_signatures, self._settings.semantic_level))
|
|
80
81
|
diagnostics.extend(_check_private_method_calls(body))
|
|
82
|
+
diagnostics.extend(
|
|
83
|
+
_check_api_misuse(body, code, ctx_data, self._settings.semantic_level)
|
|
84
|
+
)
|
|
81
85
|
if line_shift:
|
|
82
86
|
diagnostics = _shift_diagnostics(diagnostics, line_shift, 0)
|
|
83
87
|
return diagnostics
|
|
@@ -282,6 +286,207 @@ def _check_private_method_calls(body: Sequence[ast.AST]) -> List[types.Diagnosti
|
|
|
282
286
|
return diagnostics
|
|
283
287
|
|
|
284
288
|
|
|
289
|
+
def _check_api_misuse(
|
|
290
|
+
body: Sequence[ast.AST],
|
|
291
|
+
code: str,
|
|
292
|
+
ctx_data: ContextData,
|
|
293
|
+
severity_level: str,
|
|
294
|
+
) -> List[types.Diagnostic]:
|
|
295
|
+
"""Heuristics for common API mistakes (list vs scalar, missing context, property calls)."""
|
|
296
|
+
diagnostics: list[types.Diagnostic] = []
|
|
297
|
+
module = ast.Module(body=list(body), type_ignores=[])
|
|
298
|
+
parent_map = _build_parent_map(module)
|
|
299
|
+
assigned_names = _collect_assigned_names(module)
|
|
300
|
+
type_map = _diagnostic_type_map(code)
|
|
301
|
+
context_seen: set[str] = set()
|
|
302
|
+
|
|
303
|
+
for node in ast.walk(module):
|
|
304
|
+
if isinstance(node, ast.Call):
|
|
305
|
+
diagnostics.extend(_context_call_diagnostics(node, ctx_data, severity_level, context_seen))
|
|
306
|
+
diagnostics.extend(_property_call_diagnostics(node, type_map, code, severity_level))
|
|
307
|
+
if isinstance(node, ast.Attribute):
|
|
308
|
+
diagnostics.extend(_uncalled_context_attr_diagnostics(node, assigned_names, severity_level))
|
|
309
|
+
diagnostics.extend(_iterable_attr_diagnostics(node, parent_map, type_map, code, severity_level))
|
|
310
|
+
return diagnostics
|
|
311
|
+
|
|
312
|
+
|
|
313
|
+
def _context_call_diagnostics(
|
|
314
|
+
node: ast.Call,
|
|
315
|
+
ctx_data: ContextData,
|
|
316
|
+
severity_level: str,
|
|
317
|
+
seen: set[str],
|
|
318
|
+
) -> List[types.Diagnostic]:
|
|
319
|
+
diagnostics: list[types.Diagnostic] = []
|
|
320
|
+
if isinstance(node.func, ast.Name):
|
|
321
|
+
if node.func.id == "character" and not ctx_data.character and "character" not in seen:
|
|
322
|
+
seen.add("character")
|
|
323
|
+
diagnostics.append(
|
|
324
|
+
_make_diagnostic(
|
|
325
|
+
node.func,
|
|
326
|
+
"No character context configured; character() will raise at runtime.",
|
|
327
|
+
severity_level,
|
|
328
|
+
)
|
|
329
|
+
)
|
|
330
|
+
elif node.func.id == "combat" and not ctx_data.combat and "combat" not in seen:
|
|
331
|
+
seen.add("combat")
|
|
332
|
+
diagnostics.append(
|
|
333
|
+
_make_diagnostic(
|
|
334
|
+
node.func,
|
|
335
|
+
"No combat context configured; combat() will return None.",
|
|
336
|
+
severity_level,
|
|
337
|
+
)
|
|
338
|
+
)
|
|
339
|
+
return diagnostics
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def _property_call_diagnostics(
|
|
343
|
+
node: ast.Call,
|
|
344
|
+
type_map: Dict[str, str],
|
|
345
|
+
code: str,
|
|
346
|
+
severity_level: str,
|
|
347
|
+
) -> List[types.Diagnostic]:
|
|
348
|
+
if not isinstance(node.func, ast.Attribute):
|
|
349
|
+
return []
|
|
350
|
+
base_type = _resolve_expr_type(node.func.value, type_map, code)
|
|
351
|
+
if not base_type:
|
|
352
|
+
return []
|
|
353
|
+
meta = _type_meta(base_type)
|
|
354
|
+
attr = node.func.attr
|
|
355
|
+
if attr in meta.methods or attr not in meta.attrs:
|
|
356
|
+
return []
|
|
357
|
+
receiver = _expr_to_str(node.func.value) or base_type
|
|
358
|
+
return [
|
|
359
|
+
_make_diagnostic(
|
|
360
|
+
node.func,
|
|
361
|
+
f"'{attr}' on {receiver} is a property; drop the parentheses.",
|
|
362
|
+
severity_level,
|
|
363
|
+
)
|
|
364
|
+
]
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
def _uncalled_context_attr_diagnostics(
|
|
368
|
+
node: ast.Attribute,
|
|
369
|
+
assigned_names: Set[str],
|
|
370
|
+
severity_level: str,
|
|
371
|
+
) -> List[types.Diagnostic]:
|
|
372
|
+
if isinstance(node.value, ast.Name) and node.value.id in {"character", "combat"} and node.value.id not in assigned_names:
|
|
373
|
+
call_hint = f"{node.value.id}()"
|
|
374
|
+
return [
|
|
375
|
+
_make_diagnostic(
|
|
376
|
+
node.value,
|
|
377
|
+
f"Call {call_hint} before accessing '{node.attr}'.",
|
|
378
|
+
severity_level,
|
|
379
|
+
)
|
|
380
|
+
]
|
|
381
|
+
return []
|
|
382
|
+
|
|
383
|
+
|
|
384
|
+
def _iterable_attr_diagnostics(
|
|
385
|
+
node: ast.Attribute,
|
|
386
|
+
parent_map: Dict[ast.AST, ast.AST],
|
|
387
|
+
type_map: Dict[str, str],
|
|
388
|
+
code: str,
|
|
389
|
+
severity_level: str,
|
|
390
|
+
) -> List[types.Diagnostic]:
|
|
391
|
+
parent = parent_map.get(node)
|
|
392
|
+
if parent is None:
|
|
393
|
+
return []
|
|
394
|
+
if isinstance(parent, ast.Subscript) and parent.value is node:
|
|
395
|
+
return []
|
|
396
|
+
|
|
397
|
+
base_type = _resolve_expr_type(node.value, type_map, code)
|
|
398
|
+
if not base_type:
|
|
399
|
+
return []
|
|
400
|
+
meta = _type_meta(base_type)
|
|
401
|
+
attr_meta = meta.attrs.get(node.attr)
|
|
402
|
+
if not attr_meta:
|
|
403
|
+
return []
|
|
404
|
+
|
|
405
|
+
is_collection = bool(attr_meta.element_type) or attr_meta.type_name in {"list", "dict"}
|
|
406
|
+
if not is_collection:
|
|
407
|
+
return []
|
|
408
|
+
|
|
409
|
+
expr_label = _expr_to_str(node) or node.attr
|
|
410
|
+
element_label = attr_meta.element_type or "items"
|
|
411
|
+
container_label = attr_meta.type_name or "collection"
|
|
412
|
+
|
|
413
|
+
if isinstance(parent, ast.Attribute) and parent.value is node:
|
|
414
|
+
next_attr = parent.attr
|
|
415
|
+
message = f"'{expr_label}' is a {container_label} of {element_label}; index or iterate before accessing '{next_attr}'."
|
|
416
|
+
return [_make_diagnostic(node, message, severity_level)]
|
|
417
|
+
|
|
418
|
+
if isinstance(parent, ast.Call) and parent.func is node:
|
|
419
|
+
message = f"'{expr_label}' is a {container_label} of {element_label}; index or iterate before calling it."
|
|
420
|
+
return [_make_diagnostic(node, message, severity_level)]
|
|
421
|
+
|
|
422
|
+
return []
|
|
423
|
+
|
|
424
|
+
|
|
425
|
+
def _diagnostic_type_map(code: str) -> Dict[str, str]:
|
|
426
|
+
mapping = _infer_type_map(code)
|
|
427
|
+
if mapping:
|
|
428
|
+
return mapping
|
|
429
|
+
wrapped, _ = _wrap_draconic(code)
|
|
430
|
+
return _infer_type_map(wrapped)
|
|
431
|
+
|
|
432
|
+
|
|
433
|
+
def _resolve_expr_type(expr: ast.AST, type_map: Dict[str, str], code: str) -> str:
|
|
434
|
+
expr_text = _expr_to_str(expr)
|
|
435
|
+
if not expr_text:
|
|
436
|
+
return ""
|
|
437
|
+
return _resolve_type_name(expr_text, code, type_map)
|
|
438
|
+
|
|
439
|
+
|
|
440
|
+
def _expr_to_str(expr: ast.AST) -> str:
|
|
441
|
+
try:
|
|
442
|
+
return ast.unparse(expr)
|
|
443
|
+
except Exception:
|
|
444
|
+
return ""
|
|
445
|
+
|
|
446
|
+
|
|
447
|
+
def _collect_assigned_names(module: ast.Module) -> Set[str]:
|
|
448
|
+
assigned: set[str] = set()
|
|
449
|
+
|
|
450
|
+
class Collector(ast.NodeVisitor):
|
|
451
|
+
def visit_Assign(self, node: ast.Assign):
|
|
452
|
+
for target in node.targets:
|
|
453
|
+
assigned.update(_names_in_target(target))
|
|
454
|
+
self.generic_visit(node)
|
|
455
|
+
|
|
456
|
+
def visit_AnnAssign(self, node: ast.AnnAssign):
|
|
457
|
+
assigned.update(_names_in_target(node.target))
|
|
458
|
+
self.generic_visit(node)
|
|
459
|
+
|
|
460
|
+
def visit_For(self, node: ast.For):
|
|
461
|
+
assigned.update(_names_in_target(node.target))
|
|
462
|
+
self.generic_visit(node)
|
|
463
|
+
|
|
464
|
+
def visit_AsyncFor(self, node: ast.AsyncFor):
|
|
465
|
+
assigned.update(_names_in_target(node.target))
|
|
466
|
+
self.generic_visit(node)
|
|
467
|
+
|
|
468
|
+
def visit_FunctionDef(self, node: ast.FunctionDef):
|
|
469
|
+
assigned.add(node.name)
|
|
470
|
+
for arg in node.args.args:
|
|
471
|
+
assigned.add(arg.arg)
|
|
472
|
+
self.generic_visit(node)
|
|
473
|
+
|
|
474
|
+
def visit_ClassDef(self, node: ast.ClassDef):
|
|
475
|
+
assigned.add(node.name)
|
|
476
|
+
self.generic_visit(node)
|
|
477
|
+
|
|
478
|
+
Collector().visit(module)
|
|
479
|
+
return assigned
|
|
480
|
+
|
|
481
|
+
|
|
482
|
+
def _build_parent_map(root: ast.AST) -> Dict[ast.AST, ast.AST]:
|
|
483
|
+
parents: dict[ast.AST, ast.AST] = {}
|
|
484
|
+
for parent in ast.walk(root):
|
|
485
|
+
for child in ast.iter_child_nodes(parent):
|
|
486
|
+
parents[child] = parent
|
|
487
|
+
return parents
|
|
488
|
+
|
|
489
|
+
|
|
285
490
|
def _make_diagnostic(node: ast.AST, message: str, level: str) -> types.Diagnostic:
|
|
286
491
|
severity = SEVERITY.get(level, types.DiagnosticSeverity.Warning)
|
|
287
492
|
if hasattr(node, "lineno"):
|
avrae_ls/parser.py
CHANGED
|
@@ -22,12 +22,17 @@ def find_draconic_blocks(source: str) -> List[DraconicBlock]:
|
|
|
22
22
|
raw = match.group(1)
|
|
23
23
|
prefix = source[: match.start()]
|
|
24
24
|
line_offset = prefix.count("\n")
|
|
25
|
-
|
|
25
|
+
# Column where draconic content starts on its first line
|
|
26
|
+
last_nl = prefix.rfind("\n")
|
|
27
|
+
start_col = match.start(1) - (last_nl + 1 if last_nl != -1 else 0)
|
|
28
|
+
char_offset = start_col
|
|
26
29
|
# Trim leading blank lines inside the block while tracking the line shift
|
|
27
30
|
while raw.startswith("\n"):
|
|
28
31
|
raw = raw[1:]
|
|
29
32
|
line_offset += 1
|
|
30
|
-
|
|
33
|
+
char_offset = 0
|
|
34
|
+
line_count = raw.count("\n") + 1 if raw else 1
|
|
35
|
+
blocks.append(DraconicBlock(code=raw, line_offset=line_offset, char_offset=char_offset, line_count=line_count))
|
|
31
36
|
return blocks
|
|
32
37
|
|
|
33
38
|
|
avrae_ls/runtime.py
CHANGED
|
@@ -6,6 +6,7 @@ import json
|
|
|
6
6
|
import logging
|
|
7
7
|
import math
|
|
8
8
|
import random
|
|
9
|
+
import re
|
|
9
10
|
import time
|
|
10
11
|
from types import SimpleNamespace
|
|
11
12
|
try: # optional dependency
|
|
@@ -98,7 +99,66 @@ def _vroll_dice(dice: str, multiply: int = 1, add: int = 0) -> SimpleRollResult
|
|
|
98
99
|
return SimpleRollResult(rolled)
|
|
99
100
|
|
|
100
101
|
|
|
101
|
-
|
|
102
|
+
@dataclass
|
|
103
|
+
class _CoinsArgs:
|
|
104
|
+
pp: int = 0
|
|
105
|
+
gp: int = 0
|
|
106
|
+
ep: int = 0
|
|
107
|
+
sp: int = 0
|
|
108
|
+
cp: int = 0
|
|
109
|
+
explicit: bool = False
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def total(self) -> float:
|
|
113
|
+
return (self.pp * 10) + self.gp + (self.ep * 0.5) + (self.sp * 0.1) + (self.cp * 0.01)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def _parse_coin_args(args: str) -> _CoinsArgs:
|
|
117
|
+
cleaned = str(args).replace(",", "")
|
|
118
|
+
try:
|
|
119
|
+
return _parse_coin_args_float(float(cleaned))
|
|
120
|
+
except ValueError:
|
|
121
|
+
return _parse_coin_args_re(cleaned)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _parse_coin_args_float(coins: float) -> _CoinsArgs:
|
|
125
|
+
total_copper = int(round(coins * 100, 1))
|
|
126
|
+
if coins < 0:
|
|
127
|
+
return _CoinsArgs(cp=total_copper)
|
|
128
|
+
return _CoinsArgs(
|
|
129
|
+
gp=total_copper // 100,
|
|
130
|
+
sp=(total_copper % 100) // 10,
|
|
131
|
+
cp=total_copper % 10,
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
def _parse_coin_args_re(args: str) -> _CoinsArgs:
|
|
136
|
+
is_valid = re.fullmatch(r"(([+-]?\d+)\s*([pgesc]p)\s*)+", args, re.IGNORECASE)
|
|
137
|
+
if not is_valid:
|
|
138
|
+
raise avrae_argparser.InvalidArgument(
|
|
139
|
+
"Coins must be a number or a currency string, e.g. `+101.2` or `10cp +101gp -2sp`."
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
out = _CoinsArgs(explicit=True)
|
|
143
|
+
for coin_match in re.finditer(r"(?P<amount>[+-]?\d+)\s*(?P<currency>[pgesc]p)", args, re.IGNORECASE):
|
|
144
|
+
amount = int(coin_match["amount"])
|
|
145
|
+
currency = coin_match["currency"].lower()
|
|
146
|
+
|
|
147
|
+
if currency == "pp":
|
|
148
|
+
out.pp += amount
|
|
149
|
+
elif currency == "gp":
|
|
150
|
+
out.gp += amount
|
|
151
|
+
elif currency == "ep":
|
|
152
|
+
out.ep += amount
|
|
153
|
+
elif currency == "sp":
|
|
154
|
+
out.sp += amount
|
|
155
|
+
else:
|
|
156
|
+
out.cp += amount
|
|
157
|
+
|
|
158
|
+
return out
|
|
159
|
+
|
|
160
|
+
|
|
161
|
+
def _parse_coins(args: str, include_total: bool = True):
|
|
102
162
|
try:
|
|
103
163
|
from avrae.aliasing.api.functions import parse_coins as avrae_parse_coins
|
|
104
164
|
except Exception:
|
|
@@ -106,16 +166,21 @@ def _parse_coins(args: str):
|
|
|
106
166
|
|
|
107
167
|
if avrae_parse_coins:
|
|
108
168
|
try:
|
|
109
|
-
return avrae_parse_coins(str(args))
|
|
169
|
+
return avrae_parse_coins(str(args), include_total=include_total)
|
|
110
170
|
except Exception:
|
|
111
171
|
pass
|
|
112
172
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
173
|
+
coin_args = _parse_coin_args(str(args))
|
|
174
|
+
parsed = {
|
|
175
|
+
"pp": coin_args.pp,
|
|
176
|
+
"gp": coin_args.gp,
|
|
177
|
+
"ep": coin_args.ep,
|
|
178
|
+
"sp": coin_args.sp,
|
|
179
|
+
"cp": coin_args.cp,
|
|
180
|
+
}
|
|
181
|
+
if include_total:
|
|
182
|
+
parsed["total"] = coin_args.total
|
|
183
|
+
return parsed
|
|
119
184
|
|
|
120
185
|
|
|
121
186
|
def _default_builtins() -> Dict[str, Any]:
|
|
@@ -129,12 +194,9 @@ def _default_builtins() -> Dict[str, Any]:
|
|
|
129
194
|
"abs": abs,
|
|
130
195
|
"range": range,
|
|
131
196
|
"enumerate": enumerate,
|
|
132
|
-
"sorted": sorted,
|
|
133
|
-
"reversed": reversed,
|
|
134
197
|
"int": int,
|
|
135
198
|
"float": float,
|
|
136
199
|
"str": str,
|
|
137
|
-
"bool": bool,
|
|
138
200
|
"round": round,
|
|
139
201
|
"ceil": math.ceil,
|
|
140
202
|
"floor": math.floor,
|
|
@@ -417,6 +479,9 @@ class MockExecutor:
|
|
|
417
479
|
verify_cache_sig = sig_str
|
|
418
480
|
verify_cache_error = None
|
|
419
481
|
verify_cache_result = None
|
|
482
|
+
timeout = float(service_cfg.verify_timeout if service_cfg else AvraeServiceConfig.verify_timeout)
|
|
483
|
+
retries = int(service_cfg.verify_retries if service_cfg else AvraeServiceConfig.verify_retries)
|
|
484
|
+
retries = max(0, retries)
|
|
420
485
|
|
|
421
486
|
def _call_verify_api(signature: str) -> Dict[str, Any]:
|
|
422
487
|
base_url = (service_cfg.base_url if service_cfg else AvraeServiceConfig.base_url).rstrip("/")
|
|
@@ -424,10 +489,18 @@ class MockExecutor:
|
|
|
424
489
|
headers = {"Content-Type": "application/json"}
|
|
425
490
|
if service_cfg and service_cfg.token:
|
|
426
491
|
headers["Authorization"] = str(service_cfg.token)
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
492
|
+
last_exc: Exception | None = None
|
|
493
|
+
for attempt in range(retries + 1):
|
|
494
|
+
try:
|
|
495
|
+
resp = httpx.post(url, json={"signature": signature}, headers=headers, timeout=timeout)
|
|
496
|
+
break
|
|
497
|
+
except Exception as exc:
|
|
498
|
+
last_exc = exc
|
|
499
|
+
if attempt >= retries:
|
|
500
|
+
raise ValueError(f"Failed to verify signature: {exc}") from exc
|
|
501
|
+
continue
|
|
502
|
+
else: # pragma: no cover - defensive
|
|
503
|
+
raise ValueError(f"Failed to verify signature: {last_exc}") from last_exc
|
|
431
504
|
|
|
432
505
|
try:
|
|
433
506
|
payload = resp.json()
|
|
@@ -435,14 +508,17 @@ class MockExecutor:
|
|
|
435
508
|
raise ValueError("Failed to verify signature: invalid response body") from exc
|
|
436
509
|
|
|
437
510
|
if resp.status_code != 200:
|
|
438
|
-
message =
|
|
439
|
-
|
|
511
|
+
message = None
|
|
512
|
+
if isinstance(payload, dict):
|
|
513
|
+
message = payload.get("error") or payload.get("message")
|
|
514
|
+
detail = f"{message} (HTTP {resp.status_code})" if message else f"HTTP {resp.status_code}"
|
|
515
|
+
raise ValueError(f"Failed to verify signature: {detail}")
|
|
440
516
|
|
|
441
517
|
if not isinstance(payload, dict):
|
|
442
518
|
raise ValueError("Failed to verify signature: invalid response")
|
|
443
519
|
if payload.get("success") is not True:
|
|
444
520
|
message = payload.get("error")
|
|
445
|
-
raise ValueError(
|
|
521
|
+
raise ValueError(f"Failed to verify signature: {message or 'unsuccessful response'}")
|
|
446
522
|
|
|
447
523
|
data = payload.get("data")
|
|
448
524
|
if not isinstance(data, dict):
|
avrae_ls/server.py
CHANGED
|
@@ -18,7 +18,7 @@ from .alias_preview import render_alias_command, simulate_command
|
|
|
18
18
|
from .parser import find_draconic_blocks
|
|
19
19
|
from .signature_help import load_signatures, signature_help_for_code
|
|
20
20
|
from .completions import gather_suggestions, completion_items_for_position, hover_for_position
|
|
21
|
-
from .symbols import build_symbol_table, document_symbols, find_definition_range
|
|
21
|
+
from .symbols import build_symbol_table, document_symbols, find_definition_range, find_references, range_for_word
|
|
22
22
|
from .argument_parsing import apply_argument_parsing
|
|
23
23
|
|
|
24
24
|
__version__ = "0.1.0"
|
|
@@ -142,6 +142,43 @@ def on_definition(server: AvraeLanguageServer, params: types.DefinitionParams):
|
|
|
142
142
|
return types.Location(uri=params.text_document.uri, range=rng)
|
|
143
143
|
|
|
144
144
|
|
|
145
|
+
@ls.feature(types.TEXT_DOCUMENT_REFERENCES)
|
|
146
|
+
def on_references(server: AvraeLanguageServer, params: types.ReferenceParams):
|
|
147
|
+
doc = server.workspace.get_text_document(params.text_document.uri)
|
|
148
|
+
table = build_symbol_table(doc.source)
|
|
149
|
+
word = doc.word_at_position(params.position)
|
|
150
|
+
if not word or not table.lookup(word):
|
|
151
|
+
return []
|
|
152
|
+
|
|
153
|
+
ranges = find_references(table, doc.source, word, include_declaration=params.context.include_declaration)
|
|
154
|
+
return [types.Location(uri=params.text_document.uri, range=rng) for rng in ranges]
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
@ls.feature(types.TEXT_DOCUMENT_PREPARE_RENAME)
|
|
158
|
+
def on_prepare_rename(server: AvraeLanguageServer, params: types.PrepareRenameParams):
|
|
159
|
+
doc = server.workspace.get_text_document(params.text_document.uri)
|
|
160
|
+
table = build_symbol_table(doc.source)
|
|
161
|
+
word = doc.word_at_position(params.position)
|
|
162
|
+
if not word or not table.lookup(word):
|
|
163
|
+
return None
|
|
164
|
+
return range_for_word(doc.source, params.position)
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
@ls.feature(types.TEXT_DOCUMENT_RENAME)
|
|
168
|
+
def on_rename(server: AvraeLanguageServer, params: types.RenameParams):
|
|
169
|
+
doc = server.workspace.get_text_document(params.text_document.uri)
|
|
170
|
+
table = build_symbol_table(doc.source)
|
|
171
|
+
word = doc.word_at_position(params.position)
|
|
172
|
+
if not word or not table.lookup(word) or not params.new_name:
|
|
173
|
+
return None
|
|
174
|
+
|
|
175
|
+
ranges = find_references(table, doc.source, word, include_declaration=True)
|
|
176
|
+
if not ranges:
|
|
177
|
+
return None
|
|
178
|
+
edits = [types.TextEdit(range=rng, new_text=params.new_name) for rng in ranges]
|
|
179
|
+
return types.WorkspaceEdit(changes={params.text_document.uri: edits})
|
|
180
|
+
|
|
181
|
+
|
|
145
182
|
@ls.feature(types.WORKSPACE_SYMBOL)
|
|
146
183
|
def on_workspace_symbol(server: AvraeLanguageServer, params: types.WorkspaceSymbolParams):
|
|
147
184
|
symbols: list[types.SymbolInformation] = []
|