avrae-ls 0.3.1__py3-none-any.whl → 0.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- avrae_ls/alias_preview.py +175 -9
- avrae_ls/api.py +229 -229
- avrae_ls/argparser.py +16 -3
- avrae_ls/code_actions.py +282 -0
- avrae_ls/codes.py +3 -0
- avrae_ls/completions.py +489 -78
- avrae_ls/config.py +61 -2
- avrae_ls/context.py +62 -1
- avrae_ls/diagnostics.py +267 -5
- avrae_ls/parser.py +7 -2
- avrae_ls/runtime.py +94 -15
- avrae_ls/server.py +52 -6
- avrae_ls/signature_help.py +56 -5
- avrae_ls/symbols.py +149 -33
- avrae_ls-0.4.1.dist-info/METADATA +86 -0
- avrae_ls-0.4.1.dist-info/RECORD +34 -0
- avrae_ls-0.3.1.dist-info/METADATA +0 -47
- avrae_ls-0.3.1.dist-info/RECORD +0 -32
- {avrae_ls-0.3.1.dist-info → avrae_ls-0.4.1.dist-info}/WHEEL +0 -0
- {avrae_ls-0.3.1.dist-info → avrae_ls-0.4.1.dist-info}/entry_points.txt +0 -0
- {avrae_ls-0.3.1.dist-info → avrae_ls-0.4.1.dist-info}/licenses/LICENSE +0 -0
- {avrae_ls-0.3.1.dist-info → avrae_ls-0.4.1.dist-info}/top_level.txt +0 -0
avrae_ls/config.py
CHANGED
|
@@ -3,12 +3,15 @@ from __future__ import annotations
|
|
|
3
3
|
import json
|
|
4
4
|
import logging
|
|
5
5
|
import math
|
|
6
|
+
import os
|
|
7
|
+
import re
|
|
6
8
|
from dataclasses import dataclass, field
|
|
7
9
|
from pathlib import Path
|
|
8
|
-
from typing import Any, Dict, Iterable, Tuple
|
|
10
|
+
from typing import Any, Dict, Iterable, Mapping, Tuple
|
|
9
11
|
|
|
10
12
|
CONFIG_FILENAME = ".avraels.json"
|
|
11
13
|
log = logging.getLogger(__name__)
|
|
14
|
+
_ENV_VAR_PATTERN = re.compile(r"\$(\w+)|\$\{([^}]+)\}")
|
|
12
15
|
|
|
13
16
|
|
|
14
17
|
class ConfigError(Exception):
|
|
@@ -25,6 +28,8 @@ class DiagnosticSettings:
|
|
|
25
28
|
class AvraeServiceConfig:
|
|
26
29
|
base_url: str = "https://api.avrae.io"
|
|
27
30
|
token: str | None = None
|
|
31
|
+
verify_timeout: float = 5.0
|
|
32
|
+
verify_retries: int = 0
|
|
28
33
|
|
|
29
34
|
|
|
30
35
|
@dataclass
|
|
@@ -360,6 +365,30 @@ class AvraeLSConfig:
|
|
|
360
365
|
)
|
|
361
366
|
|
|
362
367
|
|
|
368
|
+
def _expand_env_vars(data: Any, env: Mapping[str, str], missing_vars: set[str]) -> Any:
|
|
369
|
+
if isinstance(data, dict):
|
|
370
|
+
return {key: _expand_env_vars(value, env, missing_vars) for key, value in data.items()}
|
|
371
|
+
if isinstance(data, list):
|
|
372
|
+
return [_expand_env_vars(value, env, missing_vars) for value in data]
|
|
373
|
+
if isinstance(data, str):
|
|
374
|
+
def _replace(match: re.Match[str]) -> str:
|
|
375
|
+
var = match.group(1) or match.group(2) or ""
|
|
376
|
+
if var in env:
|
|
377
|
+
return env[var]
|
|
378
|
+
missing_vars.add(var)
|
|
379
|
+
return ""
|
|
380
|
+
|
|
381
|
+
return _ENV_VAR_PATTERN.sub(_replace, data)
|
|
382
|
+
return data
|
|
383
|
+
|
|
384
|
+
|
|
385
|
+
def _coerce_optional_str(value: Any) -> str | None:
|
|
386
|
+
if value is None:
|
|
387
|
+
return None
|
|
388
|
+
value_str = str(value)
|
|
389
|
+
return value_str if value_str.strip() else None
|
|
390
|
+
|
|
391
|
+
|
|
363
392
|
def load_config(workspace_root: Path) -> Tuple[AvraeLSConfig, Iterable[str]]:
|
|
364
393
|
"""Load `.avraels.json` from the workspace root, returning config and warnings."""
|
|
365
394
|
path = workspace_root / CONFIG_FILENAME
|
|
@@ -374,12 +403,42 @@ def load_config(workspace_root: Path) -> Tuple[AvraeLSConfig, Iterable[str]]:
|
|
|
374
403
|
return AvraeLSConfig.default(workspace_root), [warning]
|
|
375
404
|
|
|
376
405
|
warnings: list[str] = []
|
|
406
|
+
env_missing: set[str] = set()
|
|
407
|
+
env = dict(os.environ)
|
|
408
|
+
env.setdefault("workspaceRoot", str(workspace_root))
|
|
409
|
+
env.setdefault("workspaceFolder", str(workspace_root))
|
|
410
|
+
raw = _expand_env_vars(raw, env, env_missing)
|
|
411
|
+
for var in sorted(env_missing):
|
|
412
|
+
warning = f"{CONFIG_FILENAME}: environment variable '{var}' is not set; substituting an empty string."
|
|
413
|
+
warnings.append(warning)
|
|
414
|
+
log.warning(warning)
|
|
415
|
+
|
|
377
416
|
enable_gvar_fetch = bool(raw.get("enableGvarFetch", False))
|
|
378
417
|
|
|
379
418
|
service_cfg = raw.get("avraeService") or {}
|
|
419
|
+
def _get_service_timeout(raw_timeout) -> float:
|
|
420
|
+
try:
|
|
421
|
+
timeout = float(raw_timeout)
|
|
422
|
+
if timeout > 0:
|
|
423
|
+
return timeout
|
|
424
|
+
except Exception:
|
|
425
|
+
pass
|
|
426
|
+
return AvraeServiceConfig.verify_timeout
|
|
427
|
+
|
|
428
|
+
def _get_service_retries(raw_retries) -> int:
|
|
429
|
+
try:
|
|
430
|
+
retries = int(raw_retries)
|
|
431
|
+
if retries >= 0:
|
|
432
|
+
return retries
|
|
433
|
+
except Exception:
|
|
434
|
+
pass
|
|
435
|
+
return AvraeServiceConfig.verify_retries
|
|
436
|
+
|
|
380
437
|
service = AvraeServiceConfig(
|
|
381
438
|
base_url=str(service_cfg.get("baseUrl") or AvraeServiceConfig.base_url),
|
|
382
|
-
token=service_cfg.get("token"),
|
|
439
|
+
token=_coerce_optional_str(service_cfg.get("token")),
|
|
440
|
+
verify_timeout=_get_service_timeout(service_cfg.get("verifySignatureTimeout")),
|
|
441
|
+
verify_retries=_get_service_retries(service_cfg.get("verifySignatureRetries")),
|
|
383
442
|
)
|
|
384
443
|
|
|
385
444
|
diag_cfg = raw.get("diagnostics") or {}
|
avrae_ls/context.py
CHANGED
|
@@ -33,11 +33,12 @@ class ContextBuilder:
|
|
|
33
33
|
|
|
34
34
|
def build(self, profile_name: str | None = None) -> ContextData:
|
|
35
35
|
profile = self._select_profile(profile_name)
|
|
36
|
+
combat = self._ensure_me_combatant(profile)
|
|
36
37
|
merged_vars = self._merge_character_cvars(profile.character, self._load_var_files().merge(profile.vars))
|
|
37
38
|
self._gvar_resolver.seed(merged_vars.gvars)
|
|
38
39
|
return ContextData(
|
|
39
40
|
ctx=dict(profile.ctx),
|
|
40
|
-
combat=
|
|
41
|
+
combat=combat,
|
|
41
42
|
character=dict(profile.character),
|
|
42
43
|
vars=merged_vars,
|
|
43
44
|
)
|
|
@@ -69,6 +70,66 @@ class ContextBuilder:
|
|
|
69
70
|
merged = merged.merge(VarSources(cvars=builtin_cvars))
|
|
70
71
|
return merged
|
|
71
72
|
|
|
73
|
+
def _ensure_me_combatant(self, profile: ContextProfile) -> Dict[str, Any]:
|
|
74
|
+
combat = dict(profile.combat or {})
|
|
75
|
+
combatants = list(combat.get("combatants") or [])
|
|
76
|
+
me = combat.get("me")
|
|
77
|
+
author_id = (profile.ctx.get("author") or {}).get("id")
|
|
78
|
+
|
|
79
|
+
def _matches_author(combatant: Dict[str, Any]) -> bool:
|
|
80
|
+
try:
|
|
81
|
+
return author_id is not None and str(combatant.get("controller")) == str(author_id)
|
|
82
|
+
except Exception:
|
|
83
|
+
return False
|
|
84
|
+
|
|
85
|
+
# Use an existing combatant controlled by the author if me is missing.
|
|
86
|
+
if me is None:
|
|
87
|
+
for existing in combatants:
|
|
88
|
+
if _matches_author(existing):
|
|
89
|
+
me = existing
|
|
90
|
+
break
|
|
91
|
+
|
|
92
|
+
# If still missing, synthesize a combatant from the character sheet.
|
|
93
|
+
if me is None and profile.character:
|
|
94
|
+
me = {
|
|
95
|
+
"name": profile.character.get("name", "Player"),
|
|
96
|
+
"id": "cmb_player",
|
|
97
|
+
"controller": author_id,
|
|
98
|
+
"group": None,
|
|
99
|
+
"race": profile.character.get("race"),
|
|
100
|
+
"monster_name": None,
|
|
101
|
+
"is_hidden": False,
|
|
102
|
+
"init": profile.character.get("stats", {}).get("dexterity", 10),
|
|
103
|
+
"initmod": 0,
|
|
104
|
+
"type": "combatant",
|
|
105
|
+
"note": "Mock combatant for preview",
|
|
106
|
+
"effects": [],
|
|
107
|
+
"stats": profile.character.get("stats") or {},
|
|
108
|
+
"levels": profile.character.get("levels") or profile.character.get("class_levels") or {},
|
|
109
|
+
"skills": profile.character.get("skills") or {},
|
|
110
|
+
"saves": profile.character.get("saves") or {},
|
|
111
|
+
"resistances": profile.character.get("resistances") or {},
|
|
112
|
+
"spellbook": profile.character.get("spellbook") or {},
|
|
113
|
+
"attacks": profile.character.get("attacks") or [],
|
|
114
|
+
"max_hp": profile.character.get("max_hp"),
|
|
115
|
+
"hp": profile.character.get("hp"),
|
|
116
|
+
"temp_hp": profile.character.get("temp_hp"),
|
|
117
|
+
"ac": profile.character.get("ac"),
|
|
118
|
+
"creature_type": profile.character.get("creature_type"),
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
if me is not None:
|
|
122
|
+
combat["me"] = me
|
|
123
|
+
if not any(c is me for c in combatants) and not any(_matches_author(c) for c in combatants):
|
|
124
|
+
combatants.insert(0, me)
|
|
125
|
+
combat["combatants"] = combatants
|
|
126
|
+
if "current" not in combat or combat.get("current") is None:
|
|
127
|
+
combat["current"] = me
|
|
128
|
+
else:
|
|
129
|
+
combat["combatants"] = combatants
|
|
130
|
+
|
|
131
|
+
return combat
|
|
132
|
+
|
|
72
133
|
|
|
73
134
|
class GVarResolver:
|
|
74
135
|
def __init__(self, config: AvraeLSConfig):
|
avrae_ls/diagnostics.py
CHANGED
|
@@ -1,14 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
3
|
import ast
|
|
4
|
-
import logging
|
|
5
4
|
import inspect
|
|
6
|
-
|
|
5
|
+
import logging
|
|
6
|
+
from typing import Any, Dict, Iterable, List, Sequence, Set
|
|
7
7
|
|
|
8
8
|
import draconic
|
|
9
9
|
from lsprotocol import types
|
|
10
10
|
|
|
11
|
+
from .alias_preview import simulate_command
|
|
12
|
+
from .codes import MISSING_GVAR_CODE, UNDEFINED_NAME_CODE, UNSUPPORTED_IMPORT_CODE
|
|
11
13
|
from .argument_parsing import apply_argument_parsing
|
|
14
|
+
from .completions import _infer_type_map, _resolve_type_name, _type_meta
|
|
12
15
|
from .config import DiagnosticSettings
|
|
13
16
|
from .context import ContextData, GVarResolver
|
|
14
17
|
from .parser import find_draconic_blocks
|
|
@@ -40,6 +43,10 @@ class DiagnosticProvider:
|
|
|
40
43
|
source = apply_argument_parsing(source)
|
|
41
44
|
blocks = find_draconic_blocks(source)
|
|
42
45
|
if not blocks:
|
|
46
|
+
plain = _plain_command_diagnostics(source)
|
|
47
|
+
if plain is not None:
|
|
48
|
+
diagnostics.extend(plain)
|
|
49
|
+
return diagnostics
|
|
43
50
|
diagnostics.extend(await self._analyze_code(source, ctx_data, gvar_resolver))
|
|
44
51
|
return diagnostics
|
|
45
52
|
|
|
@@ -78,6 +85,9 @@ class DiagnosticProvider:
|
|
|
78
85
|
diagnostics.extend(_check_imports(body, self._settings.semantic_level))
|
|
79
86
|
diagnostics.extend(_check_call_args(body, self._builtin_signatures, self._settings.semantic_level))
|
|
80
87
|
diagnostics.extend(_check_private_method_calls(body))
|
|
88
|
+
diagnostics.extend(
|
|
89
|
+
_check_api_misuse(body, code, ctx_data, self._settings.semantic_level)
|
|
90
|
+
)
|
|
81
91
|
if line_shift:
|
|
82
92
|
diagnostics = _shift_diagnostics(diagnostics, line_shift, 0)
|
|
83
93
|
return diagnostics
|
|
@@ -146,6 +156,8 @@ class DiagnosticProvider:
|
|
|
146
156
|
node,
|
|
147
157
|
f"'{node.id}' may be undefined in this scope",
|
|
148
158
|
severity_level,
|
|
159
|
+
code=UNDEFINED_NAME_CODE,
|
|
160
|
+
data={"name": node.id},
|
|
149
161
|
)
|
|
150
162
|
)
|
|
151
163
|
|
|
@@ -233,6 +245,8 @@ async def _check_gvars(
|
|
|
233
245
|
arg_node,
|
|
234
246
|
f"Unknown gvar '{gvar_id}'",
|
|
235
247
|
settings.semantic_level,
|
|
248
|
+
code=MISSING_GVAR_CODE,
|
|
249
|
+
data={"gvar": gvar_id},
|
|
236
250
|
)
|
|
237
251
|
)
|
|
238
252
|
|
|
@@ -282,7 +296,215 @@ def _check_private_method_calls(body: Sequence[ast.AST]) -> List[types.Diagnosti
|
|
|
282
296
|
return diagnostics
|
|
283
297
|
|
|
284
298
|
|
|
285
|
-
def
|
|
299
|
+
def _check_api_misuse(
|
|
300
|
+
body: Sequence[ast.AST],
|
|
301
|
+
code: str,
|
|
302
|
+
ctx_data: ContextData,
|
|
303
|
+
severity_level: str,
|
|
304
|
+
) -> List[types.Diagnostic]:
|
|
305
|
+
"""Heuristics for common API mistakes (list vs scalar, missing context, property calls)."""
|
|
306
|
+
diagnostics: list[types.Diagnostic] = []
|
|
307
|
+
module = ast.Module(body=list(body), type_ignores=[])
|
|
308
|
+
parent_map = _build_parent_map(module)
|
|
309
|
+
assigned_names = _collect_assigned_names(module)
|
|
310
|
+
type_map = _diagnostic_type_map(code)
|
|
311
|
+
context_seen: set[str] = set()
|
|
312
|
+
|
|
313
|
+
for node in ast.walk(module):
|
|
314
|
+
if isinstance(node, ast.Call):
|
|
315
|
+
diagnostics.extend(_context_call_diagnostics(node, ctx_data, severity_level, context_seen))
|
|
316
|
+
diagnostics.extend(_property_call_diagnostics(node, type_map, code, severity_level))
|
|
317
|
+
if isinstance(node, ast.Attribute):
|
|
318
|
+
diagnostics.extend(_uncalled_context_attr_diagnostics(node, assigned_names, severity_level))
|
|
319
|
+
diagnostics.extend(_iterable_attr_diagnostics(node, parent_map, type_map, code, severity_level))
|
|
320
|
+
return diagnostics
|
|
321
|
+
|
|
322
|
+
|
|
323
|
+
def _context_call_diagnostics(
|
|
324
|
+
node: ast.Call,
|
|
325
|
+
ctx_data: ContextData,
|
|
326
|
+
severity_level: str,
|
|
327
|
+
seen: set[str],
|
|
328
|
+
) -> List[types.Diagnostic]:
|
|
329
|
+
diagnostics: list[types.Diagnostic] = []
|
|
330
|
+
if isinstance(node.func, ast.Name):
|
|
331
|
+
if node.func.id == "character" and not ctx_data.character and "character" not in seen:
|
|
332
|
+
seen.add("character")
|
|
333
|
+
diagnostics.append(
|
|
334
|
+
_make_diagnostic(
|
|
335
|
+
node.func,
|
|
336
|
+
"No character context configured; character() will raise at runtime.",
|
|
337
|
+
severity_level,
|
|
338
|
+
)
|
|
339
|
+
)
|
|
340
|
+
elif node.func.id == "combat" and not ctx_data.combat and "combat" not in seen:
|
|
341
|
+
seen.add("combat")
|
|
342
|
+
diagnostics.append(
|
|
343
|
+
_make_diagnostic(
|
|
344
|
+
node.func,
|
|
345
|
+
"No combat context configured; combat() will return None.",
|
|
346
|
+
severity_level,
|
|
347
|
+
)
|
|
348
|
+
)
|
|
349
|
+
return diagnostics
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def _property_call_diagnostics(
|
|
353
|
+
node: ast.Call,
|
|
354
|
+
type_map: Dict[str, str],
|
|
355
|
+
code: str,
|
|
356
|
+
severity_level: str,
|
|
357
|
+
) -> List[types.Diagnostic]:
|
|
358
|
+
if not isinstance(node.func, ast.Attribute):
|
|
359
|
+
return []
|
|
360
|
+
base_type = _resolve_expr_type(node.func.value, type_map, code)
|
|
361
|
+
if not base_type:
|
|
362
|
+
return []
|
|
363
|
+
meta = _type_meta(base_type)
|
|
364
|
+
attr = node.func.attr
|
|
365
|
+
if attr in meta.methods or attr not in meta.attrs:
|
|
366
|
+
return []
|
|
367
|
+
receiver = _expr_to_str(node.func.value) or base_type
|
|
368
|
+
return [
|
|
369
|
+
_make_diagnostic(
|
|
370
|
+
node.func,
|
|
371
|
+
f"'{attr}' on {receiver} is a property; drop the parentheses.",
|
|
372
|
+
severity_level,
|
|
373
|
+
)
|
|
374
|
+
]
|
|
375
|
+
|
|
376
|
+
|
|
377
|
+
def _uncalled_context_attr_diagnostics(
|
|
378
|
+
node: ast.Attribute,
|
|
379
|
+
assigned_names: Set[str],
|
|
380
|
+
severity_level: str,
|
|
381
|
+
) -> List[types.Diagnostic]:
|
|
382
|
+
if isinstance(node.value, ast.Name) and node.value.id in {"character", "combat"} and node.value.id not in assigned_names:
|
|
383
|
+
call_hint = f"{node.value.id}()"
|
|
384
|
+
return [
|
|
385
|
+
_make_diagnostic(
|
|
386
|
+
node.value,
|
|
387
|
+
f"Call {call_hint} before accessing '{node.attr}'.",
|
|
388
|
+
severity_level,
|
|
389
|
+
)
|
|
390
|
+
]
|
|
391
|
+
return []
|
|
392
|
+
|
|
393
|
+
|
|
394
|
+
def _iterable_attr_diagnostics(
|
|
395
|
+
node: ast.Attribute,
|
|
396
|
+
parent_map: Dict[ast.AST, ast.AST],
|
|
397
|
+
type_map: Dict[str, str],
|
|
398
|
+
code: str,
|
|
399
|
+
severity_level: str,
|
|
400
|
+
) -> List[types.Diagnostic]:
|
|
401
|
+
parent = parent_map.get(node)
|
|
402
|
+
if parent is None:
|
|
403
|
+
return []
|
|
404
|
+
if isinstance(parent, ast.Subscript) and parent.value is node:
|
|
405
|
+
return []
|
|
406
|
+
|
|
407
|
+
base_type = _resolve_expr_type(node.value, type_map, code)
|
|
408
|
+
if not base_type:
|
|
409
|
+
return []
|
|
410
|
+
meta = _type_meta(base_type)
|
|
411
|
+
attr_meta = meta.attrs.get(node.attr)
|
|
412
|
+
if not attr_meta:
|
|
413
|
+
return []
|
|
414
|
+
|
|
415
|
+
is_collection = bool(attr_meta.element_type) or attr_meta.type_name in {"list", "dict"}
|
|
416
|
+
if not is_collection:
|
|
417
|
+
return []
|
|
418
|
+
|
|
419
|
+
expr_label = _expr_to_str(node) or node.attr
|
|
420
|
+
element_label = attr_meta.element_type or "items"
|
|
421
|
+
container_label = attr_meta.type_name or "collection"
|
|
422
|
+
|
|
423
|
+
if isinstance(parent, ast.Attribute) and parent.value is node:
|
|
424
|
+
next_attr = parent.attr
|
|
425
|
+
message = f"'{expr_label}' is a {container_label} of {element_label}; index or iterate before accessing '{next_attr}'."
|
|
426
|
+
return [_make_diagnostic(node, message, severity_level)]
|
|
427
|
+
|
|
428
|
+
if isinstance(parent, ast.Call) and parent.func is node:
|
|
429
|
+
message = f"'{expr_label}' is a {container_label} of {element_label}; index or iterate before calling it."
|
|
430
|
+
return [_make_diagnostic(node, message, severity_level)]
|
|
431
|
+
|
|
432
|
+
return []
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
def _diagnostic_type_map(code: str) -> Dict[str, str]:
|
|
436
|
+
mapping = _infer_type_map(code)
|
|
437
|
+
if mapping:
|
|
438
|
+
return mapping
|
|
439
|
+
wrapped, _ = _wrap_draconic(code)
|
|
440
|
+
return _infer_type_map(wrapped)
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def _resolve_expr_type(expr: ast.AST, type_map: Dict[str, str], code: str) -> str:
|
|
444
|
+
expr_text = _expr_to_str(expr)
|
|
445
|
+
if not expr_text:
|
|
446
|
+
return ""
|
|
447
|
+
return _resolve_type_name(expr_text, code, type_map)
|
|
448
|
+
|
|
449
|
+
|
|
450
|
+
def _expr_to_str(expr: ast.AST) -> str:
|
|
451
|
+
try:
|
|
452
|
+
return ast.unparse(expr)
|
|
453
|
+
except Exception:
|
|
454
|
+
return ""
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
def _collect_assigned_names(module: ast.Module) -> Set[str]:
|
|
458
|
+
assigned: set[str] = set()
|
|
459
|
+
|
|
460
|
+
class Collector(ast.NodeVisitor):
|
|
461
|
+
def visit_Assign(self, node: ast.Assign):
|
|
462
|
+
for target in node.targets:
|
|
463
|
+
assigned.update(_names_in_target(target))
|
|
464
|
+
self.generic_visit(node)
|
|
465
|
+
|
|
466
|
+
def visit_AnnAssign(self, node: ast.AnnAssign):
|
|
467
|
+
assigned.update(_names_in_target(node.target))
|
|
468
|
+
self.generic_visit(node)
|
|
469
|
+
|
|
470
|
+
def visit_For(self, node: ast.For):
|
|
471
|
+
assigned.update(_names_in_target(node.target))
|
|
472
|
+
self.generic_visit(node)
|
|
473
|
+
|
|
474
|
+
def visit_AsyncFor(self, node: ast.AsyncFor):
|
|
475
|
+
assigned.update(_names_in_target(node.target))
|
|
476
|
+
self.generic_visit(node)
|
|
477
|
+
|
|
478
|
+
def visit_FunctionDef(self, node: ast.FunctionDef):
|
|
479
|
+
assigned.add(node.name)
|
|
480
|
+
for arg in node.args.args:
|
|
481
|
+
assigned.add(arg.arg)
|
|
482
|
+
self.generic_visit(node)
|
|
483
|
+
|
|
484
|
+
def visit_ClassDef(self, node: ast.ClassDef):
|
|
485
|
+
assigned.add(node.name)
|
|
486
|
+
self.generic_visit(node)
|
|
487
|
+
|
|
488
|
+
Collector().visit(module)
|
|
489
|
+
return assigned
|
|
490
|
+
|
|
491
|
+
|
|
492
|
+
def _build_parent_map(root: ast.AST) -> Dict[ast.AST, ast.AST]:
|
|
493
|
+
parents: dict[ast.AST, ast.AST] = {}
|
|
494
|
+
for parent in ast.walk(root):
|
|
495
|
+
for child in ast.iter_child_nodes(parent):
|
|
496
|
+
parents[child] = parent
|
|
497
|
+
return parents
|
|
498
|
+
|
|
499
|
+
|
|
500
|
+
def _make_diagnostic(
|
|
501
|
+
node: ast.AST,
|
|
502
|
+
message: str,
|
|
503
|
+
level: str,
|
|
504
|
+
*,
|
|
505
|
+
code: str | None = None,
|
|
506
|
+
data: Dict[str, Any] | None = None,
|
|
507
|
+
) -> types.Diagnostic:
|
|
286
508
|
severity = SEVERITY.get(level, types.DiagnosticSeverity.Warning)
|
|
287
509
|
if hasattr(node, "lineno"):
|
|
288
510
|
rng = _range_from_positions(
|
|
@@ -301,6 +523,8 @@ def _make_diagnostic(node: ast.AST, message: str, level: str) -> types.Diagnosti
|
|
|
301
523
|
range=rng,
|
|
302
524
|
severity=severity,
|
|
303
525
|
source="avrae-ls",
|
|
526
|
+
code=code,
|
|
527
|
+
data=data,
|
|
304
528
|
)
|
|
305
529
|
|
|
306
530
|
|
|
@@ -461,10 +685,27 @@ def _check_imports(body: Sequence[ast.AST], severity_level: str) -> List[types.D
|
|
|
461
685
|
|
|
462
686
|
class Visitor(ast.NodeVisitor):
|
|
463
687
|
def visit_Import(self, node: ast.Import):
|
|
464
|
-
|
|
688
|
+
module = node.names[0].name if node.names else None
|
|
689
|
+
diagnostics.append(
|
|
690
|
+
_make_diagnostic(
|
|
691
|
+
node,
|
|
692
|
+
"Imports are not supported in draconic aliases",
|
|
693
|
+
severity_level,
|
|
694
|
+
code=UNSUPPORTED_IMPORT_CODE,
|
|
695
|
+
data={"module": module} if module else None,
|
|
696
|
+
)
|
|
697
|
+
)
|
|
465
698
|
|
|
466
699
|
def visit_ImportFrom(self, node: ast.ImportFrom):
|
|
467
|
-
diagnostics.append(
|
|
700
|
+
diagnostics.append(
|
|
701
|
+
_make_diagnostic(
|
|
702
|
+
node,
|
|
703
|
+
"Imports are not supported in draconic aliases",
|
|
704
|
+
severity_level,
|
|
705
|
+
code=UNSUPPORTED_IMPORT_CODE,
|
|
706
|
+
data={"module": node.module},
|
|
707
|
+
)
|
|
708
|
+
)
|
|
468
709
|
|
|
469
710
|
visitor = Visitor()
|
|
470
711
|
for stmt in body:
|
|
@@ -487,3 +728,24 @@ def _range_from_positions(
|
|
|
487
728
|
character=max(((end_col_offset or col_offset or 1) - 1), 0),
|
|
488
729
|
)
|
|
489
730
|
return types.Range(start=start, end=end)
|
|
731
|
+
|
|
732
|
+
|
|
733
|
+
def _plain_command_diagnostics(source: str) -> list[types.Diagnostic] | None:
|
|
734
|
+
"""Handle simple commands (embed/echo) without draconic blocks."""
|
|
735
|
+
simulated = simulate_command(source)
|
|
736
|
+
if not simulated.command_name:
|
|
737
|
+
return None
|
|
738
|
+
if simulated.command_name == "embed":
|
|
739
|
+
if simulated.validation_error:
|
|
740
|
+
return [
|
|
741
|
+
types.Diagnostic(
|
|
742
|
+
message=simulated.validation_error,
|
|
743
|
+
range=_range_from_positions(1, 1, 1, 1),
|
|
744
|
+
severity=SEVERITY["warning"],
|
|
745
|
+
source="avrae-ls",
|
|
746
|
+
)
|
|
747
|
+
]
|
|
748
|
+
return []
|
|
749
|
+
if simulated.command_name == "echo":
|
|
750
|
+
return []
|
|
751
|
+
return None
|
avrae_ls/parser.py
CHANGED
|
@@ -22,12 +22,17 @@ def find_draconic_blocks(source: str) -> List[DraconicBlock]:
|
|
|
22
22
|
raw = match.group(1)
|
|
23
23
|
prefix = source[: match.start()]
|
|
24
24
|
line_offset = prefix.count("\n")
|
|
25
|
-
|
|
25
|
+
# Column where draconic content starts on its first line
|
|
26
|
+
last_nl = prefix.rfind("\n")
|
|
27
|
+
start_col = match.start(1) - (last_nl + 1 if last_nl != -1 else 0)
|
|
28
|
+
char_offset = start_col
|
|
26
29
|
# Trim leading blank lines inside the block while tracking the line shift
|
|
27
30
|
while raw.startswith("\n"):
|
|
28
31
|
raw = raw[1:]
|
|
29
32
|
line_offset += 1
|
|
30
|
-
|
|
33
|
+
char_offset = 0
|
|
34
|
+
line_count = raw.count("\n") + 1 if raw else 1
|
|
35
|
+
blocks.append(DraconicBlock(code=raw, line_offset=line_offset, char_offset=char_offset, line_count=line_count))
|
|
31
36
|
return blocks
|
|
32
37
|
|
|
33
38
|
|