yuho 5.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- yuho/__init__.py +16 -0
- yuho/ast/__init__.py +196 -0
- yuho/ast/builder.py +926 -0
- yuho/ast/constant_folder.py +280 -0
- yuho/ast/dead_code.py +199 -0
- yuho/ast/exhaustiveness.py +503 -0
- yuho/ast/nodes.py +907 -0
- yuho/ast/overlap.py +291 -0
- yuho/ast/reachability.py +293 -0
- yuho/ast/scope_analysis.py +490 -0
- yuho/ast/transformer.py +490 -0
- yuho/ast/type_check.py +471 -0
- yuho/ast/type_inference.py +425 -0
- yuho/ast/visitor.py +239 -0
- yuho/cli/__init__.py +14 -0
- yuho/cli/commands/__init__.py +1 -0
- yuho/cli/commands/api.py +431 -0
- yuho/cli/commands/ast_viz.py +334 -0
- yuho/cli/commands/check.py +218 -0
- yuho/cli/commands/config.py +311 -0
- yuho/cli/commands/contribute.py +122 -0
- yuho/cli/commands/diff.py +487 -0
- yuho/cli/commands/explain.py +240 -0
- yuho/cli/commands/fmt.py +253 -0
- yuho/cli/commands/generate.py +316 -0
- yuho/cli/commands/graph.py +410 -0
- yuho/cli/commands/init.py +120 -0
- yuho/cli/commands/library.py +656 -0
- yuho/cli/commands/lint.py +503 -0
- yuho/cli/commands/lsp.py +36 -0
- yuho/cli/commands/preview.py +377 -0
- yuho/cli/commands/repl.py +444 -0
- yuho/cli/commands/serve.py +44 -0
- yuho/cli/commands/test.py +528 -0
- yuho/cli/commands/transpile.py +121 -0
- yuho/cli/commands/wizard.py +370 -0
- yuho/cli/completions.py +182 -0
- yuho/cli/error_formatter.py +193 -0
- yuho/cli/main.py +1064 -0
- yuho/config/__init__.py +46 -0
- yuho/config/loader.py +235 -0
- yuho/config/mask.py +194 -0
- yuho/config/schema.py +147 -0
- yuho/library/__init__.py +84 -0
- yuho/library/index.py +328 -0
- yuho/library/install.py +699 -0
- yuho/library/lockfile.py +330 -0
- yuho/library/package.py +421 -0
- yuho/library/resolver.py +791 -0
- yuho/library/signature.py +335 -0
- yuho/llm/__init__.py +45 -0
- yuho/llm/config.py +75 -0
- yuho/llm/factory.py +123 -0
- yuho/llm/prompts.py +146 -0
- yuho/llm/providers.py +383 -0
- yuho/llm/utils.py +470 -0
- yuho/lsp/__init__.py +14 -0
- yuho/lsp/code_action_handler.py +518 -0
- yuho/lsp/completion_handler.py +85 -0
- yuho/lsp/diagnostics.py +100 -0
- yuho/lsp/hover_handler.py +130 -0
- yuho/lsp/server.py +1425 -0
- yuho/mcp/__init__.py +10 -0
- yuho/mcp/server.py +1452 -0
- yuho/parser/__init__.py +8 -0
- yuho/parser/source_location.py +108 -0
- yuho/parser/wrapper.py +311 -0
- yuho/testing/__init__.py +48 -0
- yuho/testing/coverage.py +274 -0
- yuho/testing/fixtures.py +263 -0
- yuho/transpile/__init__.py +52 -0
- yuho/transpile/alloy_transpiler.py +546 -0
- yuho/transpile/base.py +100 -0
- yuho/transpile/blocks_transpiler.py +338 -0
- yuho/transpile/english_transpiler.py +470 -0
- yuho/transpile/graphql_transpiler.py +404 -0
- yuho/transpile/json_transpiler.py +217 -0
- yuho/transpile/jsonld_transpiler.py +250 -0
- yuho/transpile/latex_preamble.py +161 -0
- yuho/transpile/latex_transpiler.py +406 -0
- yuho/transpile/latex_utils.py +206 -0
- yuho/transpile/mermaid_transpiler.py +357 -0
- yuho/transpile/registry.py +275 -0
- yuho/verify/__init__.py +43 -0
- yuho/verify/alloy.py +352 -0
- yuho/verify/combined.py +218 -0
- yuho/verify/z3_solver.py +1155 -0
- yuho-5.0.0.dist-info/METADATA +186 -0
- yuho-5.0.0.dist-info/RECORD +91 -0
- yuho-5.0.0.dist-info/WHEEL +4 -0
- yuho-5.0.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,518 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Code action handler for Yuho LSP.
|
|
3
|
+
|
|
4
|
+
Provides quick fixes and refactorings for Yuho code.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import List, Optional, Tuple, TYPE_CHECKING
|
|
8
|
+
import re
|
|
9
|
+
|
|
10
|
+
try:
|
|
11
|
+
from lsprotocol import types as lsp
|
|
12
|
+
except ImportError:
|
|
13
|
+
raise ImportError(
|
|
14
|
+
"LSP dependencies not installed. Install with: pip install yuho[lsp]"
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
if TYPE_CHECKING:
|
|
18
|
+
from yuho.lsp.server import DocumentState
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def get_type_conversion(from_type: str, to_type: str) -> Optional[str]:
|
|
22
|
+
"""Get conversion function name for type conversion."""
|
|
23
|
+
conversions = {
|
|
24
|
+
("int", "float"): "to_float",
|
|
25
|
+
("float", "int"): "to_int",
|
|
26
|
+
("string", "int"): "parse_int",
|
|
27
|
+
("string", "float"): "parse_float",
|
|
28
|
+
("int", "string"): "to_string",
|
|
29
|
+
("float", "string"): "to_string",
|
|
30
|
+
("bool", "string"): "to_string",
|
|
31
|
+
}
|
|
32
|
+
return conversions.get((from_type.lower(), to_type.lower()))
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def get_line_text(source: str, line: int) -> str:
|
|
36
|
+
"""Get text of a specific line."""
|
|
37
|
+
lines = source.splitlines()
|
|
38
|
+
if 0 <= line < len(lines):
|
|
39
|
+
return lines[line]
|
|
40
|
+
return ""
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def extract_undefined_symbol(message: str) -> Optional[str]:
|
|
44
|
+
"""Extract undefined symbol name from error message."""
|
|
45
|
+
match = re.search(r"undefined[:\s]+['\"]?(\w+)['\"]?", message, re.IGNORECASE)
|
|
46
|
+
if match:
|
|
47
|
+
return match.group(1)
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def extract_match_arm_pattern(
|
|
52
|
+
doc_state: "DocumentState", range_: lsp.Range
|
|
53
|
+
) -> Optional[Tuple[str, lsp.Range]]:
|
|
54
|
+
"""
|
|
55
|
+
Extract pattern text from a match arm at cursor position.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Tuple of (pattern_text, pattern_range) if on a case pattern, else None
|
|
59
|
+
"""
|
|
60
|
+
line = range_.start.line
|
|
61
|
+
line_text = get_line_text(doc_state.source, line)
|
|
62
|
+
|
|
63
|
+
# Look for case pattern: "case <pattern> =>"
|
|
64
|
+
case_match = re.match(r'^(\s*)case\s+(.+?)\s*=>', line_text)
|
|
65
|
+
if not case_match:
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
indent = case_match.group(1)
|
|
69
|
+
pattern = case_match.group(2).strip()
|
|
70
|
+
|
|
71
|
+
# Don't extract trivial patterns (wildcards, simple variables)
|
|
72
|
+
if pattern in ('_', 'true', 'false') or re.match(r'^\w+$', pattern):
|
|
73
|
+
return None
|
|
74
|
+
|
|
75
|
+
# Calculate pattern range
|
|
76
|
+
pattern_start_char = len(indent) + len("case ")
|
|
77
|
+
pattern_end_char = pattern_start_char + len(pattern)
|
|
78
|
+
|
|
79
|
+
pattern_range = lsp.Range(
|
|
80
|
+
start=lsp.Position(line=line, character=pattern_start_char),
|
|
81
|
+
end=lsp.Position(line=line, character=pattern_end_char),
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
return (pattern, pattern_range)
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def get_existing_pattern_names(doc_state: "DocumentState") -> set:
|
|
88
|
+
"""Get set of existing pattern names in document."""
|
|
89
|
+
names = set()
|
|
90
|
+
for line in doc_state.source.splitlines():
|
|
91
|
+
match = re.match(r'^\s*pattern\s+(\w+)\s*=', line)
|
|
92
|
+
if match:
|
|
93
|
+
names.add(match.group(1))
|
|
94
|
+
return names
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def suggest_pattern_name(pattern_text: str, doc_state: "DocumentState") -> str:
|
|
98
|
+
"""
|
|
99
|
+
Suggest a name for an extracted pattern based on its content.
|
|
100
|
+
|
|
101
|
+
Args:
|
|
102
|
+
pattern_text: The pattern being extracted
|
|
103
|
+
doc_state: Document state for checking existing names
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
A suggested snake_case pattern name
|
|
107
|
+
"""
|
|
108
|
+
name_parts = []
|
|
109
|
+
|
|
110
|
+
# Look for struct/enum type name
|
|
111
|
+
struct_match = re.match(r'^(\w+)\s*\{', pattern_text)
|
|
112
|
+
if struct_match:
|
|
113
|
+
name_parts.append(struct_match.group(1).lower())
|
|
114
|
+
|
|
115
|
+
# Look for literal values
|
|
116
|
+
string_values = re.findall(r'"([^"]+)"', pattern_text)
|
|
117
|
+
for val in string_values[:2]: # Max 2 string parts
|
|
118
|
+
clean_val = re.sub(r'[^a-zA-Z0-9]', '_', val.lower())
|
|
119
|
+
clean_val = re.sub(r'_+', '_', clean_val).strip('_')
|
|
120
|
+
if clean_val and len(clean_val) <= 20:
|
|
121
|
+
name_parts.insert(0, clean_val)
|
|
122
|
+
|
|
123
|
+
# Look for numeric comparisons
|
|
124
|
+
if '>' in pattern_text or '>=' in pattern_text:
|
|
125
|
+
name_parts.insert(0, "large")
|
|
126
|
+
elif '<' in pattern_text or '<=' in pattern_text:
|
|
127
|
+
name_parts.insert(0, "small")
|
|
128
|
+
|
|
129
|
+
# Fallback if no meaningful parts
|
|
130
|
+
if not name_parts:
|
|
131
|
+
name_parts = ["extracted_pattern"]
|
|
132
|
+
|
|
133
|
+
suggested = "_".join(name_parts)
|
|
134
|
+
|
|
135
|
+
# Ensure uniqueness
|
|
136
|
+
existing_patterns = get_existing_pattern_names(doc_state)
|
|
137
|
+
base_name = suggested
|
|
138
|
+
counter = 1
|
|
139
|
+
while suggested in existing_patterns:
|
|
140
|
+
suggested = f"{base_name}_{counter}"
|
|
141
|
+
counter += 1
|
|
142
|
+
|
|
143
|
+
return suggested
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def get_struct_field_names(doc_state: "DocumentState", type_name: str) -> List[str]:
|
|
147
|
+
"""Get field names for a struct type from AST or source."""
|
|
148
|
+
field_names = []
|
|
149
|
+
|
|
150
|
+
# Try AST first
|
|
151
|
+
if doc_state.ast:
|
|
152
|
+
for type_def in doc_state.ast.type_defs:
|
|
153
|
+
if hasattr(type_def, 'name') and type_def.name == type_name:
|
|
154
|
+
if hasattr(type_def, 'fields'):
|
|
155
|
+
for field in type_def.fields:
|
|
156
|
+
if hasattr(field, 'name'):
|
|
157
|
+
field_names.append(field.name)
|
|
158
|
+
if field_names:
|
|
159
|
+
return field_names
|
|
160
|
+
|
|
161
|
+
# Fallback: parse source for struct definition
|
|
162
|
+
struct_pattern = re.compile(
|
|
163
|
+
rf'struct\s+{re.escape(type_name)}\s*\{{([^}}]+)\}}',
|
|
164
|
+
re.DOTALL
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
match = struct_pattern.search(doc_state.source)
|
|
168
|
+
if match:
|
|
169
|
+
fields_str = match.group(1)
|
|
170
|
+
field_pattern = re.compile(r'(\w+)\s*:')
|
|
171
|
+
for field_match in field_pattern.finditer(fields_str):
|
|
172
|
+
field_names.append(field_match.group(1))
|
|
173
|
+
|
|
174
|
+
return field_names
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
def get_struct_literal_info(
|
|
178
|
+
doc_state: "DocumentState", range_: lsp.Range
|
|
179
|
+
) -> Optional[Tuple[lsp.Range, str]]:
|
|
180
|
+
"""
|
|
181
|
+
Get info to convert a positional struct literal to explicit form.
|
|
182
|
+
|
|
183
|
+
Returns:
|
|
184
|
+
Tuple of (literal_range, explicit_form) or None
|
|
185
|
+
"""
|
|
186
|
+
line = range_.start.line
|
|
187
|
+
line_text = get_line_text(doc_state.source, line)
|
|
188
|
+
|
|
189
|
+
# Match positional struct: TypeName(arg1, arg2, ...)
|
|
190
|
+
struct_pattern = re.compile(r'([A-Z]\w*)\s*\(([^)]+)\)')
|
|
191
|
+
|
|
192
|
+
for match in struct_pattern.finditer(line_text):
|
|
193
|
+
match_start = match.start()
|
|
194
|
+
match_end = match.end()
|
|
195
|
+
|
|
196
|
+
if match_start <= range_.start.character <= match_end:
|
|
197
|
+
type_name = match.group(1)
|
|
198
|
+
args_str = match.group(2)
|
|
199
|
+
args = [a.strip() for a in args_str.split(',')]
|
|
200
|
+
field_names = get_struct_field_names(doc_state, type_name)
|
|
201
|
+
|
|
202
|
+
if not field_names:
|
|
203
|
+
field_names = [f"field{i+1}" for i in range(len(args))]
|
|
204
|
+
|
|
205
|
+
if len(args) <= len(field_names):
|
|
206
|
+
field_assignments = [
|
|
207
|
+
f"{field_names[i]}: {args[i]}"
|
|
208
|
+
for i in range(len(args))
|
|
209
|
+
]
|
|
210
|
+
explicit_form = f"{type_name} {{ {', '.join(field_assignments)} }}"
|
|
211
|
+
|
|
212
|
+
literal_range = lsp.Range(
|
|
213
|
+
start=lsp.Position(line=line, character=match_start),
|
|
214
|
+
end=lsp.Position(line=line, character=match_end),
|
|
215
|
+
)
|
|
216
|
+
|
|
217
|
+
return (literal_range, explicit_form)
|
|
218
|
+
|
|
219
|
+
return None
|
|
220
|
+
|
|
221
|
+
|
|
222
|
+
def get_inline_variable_info(
|
|
223
|
+
doc_state: "DocumentState", range_: lsp.Range
|
|
224
|
+
) -> Optional[Tuple[str, str, List[lsp.Range]]]:
|
|
225
|
+
"""
|
|
226
|
+
Get information needed to inline a variable at cursor.
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
Tuple of (var_name, var_value, list of usage ranges) or None
|
|
230
|
+
"""
|
|
231
|
+
line = range_.start.line
|
|
232
|
+
char = range_.start.character
|
|
233
|
+
line_text = get_line_text(doc_state.source, line)
|
|
234
|
+
|
|
235
|
+
# Find word boundaries
|
|
236
|
+
word_start = char
|
|
237
|
+
while word_start > 0 and (line_text[word_start - 1].isalnum() or line_text[word_start - 1] == '_'):
|
|
238
|
+
word_start -= 1
|
|
239
|
+
|
|
240
|
+
word_end = char
|
|
241
|
+
while word_end < len(line_text) and (line_text[word_end].isalnum() or line_text[word_end] == '_'):
|
|
242
|
+
word_end += 1
|
|
243
|
+
|
|
244
|
+
if word_start == word_end:
|
|
245
|
+
return None
|
|
246
|
+
|
|
247
|
+
var_name = line_text[word_start:word_end]
|
|
248
|
+
|
|
249
|
+
# Look for variable definition
|
|
250
|
+
var_value = None
|
|
251
|
+
definition_line = -1
|
|
252
|
+
|
|
253
|
+
lines = doc_state.source.splitlines()
|
|
254
|
+
for i, src_line in enumerate(lines):
|
|
255
|
+
let_match = re.match(rf'^\s*let\s+{re.escape(var_name)}\s*=\s*(.+)$', src_line)
|
|
256
|
+
if let_match:
|
|
257
|
+
var_value = let_match.group(1).strip()
|
|
258
|
+
definition_line = i
|
|
259
|
+
break
|
|
260
|
+
|
|
261
|
+
assign_match = re.match(rf'^\s*{re.escape(var_name)}\s*:=\s*(.+)$', src_line)
|
|
262
|
+
if assign_match:
|
|
263
|
+
var_value = assign_match.group(1).strip()
|
|
264
|
+
definition_line = i
|
|
265
|
+
break
|
|
266
|
+
|
|
267
|
+
if not var_value:
|
|
268
|
+
return None
|
|
269
|
+
|
|
270
|
+
# Find all usages
|
|
271
|
+
usage_ranges = []
|
|
272
|
+
var_pattern = re.compile(rf'\b{re.escape(var_name)}\b')
|
|
273
|
+
|
|
274
|
+
for line_num, src_line in enumerate(lines):
|
|
275
|
+
if line_num == definition_line:
|
|
276
|
+
continue
|
|
277
|
+
|
|
278
|
+
for match in var_pattern.finditer(src_line):
|
|
279
|
+
prefix = src_line[:match.start()]
|
|
280
|
+
if 'let ' in prefix or ':=' in src_line[match.start():]:
|
|
281
|
+
continue
|
|
282
|
+
|
|
283
|
+
usage_ranges.append(lsp.Range(
|
|
284
|
+
start=lsp.Position(line=line_num, character=match.start()),
|
|
285
|
+
end=lsp.Position(line=line_num, character=match.end()),
|
|
286
|
+
))
|
|
287
|
+
|
|
288
|
+
return (var_name, var_value, usage_ranges)
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def find_similar_variants(doc_state: "DocumentState", typo: str) -> List[str]:
|
|
292
|
+
"""Find enum variants similar to a typo using similarity scoring."""
|
|
293
|
+
if not doc_state.ast:
|
|
294
|
+
return []
|
|
295
|
+
|
|
296
|
+
variants = []
|
|
297
|
+
for type_def in doc_state.ast.type_defs:
|
|
298
|
+
if hasattr(type_def, 'variants'):
|
|
299
|
+
for variant in type_def.variants:
|
|
300
|
+
variants.append(variant.name)
|
|
301
|
+
|
|
302
|
+
def similarity(a: str, b: str) -> int:
|
|
303
|
+
a_lower, b_lower = a.lower(), b.lower()
|
|
304
|
+
if a_lower == b_lower:
|
|
305
|
+
return 100
|
|
306
|
+
if a_lower.startswith(b_lower) or b_lower.startswith(a_lower):
|
|
307
|
+
return 80
|
|
308
|
+
if a_lower in b_lower or b_lower in a_lower:
|
|
309
|
+
return 60
|
|
310
|
+
common = sum(1 for c in a_lower if c in b_lower)
|
|
311
|
+
return int(common * 100 / max(len(a), len(b)))
|
|
312
|
+
|
|
313
|
+
scored = [(v, similarity(v, typo)) for v in variants]
|
|
314
|
+
scored.sort(key=lambda x: -x[1])
|
|
315
|
+
|
|
316
|
+
return [v for v, score in scored if score > 40]
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
def get_code_actions(
|
|
320
|
+
doc_state: Optional["DocumentState"],
|
|
321
|
+
uri: str,
|
|
322
|
+
range_: lsp.Range,
|
|
323
|
+
context: lsp.CodeActionContext,
|
|
324
|
+
) -> List[lsp.CodeAction]:
|
|
325
|
+
"""Provide code actions (quick fixes, refactorings)."""
|
|
326
|
+
actions: List[lsp.CodeAction] = []
|
|
327
|
+
|
|
328
|
+
if not doc_state:
|
|
329
|
+
return actions
|
|
330
|
+
|
|
331
|
+
# Check diagnostics for quick fixes
|
|
332
|
+
for diagnostic in context.diagnostics:
|
|
333
|
+
msg = diagnostic.message.lower()
|
|
334
|
+
|
|
335
|
+
# Fix for undefined symbol
|
|
336
|
+
if "undefined" in msg:
|
|
337
|
+
word = extract_undefined_symbol(diagnostic.message)
|
|
338
|
+
if word:
|
|
339
|
+
actions.append(lsp.CodeAction(
|
|
340
|
+
title=f"Add import for '{word}'",
|
|
341
|
+
kind=lsp.CodeActionKind.QuickFix,
|
|
342
|
+
diagnostics=[diagnostic],
|
|
343
|
+
edit=lsp.WorkspaceEdit(
|
|
344
|
+
changes={
|
|
345
|
+
uri: [lsp.TextEdit(
|
|
346
|
+
range=lsp.Range(
|
|
347
|
+
start=lsp.Position(line=0, character=0),
|
|
348
|
+
end=lsp.Position(line=0, character=0),
|
|
349
|
+
),
|
|
350
|
+
new_text=f"import {word}\n",
|
|
351
|
+
)],
|
|
352
|
+
},
|
|
353
|
+
),
|
|
354
|
+
))
|
|
355
|
+
|
|
356
|
+
# Fix for missing match arms
|
|
357
|
+
if "non-exhaustive" in msg or "missing" in msg and "arm" in msg:
|
|
358
|
+
actions.append(lsp.CodeAction(
|
|
359
|
+
title="Add wildcard pattern '_ =>'",
|
|
360
|
+
kind=lsp.CodeActionKind.QuickFix,
|
|
361
|
+
diagnostics=[diagnostic],
|
|
362
|
+
edit=lsp.WorkspaceEdit(
|
|
363
|
+
changes={
|
|
364
|
+
uri: [lsp.TextEdit(
|
|
365
|
+
range=lsp.Range(
|
|
366
|
+
start=lsp.Position(
|
|
367
|
+
line=diagnostic.range.end.line,
|
|
368
|
+
character=0,
|
|
369
|
+
),
|
|
370
|
+
end=lsp.Position(
|
|
371
|
+
line=diagnostic.range.end.line,
|
|
372
|
+
character=0,
|
|
373
|
+
),
|
|
374
|
+
),
|
|
375
|
+
new_text=" _ => pass\n",
|
|
376
|
+
)],
|
|
377
|
+
},
|
|
378
|
+
),
|
|
379
|
+
))
|
|
380
|
+
|
|
381
|
+
# Fix for type mismatch
|
|
382
|
+
if "type mismatch" in msg or "expected" in msg and "got" in msg:
|
|
383
|
+
match = re.search(r"expected\s+(\w+).*got\s+(\w+)", msg)
|
|
384
|
+
if match:
|
|
385
|
+
expected, got = match.groups()
|
|
386
|
+
conversion = get_type_conversion(got, expected)
|
|
387
|
+
if conversion:
|
|
388
|
+
actions.append(lsp.CodeAction(
|
|
389
|
+
title=f"Convert to {expected}",
|
|
390
|
+
kind=lsp.CodeActionKind.QuickFix,
|
|
391
|
+
diagnostics=[diagnostic],
|
|
392
|
+
))
|
|
393
|
+
|
|
394
|
+
# Fix for missing struct fields
|
|
395
|
+
if "missing field" in msg:
|
|
396
|
+
field_match = re.search(r"missing field[s]?\s*['\"]?(\w+)", msg)
|
|
397
|
+
if field_match:
|
|
398
|
+
field = field_match.group(1)
|
|
399
|
+
actions.append(lsp.CodeAction(
|
|
400
|
+
title=f"Add missing field '{field}'",
|
|
401
|
+
kind=lsp.CodeActionKind.QuickFix,
|
|
402
|
+
diagnostics=[diagnostic],
|
|
403
|
+
edit=lsp.WorkspaceEdit(
|
|
404
|
+
changes={
|
|
405
|
+
uri: [lsp.TextEdit(
|
|
406
|
+
range=lsp.Range(
|
|
407
|
+
start=diagnostic.range.end,
|
|
408
|
+
end=diagnostic.range.end,
|
|
409
|
+
),
|
|
410
|
+
new_text=f", {field}: TODO",
|
|
411
|
+
)],
|
|
412
|
+
},
|
|
413
|
+
),
|
|
414
|
+
))
|
|
415
|
+
|
|
416
|
+
# Fix for enum variant typos
|
|
417
|
+
if "unknown variant" in msg or "invalid variant" in msg:
|
|
418
|
+
variant_match = re.search(r"variant[:\s]+['\"]?(\w+)['\"]?", msg)
|
|
419
|
+
if variant_match:
|
|
420
|
+
typo = variant_match.group(1)
|
|
421
|
+
suggestions = find_similar_variants(doc_state, typo)
|
|
422
|
+
for suggestion in suggestions[:3]:
|
|
423
|
+
actions.append(lsp.CodeAction(
|
|
424
|
+
title=f"Change to '{suggestion}'",
|
|
425
|
+
kind=lsp.CodeActionKind.QuickFix,
|
|
426
|
+
diagnostics=[diagnostic],
|
|
427
|
+
edit=lsp.WorkspaceEdit(
|
|
428
|
+
changes={
|
|
429
|
+
uri: [lsp.TextEdit(
|
|
430
|
+
range=diagnostic.range,
|
|
431
|
+
new_text=suggestion,
|
|
432
|
+
)],
|
|
433
|
+
},
|
|
434
|
+
),
|
|
435
|
+
))
|
|
436
|
+
|
|
437
|
+
# Context-based refactorings
|
|
438
|
+
if doc_state.ast:
|
|
439
|
+
line_text = get_line_text(doc_state.source, range_.start.line)
|
|
440
|
+
|
|
441
|
+
# If inside a match expression, offer to add case
|
|
442
|
+
if "match" in line_text.lower():
|
|
443
|
+
actions.append(lsp.CodeAction(
|
|
444
|
+
title="Add match case",
|
|
445
|
+
kind=lsp.CodeActionKind.Refactor,
|
|
446
|
+
edit=lsp.WorkspaceEdit(
|
|
447
|
+
changes={
|
|
448
|
+
uri: [lsp.TextEdit(
|
|
449
|
+
range=lsp.Range(
|
|
450
|
+
start=lsp.Position(line=range_.end.line + 1, character=0),
|
|
451
|
+
end=lsp.Position(line=range_.end.line + 1, character=0),
|
|
452
|
+
),
|
|
453
|
+
new_text=" case TODO => pass\n",
|
|
454
|
+
)],
|
|
455
|
+
},
|
|
456
|
+
),
|
|
457
|
+
))
|
|
458
|
+
|
|
459
|
+
# Extract match arm to named pattern
|
|
460
|
+
pattern_info = extract_match_arm_pattern(doc_state, range_)
|
|
461
|
+
if pattern_info:
|
|
462
|
+
pattern_text, pattern_range = pattern_info
|
|
463
|
+
suggested_name = suggest_pattern_name(pattern_text, doc_state)
|
|
464
|
+
actions.append(lsp.CodeAction(
|
|
465
|
+
title=f"Extract to named pattern '{suggested_name}'",
|
|
466
|
+
kind=lsp.CodeActionKind.RefactorExtract,
|
|
467
|
+
edit=lsp.WorkspaceEdit(
|
|
468
|
+
changes={
|
|
469
|
+
uri: [
|
|
470
|
+
lsp.TextEdit(
|
|
471
|
+
range=lsp.Range(
|
|
472
|
+
start=lsp.Position(line=0, character=0),
|
|
473
|
+
end=lsp.Position(line=0, character=0),
|
|
474
|
+
),
|
|
475
|
+
new_text=f"pattern {suggested_name} = {pattern_text}\n\n",
|
|
476
|
+
),
|
|
477
|
+
lsp.TextEdit(
|
|
478
|
+
range=pattern_range,
|
|
479
|
+
new_text=suggested_name,
|
|
480
|
+
),
|
|
481
|
+
],
|
|
482
|
+
},
|
|
483
|
+
),
|
|
484
|
+
))
|
|
485
|
+
|
|
486
|
+
# Inline variable at cursor
|
|
487
|
+
inline_info = get_inline_variable_info(doc_state, range_)
|
|
488
|
+
if inline_info:
|
|
489
|
+
var_name, var_value, usage_ranges = inline_info
|
|
490
|
+
if usage_ranges:
|
|
491
|
+
edits = [
|
|
492
|
+
lsp.TextEdit(range=usage_range, new_text=var_value)
|
|
493
|
+
for usage_range in usage_ranges
|
|
494
|
+
]
|
|
495
|
+
actions.append(lsp.CodeAction(
|
|
496
|
+
title=f"Inline variable '{var_name}'",
|
|
497
|
+
kind=lsp.CodeActionKind.RefactorInline,
|
|
498
|
+
edit=lsp.WorkspaceEdit(changes={uri: edits}),
|
|
499
|
+
))
|
|
500
|
+
|
|
501
|
+
# Convert struct literal to explicit form
|
|
502
|
+
struct_info = get_struct_literal_info(doc_state, range_)
|
|
503
|
+
if struct_info:
|
|
504
|
+
struct_range, explicit_form = struct_info
|
|
505
|
+
actions.append(lsp.CodeAction(
|
|
506
|
+
title="Convert to explicit struct literal",
|
|
507
|
+
kind=lsp.CodeActionKind.RefactorRewrite,
|
|
508
|
+
edit=lsp.WorkspaceEdit(
|
|
509
|
+
changes={
|
|
510
|
+
uri: [lsp.TextEdit(
|
|
511
|
+
range=struct_range,
|
|
512
|
+
new_text=explicit_form,
|
|
513
|
+
)],
|
|
514
|
+
},
|
|
515
|
+
),
|
|
516
|
+
))
|
|
517
|
+
|
|
518
|
+
return actions
|
|
@@ -0,0 +1,85 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Completion handler for Yuho LSP.
|
|
3
|
+
|
|
4
|
+
Provides code completion for keywords, types, and user-defined symbols.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import List, Dict, Optional, TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
from lsprotocol import types as lsp
|
|
11
|
+
except ImportError:
|
|
12
|
+
raise ImportError(
|
|
13
|
+
"LSP dependencies not installed. Install with: pip install yuho[lsp]"
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
if TYPE_CHECKING:
|
|
17
|
+
from yuho.lsp.server import DocumentState
|
|
18
|
+
|
|
19
|
+
# Yuho keywords for completion
|
|
20
|
+
YUHO_KEYWORDS = [
|
|
21
|
+
"struct", "fn", "match", "case", "consequence", "pass", "return",
|
|
22
|
+
"statute", "definitions", "elements", "penalty", "illustration",
|
|
23
|
+
"import", "from", "actus_reus", "mens_rea", "circumstance",
|
|
24
|
+
"imprisonment", "fine", "supplementary", "TRUE", "FALSE",
|
|
25
|
+
]
|
|
26
|
+
|
|
27
|
+
# Yuho built-in types
|
|
28
|
+
YUHO_TYPES = [
|
|
29
|
+
"int", "float", "bool", "string", "money", "percent", "date", "duration", "void",
|
|
30
|
+
]
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def get_completions(
|
|
34
|
+
doc_state: Optional["DocumentState"],
|
|
35
|
+
uri: str,
|
|
36
|
+
position: lsp.Position,
|
|
37
|
+
) -> lsp.CompletionList:
|
|
38
|
+
"""Get completion items for position."""
|
|
39
|
+
items: List[lsp.CompletionItem] = []
|
|
40
|
+
|
|
41
|
+
# Keywords
|
|
42
|
+
for kw in YUHO_KEYWORDS:
|
|
43
|
+
items.append(lsp.CompletionItem(
|
|
44
|
+
label=kw,
|
|
45
|
+
kind=lsp.CompletionItemKind.Keyword,
|
|
46
|
+
detail="keyword",
|
|
47
|
+
))
|
|
48
|
+
|
|
49
|
+
# Built-in types
|
|
50
|
+
for typ in YUHO_TYPES:
|
|
51
|
+
items.append(lsp.CompletionItem(
|
|
52
|
+
label=typ,
|
|
53
|
+
kind=lsp.CompletionItemKind.TypeParameter,
|
|
54
|
+
detail="built-in type",
|
|
55
|
+
))
|
|
56
|
+
|
|
57
|
+
# Symbols from current document
|
|
58
|
+
if doc_state and doc_state.ast:
|
|
59
|
+
# Struct names
|
|
60
|
+
for struct in doc_state.ast.type_defs:
|
|
61
|
+
items.append(lsp.CompletionItem(
|
|
62
|
+
label=struct.name,
|
|
63
|
+
kind=lsp.CompletionItemKind.Struct,
|
|
64
|
+
detail=f"struct {struct.name}",
|
|
65
|
+
))
|
|
66
|
+
|
|
67
|
+
# Function names
|
|
68
|
+
for func in doc_state.ast.function_defs:
|
|
69
|
+
params = ", ".join(p.name for p in func.params)
|
|
70
|
+
items.append(lsp.CompletionItem(
|
|
71
|
+
label=func.name,
|
|
72
|
+
kind=lsp.CompletionItemKind.Function,
|
|
73
|
+
detail=f"fn {func.name}({params})",
|
|
74
|
+
))
|
|
75
|
+
|
|
76
|
+
# Statute sections
|
|
77
|
+
for statute in doc_state.ast.statutes:
|
|
78
|
+
title = statute.title.value if statute.title else ""
|
|
79
|
+
items.append(lsp.CompletionItem(
|
|
80
|
+
label=f"S{statute.section_number}",
|
|
81
|
+
kind=lsp.CompletionItemKind.Module,
|
|
82
|
+
detail=f"statute {statute.section_number}: {title}",
|
|
83
|
+
))
|
|
84
|
+
|
|
85
|
+
return lsp.CompletionList(is_incomplete=False, items=items)
|
yuho/lsp/diagnostics.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Diagnostics publishing for Yuho LSP.
|
|
3
|
+
|
|
4
|
+
Handles parsing errors and type checker errors conversion to LSP diagnostics.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from typing import List, TYPE_CHECKING
|
|
8
|
+
|
|
9
|
+
try:
|
|
10
|
+
from lsprotocol import types as lsp
|
|
11
|
+
except ImportError:
|
|
12
|
+
raise ImportError(
|
|
13
|
+
"LSP dependencies not installed. Install with: pip install yuho[lsp]"
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
from yuho.parser.wrapper import ParseError
|
|
17
|
+
from yuho.ast.type_inference import TypeInferenceVisitor
|
|
18
|
+
from yuho.ast.type_check import TypeCheckVisitor, TypeErrorInfo
|
|
19
|
+
|
|
20
|
+
if TYPE_CHECKING:
|
|
21
|
+
from yuho.lsp.server import DocumentState
|
|
22
|
+
from yuho.ast import ModuleNode
|
|
23
|
+
|
|
24
|
+
import logging
|
|
25
|
+
|
|
26
|
+
logger = logging.getLogger(__name__)
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def run_type_checker(ast: "ModuleNode") -> List[TypeErrorInfo]:
|
|
30
|
+
"""Run type inference and type checking on AST, return errors."""
|
|
31
|
+
try:
|
|
32
|
+
# First run type inference
|
|
33
|
+
infer_visitor = TypeInferenceVisitor()
|
|
34
|
+
ast.accept(infer_visitor)
|
|
35
|
+
|
|
36
|
+
# Then run type checking
|
|
37
|
+
check_visitor = TypeCheckVisitor(infer_visitor.result)
|
|
38
|
+
ast.accept(check_visitor)
|
|
39
|
+
|
|
40
|
+
# Return all errors and warnings
|
|
41
|
+
return check_visitor.result.errors + check_visitor.result.warnings
|
|
42
|
+
except Exception as e:
|
|
43
|
+
logger.warning(f"Type checking failed: {e}")
|
|
44
|
+
return []
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def parse_error_to_diagnostic(error: ParseError) -> lsp.Diagnostic:
|
|
48
|
+
"""Convert ParseError to LSP Diagnostic."""
|
|
49
|
+
loc = error.location
|
|
50
|
+
|
|
51
|
+
return lsp.Diagnostic(
|
|
52
|
+
range=lsp.Range(
|
|
53
|
+
start=lsp.Position(line=loc.line - 1, character=loc.col - 1),
|
|
54
|
+
end=lsp.Position(line=loc.end_line - 1, character=loc.end_col - 1),
|
|
55
|
+
),
|
|
56
|
+
message=error.message,
|
|
57
|
+
severity=lsp.DiagnosticSeverity.Error,
|
|
58
|
+
source="yuho",
|
|
59
|
+
)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def type_error_to_diagnostic(error: TypeErrorInfo) -> lsp.Diagnostic:
|
|
63
|
+
"""Convert TypeErrorInfo to LSP Diagnostic."""
|
|
64
|
+
# TypeErrorInfo has 1-based line numbers, LSP uses 0-based
|
|
65
|
+
line = max(0, error.line - 1)
|
|
66
|
+
column = max(0, error.column - 1)
|
|
67
|
+
|
|
68
|
+
severity = (
|
|
69
|
+
lsp.DiagnosticSeverity.Error
|
|
70
|
+
if error.severity == "error"
|
|
71
|
+
else lsp.DiagnosticSeverity.Warning
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
return lsp.Diagnostic(
|
|
75
|
+
range=lsp.Range(
|
|
76
|
+
start=lsp.Position(line=line, character=column),
|
|
77
|
+
end=lsp.Position(line=line, character=column + 1),
|
|
78
|
+
),
|
|
79
|
+
message=error.message,
|
|
80
|
+
severity=severity,
|
|
81
|
+
source="yuho-typecheck",
|
|
82
|
+
)
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def collect_diagnostics(doc_state: "DocumentState") -> List[lsp.Diagnostic]:
|
|
86
|
+
"""Collect all diagnostics for a document."""
|
|
87
|
+
diagnostics: List[lsp.Diagnostic] = []
|
|
88
|
+
|
|
89
|
+
# Parser errors
|
|
90
|
+
if doc_state.parse_result and doc_state.parse_result.errors:
|
|
91
|
+
for error in doc_state.parse_result.errors:
|
|
92
|
+
diagnostics.append(parse_error_to_diagnostic(error))
|
|
93
|
+
|
|
94
|
+
# Semantic errors from type checker
|
|
95
|
+
if doc_state.ast:
|
|
96
|
+
type_errors = run_type_checker(doc_state.ast)
|
|
97
|
+
for type_error in type_errors:
|
|
98
|
+
diagnostics.append(type_error_to_diagnostic(type_error))
|
|
99
|
+
|
|
100
|
+
return diagnostics
|