lean-lsp-mcp 0.16.1__tar.gz → 0.17.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {lean_lsp_mcp-0.16.1/src/lean_lsp_mcp.egg-info → lean_lsp_mcp-0.17.0}/PKG-INFO +3 -4
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/pyproject.toml +7 -20
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/instructions.py +3 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/loogle.py +99 -18
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/models.py +88 -3
- lean_lsp_mcp-0.17.0/src/lean_lsp_mcp/search_utils.py +236 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/server.py +92 -76
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/utils.py +34 -6
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0/src/lean_lsp_mcp.egg-info}/PKG-INFO +3 -4
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp.egg-info/SOURCES.txt +3 -1
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp.egg-info/requires.txt +2 -3
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_diagnostic_line_range.py +55 -41
- lean_lsp_mcp-0.17.0/tests/test_error_handling.py +74 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_file_caching.py +4 -1
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_search_tools.py +26 -17
- lean_lsp_mcp-0.17.0/tests/test_structured_output.py +124 -0
- lean_lsp_mcp-0.16.1/src/lean_lsp_mcp/search_utils.py +0 -142
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/LICENSE +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/README.md +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/setup.cfg +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/__init__.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/__main__.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/client_utils.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/file_utils.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp/outline_utils.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp.egg-info/dependency_links.txt +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp.egg-info/entry_points.txt +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/src/lean_lsp_mcp.egg-info/top_level.txt +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_editor_tools.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_logging.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_misc_tools.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_outline.py +0 -0
- {lean_lsp_mcp-0.16.1 → lean_lsp_mcp-0.17.0}/tests/test_project_tools.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: lean-lsp-mcp
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.17.0
|
|
4
4
|
Summary: Lean Theorem Prover MCP
|
|
5
5
|
Author-email: Oliver Dressler <hey@oli.show>
|
|
6
6
|
License-Expression: MIT
|
|
@@ -8,9 +8,8 @@ Project-URL: Repository, https://github.com/oOo0oOo/lean-lsp-mcp
|
|
|
8
8
|
Requires-Python: >=3.10
|
|
9
9
|
Description-Content-Type: text/markdown
|
|
10
10
|
License-File: LICENSE
|
|
11
|
-
Requires-Dist: leanclient==0.
|
|
12
|
-
Requires-Dist: mcp[cli]==1.
|
|
13
|
-
Requires-Dist: mcp[cli]>=1.22.0
|
|
11
|
+
Requires-Dist: leanclient==0.7.0
|
|
12
|
+
Requires-Dist: mcp[cli]==1.24.0
|
|
14
13
|
Requires-Dist: orjson>=3.11.1
|
|
15
14
|
Provides-Extra: lint
|
|
16
15
|
Requires-Dist: ruff>=0.2.0; extra == "lint"
|
|
@@ -1,40 +1,27 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "lean-lsp-mcp"
|
|
3
|
-
version = "0.
|
|
3
|
+
version = "0.17.0"
|
|
4
4
|
description = "Lean Theorem Prover MCP"
|
|
5
|
-
authors = [{name="Oliver Dressler", email="hey@oli.show"}]
|
|
5
|
+
authors = [{ name = "Oliver Dressler", email = "hey@oli.show" }]
|
|
6
6
|
readme = "README.md"
|
|
7
7
|
requires-python = ">=3.10"
|
|
8
8
|
license = "MIT"
|
|
9
|
-
dependencies = [
|
|
10
|
-
"leanclient==0.6.1",
|
|
11
|
-
"mcp[cli]==1.23.1", "mcp[cli]>=1.22.0",
|
|
12
|
-
"orjson>=3.11.1",
|
|
13
|
-
]
|
|
9
|
+
dependencies = ["leanclient==0.7.0", "mcp[cli]==1.24.0", "orjson>=3.11.1"]
|
|
14
10
|
|
|
15
11
|
[project.urls]
|
|
16
12
|
Repository = "https://github.com/oOo0oOo/lean-lsp-mcp"
|
|
17
13
|
|
|
18
14
|
[project.optional-dependencies]
|
|
19
|
-
lint = [
|
|
20
|
-
|
|
21
|
-
]
|
|
22
|
-
dev = [
|
|
23
|
-
"ruff>=0.2.0",
|
|
24
|
-
"pytest>=8.3",
|
|
25
|
-
"anyio>=4.4",
|
|
26
|
-
"pytest-asyncio>=0.23",
|
|
27
|
-
]
|
|
15
|
+
lint = ["ruff>=0.2.0"]
|
|
16
|
+
dev = ["ruff>=0.2.0", "pytest>=8.3", "anyio>=4.4", "pytest-asyncio>=0.23"]
|
|
28
17
|
|
|
29
18
|
[tool.pytest.ini_options]
|
|
30
19
|
asyncio_mode = "auto"
|
|
31
|
-
markers = [
|
|
32
|
-
"slow: marks tests as slow (deselect with '-m \"not slow\"')",
|
|
33
|
-
]
|
|
20
|
+
markers = ["slow: marks tests as slow (deselect with '-m \"not slow\"')"]
|
|
34
21
|
|
|
35
22
|
[tool.setuptools]
|
|
36
23
|
packages = ["lean_lsp_mcp"]
|
|
37
|
-
package-dir = {"" = "src"}
|
|
24
|
+
package-dir = { "" = "src" }
|
|
38
25
|
|
|
39
26
|
[build-system]
|
|
40
27
|
requires = ["setuptools>=61.0"]
|
|
@@ -33,4 +33,7 @@ After finding a name: lean_local_search to verify, lean_hover_info for signature
|
|
|
33
33
|
|
|
34
34
|
## Return Formats
|
|
35
35
|
List tools return JSON arrays. Empty = `[]`.
|
|
36
|
+
|
|
37
|
+
## Error Handling
|
|
38
|
+
Check `isError` in responses: `true` means failure (timeout/LSP error), while `[]` with `isError: false` means no results found.
|
|
36
39
|
"""
|
|
@@ -3,6 +3,7 @@
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
|
+
import hashlib
|
|
6
7
|
import json
|
|
7
8
|
import logging
|
|
8
9
|
import os
|
|
@@ -34,7 +35,7 @@ def loogle_remote(query: str, num_results: int) -> list[LoogleResult] | str:
|
|
|
34
35
|
f"https://loogle.lean-lang.org/json?q={urllib.parse.quote(query)}",
|
|
35
36
|
headers={"User-Agent": "lean-lsp-mcp/0.1"},
|
|
36
37
|
)
|
|
37
|
-
with urllib.request.urlopen(req, timeout=
|
|
38
|
+
with urllib.request.urlopen(req, timeout=10) as response:
|
|
38
39
|
results = orjson.loads(response.read())
|
|
39
40
|
if "hits" not in results:
|
|
40
41
|
return "No results found."
|
|
@@ -52,18 +53,25 @@ def loogle_remote(query: str, num_results: int) -> list[LoogleResult] | str:
|
|
|
52
53
|
|
|
53
54
|
|
|
54
55
|
class LoogleManager:
|
|
55
|
-
"""Manages local loogle installation and async subprocess.
|
|
56
|
+
"""Manages local loogle installation and async subprocess.
|
|
57
|
+
|
|
58
|
+
Args:
|
|
59
|
+
cache_dir: Directory for loogle repo and indices (default: ~/.cache/lean-lsp-mcp/loogle)
|
|
60
|
+
project_path: Optional Lean project path to index its .lake/packages dependencies
|
|
61
|
+
"""
|
|
56
62
|
|
|
57
63
|
REPO_URL = "https://github.com/nomeata/loogle.git"
|
|
58
64
|
READY_SIGNAL = "Loogle is ready."
|
|
59
65
|
|
|
60
|
-
def __init__(self, cache_dir: Path | None = None):
|
|
66
|
+
def __init__(self, cache_dir: Path | None = None, project_path: Path | None = None):
|
|
61
67
|
self.cache_dir = cache_dir or get_cache_dir()
|
|
62
68
|
self.repo_dir = self.cache_dir / "repo"
|
|
63
69
|
self.index_dir = self.cache_dir / "index"
|
|
70
|
+
self.project_path = project_path
|
|
64
71
|
self.process: asyncio.subprocess.Process | None = None
|
|
65
72
|
self._ready = False
|
|
66
73
|
self._lock = asyncio.Lock()
|
|
74
|
+
self._extra_paths: list[Path] = []
|
|
67
75
|
|
|
68
76
|
@property
|
|
69
77
|
def binary_path(self) -> Path:
|
|
@@ -181,21 +189,54 @@ class LoogleManager:
|
|
|
181
189
|
return False, err
|
|
182
190
|
return True, ""
|
|
183
191
|
|
|
192
|
+
def _discover_project_paths(self) -> list[Path]:
|
|
193
|
+
"""Find .lake/packages lib paths from the user's project."""
|
|
194
|
+
if not self.project_path:
|
|
195
|
+
return []
|
|
196
|
+
paths = []
|
|
197
|
+
# Check packages directory
|
|
198
|
+
lake_packages = self.project_path / ".lake" / "packages"
|
|
199
|
+
if lake_packages.exists():
|
|
200
|
+
for pkg_dir in lake_packages.iterdir():
|
|
201
|
+
if not pkg_dir.is_dir():
|
|
202
|
+
continue
|
|
203
|
+
lib_path = pkg_dir / ".lake" / "build" / "lib" / "lean"
|
|
204
|
+
if lib_path.exists():
|
|
205
|
+
paths.append(lib_path)
|
|
206
|
+
# Also add the project's own build output
|
|
207
|
+
project_lib = self.project_path / ".lake" / "build" / "lib" / "lean"
|
|
208
|
+
if project_lib.exists():
|
|
209
|
+
paths.append(project_lib)
|
|
210
|
+
return sorted(paths)
|
|
211
|
+
|
|
184
212
|
def _get_index_path(self) -> Path:
|
|
185
|
-
|
|
213
|
+
base = f"mathlib-{self._get_mathlib_version()}"
|
|
214
|
+
if self._extra_paths:
|
|
215
|
+
# Include hash of extra paths for project-specific index
|
|
216
|
+
paths_str = ":".join(str(p) for p in sorted(self._extra_paths))
|
|
217
|
+
path_hash = hashlib.sha256(paths_str.encode()).hexdigest()[:8]
|
|
218
|
+
return self.index_dir / f"{base}-{path_hash}.idx"
|
|
219
|
+
return self.index_dir / f"{base}.idx"
|
|
186
220
|
|
|
187
221
|
def _cleanup_old_indices(self) -> None:
|
|
188
|
-
"""Remove old index files from previous mathlib versions.
|
|
222
|
+
"""Remove old index files from previous mathlib versions.
|
|
223
|
+
|
|
224
|
+
Cleans up both mathlib-only indexes (mathlib-<version>.idx) and
|
|
225
|
+
project-specific indexes (mathlib-<version>-<hash>.idx) that don't
|
|
226
|
+
match the current mathlib version.
|
|
227
|
+
"""
|
|
189
228
|
if not self.index_dir.exists():
|
|
190
229
|
return
|
|
191
|
-
|
|
230
|
+
current_mathlib = f"mathlib-{self._get_mathlib_version()}"
|
|
192
231
|
for idx in self.index_dir.glob("*.idx"):
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
232
|
+
# Keep indexes with current mathlib version (both base and project-specific)
|
|
233
|
+
if idx.name.startswith(current_mathlib):
|
|
234
|
+
continue
|
|
235
|
+
try:
|
|
236
|
+
idx.unlink()
|
|
237
|
+
logger.info(f"Removed old index: {idx.name}")
|
|
238
|
+
except Exception:
|
|
239
|
+
pass
|
|
199
240
|
|
|
200
241
|
def _build_index(self) -> Path | None:
|
|
201
242
|
index_path = self._get_index_path()
|
|
@@ -205,17 +246,37 @@ class LoogleManager:
|
|
|
205
246
|
return None
|
|
206
247
|
self.index_dir.mkdir(parents=True, exist_ok=True)
|
|
207
248
|
self._cleanup_old_indices()
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
249
|
+
|
|
250
|
+
# Build command with extra paths
|
|
251
|
+
cmd = [str(self.binary_path), "--write-index", str(index_path), "--json"]
|
|
252
|
+
for path in self._extra_paths:
|
|
253
|
+
cmd.extend(["--path", str(path)])
|
|
254
|
+
cmd.append("") # Empty query for index building
|
|
255
|
+
|
|
256
|
+
if self._extra_paths:
|
|
257
|
+
logger.info(
|
|
258
|
+
f"Building search index with {len(self._extra_paths)} extra paths..."
|
|
213
259
|
)
|
|
260
|
+
else:
|
|
261
|
+
logger.info("Building search index...")
|
|
262
|
+
try:
|
|
263
|
+
self._run(cmd, timeout=600)
|
|
214
264
|
return index_path if index_path.exists() else None
|
|
215
265
|
except Exception as e:
|
|
216
266
|
logger.error(f"Index build error: {e}")
|
|
217
267
|
return None
|
|
218
268
|
|
|
269
|
+
def set_project_path(self, project_path: Path | None) -> bool:
|
|
270
|
+
"""Update project path and rediscover extra paths. Returns True if paths changed."""
|
|
271
|
+
self.project_path = project_path
|
|
272
|
+
new_paths = self._discover_project_paths()
|
|
273
|
+
if new_paths != self._extra_paths:
|
|
274
|
+
self._extra_paths = new_paths
|
|
275
|
+
if new_paths:
|
|
276
|
+
logger.info(f"Discovered {len(new_paths)} project library paths")
|
|
277
|
+
return True
|
|
278
|
+
return False
|
|
279
|
+
|
|
219
280
|
def ensure_installed(self) -> bool:
|
|
220
281
|
ok, err = self._check_prerequisites()
|
|
221
282
|
if not ok:
|
|
@@ -223,6 +284,10 @@ class LoogleManager:
|
|
|
223
284
|
return False
|
|
224
285
|
if not self._clone_repo() or not self._build_loogle():
|
|
225
286
|
return False
|
|
287
|
+
# Discover project paths before building index
|
|
288
|
+
self._extra_paths = self._discover_project_paths()
|
|
289
|
+
if self._extra_paths:
|
|
290
|
+
logger.info(f"Indexing {len(self._extra_paths)} project library paths")
|
|
226
291
|
if not self._build_index():
|
|
227
292
|
logger.warning("Index build failed, loogle will build on startup")
|
|
228
293
|
return self.is_installed
|
|
@@ -234,10 +299,26 @@ class LoogleManager:
|
|
|
234
299
|
if not ok:
|
|
235
300
|
logger.error(f"Loogle environment check failed: {err}")
|
|
236
301
|
return False
|
|
302
|
+
|
|
303
|
+
# Check if project paths changed and we need to rebuild index
|
|
304
|
+
if self.project_path:
|
|
305
|
+
new_paths = self._discover_project_paths()
|
|
306
|
+
if new_paths != self._extra_paths:
|
|
307
|
+
self._extra_paths = new_paths
|
|
308
|
+
# Build new index if paths changed
|
|
309
|
+
self._build_index()
|
|
310
|
+
|
|
237
311
|
cmd = [str(self.binary_path), "--json", "--interactive"]
|
|
238
312
|
if (idx := self._get_index_path()).exists():
|
|
239
313
|
cmd.extend(["--read-index", str(idx)])
|
|
240
|
-
|
|
314
|
+
# Add extra paths for runtime search (in case not all are indexed)
|
|
315
|
+
for path in self._extra_paths:
|
|
316
|
+
cmd.extend(["--path", str(path)])
|
|
317
|
+
|
|
318
|
+
if self._extra_paths:
|
|
319
|
+
logger.info(f"Starting loogle with {len(self._extra_paths)} extra paths...")
|
|
320
|
+
else:
|
|
321
|
+
logger.info("Starting loogle subprocess...")
|
|
241
322
|
try:
|
|
242
323
|
self.process = await asyncio.create_subprocess_exec(
|
|
243
324
|
*cmd,
|
|
@@ -46,7 +46,15 @@ class DiagnosticMessage(BaseModel):
|
|
|
46
46
|
|
|
47
47
|
class GoalState(BaseModel):
|
|
48
48
|
line_context: str = Field(description="Source line where goals were queried")
|
|
49
|
-
goals: str = Field(
|
|
49
|
+
goals: Optional[List[str]] = Field(
|
|
50
|
+
None, description="Goal list at specified column position"
|
|
51
|
+
)
|
|
52
|
+
goals_before: Optional[List[str]] = Field(
|
|
53
|
+
None, description="Goals at line start (when column omitted)"
|
|
54
|
+
)
|
|
55
|
+
goals_after: Optional[List[str]] = Field(
|
|
56
|
+
None, description="Goals at line end (when column omitted)"
|
|
57
|
+
)
|
|
50
58
|
|
|
51
59
|
|
|
52
60
|
class CompletionItem(BaseModel):
|
|
@@ -94,8 +102,8 @@ class FileOutline(BaseModel):
|
|
|
94
102
|
|
|
95
103
|
class AttemptResult(BaseModel):
|
|
96
104
|
snippet: str = Field(description="Code snippet that was tried")
|
|
97
|
-
|
|
98
|
-
|
|
105
|
+
goals: List[str] = Field(
|
|
106
|
+
default_factory=list, description="Goal list after applying snippet"
|
|
99
107
|
)
|
|
100
108
|
diagnostics: List[DiagnosticMessage] = Field(
|
|
101
109
|
default_factory=list, description="Diagnostics for this attempt"
|
|
@@ -118,3 +126,80 @@ class RunResult(BaseModel):
|
|
|
118
126
|
class DeclarationInfo(BaseModel):
|
|
119
127
|
file_path: str = Field(description="Path to declaration file")
|
|
120
128
|
content: str = Field(description="File content")
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
# Wrapper models for list-returning tools
|
|
132
|
+
# FastMCP flattens bare lists into separate TextContent blocks, causing serialization issues.
|
|
133
|
+
# Wrapping in a model ensures proper JSON serialization.
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
class DiagnosticsResult(BaseModel):
|
|
137
|
+
"""Wrapper for diagnostic messages list."""
|
|
138
|
+
|
|
139
|
+
items: List[DiagnosticMessage] = Field(
|
|
140
|
+
default_factory=list, description="List of diagnostic messages"
|
|
141
|
+
)
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
class CompletionsResult(BaseModel):
|
|
145
|
+
"""Wrapper for completions list."""
|
|
146
|
+
|
|
147
|
+
items: List[CompletionItem] = Field(
|
|
148
|
+
default_factory=list, description="List of completion items"
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
|
|
152
|
+
class MultiAttemptResult(BaseModel):
|
|
153
|
+
"""Wrapper for multi-attempt results list."""
|
|
154
|
+
|
|
155
|
+
items: List[AttemptResult] = Field(
|
|
156
|
+
default_factory=list, description="List of attempt results"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
class LocalSearchResults(BaseModel):
|
|
161
|
+
"""Wrapper for local search results list."""
|
|
162
|
+
|
|
163
|
+
items: List[LocalSearchResult] = Field(
|
|
164
|
+
default_factory=list, description="List of local search results"
|
|
165
|
+
)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class LeanSearchResults(BaseModel):
|
|
169
|
+
"""Wrapper for LeanSearch results list."""
|
|
170
|
+
|
|
171
|
+
items: List[LeanSearchResult] = Field(
|
|
172
|
+
default_factory=list, description="List of LeanSearch results"
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
class LoogleResults(BaseModel):
|
|
177
|
+
"""Wrapper for Loogle results list."""
|
|
178
|
+
|
|
179
|
+
items: List[LoogleResult] = Field(
|
|
180
|
+
default_factory=list, description="List of Loogle results"
|
|
181
|
+
)
|
|
182
|
+
|
|
183
|
+
|
|
184
|
+
class LeanFinderResults(BaseModel):
|
|
185
|
+
"""Wrapper for Lean Finder results list."""
|
|
186
|
+
|
|
187
|
+
items: List[LeanFinderResult] = Field(
|
|
188
|
+
default_factory=list, description="List of Lean Finder results"
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
|
|
192
|
+
class StateSearchResults(BaseModel):
|
|
193
|
+
"""Wrapper for state search results list."""
|
|
194
|
+
|
|
195
|
+
items: List[StateSearchResult] = Field(
|
|
196
|
+
default_factory=list, description="List of state search results"
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
|
|
200
|
+
class PremiseResults(BaseModel):
|
|
201
|
+
"""Wrapper for premise results list."""
|
|
202
|
+
|
|
203
|
+
items: List[PremiseResult] = Field(
|
|
204
|
+
default_factory=list, description="List of premise results"
|
|
205
|
+
)
|
|
@@ -0,0 +1,236 @@
|
|
|
1
|
+
"""Utilities for Lean search tools."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from collections.abc import Iterable
|
|
6
|
+
from functools import lru_cache
|
|
7
|
+
import platform
|
|
8
|
+
import re
|
|
9
|
+
import shutil
|
|
10
|
+
import subprocess
|
|
11
|
+
import threading
|
|
12
|
+
from orjson import loads as _json_loads
|
|
13
|
+
from pathlib import Path
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
INSTALL_URL = "https://github.com/BurntSushi/ripgrep#installation"
|
|
17
|
+
|
|
18
|
+
_PLATFORM_INSTRUCTIONS: dict[str, Iterable[str]] = {
|
|
19
|
+
"Windows": (
|
|
20
|
+
"winget install BurntSushi.ripgrep.MSVC",
|
|
21
|
+
"choco install ripgrep",
|
|
22
|
+
),
|
|
23
|
+
"Darwin": ("brew install ripgrep",),
|
|
24
|
+
"Linux": (
|
|
25
|
+
"sudo apt-get install ripgrep",
|
|
26
|
+
"sudo dnf install ripgrep",
|
|
27
|
+
),
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _create_ripgrep_process(command: list[str], *, cwd: str) -> subprocess.Popen[str]:
|
|
32
|
+
"""Spawn ripgrep and return a process with line-streaming stdout.
|
|
33
|
+
|
|
34
|
+
Separated for test monkeypatching and to allow early termination once we
|
|
35
|
+
have enough matches.
|
|
36
|
+
"""
|
|
37
|
+
return subprocess.Popen(
|
|
38
|
+
command,
|
|
39
|
+
stdout=subprocess.PIPE,
|
|
40
|
+
stderr=subprocess.PIPE,
|
|
41
|
+
text=True,
|
|
42
|
+
cwd=cwd,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
def check_ripgrep_status() -> tuple[bool, str]:
|
|
47
|
+
"""Check whether ``rg`` is available on PATH and return status + message."""
|
|
48
|
+
|
|
49
|
+
if shutil.which("rg"):
|
|
50
|
+
return True, ""
|
|
51
|
+
|
|
52
|
+
system = platform.system()
|
|
53
|
+
platform_instructions = _PLATFORM_INSTRUCTIONS.get(
|
|
54
|
+
system, ("Check alternative installation methods.",)
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
lines = [
|
|
58
|
+
"ripgrep (rg) was not found on your PATH. The lean_local_search tool uses ripgrep for fast declaration search.",
|
|
59
|
+
"",
|
|
60
|
+
"Installation options:",
|
|
61
|
+
*(f" - {item}" for item in platform_instructions),
|
|
62
|
+
f"More installation options: {INSTALL_URL}",
|
|
63
|
+
]
|
|
64
|
+
|
|
65
|
+
return False, "\n".join(lines)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def lean_local_search(
|
|
69
|
+
query: str,
|
|
70
|
+
limit: int = 32,
|
|
71
|
+
project_root: Path | None = None,
|
|
72
|
+
) -> list[dict[str, str]]:
|
|
73
|
+
"""Search Lean declarations matching ``query`` using ripgrep; results include theorems, lemmas, defs, classes, instances, structures, inductives, abbrevs, and opaque decls."""
|
|
74
|
+
root = (project_root or Path.cwd()).resolve()
|
|
75
|
+
|
|
76
|
+
pattern = (
|
|
77
|
+
rf"^\s*(?:theorem|lemma|def|axiom|class|instance|structure|inductive|abbrev|opaque)\s+"
|
|
78
|
+
rf"(?:[A-Za-z0-9_'.]+\.)*{re.escape(query)}[A-Za-z0-9_'.]*(?:\s|:)"
|
|
79
|
+
)
|
|
80
|
+
|
|
81
|
+
command = [
|
|
82
|
+
"rg",
|
|
83
|
+
"--json",
|
|
84
|
+
"--no-ignore",
|
|
85
|
+
"--smart-case",
|
|
86
|
+
"--hidden",
|
|
87
|
+
"--color",
|
|
88
|
+
"never",
|
|
89
|
+
"--no-messages",
|
|
90
|
+
"-g",
|
|
91
|
+
"*.lean",
|
|
92
|
+
"-g",
|
|
93
|
+
"!.git/**",
|
|
94
|
+
"-g",
|
|
95
|
+
"!.lake/build/**",
|
|
96
|
+
pattern,
|
|
97
|
+
str(root),
|
|
98
|
+
]
|
|
99
|
+
|
|
100
|
+
if lean_src := _get_lean_src_search_path():
|
|
101
|
+
command.append(lean_src)
|
|
102
|
+
|
|
103
|
+
process = _create_ripgrep_process(command, cwd=str(root))
|
|
104
|
+
|
|
105
|
+
matches: list[dict[str, str]] = []
|
|
106
|
+
stderr_text = ""
|
|
107
|
+
terminated_early = False
|
|
108
|
+
stderr_chunks: list[str] = []
|
|
109
|
+
stderr_chars = 0
|
|
110
|
+
stderr_truncated = False
|
|
111
|
+
max_stderr_chars = 100_000
|
|
112
|
+
|
|
113
|
+
def _drain_stderr(pipe) -> None:
|
|
114
|
+
nonlocal stderr_chars, stderr_truncated
|
|
115
|
+
try:
|
|
116
|
+
for err_line in pipe:
|
|
117
|
+
if stderr_chars < max_stderr_chars:
|
|
118
|
+
stderr_chunks.append(err_line)
|
|
119
|
+
stderr_chars += len(err_line)
|
|
120
|
+
else:
|
|
121
|
+
stderr_truncated = True
|
|
122
|
+
except Exception:
|
|
123
|
+
return
|
|
124
|
+
|
|
125
|
+
stderr_thread: threading.Thread | None = None
|
|
126
|
+
if process.stderr is not None:
|
|
127
|
+
stderr_thread = threading.Thread(
|
|
128
|
+
target=_drain_stderr,
|
|
129
|
+
args=(process.stderr,),
|
|
130
|
+
name="lean-local-search-rg-stderr",
|
|
131
|
+
daemon=True,
|
|
132
|
+
)
|
|
133
|
+
stderr_thread.start()
|
|
134
|
+
|
|
135
|
+
try:
|
|
136
|
+
stdout = process.stdout
|
|
137
|
+
if stdout is None:
|
|
138
|
+
raise RuntimeError("ripgrep did not provide stdout pipe")
|
|
139
|
+
|
|
140
|
+
for line in stdout:
|
|
141
|
+
if not line or (event := _json_loads(line)).get("type") != "match":
|
|
142
|
+
continue
|
|
143
|
+
|
|
144
|
+
data = event["data"]
|
|
145
|
+
parts = data["lines"]["text"].lstrip().split(maxsplit=2)
|
|
146
|
+
if len(parts) < 2:
|
|
147
|
+
continue
|
|
148
|
+
|
|
149
|
+
decl_kind, decl_name = parts[0], parts[1].rstrip(":")
|
|
150
|
+
file_path = Path(data["path"]["text"])
|
|
151
|
+
abs_path = (
|
|
152
|
+
file_path if file_path.is_absolute() else (root / file_path).resolve()
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
try:
|
|
156
|
+
display_path = str(abs_path.relative_to(root))
|
|
157
|
+
except ValueError:
|
|
158
|
+
display_path = str(file_path)
|
|
159
|
+
|
|
160
|
+
matches.append({"name": decl_name, "kind": decl_kind, "file": display_path})
|
|
161
|
+
|
|
162
|
+
if len(matches) >= limit:
|
|
163
|
+
terminated_early = True
|
|
164
|
+
try:
|
|
165
|
+
process.terminate()
|
|
166
|
+
except Exception:
|
|
167
|
+
pass
|
|
168
|
+
break
|
|
169
|
+
|
|
170
|
+
try:
|
|
171
|
+
if terminated_early:
|
|
172
|
+
process.wait(timeout=5)
|
|
173
|
+
else:
|
|
174
|
+
process.wait()
|
|
175
|
+
except subprocess.TimeoutExpired:
|
|
176
|
+
process.kill()
|
|
177
|
+
process.wait()
|
|
178
|
+
finally:
|
|
179
|
+
if process.returncode is None:
|
|
180
|
+
try:
|
|
181
|
+
process.terminate()
|
|
182
|
+
except Exception:
|
|
183
|
+
pass
|
|
184
|
+
try:
|
|
185
|
+
process.wait(timeout=5)
|
|
186
|
+
except Exception:
|
|
187
|
+
try:
|
|
188
|
+
process.kill()
|
|
189
|
+
except Exception:
|
|
190
|
+
pass
|
|
191
|
+
try:
|
|
192
|
+
process.wait(timeout=5)
|
|
193
|
+
except Exception:
|
|
194
|
+
pass
|
|
195
|
+
if stderr_thread is not None:
|
|
196
|
+
stderr_thread.join(timeout=1)
|
|
197
|
+
if process.stdout is not None:
|
|
198
|
+
process.stdout.close()
|
|
199
|
+
if process.stderr is not None:
|
|
200
|
+
process.stderr.close()
|
|
201
|
+
|
|
202
|
+
if stderr_chunks:
|
|
203
|
+
stderr_text = "".join(stderr_chunks)
|
|
204
|
+
if stderr_truncated:
|
|
205
|
+
stderr_text += "\n[stderr truncated]"
|
|
206
|
+
|
|
207
|
+
returncode = process.returncode if process.returncode is not None else 0
|
|
208
|
+
|
|
209
|
+
if returncode not in (0, 1) and not matches:
|
|
210
|
+
error_msg = f"ripgrep exited with code {returncode}"
|
|
211
|
+
if stderr_text:
|
|
212
|
+
error_msg += f"\n{stderr_text}"
|
|
213
|
+
raise RuntimeError(error_msg)
|
|
214
|
+
|
|
215
|
+
return matches
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
@lru_cache(maxsize=1)
|
|
219
|
+
def _get_lean_src_search_path() -> str | None:
|
|
220
|
+
"""Return the Lean stdlib directory, if available (cache once)."""
|
|
221
|
+
try:
|
|
222
|
+
completed = subprocess.run(
|
|
223
|
+
["lean", "--print-prefix"], capture_output=True, text=True
|
|
224
|
+
)
|
|
225
|
+
except (FileNotFoundError, subprocess.CalledProcessError):
|
|
226
|
+
return None
|
|
227
|
+
|
|
228
|
+
prefix = completed.stdout.strip()
|
|
229
|
+
if not prefix:
|
|
230
|
+
return None
|
|
231
|
+
|
|
232
|
+
candidate = Path(prefix).expanduser().resolve() / "src"
|
|
233
|
+
if candidate.exists():
|
|
234
|
+
return str(candidate)
|
|
235
|
+
|
|
236
|
+
return None
|