lean-explore 1.0.0__py3-none-any.whl → 1.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- lean_explore/cli/data_commands.py +126 -109
- lean_explore/extract/doc_gen4.py +1 -1
- lean_explore/extract/embeddings.py +1 -3
- lean_explore/extract/index.py +1 -2
- lean_explore/extract/package_utils.py +2 -2
- lean_explore/mcp/tools.py +1 -0
- lean_explore/search/engine.py +2 -6
- lean_explore/util/reranker_client.py +1 -3
- {lean_explore-1.0.0.dist-info → lean_explore-1.0.1.dist-info}/METADATA +1 -1
- {lean_explore-1.0.0.dist-info → lean_explore-1.0.1.dist-info}/RECORD +14 -14
- {lean_explore-1.0.0.dist-info → lean_explore-1.0.1.dist-info}/WHEEL +0 -0
- {lean_explore-1.0.0.dist-info → lean_explore-1.0.1.dist-info}/entry_points.txt +0 -0
- {lean_explore-1.0.0.dist-info → lean_explore-1.0.1.dist-info}/licenses/LICENSE +0 -0
- {lean_explore-1.0.0.dist-info → lean_explore-1.0.1.dist-info}/top_level.txt +0 -0
|
@@ -3,42 +3,26 @@
|
|
|
3
3
|
"""Manages local Lean Explore data toolchains.
|
|
4
4
|
|
|
5
5
|
Provides CLI commands to download, install, and clean data files (database,
|
|
6
|
-
FAISS index, etc.) from remote storage
|
|
6
|
+
FAISS index, BM25 indexes, etc.) from remote storage.
|
|
7
7
|
"""
|
|
8
8
|
|
|
9
9
|
import logging
|
|
10
10
|
import shutil
|
|
11
|
-
from
|
|
11
|
+
from pathlib import Path
|
|
12
12
|
|
|
13
|
-
import pooch
|
|
14
13
|
import requests
|
|
15
14
|
import typer
|
|
16
15
|
from rich.console import Console
|
|
16
|
+
from rich.progress import (
|
|
17
|
+
BarColumn,
|
|
18
|
+
DownloadColumn,
|
|
19
|
+
Progress,
|
|
20
|
+
TextColumn,
|
|
21
|
+
TransferSpeedColumn,
|
|
22
|
+
)
|
|
17
23
|
|
|
18
24
|
from lean_explore.config import Config
|
|
19
25
|
|
|
20
|
-
|
|
21
|
-
class ManifestFileEntry(TypedDict):
|
|
22
|
-
"""A file entry in the manifest's toolchain version."""
|
|
23
|
-
|
|
24
|
-
remote_name: str
|
|
25
|
-
local_name: str
|
|
26
|
-
sha256: str
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
class ToolchainVersionInfo(TypedDict):
|
|
30
|
-
"""Version information for a specific toolchain in the manifest."""
|
|
31
|
-
|
|
32
|
-
assets_base_path_r2: str
|
|
33
|
-
files: list[ManifestFileEntry]
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
class Manifest(TypedDict):
|
|
37
|
-
"""Remote data manifest structure."""
|
|
38
|
-
|
|
39
|
-
default_toolchain: str
|
|
40
|
-
toolchains: dict[str, ToolchainVersionInfo]
|
|
41
|
-
|
|
42
26
|
logger = logging.getLogger(__name__)
|
|
43
27
|
|
|
44
28
|
app = typer.Typer(
|
|
@@ -48,64 +32,79 @@ app = typer.Typer(
|
|
|
48
32
|
no_args_is_help=True,
|
|
49
33
|
)
|
|
50
34
|
|
|
35
|
+
# Files required for the search engine (relative to version directory)
|
|
36
|
+
REQUIRED_FILES: list[str] = [
|
|
37
|
+
"lean_explore.db",
|
|
38
|
+
"informalization_faiss.index",
|
|
39
|
+
"informalization_faiss_ids_map.json",
|
|
40
|
+
"bm25_ids_map.json",
|
|
41
|
+
]
|
|
42
|
+
|
|
43
|
+
# BM25 index directories and their contents
|
|
44
|
+
BM25_DIRECTORIES: dict[str, list[str]] = {
|
|
45
|
+
"bm25_name_raw": [
|
|
46
|
+
"data.csc.index.npy",
|
|
47
|
+
"indices.csc.index.npy",
|
|
48
|
+
"indptr.csc.index.npy",
|
|
49
|
+
"nonoccurrence_array.index.npy",
|
|
50
|
+
"params.index.json",
|
|
51
|
+
"vocab.index.json",
|
|
52
|
+
],
|
|
53
|
+
"bm25_name_spaced": [
|
|
54
|
+
"data.csc.index.npy",
|
|
55
|
+
"indices.csc.index.npy",
|
|
56
|
+
"indptr.csc.index.npy",
|
|
57
|
+
"nonoccurrence_array.index.npy",
|
|
58
|
+
"params.index.json",
|
|
59
|
+
"vocab.index.json",
|
|
60
|
+
],
|
|
61
|
+
}
|
|
62
|
+
|
|
51
63
|
|
|
52
64
|
def _get_console() -> Console:
|
|
53
65
|
"""Create a Rich console instance for output."""
|
|
54
66
|
return Console()
|
|
55
67
|
|
|
56
68
|
|
|
57
|
-
def
|
|
58
|
-
"""
|
|
69
|
+
def _fetch_latest_version() -> str:
|
|
70
|
+
"""Fetch the latest version identifier from remote storage.
|
|
59
71
|
|
|
60
72
|
Returns:
|
|
61
|
-
The
|
|
73
|
+
The version string (e.g., "20260127_103630").
|
|
74
|
+
|
|
75
|
+
Raises:
|
|
76
|
+
ValueError: If the latest version cannot be fetched.
|
|
62
77
|
"""
|
|
63
|
-
|
|
78
|
+
latest_url = f"{Config.R2_ASSETS_BASE_URL}/assets/latest.txt"
|
|
64
79
|
try:
|
|
65
|
-
response = requests.get(
|
|
80
|
+
response = requests.get(latest_url, timeout=10)
|
|
66
81
|
response.raise_for_status()
|
|
67
|
-
return response.
|
|
82
|
+
return response.text.strip()
|
|
68
83
|
except requests.exceptions.RequestException as error:
|
|
69
|
-
logger.error("Failed to fetch
|
|
70
|
-
|
|
71
|
-
return None
|
|
84
|
+
logger.error("Failed to fetch latest version: %s", error)
|
|
85
|
+
raise ValueError(f"Failed to fetch latest version: {error}") from error
|
|
72
86
|
|
|
73
87
|
|
|
74
|
-
def
|
|
75
|
-
"""
|
|
88
|
+
def _download_file(url: str, destination: Path, progress: Progress) -> None:
|
|
89
|
+
"""Download a file with progress tracking.
|
|
76
90
|
|
|
77
91
|
Args:
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
Returns:
|
|
82
|
-
The resolved version string.
|
|
83
|
-
|
|
84
|
-
Raises:
|
|
85
|
-
ValueError: If the version cannot be resolved.
|
|
92
|
+
url: The URL to download from.
|
|
93
|
+
destination: The local path to save the file.
|
|
94
|
+
progress: Rich progress instance for tracking.
|
|
86
95
|
"""
|
|
87
|
-
|
|
88
|
-
resolved = manifest.get("default_toolchain")
|
|
89
|
-
if not resolved:
|
|
90
|
-
raise ValueError("No default_toolchain specified in manifest")
|
|
91
|
-
return resolved
|
|
92
|
-
return version
|
|
96
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
93
97
|
|
|
98
|
+
response = requests.get(url, stream=True, timeout=300)
|
|
99
|
+
response.raise_for_status()
|
|
94
100
|
|
|
95
|
-
|
|
96
|
-
|
|
101
|
+
total_size = int(response.headers.get("content-length", 0))
|
|
102
|
+
task_id = progress.add_task(destination.name, total=total_size)
|
|
97
103
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
A dictionary mapping remote filenames to SHA256 checksums.
|
|
103
|
-
"""
|
|
104
|
-
return {
|
|
105
|
-
file_entry["remote_name"]: f"sha256:{file_entry['sha256']}"
|
|
106
|
-
for file_entry in version_info.get("files", [])
|
|
107
|
-
if file_entry.get("remote_name") and file_entry.get("sha256")
|
|
108
|
-
}
|
|
104
|
+
with open(destination, "wb") as file:
|
|
105
|
+
for chunk in response.iter_content(chunk_size=8192):
|
|
106
|
+
file.write(chunk)
|
|
107
|
+
progress.update(task_id, advance=len(chunk))
|
|
109
108
|
|
|
110
109
|
|
|
111
110
|
def _write_active_version(version: str) -> None:
|
|
@@ -139,53 +138,64 @@ def _cleanup_old_versions(current_version: str) -> None:
|
|
|
139
138
|
|
|
140
139
|
|
|
141
140
|
def _install_toolchain(version: str | None = None) -> None:
|
|
142
|
-
"""
|
|
141
|
+
"""Install the data toolchain for the specified version.
|
|
143
142
|
|
|
144
|
-
Downloads
|
|
145
|
-
|
|
146
|
-
|
|
143
|
+
Downloads all required data files (database, FAISS index, BM25 indexes)
|
|
144
|
+
from remote storage. After successful installation, sets this version
|
|
145
|
+
as the active version and cleans up old versions.
|
|
147
146
|
|
|
148
147
|
Args:
|
|
149
|
-
version: The version to install. If None,
|
|
148
|
+
version: The version to install. If None, fetches the latest version.
|
|
150
149
|
|
|
151
150
|
Raises:
|
|
152
|
-
ValueError: If
|
|
151
|
+
ValueError: If version fetch fails or download errors occur.
|
|
153
152
|
"""
|
|
154
153
|
console = _get_console()
|
|
155
154
|
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
155
|
+
if version:
|
|
156
|
+
resolved_version = version
|
|
157
|
+
else:
|
|
158
|
+
console.print("Fetching latest version...")
|
|
159
|
+
resolved_version = _fetch_latest_version()
|
|
160
|
+
|
|
161
|
+
console.print(f"Installing version: [bold]{resolved_version}[/bold]")
|
|
162
|
+
|
|
163
|
+
base_url = f"{Config.R2_ASSETS_BASE_URL}/assets/{resolved_version}"
|
|
164
|
+
cache_path = Config.CACHE_DIRECTORY / resolved_version
|
|
165
|
+
|
|
166
|
+
# Build list of all files to download
|
|
167
|
+
files_to_download: list[tuple[str, Path]] = []
|
|
168
|
+
|
|
169
|
+
for filename in REQUIRED_FILES:
|
|
170
|
+
url = f"{base_url}/{filename}"
|
|
171
|
+
destination = cache_path / filename
|
|
172
|
+
files_to_download.append((url, destination))
|
|
173
|
+
|
|
174
|
+
for directory_name, directory_files in BM25_DIRECTORIES.items():
|
|
175
|
+
for filename in directory_files:
|
|
176
|
+
url = f"{base_url}/{directory_name}/{filename}"
|
|
177
|
+
destination = cache_path / directory_name / filename
|
|
178
|
+
files_to_download.append((url, destination))
|
|
179
|
+
|
|
180
|
+
# Download all files with progress
|
|
181
|
+
with Progress(
|
|
182
|
+
TextColumn("[bold blue]{task.description}"),
|
|
183
|
+
BarColumn(),
|
|
184
|
+
DownloadColumn(),
|
|
185
|
+
TransferSpeedColumn(),
|
|
186
|
+
console=console,
|
|
187
|
+
) as progress:
|
|
188
|
+
for url, destination in files_to_download:
|
|
189
|
+
if destination.exists():
|
|
190
|
+
logger.info("Skipping existing file: %s", destination.name)
|
|
191
|
+
continue
|
|
192
|
+
try:
|
|
193
|
+
_download_file(url, destination, progress)
|
|
194
|
+
except requests.exceptions.RequestException as error:
|
|
195
|
+
logger.error("Failed to download %s: %s", url, error)
|
|
196
|
+
raise ValueError(f"Failed to download {url}: {error}") from error
|
|
197
|
+
|
|
198
|
+
# Set this version as active and clean up old versions
|
|
189
199
|
_write_active_version(resolved_version)
|
|
190
200
|
_cleanup_old_versions(resolved_version)
|
|
191
201
|
|
|
@@ -208,29 +218,36 @@ def fetch(
|
|
|
208
218
|
None,
|
|
209
219
|
"--version",
|
|
210
220
|
"-v",
|
|
211
|
-
help="Version to install (e.g., '
|
|
221
|
+
help="Version to install (e.g., '20260127_103630'). Defaults to latest.",
|
|
212
222
|
),
|
|
213
223
|
) -> None:
|
|
214
|
-
"""
|
|
224
|
+
"""Fetch and install the data toolchain from remote storage.
|
|
215
225
|
|
|
216
|
-
Downloads the database, FAISS index, and
|
|
217
|
-
|
|
226
|
+
Downloads the database, FAISS index, and BM25 indexes required for
|
|
227
|
+
local search. Automatically cleans up old cached versions.
|
|
218
228
|
"""
|
|
219
229
|
_install_toolchain(version)
|
|
220
230
|
|
|
221
231
|
|
|
222
232
|
@app.command("clean")
|
|
223
233
|
def clean_data_toolchains() -> None:
|
|
224
|
-
"""
|
|
234
|
+
"""Remove all downloaded local data toolchains."""
|
|
225
235
|
console = _get_console()
|
|
226
236
|
|
|
227
|
-
|
|
237
|
+
cache_exists = Config.CACHE_DIRECTORY.exists()
|
|
238
|
+
version_file = Config.CACHE_DIRECTORY.parent / "active_version"
|
|
239
|
+
version_exists = version_file.exists()
|
|
240
|
+
|
|
241
|
+
if not cache_exists and not version_exists:
|
|
228
242
|
console.print("[yellow]No local data found to clean.[/yellow]")
|
|
229
243
|
return
|
|
230
244
|
|
|
231
245
|
if typer.confirm("Delete all cached data?", default=False, abort=True):
|
|
232
246
|
try:
|
|
233
|
-
|
|
247
|
+
if cache_exists:
|
|
248
|
+
shutil.rmtree(Config.CACHE_DIRECTORY)
|
|
249
|
+
if version_exists:
|
|
250
|
+
version_file.unlink()
|
|
234
251
|
console.print("[green]Data cache cleared.[/green]")
|
|
235
252
|
except OSError as error:
|
|
236
253
|
logger.error("Failed to clean cache directory: %s", error)
|
lean_explore/extract/doc_gen4.py
CHANGED
|
@@ -186,7 +186,7 @@ async def run_doc_gen4(
|
|
|
186
186
|
|
|
187
187
|
config = PACKAGE_REGISTRY[package_name]
|
|
188
188
|
workspace_path = Path("lean") / package_name
|
|
189
|
-
logger.info(f"\n{'='*50}\nPackage: {package_name}\n{'='*50}")
|
|
189
|
+
logger.info(f"\n{'=' * 50}\nPackage: {package_name}\n{'=' * 50}")
|
|
190
190
|
|
|
191
191
|
if fresh:
|
|
192
192
|
_clear_workspace_cache(workspace_path)
|
|
@@ -341,9 +341,7 @@ async def generate_embeddings(
|
|
|
341
341
|
# Phase 2: Generate embeddings for remaining declarations
|
|
342
342
|
logger.info("Phase 2: Generating embeddings for remaining declarations...")
|
|
343
343
|
client = EmbeddingClient(model_name=model_name, max_length=max_seq_length)
|
|
344
|
-
logger.info(
|
|
345
|
-
f"Using {client.model_name} on {client.device}"
|
|
346
|
-
)
|
|
344
|
+
logger.info(f"Using {client.model_name} on {client.device}")
|
|
347
345
|
|
|
348
346
|
total = len(remaining)
|
|
349
347
|
total_embeddings = 0
|
lean_explore/extract/index.py
CHANGED
|
@@ -95,8 +95,7 @@ def _build_faiss_index(embeddings: np.ndarray, device: str) -> faiss.Index:
|
|
|
95
95
|
nlist = max(256, int(np.sqrt(num_vectors)))
|
|
96
96
|
|
|
97
97
|
logger.info(
|
|
98
|
-
f"Building FAISS IVF index for {num_vectors} vectors "
|
|
99
|
-
f"with {nlist} clusters..."
|
|
98
|
+
f"Building FAISS IVF index for {num_vectors} vectors with {nlist} clusters..."
|
|
100
99
|
)
|
|
101
100
|
|
|
102
101
|
# Use inner product (cosine similarity on normalized vectors)
|
|
@@ -91,11 +91,11 @@ def update_lakefile_docgen_version(lakefile_path: Path, lean_version: str) -> No
|
|
|
91
91
|
content = lakefile_path.read_text()
|
|
92
92
|
|
|
93
93
|
pattern = (
|
|
94
|
-
r
|
|
94
|
+
r"require «doc-gen4» from git\s+"
|
|
95
95
|
r'"https://github\.com/leanprover/doc-gen4"(?:\s+@\s+"[^"]*")?'
|
|
96
96
|
)
|
|
97
97
|
replacement = (
|
|
98
|
-
f
|
|
98
|
+
f"require «doc-gen4» from git\n"
|
|
99
99
|
f' "https://github.com/leanprover/doc-gen4" @ "{lean_version}"'
|
|
100
100
|
)
|
|
101
101
|
new_content = re.sub(pattern, replacement, content)
|
lean_explore/mcp/tools.py
CHANGED
lean_explore/search/engine.py
CHANGED
|
@@ -232,9 +232,7 @@ class SearchEngine:
|
|
|
232
232
|
Map of declaration ID to semantic similarity score.
|
|
233
233
|
"""
|
|
234
234
|
embedding_response = await self.embedding_client.embed([query], is_query=True)
|
|
235
|
-
query_embedding = np.array(
|
|
236
|
-
[embedding_response.embeddings[0]], dtype=np.float32
|
|
237
|
-
)
|
|
235
|
+
query_embedding = np.array([embedding_response.embeddings[0]], dtype=np.float32)
|
|
238
236
|
|
|
239
237
|
import faiss as faiss_module
|
|
240
238
|
|
|
@@ -581,9 +579,7 @@ class SearchEngine:
|
|
|
581
579
|
declarations_map = self._filter_by_packages(declarations_map, packages)
|
|
582
580
|
# Filter boosted_scores to only include filtered declarations
|
|
583
581
|
boosted_scores = [
|
|
584
|
-
(cid, score)
|
|
585
|
-
for cid, score in boosted_scores
|
|
586
|
-
if cid in declarations_map
|
|
582
|
+
(cid, score) for cid, score in boosted_scores if cid in declarations_map
|
|
587
583
|
]
|
|
588
584
|
logger.info(f"Filtered to {len(declarations_map)} in {packages}")
|
|
589
585
|
|
|
@@ -86,9 +86,7 @@ class RerankerClient:
|
|
|
86
86
|
Formatted string for the reranker model.
|
|
87
87
|
"""
|
|
88
88
|
return (
|
|
89
|
-
f"<Instruct>: {self.instruction}\n"
|
|
90
|
-
f"<Query>: {query}\n"
|
|
91
|
-
f"<Document>: {document}"
|
|
89
|
+
f"<Instruct>: {self.instruction}\n<Query>: {query}\n<Document>: {document}"
|
|
92
90
|
)
|
|
93
91
|
|
|
94
92
|
@torch.no_grad()
|
|
@@ -3,30 +3,30 @@ lean_explore/config.py,sha256=oZzGbpmOn1h2IZV6O9CNbw-zRasoOb8ejogkOqVp_BY,8464
|
|
|
3
3
|
lean_explore/api/__init__.py,sha256=twql10w_SSqNbMD3tsCi0KYFp5A5n3UmUKSAfgWtwFc,341
|
|
4
4
|
lean_explore/api/client.py,sha256=udvC28Sa797DxdfiAGRGgPpDMHPp3yN1ymkfwJ-wvi8,3741
|
|
5
5
|
lean_explore/cli/__init__.py,sha256=JRpFfaP3DdPcPZ2MMIjJVw1RJ9gXLc8vBDa77favFlA,405
|
|
6
|
-
lean_explore/cli/data_commands.py,sha256=
|
|
6
|
+
lean_explore/cli/data_commands.py,sha256=bpPfzYKwKhVz4gy727KdMYyZjO6UqozHS2xXiBF2l58,8084
|
|
7
7
|
lean_explore/cli/display.py,sha256=t7qfP8Cdw05KM-kfYmFEeWSHNzyYuBOcvx4aCb5lFv0,5338
|
|
8
8
|
lean_explore/cli/main.py,sha256=feVxTmzH0FMZGP7Urb6X43atlESJIYXgC6DJylgiKp0,3937
|
|
9
9
|
lean_explore/extract/__init__.py,sha256=ZpCPPHjFCRxyHwYxQNhGpbpd_XuZOkgB9qkxa9zSUD0,184
|
|
10
10
|
lean_explore/extract/__main__.py,sha256=Aquy2g-WIU7YO39UZRq0EoHgJWW-uM5G0FSK2OwDuR8,12010
|
|
11
|
-
lean_explore/extract/doc_gen4.py,sha256=
|
|
11
|
+
lean_explore/extract/doc_gen4.py,sha256=5pvw-EdfDxg1NPxLpa6kQuoVUe1_TtKL1_hO13I0t78,6734
|
|
12
12
|
lean_explore/extract/doc_parser.py,sha256=tb3y7409mn5DhPjR2prPh2Cm7QMf3VJy8qfir_-Y4z8,17511
|
|
13
|
-
lean_explore/extract/embeddings.py,sha256=
|
|
13
|
+
lean_explore/extract/embeddings.py,sha256=atZzaM2wZBSZflL2sTbStGUSd1WKjyex218fMxG_CWc,11875
|
|
14
14
|
lean_explore/extract/github.py,sha256=1Zyyl2u1esULJ3KFHHL8F80zcxLDx1Fh8u_gxCmBTi8,3499
|
|
15
|
-
lean_explore/extract/index.py,sha256=
|
|
15
|
+
lean_explore/extract/index.py,sha256=ctkhXIPV-IV6bKa6TzSeaXNof3E6HZxrLsnFLxS_Fi0,10081
|
|
16
16
|
lean_explore/extract/informalize.py,sha256=AtjRQUvvaL22yn_D8Ik0HS3Xv-s6U9mwTv0sWMWs8oM,22209
|
|
17
17
|
lean_explore/extract/package_config.py,sha256=-09icflXgioPzpnXNPhpTPkKixxBmWCLNsxEY50Uq2g,1890
|
|
18
18
|
lean_explore/extract/package_registry.py,sha256=N-M3WYry77P80pjt12lpFcCuPW1M9SgxiCd_N8-EoXw,1565
|
|
19
|
-
lean_explore/extract/package_utils.py,sha256=
|
|
19
|
+
lean_explore/extract/package_utils.py,sha256=SBD4gbCA5enldm2m3AoL4Dti1FsSs-ShQYeYMjyOADg,3400
|
|
20
20
|
lean_explore/extract/types.py,sha256=Sp6sYuioTE_Gs0Z0lbq4h7OMyAnaZafdLV8UGtKn-zs,583
|
|
21
21
|
lean_explore/mcp/__init__.py,sha256=YO0RM466ik2jQk8YMDITzkm3AHPtjEhn7Wm7rOusUXo,462
|
|
22
22
|
lean_explore/mcp/app.py,sha256=PJ2HwX6VyTqKejuI1G8Ld4aO9XWp9hT5H8loaA5g0Lc,2173
|
|
23
23
|
lean_explore/mcp/server.py,sha256=Lf3SCn8ghPNkZ3BybHh3VCXn91F-yX6RSRke1rvC7Pk,8234
|
|
24
|
-
lean_explore/mcp/tools.py,sha256=
|
|
24
|
+
lean_explore/mcp/tools.py,sha256=iOpJkezDDIBGfAKU5xfqVjrGQEP0MLGHHdkqHEnLPDE,4515
|
|
25
25
|
lean_explore/models/__init__.py,sha256=G2Xeld_DADq-hhxm1K1CrEPeAM3ylHU2ckCh42Ogxro,321
|
|
26
26
|
lean_explore/models/search_db.py,sha256=_a6B6FpqUevyHvW4KmNLeziiznIuxftpMUy0AtSDBJE,2673
|
|
27
27
|
lean_explore/models/search_types.py,sha256=VcFGrK5Z12Cg9f2R-Y6GXGlh2SunyBAJaZ5I4DG0AUw,1442
|
|
28
28
|
lean_explore/search/__init__.py,sha256=0k_iHe5xrurepznk7NzMYz10QFbK10ydMlpFlsuyFSc,1216
|
|
29
|
-
lean_explore/search/engine.py,sha256=
|
|
29
|
+
lean_explore/search/engine.py,sha256=e4F_wOChfY6aBWQbYXDpsE_gIENXzd-Vj08aRK5qrls,23591
|
|
30
30
|
lean_explore/search/scoring.py,sha256=VkH-kpGheX14_tf8uJYBOp0nrG05_JJLmv7_0QdfAQk,4168
|
|
31
31
|
lean_explore/search/service.py,sha256=6CWN-U5jxv7cTzc7ffitgzNMn3k59LfirpteC4xsvSE,1915
|
|
32
32
|
lean_explore/search/tokenization.py,sha256=1EHd3dbJLwnmrj2SmdU1W8WoyCUnzhJB5gzmZLpWifs,1831
|
|
@@ -34,10 +34,10 @@ lean_explore/util/__init__.py,sha256=07QNedHV2KNBRMfQKzkw4nbgZn5-W5_YRtqpeUnSokg
|
|
|
34
34
|
lean_explore/util/embedding_client.py,sha256=6XJGJrGTXAiefDr-E1j_SPHTTZMIJYi62PwXXLdSLDQ,3098
|
|
35
35
|
lean_explore/util/logging.py,sha256=hF8YPi-1I6DdC1B_yROXA6u5GG14IIhD0Nym2FfqgRA,649
|
|
36
36
|
lean_explore/util/openrouter_client.py,sha256=C_0HLO5o1seYjGl2zn6897i2onK7CdI6XxtE3cWb3Os,1926
|
|
37
|
-
lean_explore/util/reranker_client.py,sha256=
|
|
38
|
-
lean_explore-1.0.
|
|
39
|
-
lean_explore-1.0.
|
|
40
|
-
lean_explore-1.0.
|
|
41
|
-
lean_explore-1.0.
|
|
42
|
-
lean_explore-1.0.
|
|
43
|
-
lean_explore-1.0.
|
|
37
|
+
lean_explore/util/reranker_client.py,sha256=kLCTGPMQuphjwAj0PPi9KXpSzDP7o9JRQJpTbmWGiMs,6074
|
|
38
|
+
lean_explore-1.0.1.dist-info/licenses/LICENSE,sha256=l4QLw1kIvEOjUktmmKm4dycK1E249Qs2s2AQTYbMXpY,11354
|
|
39
|
+
lean_explore-1.0.1.dist-info/METADATA,sha256=Cuj-elh89YOPKaM_bf7jxvsumb1nQpm_sytbB8bF0ak,17084
|
|
40
|
+
lean_explore-1.0.1.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
41
|
+
lean_explore-1.0.1.dist-info/entry_points.txt,sha256=FuKSRE7GmI9B_kM-xoiWEJj2dQ4upqhHnw8qH1vcjW8,59
|
|
42
|
+
lean_explore-1.0.1.dist-info/top_level.txt,sha256=h51BKWrFvB7iym-IlaNAAHX5MZfA8Gmg-aDuXGo0fQ8,13
|
|
43
|
+
lean_explore-1.0.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|