scry-run 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scry_run/__init__.py +102 -0
- scry_run/backends/__init__.py +6 -0
- scry_run/backends/base.py +65 -0
- scry_run/backends/claude.py +404 -0
- scry_run/backends/frozen.py +85 -0
- scry_run/backends/registry.py +72 -0
- scry_run/cache.py +441 -0
- scry_run/cli/__init__.py +137 -0
- scry_run/cli/apps.py +396 -0
- scry_run/cli/cache.py +342 -0
- scry_run/cli/config_cmd.py +84 -0
- scry_run/cli/env.py +27 -0
- scry_run/cli/init.py +375 -0
- scry_run/cli/run.py +71 -0
- scry_run/config.py +141 -0
- scry_run/console.py +52 -0
- scry_run/context.py +298 -0
- scry_run/generator.py +698 -0
- scry_run/home.py +60 -0
- scry_run/logging.py +171 -0
- scry_run/meta.py +1852 -0
- scry_run/packages.py +175 -0
- scry_run-0.1.0.dist-info/METADATA +282 -0
- scry_run-0.1.0.dist-info/RECORD +26 -0
- scry_run-0.1.0.dist-info/WHEEL +4 -0
- scry_run-0.1.0.dist-info/entry_points.txt +2 -0
|
@@ -0,0 +1,72 @@
|
|
|
1
|
+
"""Backend registry and auto-detection."""
|
|
2
|
+
|
|
3
|
+
import os
|
|
4
|
+
from typing import Optional, Type
|
|
5
|
+
|
|
6
|
+
from scry_run.backends.base import GeneratorBackend
|
|
7
|
+
from scry_run.console import backend_selected
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
# Registry of available backends
|
|
11
|
+
_backends: dict[str, Type[GeneratorBackend]] = {}
|
|
12
|
+
|
|
13
|
+
# Auto-detection order (first available wins)
|
|
14
|
+
AUTO_DETECT_ORDER = ["claude"]
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def register_backend(name: str, backend_cls: Type[GeneratorBackend]) -> None:
|
|
18
|
+
"""Register a backend class.
|
|
19
|
+
|
|
20
|
+
Args:
|
|
21
|
+
name: Backend name (e.g., "cli", "api")
|
|
22
|
+
backend_cls: The backend class
|
|
23
|
+
"""
|
|
24
|
+
_backends[name] = backend_cls
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def get_backend(
|
|
28
|
+
name: Optional[str] = None,
|
|
29
|
+
model: Optional[str] = None,
|
|
30
|
+
) -> GeneratorBackend:
|
|
31
|
+
"""Get a backend instance.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
name: Backend name to use. If None, auto-detect.
|
|
35
|
+
Options: "claude", "frozen", "auto"
|
|
36
|
+
model: Model name to use (backend-specific)
|
|
37
|
+
|
|
38
|
+
Returns:
|
|
39
|
+
Configured backend instance
|
|
40
|
+
|
|
41
|
+
Raises:
|
|
42
|
+
ValueError: If specified backend not found or unavailable
|
|
43
|
+
"""
|
|
44
|
+
# Import backends here to trigger registration
|
|
45
|
+
from scry_run.backends import claude, frozen
|
|
46
|
+
|
|
47
|
+
# Check env var first
|
|
48
|
+
env_backend = os.environ.get("SCRY_BACKEND", "").lower()
|
|
49
|
+
if env_backend and name is None:
|
|
50
|
+
name = env_backend
|
|
51
|
+
|
|
52
|
+
# Auto-detect: try backends in priority order
|
|
53
|
+
if name is None or name == "auto":
|
|
54
|
+
for backend_name in AUTO_DETECT_ORDER:
|
|
55
|
+
if backend_name in _backends and _backends[backend_name].is_available():
|
|
56
|
+
backend_selected(backend_name, model, "auto-detected")
|
|
57
|
+
return _backends[backend_name](model=model)
|
|
58
|
+
raise ValueError(
|
|
59
|
+
"No available backend. Install claude CLI."
|
|
60
|
+
)
|
|
61
|
+
|
|
62
|
+
# Specific backend requested
|
|
63
|
+
if name not in _backends:
|
|
64
|
+
raise ValueError(f"Unknown backend: {name}. Available: {list(_backends.keys())}")
|
|
65
|
+
|
|
66
|
+
backend_cls = _backends[name]
|
|
67
|
+
if not backend_cls.is_available():
|
|
68
|
+
raise ValueError(f"Backend '{name}' is not available.")
|
|
69
|
+
|
|
70
|
+
reason = "from SCRY_BACKEND" if env_backend else "explicit"
|
|
71
|
+
backend_selected(name, model, reason)
|
|
72
|
+
return backend_cls(model=model)
|
scry_run/cache.py
ADDED
|
@@ -0,0 +1,441 @@
|
|
|
1
|
+
"""File-based cache for generated code."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import hashlib
|
|
7
|
+
from dataclasses import dataclass, asdict
|
|
8
|
+
from datetime import datetime
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Optional
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@dataclass
|
|
14
|
+
class CacheEntry:
|
|
15
|
+
"""A single cached code entry."""
|
|
16
|
+
|
|
17
|
+
class_name: str
|
|
18
|
+
attr_name: str
|
|
19
|
+
code: str
|
|
20
|
+
code_type: str # method, property, classmethod, staticmethod
|
|
21
|
+
docstring: str
|
|
22
|
+
dependencies: list[str]
|
|
23
|
+
created_at: str
|
|
24
|
+
checksum: str # SHA256 of the code
|
|
25
|
+
packages: list[str] = None # PyPI packages
|
|
26
|
+
|
|
27
|
+
def __post_init__(self):
|
|
28
|
+
if self.packages is None:
|
|
29
|
+
self.packages = []
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def create(
|
|
33
|
+
cls,
|
|
34
|
+
class_name: str,
|
|
35
|
+
attr_name: str,
|
|
36
|
+
code: str,
|
|
37
|
+
code_type: str = "method",
|
|
38
|
+
docstring: str = "",
|
|
39
|
+
dependencies: list[str] | None = None,
|
|
40
|
+
packages: list[str] | None = None,
|
|
41
|
+
) -> CacheEntry:
|
|
42
|
+
"""Create a new cache entry with auto-generated metadata."""
|
|
43
|
+
return cls(
|
|
44
|
+
class_name=class_name,
|
|
45
|
+
attr_name=attr_name,
|
|
46
|
+
code=code,
|
|
47
|
+
code_type=code_type,
|
|
48
|
+
docstring=docstring,
|
|
49
|
+
dependencies=dependencies or [],
|
|
50
|
+
created_at=datetime.now().isoformat(),
|
|
51
|
+
checksum=hashlib.sha256(code.encode()).hexdigest(),
|
|
52
|
+
packages=packages or [],
|
|
53
|
+
)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
class ScryCache:
|
|
57
|
+
"""Manages the cache of generated code.
|
|
58
|
+
|
|
59
|
+
Supports two modes:
|
|
60
|
+
- Directory mode (legacy): Each class gets its own JSON file in a cache directory
|
|
61
|
+
- Single-file mode: All classes in one JSON file
|
|
62
|
+
|
|
63
|
+
Mode is auto-detected based on the path:
|
|
64
|
+
- If path ends with .json -> single-file mode
|
|
65
|
+
- If path is a directory (or no extension) -> directory mode
|
|
66
|
+
"""
|
|
67
|
+
|
|
68
|
+
DEFAULT_CACHE_DIR = ".scry-run-cache"
|
|
69
|
+
|
|
70
|
+
DEFAULT_CACHE_DIR = ".scry-run-cache"
|
|
71
|
+
|
|
72
|
+
def __init__(self, cache_path: str | Path | None = None, read_only: bool = False):
|
|
73
|
+
|
|
74
|
+
"""Initialize cache with optional custom path.
|
|
75
|
+
|
|
76
|
+
Args:
|
|
77
|
+
cache_path: Path to cache directory or file.
|
|
78
|
+
If ends with .json, uses single-file mode.
|
|
79
|
+
Otherwise uses directory mode.
|
|
80
|
+
Defaults to .scry-run-cache/ directory.
|
|
81
|
+
read_only: If True, prevents any modification to the cache.
|
|
82
|
+
"""
|
|
83
|
+
self.read_only = read_only
|
|
84
|
+
|
|
85
|
+
if cache_path is None:
|
|
86
|
+
# Legacy mode: directory
|
|
87
|
+
self.cache_path = Path(self.DEFAULT_CACHE_DIR)
|
|
88
|
+
self._file_mode = False
|
|
89
|
+
else:
|
|
90
|
+
self.cache_path = Path(cache_path)
|
|
91
|
+
# File mode if path ends with .json
|
|
92
|
+
self._file_mode = self.cache_path.suffix == ".json"
|
|
93
|
+
|
|
94
|
+
# For backwards compatibility, expose cache_dir for directory mode
|
|
95
|
+
if not self._file_mode:
|
|
96
|
+
self.cache_dir = self.cache_path
|
|
97
|
+
self._ensure_cache_dir()
|
|
98
|
+
|
|
99
|
+
def _ensure_cache_dir(self) -> None:
|
|
100
|
+
"""Create cache directory if it doesn't exist (directory mode only)."""
|
|
101
|
+
if not self._file_mode:
|
|
102
|
+
self.cache_path.mkdir(parents=True, exist_ok=True)
|
|
103
|
+
|
|
104
|
+
# === Single-file mode methods ===
|
|
105
|
+
|
|
106
|
+
def _load_all(self) -> dict[str, dict[str, dict]]:
|
|
107
|
+
"""Load entire cache from single JSON file.
|
|
108
|
+
|
|
109
|
+
Returns:
|
|
110
|
+
Nested dict: {class_name: {attr_name: entry_data}}
|
|
111
|
+
"""
|
|
112
|
+
if not self.cache_path.exists():
|
|
113
|
+
return {}
|
|
114
|
+
|
|
115
|
+
with open(self.cache_path, "r", encoding="utf-8") as f:
|
|
116
|
+
return json.load(f)
|
|
117
|
+
|
|
118
|
+
def _save_all(self, data: dict[str, dict[str, dict]]) -> None:
|
|
119
|
+
"""Save entire cache to single JSON file.
|
|
120
|
+
|
|
121
|
+
Args:
|
|
122
|
+
data: Nested dict: {class_name: {attr_name: entry_data}}
|
|
123
|
+
"""
|
|
124
|
+
# Ensure parent directory exists
|
|
125
|
+
self.cache_path.parent.mkdir(parents=True, exist_ok=True)
|
|
126
|
+
|
|
127
|
+
with open(self.cache_path, "w", encoding="utf-8") as f:
|
|
128
|
+
json.dump(data, f, indent=2)
|
|
129
|
+
|
|
130
|
+
# === Directory mode methods ===
|
|
131
|
+
|
|
132
|
+
def _get_class_cache_path(self, class_name: str) -> Path:
|
|
133
|
+
"""Get the cache file path for a specific class (directory mode only)."""
|
|
134
|
+
return self.cache_path / f"{class_name}.json"
|
|
135
|
+
|
|
136
|
+
def _load_class_cache(self, class_name: str) -> dict[str, CacheEntry]:
|
|
137
|
+
"""Load all cached entries for a class.
|
|
138
|
+
|
|
139
|
+
Works in both modes:
|
|
140
|
+
- Directory mode: loads from {class_name}.json
|
|
141
|
+
- Single-file mode: loads class section from cache.json
|
|
142
|
+
"""
|
|
143
|
+
if self._file_mode:
|
|
144
|
+
all_data = self._load_all()
|
|
145
|
+
class_data = all_data.get(class_name, {})
|
|
146
|
+
return {
|
|
147
|
+
attr_name: CacheEntry(**entry_data)
|
|
148
|
+
for attr_name, entry_data in class_data.items()
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
# Directory mode
|
|
152
|
+
cache_path = self._get_class_cache_path(class_name)
|
|
153
|
+
|
|
154
|
+
if not cache_path.exists():
|
|
155
|
+
return {}
|
|
156
|
+
|
|
157
|
+
with open(cache_path, "r", encoding="utf-8") as f:
|
|
158
|
+
data = json.load(f)
|
|
159
|
+
|
|
160
|
+
return {
|
|
161
|
+
attr_name: CacheEntry(**entry_data)
|
|
162
|
+
for attr_name, entry_data in data.items()
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
def _save_class_cache(self, class_name: str, entries: dict[str, CacheEntry]) -> None:
|
|
166
|
+
"""Save all cached entries for a class.
|
|
167
|
+
|
|
168
|
+
Works in both modes:
|
|
169
|
+
- Directory mode: saves to {class_name}.json
|
|
170
|
+
- Single-file mode: saves class section to cache.json
|
|
171
|
+
"""
|
|
172
|
+
data = {
|
|
173
|
+
attr_name: asdict(entry)
|
|
174
|
+
for attr_name, entry in entries.items()
|
|
175
|
+
}
|
|
176
|
+
|
|
177
|
+
if self._file_mode:
|
|
178
|
+
all_data = self._load_all()
|
|
179
|
+
all_data[class_name] = data
|
|
180
|
+
self._save_all(all_data)
|
|
181
|
+
return
|
|
182
|
+
|
|
183
|
+
# Directory mode
|
|
184
|
+
cache_path = self._get_class_cache_path(class_name)
|
|
185
|
+
|
|
186
|
+
with open(cache_path, "w", encoding="utf-8") as f:
|
|
187
|
+
json.dump(data, f, indent=2)
|
|
188
|
+
|
|
189
|
+
def get(self, class_name: str, attr_name: str) -> Optional[CacheEntry]:
|
|
190
|
+
"""Get a cached code entry.
|
|
191
|
+
|
|
192
|
+
Args:
|
|
193
|
+
class_name: Name of the class
|
|
194
|
+
attr_name: Name of the attribute
|
|
195
|
+
|
|
196
|
+
Returns:
|
|
197
|
+
CacheEntry if found, None otherwise
|
|
198
|
+
"""
|
|
199
|
+
entries = self._load_class_cache(class_name)
|
|
200
|
+
return entries.get(attr_name)
|
|
201
|
+
|
|
202
|
+
def set(
|
|
203
|
+
self,
|
|
204
|
+
class_name: str,
|
|
205
|
+
attr_name: str,
|
|
206
|
+
code: str,
|
|
207
|
+
code_type: str = "method",
|
|
208
|
+
docstring: str = "",
|
|
209
|
+
dependencies: list[str] | None = None,
|
|
210
|
+
packages: list[str] | None = None,
|
|
211
|
+
) -> CacheEntry:
|
|
212
|
+
"""Cache a code entry.
|
|
213
|
+
|
|
214
|
+
Args:
|
|
215
|
+
class_name: Name of the class
|
|
216
|
+
attr_name: Name of the attribute
|
|
217
|
+
code: The generated code
|
|
218
|
+
code_type: Type of code (method, property, etc.)
|
|
219
|
+
docstring: Description of the code
|
|
220
|
+
dependencies: List of import dependencies
|
|
221
|
+
packages: List of PyPI packages required
|
|
222
|
+
|
|
223
|
+
"""
|
|
224
|
+
if self.read_only:
|
|
225
|
+
# In read-only mode, we pretend we set it but don't persist
|
|
226
|
+
# This allows runtime usage of generated code without saving
|
|
227
|
+
return CacheEntry.create(
|
|
228
|
+
class_name=class_name,
|
|
229
|
+
attr_name=attr_name,
|
|
230
|
+
code=code,
|
|
231
|
+
code_type=code_type,
|
|
232
|
+
docstring=docstring,
|
|
233
|
+
dependencies=dependencies,
|
|
234
|
+
packages=packages,
|
|
235
|
+
)
|
|
236
|
+
|
|
237
|
+
entries = self._load_class_cache(class_name)
|
|
238
|
+
|
|
239
|
+
|
|
240
|
+
entry = CacheEntry.create(
|
|
241
|
+
class_name=class_name,
|
|
242
|
+
attr_name=attr_name,
|
|
243
|
+
code=code,
|
|
244
|
+
code_type=code_type,
|
|
245
|
+
docstring=docstring,
|
|
246
|
+
dependencies=dependencies,
|
|
247
|
+
packages=packages,
|
|
248
|
+
)
|
|
249
|
+
|
|
250
|
+
entries[attr_name] = entry
|
|
251
|
+
self._save_class_cache(class_name, entries)
|
|
252
|
+
|
|
253
|
+
return entry
|
|
254
|
+
|
|
255
|
+
def delete(self, class_name: str, attr_name: str) -> bool:
|
|
256
|
+
"""Delete a cached entry.
|
|
257
|
+
|
|
258
|
+
Args:
|
|
259
|
+
class_name: Name of the class
|
|
260
|
+
attr_name: Name of the attribute
|
|
261
|
+
|
|
262
|
+
Returns:
|
|
263
|
+
True if entry was deleted, False if it didn't exist or read-only
|
|
264
|
+
"""
|
|
265
|
+
if self.read_only:
|
|
266
|
+
return False
|
|
267
|
+
|
|
268
|
+
entries = self._load_class_cache(class_name)
|
|
269
|
+
|
|
270
|
+
|
|
271
|
+
if attr_name not in entries:
|
|
272
|
+
return False
|
|
273
|
+
|
|
274
|
+
del entries[attr_name]
|
|
275
|
+
|
|
276
|
+
if self._file_mode:
|
|
277
|
+
# Single-file mode: update the entire file
|
|
278
|
+
all_data = self._load_all()
|
|
279
|
+
if entries:
|
|
280
|
+
all_data[class_name] = {
|
|
281
|
+
attr_name: asdict(entry)
|
|
282
|
+
for attr_name, entry in entries.items()
|
|
283
|
+
}
|
|
284
|
+
else:
|
|
285
|
+
# Remove empty class from file
|
|
286
|
+
all_data.pop(class_name, None)
|
|
287
|
+
self._save_all(all_data)
|
|
288
|
+
else:
|
|
289
|
+
# Directory mode
|
|
290
|
+
if entries:
|
|
291
|
+
self._save_class_cache(class_name, entries)
|
|
292
|
+
else:
|
|
293
|
+
# Remove empty cache file
|
|
294
|
+
cache_path = self._get_class_cache_path(class_name)
|
|
295
|
+
cache_path.unlink(missing_ok=True)
|
|
296
|
+
|
|
297
|
+
return True
|
|
298
|
+
|
|
299
|
+
def prune(
|
|
300
|
+
self,
|
|
301
|
+
class_name: Optional[str] = None,
|
|
302
|
+
attr_name: Optional[str] = None,
|
|
303
|
+
) -> int:
|
|
304
|
+
"""Prune cache entries.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
class_name: If provided, only prune entries for this class
|
|
308
|
+
attr_name: If provided along with class_name, only prune this specific entry
|
|
309
|
+
|
|
310
|
+
Returns:
|
|
311
|
+
Number of entries pruned (0 if read-only)
|
|
312
|
+
"""
|
|
313
|
+
if self.read_only:
|
|
314
|
+
return 0
|
|
315
|
+
|
|
316
|
+
pruned = 0
|
|
317
|
+
|
|
318
|
+
|
|
319
|
+
if class_name and attr_name:
|
|
320
|
+
# Prune specific entry
|
|
321
|
+
if self.delete(class_name, attr_name):
|
|
322
|
+
pruned = 1
|
|
323
|
+
elif class_name:
|
|
324
|
+
# Prune all entries for a class
|
|
325
|
+
entries = self._load_class_cache(class_name)
|
|
326
|
+
pruned = len(entries)
|
|
327
|
+
|
|
328
|
+
if self._file_mode:
|
|
329
|
+
# Single-file mode: remove class from file
|
|
330
|
+
all_data = self._load_all()
|
|
331
|
+
all_data.pop(class_name, None)
|
|
332
|
+
self._save_all(all_data)
|
|
333
|
+
else:
|
|
334
|
+
# Directory mode: remove class file
|
|
335
|
+
cache_path = self._get_class_cache_path(class_name)
|
|
336
|
+
cache_path.unlink(missing_ok=True)
|
|
337
|
+
else:
|
|
338
|
+
# Prune entire cache
|
|
339
|
+
if self._file_mode:
|
|
340
|
+
# Single-file mode: count entries and clear file
|
|
341
|
+
all_data = self._load_all()
|
|
342
|
+
for class_data in all_data.values():
|
|
343
|
+
pruned += len(class_data)
|
|
344
|
+
self.cache_path.unlink(missing_ok=True)
|
|
345
|
+
else:
|
|
346
|
+
# Directory mode: remove all class files
|
|
347
|
+
for cache_file in self.cache_path.glob("*.json"):
|
|
348
|
+
with open(cache_file, "r", encoding="utf-8") as f:
|
|
349
|
+
data = json.load(f)
|
|
350
|
+
pruned += len(data)
|
|
351
|
+
cache_file.unlink()
|
|
352
|
+
|
|
353
|
+
return pruned
|
|
354
|
+
|
|
355
|
+
def list_entries(self) -> list[CacheEntry]:
|
|
356
|
+
"""List all cached entries.
|
|
357
|
+
|
|
358
|
+
Returns:
|
|
359
|
+
List of all CacheEntry objects in the cache
|
|
360
|
+
"""
|
|
361
|
+
entries = []
|
|
362
|
+
|
|
363
|
+
if self._file_mode:
|
|
364
|
+
# Single-file mode: load all classes from one file
|
|
365
|
+
all_data = self._load_all()
|
|
366
|
+
for class_name, class_data in all_data.items():
|
|
367
|
+
for attr_name, entry_data in class_data.items():
|
|
368
|
+
entries.append(CacheEntry(**entry_data))
|
|
369
|
+
else:
|
|
370
|
+
# Directory mode: load from each class file
|
|
371
|
+
for cache_file in self.cache_path.glob("*.json"):
|
|
372
|
+
class_name = cache_file.stem
|
|
373
|
+
class_entries = self._load_class_cache(class_name)
|
|
374
|
+
entries.extend(class_entries.values())
|
|
375
|
+
|
|
376
|
+
return entries
|
|
377
|
+
|
|
378
|
+
def export(self) -> dict[str, dict[str, dict]]:
|
|
379
|
+
"""Export entire cache as a dictionary.
|
|
380
|
+
|
|
381
|
+
Returns:
|
|
382
|
+
Nested dict: {class_name: {attr_name: entry_data}}
|
|
383
|
+
"""
|
|
384
|
+
if self._file_mode:
|
|
385
|
+
# Single-file mode: just load and return the entire file
|
|
386
|
+
return self._load_all()
|
|
387
|
+
|
|
388
|
+
# Directory mode: aggregate from all class files
|
|
389
|
+
result = {}
|
|
390
|
+
|
|
391
|
+
for cache_file in self.cache_path.glob("*.json"):
|
|
392
|
+
class_name = cache_file.stem
|
|
393
|
+
with open(cache_file, "r", encoding="utf-8") as f:
|
|
394
|
+
result[class_name] = json.load(f)
|
|
395
|
+
|
|
396
|
+
return result
|
|
397
|
+
|
|
398
|
+
def export_to_file(self, output_path: str | Path) -> None:
|
|
399
|
+
"""Export cache to a Python file with all generated code.
|
|
400
|
+
|
|
401
|
+
Args:
|
|
402
|
+
output_path: Path to write the Python file
|
|
403
|
+
"""
|
|
404
|
+
output_path = Path(output_path)
|
|
405
|
+
entries = self.list_entries()
|
|
406
|
+
|
|
407
|
+
# Collect all dependencies
|
|
408
|
+
all_deps = set()
|
|
409
|
+
for entry in entries:
|
|
410
|
+
all_deps.update(entry.dependencies)
|
|
411
|
+
|
|
412
|
+
# Group entries by class
|
|
413
|
+
by_class: dict[str, list[CacheEntry]] = {}
|
|
414
|
+
for entry in entries:
|
|
415
|
+
by_class.setdefault(entry.class_name, []).append(entry)
|
|
416
|
+
|
|
417
|
+
lines = [
|
|
418
|
+
'"""Generated code exported from scry-run cache."""',
|
|
419
|
+
"",
|
|
420
|
+
]
|
|
421
|
+
|
|
422
|
+
# Add imports
|
|
423
|
+
if all_deps:
|
|
424
|
+
for dep in sorted(all_deps):
|
|
425
|
+
lines.append(dep)
|
|
426
|
+
lines.append("")
|
|
427
|
+
|
|
428
|
+
# Add code for each class
|
|
429
|
+
for class_name, class_entries in sorted(by_class.items()):
|
|
430
|
+
lines.append(f"# === {class_name} ===")
|
|
431
|
+
lines.append("")
|
|
432
|
+
|
|
433
|
+
for entry in sorted(class_entries, key=lambda e: e.attr_name):
|
|
434
|
+
if entry.docstring:
|
|
435
|
+
lines.append(f"# {entry.docstring}")
|
|
436
|
+
lines.append(f"# Type: {entry.code_type}")
|
|
437
|
+
lines.append(entry.code)
|
|
438
|
+
lines.append("")
|
|
439
|
+
|
|
440
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
441
|
+
f.write("\n".join(lines))
|
scry_run/cli/__init__.py
ADDED
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
"""CLI entry point for scry-run."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
import click
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from typing import Optional
|
|
7
|
+
|
|
8
|
+
from scry_run.cli.init import init
|
|
9
|
+
|
|
10
|
+
console = Console()
|
|
11
|
+
from scry_run.cli.cache import cache
|
|
12
|
+
from scry_run.cli.run import run
|
|
13
|
+
from scry_run.cli.env import env
|
|
14
|
+
from scry_run.cli.apps import list_apps, which_app, rm_app, reset_app, info_app
|
|
15
|
+
from scry_run.cli.config_cmd import config_cmd
|
|
16
|
+
from scry_run.generator import CodeGenerator, ScryRunError
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class DefaultGroup(click.Group):
|
|
20
|
+
"""Click Group that invokes a default command for unknown commands."""
|
|
21
|
+
|
|
22
|
+
def __init__(self, *args, **kwargs):
|
|
23
|
+
# To support default command, we need to allow resolving unknown commands
|
|
24
|
+
# to the default handler
|
|
25
|
+
self.default_cmd_name = kwargs.pop("default_if_missing", None)
|
|
26
|
+
super().__init__(*args, **kwargs)
|
|
27
|
+
|
|
28
|
+
def resolve_command(self, ctx, args):
|
|
29
|
+
try:
|
|
30
|
+
# Try to resolve the command normally
|
|
31
|
+
return super().resolve_command(ctx, args)
|
|
32
|
+
except click.UsageError as e:
|
|
33
|
+
# If command not found and we have a default...
|
|
34
|
+
if self.default_cmd_name:
|
|
35
|
+
# check if the command actually exists in our list (to be safe)
|
|
36
|
+
cmd = self.get_command(ctx, self.default_cmd_name)
|
|
37
|
+
if cmd:
|
|
38
|
+
# If we are falling back to default, we treat ALL args
|
|
39
|
+
# as arguments to the default command.
|
|
40
|
+
# This is tricky because Click removed the command name from args
|
|
41
|
+
# if it thought it was a command but failed.
|
|
42
|
+
# But here, 'args' passed to resolve_command are the remaining args.
|
|
43
|
+
|
|
44
|
+
# If the first arg was what Click thought was a command name,
|
|
45
|
+
# we need to put it back into the args list for the default command
|
|
46
|
+
# if we want the default command to see it.
|
|
47
|
+
# actually resolve_command returns (cmd_name, cmd, args)
|
|
48
|
+
# The 'args' passed IN are the ones to be processed.
|
|
49
|
+
|
|
50
|
+
# We just return the default command and the full args.
|
|
51
|
+
return self.default_cmd_name, cmd, args
|
|
52
|
+
|
|
53
|
+
# If no default or other error, raise original
|
|
54
|
+
raise
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@click.group(cls=DefaultGroup, default_if_missing="ask", context_settings={"ignore_unknown_options": True})
|
|
58
|
+
@click.version_option()
|
|
59
|
+
def main() -> None:
|
|
60
|
+
"""scry-run: LLM-powered dynamic code generation.
|
|
61
|
+
|
|
62
|
+
Use a metaclass to automatically generate missing code via LLMs.
|
|
63
|
+
"""
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
@main.command(hidden=True, context_settings={"ignore_unknown_options": True})
|
|
68
|
+
@click.argument("prompt_parts", nargs=-1)
|
|
69
|
+
def ask(prompt_parts: tuple[str, ...]) -> None:
|
|
70
|
+
"""Default command: Ask the coding assistant."""
|
|
71
|
+
# Collect all parts including unknown options causing confusion
|
|
72
|
+
ctx = click.get_current_context()
|
|
73
|
+
|
|
74
|
+
# If ignore_unknown_options=True, unparsed options are in ctx.args (as a list)
|
|
75
|
+
# prompt_parts contains the positional arguments
|
|
76
|
+
|
|
77
|
+
# We need to reconstruct the order if possible, or just append.
|
|
78
|
+
# Typically ctx.args contains options that were skipped.
|
|
79
|
+
# But since we want "everything as string", order matters slightly but appending is likely safe enough for natural language.
|
|
80
|
+
# However, for perfect reconstruction, we might look at sys.argv keywords? NO.
|
|
81
|
+
|
|
82
|
+
# Let's combine them. prompt_parts are the recognized parts (non-options).
|
|
83
|
+
# ctx.args are the ignored options.
|
|
84
|
+
# Wait, Click splits them.
|
|
85
|
+
# If user types: scry-run --flag word
|
|
86
|
+
# --flag goes to ctx.args (if ignored), word goes to prompt_parts.
|
|
87
|
+
|
|
88
|
+
parent_args = ctx.parent.args if ctx.parent else []
|
|
89
|
+
current_args = ctx.args
|
|
90
|
+
|
|
91
|
+
full_parts = list(parent_args) + list(prompt_parts) + list(current_args)
|
|
92
|
+
|
|
93
|
+
if not full_parts:
|
|
94
|
+
# If no arguments, show help
|
|
95
|
+
# Note: ctx.parent.get_help() returns the help string for the group (main)
|
|
96
|
+
click.echo(ctx.parent.get_help())
|
|
97
|
+
ctx.exit(0)
|
|
98
|
+
|
|
99
|
+
# Simple join might be messy if flags were interspersed.
|
|
100
|
+
# But "write a poem -f json" -> "write a poem" + "-f" + "json"?
|
|
101
|
+
# Click might parse "-f" as option, "json" as argument.
|
|
102
|
+
# prompt_parts=["write", "a", "poem", "json"], ctx.args=["-f"]
|
|
103
|
+
# So "write a poem json -f". Acceptable for LLM prompt usually.
|
|
104
|
+
|
|
105
|
+
prompt = " ".join(full_parts)
|
|
106
|
+
|
|
107
|
+
try:
|
|
108
|
+
generator = CodeGenerator()
|
|
109
|
+
response = generator.generate_freeform(prompt)
|
|
110
|
+
console.print(response)
|
|
111
|
+
|
|
112
|
+
except ScryRunError as e:
|
|
113
|
+
console.print(f"[red]Error:[/red] {e.message}", highlight=False)
|
|
114
|
+
if e.hint:
|
|
115
|
+
console.print(f"[dim]Hint:[/dim] {e.hint}", highlight=False)
|
|
116
|
+
sys.exit(1)
|
|
117
|
+
except Exception as e:
|
|
118
|
+
console.print(f"[red]Error:[/red] {e}", highlight=False)
|
|
119
|
+
sys.exit(1)
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
# Register commands
|
|
123
|
+
main.add_command(init)
|
|
124
|
+
main.add_command(init, name="new") # Alias
|
|
125
|
+
main.add_command(cache)
|
|
126
|
+
main.add_command(run)
|
|
127
|
+
main.add_command(env)
|
|
128
|
+
main.add_command(list_apps, name="list")
|
|
129
|
+
main.add_command(which_app, name="which")
|
|
130
|
+
main.add_command(rm_app, name="rm")
|
|
131
|
+
main.add_command(reset_app, name="reset")
|
|
132
|
+
main.add_command(info_app, name="info")
|
|
133
|
+
main.add_command(config_cmd, name="config")
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
if __name__ == "__main__":
|
|
137
|
+
main()
|