oasr 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- __init__.py +3 -0
- __main__.py +6 -0
- adapter.py +396 -0
- adapters/__init__.py +17 -0
- adapters/base.py +254 -0
- adapters/claude.py +82 -0
- adapters/codex.py +84 -0
- adapters/copilot.py +210 -0
- adapters/cursor.py +78 -0
- adapters/windsurf.py +83 -0
- cli.py +94 -0
- commands/__init__.py +6 -0
- commands/adapter.py +102 -0
- commands/add.py +302 -0
- commands/clean.py +155 -0
- commands/diff.py +180 -0
- commands/find.py +56 -0
- commands/help.py +51 -0
- commands/info.py +152 -0
- commands/list.py +110 -0
- commands/registry.py +303 -0
- commands/rm.py +128 -0
- commands/status.py +119 -0
- commands/sync.py +143 -0
- commands/update.py +417 -0
- commands/use.py +172 -0
- commands/validate.py +74 -0
- config.py +86 -0
- discovery.py +145 -0
- manifest.py +437 -0
- oasr-0.3.4.dist-info/METADATA +358 -0
- oasr-0.3.4.dist-info/RECORD +43 -0
- oasr-0.3.4.dist-info/WHEEL +4 -0
- oasr-0.3.4.dist-info/entry_points.txt +3 -0
- oasr-0.3.4.dist-info/licenses/LICENSE +187 -0
- oasr-0.3.4.dist-info/licenses/NOTICE +8 -0
- registry.py +173 -0
- remote.py +482 -0
- skillcopy/__init__.py +71 -0
- skillcopy/local.py +40 -0
- skillcopy/remote.py +98 -0
- tracking.py +181 -0
- validate.py +362 -0
commands/update.py
ADDED
|
@@ -0,0 +1,417 @@
|
|
|
1
|
+
"""`oasr update` command - Update ASR tool from GitHub."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import subprocess
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def find_asr_repo() -> Path | None:
|
|
13
|
+
"""Find the ASR git repository path.
|
|
14
|
+
|
|
15
|
+
Returns:
|
|
16
|
+
Path to ASR repo, or None if not found.
|
|
17
|
+
"""
|
|
18
|
+
# Try to find via current module location
|
|
19
|
+
try:
|
|
20
|
+
import cli
|
|
21
|
+
|
|
22
|
+
cli_file = Path(cli.__file__).resolve()
|
|
23
|
+
|
|
24
|
+
# Walk up to find .git directory
|
|
25
|
+
current = cli_file.parent
|
|
26
|
+
for _ in range(5): # Max 5 levels up
|
|
27
|
+
if (current / ".git").exists():
|
|
28
|
+
return current
|
|
29
|
+
if current.parent == current: # Reached root
|
|
30
|
+
break
|
|
31
|
+
current = current.parent
|
|
32
|
+
except Exception:
|
|
33
|
+
pass
|
|
34
|
+
|
|
35
|
+
return None
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_git_remote_url(repo_path: Path) -> str | None:
|
|
39
|
+
"""Get the git remote URL.
|
|
40
|
+
|
|
41
|
+
Args:
|
|
42
|
+
repo_path: Path to git repository.
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
Remote URL or None.
|
|
46
|
+
"""
|
|
47
|
+
try:
|
|
48
|
+
result = subprocess.run(
|
|
49
|
+
["git", "remote", "get-url", "origin"],
|
|
50
|
+
cwd=repo_path,
|
|
51
|
+
capture_output=True,
|
|
52
|
+
text=True,
|
|
53
|
+
timeout=5,
|
|
54
|
+
)
|
|
55
|
+
if result.returncode == 0:
|
|
56
|
+
return result.stdout.strip()
|
|
57
|
+
except Exception:
|
|
58
|
+
pass
|
|
59
|
+
return None
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_current_commit(repo_path: Path) -> str | None:
|
|
63
|
+
"""Get current git commit hash.
|
|
64
|
+
|
|
65
|
+
Args:
|
|
66
|
+
repo_path: Path to git repository.
|
|
67
|
+
|
|
68
|
+
Returns:
|
|
69
|
+
Commit hash or None.
|
|
70
|
+
"""
|
|
71
|
+
try:
|
|
72
|
+
result = subprocess.run(
|
|
73
|
+
["git", "rev-parse", "HEAD"],
|
|
74
|
+
cwd=repo_path,
|
|
75
|
+
capture_output=True,
|
|
76
|
+
text=True,
|
|
77
|
+
timeout=5,
|
|
78
|
+
)
|
|
79
|
+
if result.returncode == 0:
|
|
80
|
+
return result.stdout.strip()
|
|
81
|
+
except Exception:
|
|
82
|
+
pass
|
|
83
|
+
return None
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def check_working_tree_clean(repo_path: Path) -> bool:
|
|
87
|
+
"""Check if git working tree is clean.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
repo_path: Path to git repository.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
True if clean, False if dirty.
|
|
94
|
+
"""
|
|
95
|
+
try:
|
|
96
|
+
result = subprocess.run(
|
|
97
|
+
["git", "status", "--porcelain"],
|
|
98
|
+
cwd=repo_path,
|
|
99
|
+
capture_output=True,
|
|
100
|
+
text=True,
|
|
101
|
+
timeout=5,
|
|
102
|
+
)
|
|
103
|
+
return result.returncode == 0 and not result.stdout.strip()
|
|
104
|
+
except Exception:
|
|
105
|
+
return False
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def pull_updates(repo_path: Path) -> tuple[bool, str]:
|
|
109
|
+
"""Pull updates from git remote.
|
|
110
|
+
|
|
111
|
+
Args:
|
|
112
|
+
repo_path: Path to git repository.
|
|
113
|
+
|
|
114
|
+
Returns:
|
|
115
|
+
Tuple of (success, message).
|
|
116
|
+
"""
|
|
117
|
+
try:
|
|
118
|
+
result = subprocess.run(
|
|
119
|
+
["git", "pull", "--ff-only"],
|
|
120
|
+
cwd=repo_path,
|
|
121
|
+
capture_output=True,
|
|
122
|
+
text=True,
|
|
123
|
+
timeout=30,
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
if result.returncode == 0:
|
|
127
|
+
# Check if already up to date
|
|
128
|
+
if "Already up to date" in result.stdout or "Already up-to-date" in result.stdout:
|
|
129
|
+
return True, "already_up_to_date"
|
|
130
|
+
return True, "updated"
|
|
131
|
+
else:
|
|
132
|
+
return False, result.stderr.strip()
|
|
133
|
+
except subprocess.TimeoutExpired:
|
|
134
|
+
return False, "Update timed out"
|
|
135
|
+
except Exception as e:
|
|
136
|
+
return False, str(e)
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
def get_changelog(repo_path: Path, old_commit: str, new_commit: str, max_lines: int = 10) -> list[str]:
|
|
140
|
+
"""Get changelog between two commits.
|
|
141
|
+
|
|
142
|
+
Args:
|
|
143
|
+
repo_path: Path to git repository.
|
|
144
|
+
old_commit: Old commit hash.
|
|
145
|
+
new_commit: New commit hash.
|
|
146
|
+
max_lines: Maximum number of commits to show.
|
|
147
|
+
|
|
148
|
+
Returns:
|
|
149
|
+
List of commit messages.
|
|
150
|
+
"""
|
|
151
|
+
try:
|
|
152
|
+
result = subprocess.run(
|
|
153
|
+
["git", "log", "--oneline", f"{old_commit}..{new_commit}", f"-{max_lines}"],
|
|
154
|
+
cwd=repo_path,
|
|
155
|
+
capture_output=True,
|
|
156
|
+
text=True,
|
|
157
|
+
timeout=5,
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
161
|
+
return result.stdout.strip().split("\n")
|
|
162
|
+
except Exception:
|
|
163
|
+
pass
|
|
164
|
+
return []
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
def get_stats(repo_path: Path, old_commit: str, new_commit: str) -> dict:
|
|
168
|
+
"""Get statistics about changes.
|
|
169
|
+
|
|
170
|
+
Args:
|
|
171
|
+
repo_path: Path to git repository.
|
|
172
|
+
old_commit: Old commit hash.
|
|
173
|
+
new_commit: New commit hash.
|
|
174
|
+
|
|
175
|
+
Returns:
|
|
176
|
+
Dictionary with stats (commits, files, insertions, deletions).
|
|
177
|
+
"""
|
|
178
|
+
stats = {"commits": 0, "files": 0, "insertions": 0, "deletions": 0}
|
|
179
|
+
|
|
180
|
+
try:
|
|
181
|
+
# Count commits
|
|
182
|
+
result = subprocess.run(
|
|
183
|
+
["git", "rev-list", "--count", f"{old_commit}..{new_commit}"],
|
|
184
|
+
cwd=repo_path,
|
|
185
|
+
capture_output=True,
|
|
186
|
+
text=True,
|
|
187
|
+
timeout=5,
|
|
188
|
+
)
|
|
189
|
+
if result.returncode == 0:
|
|
190
|
+
stats["commits"] = int(result.stdout.strip())
|
|
191
|
+
|
|
192
|
+
# Get file stats
|
|
193
|
+
result = subprocess.run(
|
|
194
|
+
["git", "diff", "--shortstat", old_commit, new_commit],
|
|
195
|
+
cwd=repo_path,
|
|
196
|
+
capture_output=True,
|
|
197
|
+
text=True,
|
|
198
|
+
timeout=5,
|
|
199
|
+
)
|
|
200
|
+
if result.returncode == 0 and result.stdout.strip():
|
|
201
|
+
# Parse: "5 files changed, 123 insertions(+), 45 deletions(-)"
|
|
202
|
+
output = result.stdout.strip()
|
|
203
|
+
if "file" in output:
|
|
204
|
+
parts = output.split(",")
|
|
205
|
+
for part in parts:
|
|
206
|
+
if "file" in part:
|
|
207
|
+
stats["files"] = int(part.split()[0])
|
|
208
|
+
elif "insertion" in part:
|
|
209
|
+
stats["insertions"] = int(part.split()[0])
|
|
210
|
+
elif "deletion" in part:
|
|
211
|
+
stats["deletions"] = int(part.split()[0])
|
|
212
|
+
except Exception:
|
|
213
|
+
pass
|
|
214
|
+
|
|
215
|
+
return stats
|
|
216
|
+
|
|
217
|
+
|
|
218
|
+
def reinstall_asr(repo_path: Path) -> tuple[bool, str]:
|
|
219
|
+
"""Reinstall ASR using uv or pip.
|
|
220
|
+
|
|
221
|
+
Args:
|
|
222
|
+
repo_path: Path to ASR repository.
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Tuple of (success, message).
|
|
226
|
+
"""
|
|
227
|
+
# Try uv first
|
|
228
|
+
try:
|
|
229
|
+
result = subprocess.run(
|
|
230
|
+
["uv", "pip", "install", "-e", "."],
|
|
231
|
+
cwd=repo_path,
|
|
232
|
+
capture_output=True,
|
|
233
|
+
text=True,
|
|
234
|
+
timeout=60,
|
|
235
|
+
)
|
|
236
|
+
if result.returncode == 0:
|
|
237
|
+
return True, "Reinstalled with uv"
|
|
238
|
+
except (subprocess.TimeoutExpired, FileNotFoundError):
|
|
239
|
+
pass
|
|
240
|
+
|
|
241
|
+
# Fall back to pip
|
|
242
|
+
try:
|
|
243
|
+
result = subprocess.run(
|
|
244
|
+
[sys.executable, "-m", "pip", "install", "-e", "."],
|
|
245
|
+
cwd=repo_path,
|
|
246
|
+
capture_output=True,
|
|
247
|
+
text=True,
|
|
248
|
+
timeout=60,
|
|
249
|
+
)
|
|
250
|
+
if result.returncode == 0:
|
|
251
|
+
return True, "Reinstalled with pip"
|
|
252
|
+
else:
|
|
253
|
+
return False, result.stderr.strip()
|
|
254
|
+
except Exception as e:
|
|
255
|
+
return False, str(e)
|
|
256
|
+
|
|
257
|
+
|
|
258
|
+
def register(subparsers) -> None:
|
|
259
|
+
"""Register the update command."""
|
|
260
|
+
p = subparsers.add_parser(
|
|
261
|
+
"update",
|
|
262
|
+
help="Update ASR tool from GitHub",
|
|
263
|
+
)
|
|
264
|
+
p.add_argument(
|
|
265
|
+
"--no-reinstall",
|
|
266
|
+
action="store_true",
|
|
267
|
+
help="Skip reinstallation step",
|
|
268
|
+
)
|
|
269
|
+
p.add_argument(
|
|
270
|
+
"--changelog",
|
|
271
|
+
type=int,
|
|
272
|
+
default=10,
|
|
273
|
+
metavar="N",
|
|
274
|
+
help="Number of changelog entries to show (default: 10)",
|
|
275
|
+
)
|
|
276
|
+
p.add_argument(
|
|
277
|
+
"--json",
|
|
278
|
+
action="store_true",
|
|
279
|
+
help="Output in JSON format",
|
|
280
|
+
)
|
|
281
|
+
p.add_argument(
|
|
282
|
+
"--quiet",
|
|
283
|
+
action="store_true",
|
|
284
|
+
help="Suppress info messages",
|
|
285
|
+
)
|
|
286
|
+
p.set_defaults(func=run)
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def run(args: argparse.Namespace) -> int:
|
|
290
|
+
"""Run the update command."""
|
|
291
|
+
# Find ASR repository
|
|
292
|
+
repo_path = find_asr_repo()
|
|
293
|
+
|
|
294
|
+
if not repo_path:
|
|
295
|
+
if args.json:
|
|
296
|
+
print(json.dumps({"success": False, "error": "Could not find ASR git repository"}))
|
|
297
|
+
else:
|
|
298
|
+
print("✗ Could not find ASR git repository", file=sys.stderr)
|
|
299
|
+
print(" Make sure ASR is installed from git (git clone + pip install -e .)", file=sys.stderr)
|
|
300
|
+
return 1
|
|
301
|
+
|
|
302
|
+
if not args.quiet and not args.json:
|
|
303
|
+
print(f"Found ASR repository: {repo_path}")
|
|
304
|
+
|
|
305
|
+
# Check if it's a git repository
|
|
306
|
+
if not (repo_path / ".git").exists():
|
|
307
|
+
if args.json:
|
|
308
|
+
print(json.dumps({"success": False, "error": "Not a git repository"}))
|
|
309
|
+
else:
|
|
310
|
+
print(f"✗ {repo_path} is not a git repository", file=sys.stderr)
|
|
311
|
+
return 1
|
|
312
|
+
|
|
313
|
+
# Get remote URL
|
|
314
|
+
remote_url = get_git_remote_url(repo_path)
|
|
315
|
+
if remote_url and not args.quiet and not args.json:
|
|
316
|
+
print(f"Remote: {remote_url}")
|
|
317
|
+
|
|
318
|
+
# Check working tree
|
|
319
|
+
if not check_working_tree_clean(repo_path):
|
|
320
|
+
if args.json:
|
|
321
|
+
print(json.dumps({"success": False, "error": "Working tree has uncommitted changes"}))
|
|
322
|
+
else:
|
|
323
|
+
print("✗ Working tree has uncommitted changes", file=sys.stderr)
|
|
324
|
+
print(" Commit or stash your changes before updating", file=sys.stderr)
|
|
325
|
+
return 1
|
|
326
|
+
|
|
327
|
+
# Get current commit before update
|
|
328
|
+
old_commit = get_current_commit(repo_path)
|
|
329
|
+
if not old_commit:
|
|
330
|
+
if args.json:
|
|
331
|
+
print(json.dumps({"success": False, "error": "Could not get current commit"}))
|
|
332
|
+
else:
|
|
333
|
+
print("✗ Could not get current commit", file=sys.stderr)
|
|
334
|
+
return 1
|
|
335
|
+
|
|
336
|
+
# Pull updates
|
|
337
|
+
if not args.quiet and not args.json:
|
|
338
|
+
print("Pulling updates from GitHub...")
|
|
339
|
+
|
|
340
|
+
success, message = pull_updates(repo_path)
|
|
341
|
+
|
|
342
|
+
if not success:
|
|
343
|
+
if args.json:
|
|
344
|
+
print(json.dumps({"success": False, "error": f"Git pull failed: {message}"}))
|
|
345
|
+
else:
|
|
346
|
+
print(f"✗ Git pull failed: {message}", file=sys.stderr)
|
|
347
|
+
return 1
|
|
348
|
+
|
|
349
|
+
# Check if already up to date
|
|
350
|
+
if message == "already_up_to_date":
|
|
351
|
+
if args.json:
|
|
352
|
+
print(json.dumps({"success": True, "updated": False, "message": "Already up to date"}))
|
|
353
|
+
else:
|
|
354
|
+
print("✓ Already up to date")
|
|
355
|
+
return 0
|
|
356
|
+
|
|
357
|
+
# Get new commit
|
|
358
|
+
new_commit = get_current_commit(repo_path)
|
|
359
|
+
if not new_commit or new_commit == old_commit:
|
|
360
|
+
if args.json:
|
|
361
|
+
print(json.dumps({"success": True, "updated": False, "message": "No changes"}))
|
|
362
|
+
else:
|
|
363
|
+
print("✓ No changes")
|
|
364
|
+
return 0
|
|
365
|
+
|
|
366
|
+
# Get statistics
|
|
367
|
+
stats = get_stats(repo_path, old_commit, new_commit)
|
|
368
|
+
|
|
369
|
+
# Get changelog
|
|
370
|
+
changelog = get_changelog(repo_path, old_commit, new_commit, max_lines=args.changelog)
|
|
371
|
+
|
|
372
|
+
if args.json:
|
|
373
|
+
print(
|
|
374
|
+
json.dumps(
|
|
375
|
+
{
|
|
376
|
+
"success": True,
|
|
377
|
+
"updated": True,
|
|
378
|
+
"old_commit": old_commit[:7],
|
|
379
|
+
"new_commit": new_commit[:7],
|
|
380
|
+
"stats": stats,
|
|
381
|
+
"changelog": changelog,
|
|
382
|
+
},
|
|
383
|
+
indent=2,
|
|
384
|
+
)
|
|
385
|
+
)
|
|
386
|
+
else:
|
|
387
|
+
print(f"✓ Updated ASR from {old_commit[:7]} to {new_commit[:7]}")
|
|
388
|
+
print(f" {stats['commits']} commit(s), {stats['files']} file(s) changed", end="")
|
|
389
|
+
if stats["insertions"] > 0:
|
|
390
|
+
print(f", +{stats['insertions']}", end="")
|
|
391
|
+
if stats["deletions"] > 0:
|
|
392
|
+
print(f", -{stats['deletions']}", end="")
|
|
393
|
+
print()
|
|
394
|
+
|
|
395
|
+
if changelog:
|
|
396
|
+
print("\nRecent changes:")
|
|
397
|
+
for line in changelog:
|
|
398
|
+
print(f" {line}")
|
|
399
|
+
|
|
400
|
+
# Reinstall if requested
|
|
401
|
+
if not args.no_reinstall:
|
|
402
|
+
if not args.quiet and not args.json:
|
|
403
|
+
print("\nReinstalling ASR...")
|
|
404
|
+
|
|
405
|
+
success, message = reinstall_asr(repo_path)
|
|
406
|
+
|
|
407
|
+
if success:
|
|
408
|
+
if not args.quiet and not args.json:
|
|
409
|
+
print(f"✓ {message}")
|
|
410
|
+
else:
|
|
411
|
+
if args.json:
|
|
412
|
+
print(json.dumps({"warning": f"Reinstall failed: {message}"}), file=sys.stderr)
|
|
413
|
+
else:
|
|
414
|
+
print(f"⚠ Reinstall failed: {message}", file=sys.stderr)
|
|
415
|
+
print(" You may need to reinstall manually", file=sys.stderr)
|
|
416
|
+
|
|
417
|
+
return 0
|
commands/use.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
1
|
+
"""`asr use` command."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import fnmatch
|
|
7
|
+
import json
|
|
8
|
+
import sys
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
from registry import load_registry
|
|
12
|
+
from skillcopy import copy_skill
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def register(subparsers) -> None:
|
|
16
|
+
p = subparsers.add_parser("use", help="Copy skill(s) to target directory")
|
|
17
|
+
p.add_argument("names", nargs="+", help="Skill name(s) or glob pattern(s) to copy")
|
|
18
|
+
p.add_argument(
|
|
19
|
+
"-d",
|
|
20
|
+
"--dir",
|
|
21
|
+
type=Path,
|
|
22
|
+
default=Path("."),
|
|
23
|
+
dest="output_dir",
|
|
24
|
+
help="Target directory (default: current)",
|
|
25
|
+
)
|
|
26
|
+
p.add_argument("--json", action="store_true", help="Output in JSON format")
|
|
27
|
+
p.add_argument("--quiet", action="store_true", help="Suppress info/warnings")
|
|
28
|
+
p.set_defaults(func=run)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _match_skills(patterns: list[str], entry_map: dict) -> tuple[list[str], list[str]]:
|
|
32
|
+
"""Match skill names against patterns (exact or glob).
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Tuple of (matched_names, unmatched_patterns).
|
|
36
|
+
"""
|
|
37
|
+
matched = set()
|
|
38
|
+
unmatched = []
|
|
39
|
+
all_names = list(entry_map.keys())
|
|
40
|
+
|
|
41
|
+
for pattern in patterns:
|
|
42
|
+
if pattern in entry_map:
|
|
43
|
+
matched.add(pattern)
|
|
44
|
+
elif any(c in pattern for c in "*?["):
|
|
45
|
+
# Glob pattern
|
|
46
|
+
matches = fnmatch.filter(all_names, pattern)
|
|
47
|
+
if matches:
|
|
48
|
+
matched.update(matches)
|
|
49
|
+
else:
|
|
50
|
+
unmatched.append(pattern)
|
|
51
|
+
else:
|
|
52
|
+
unmatched.append(pattern)
|
|
53
|
+
|
|
54
|
+
return list(matched), unmatched
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def run(args: argparse.Namespace) -> int:
|
|
58
|
+
entries = load_registry()
|
|
59
|
+
entry_map = {e.name: e for e in entries}
|
|
60
|
+
|
|
61
|
+
output_dir = args.output_dir.resolve()
|
|
62
|
+
output_dir.mkdir(parents=True, exist_ok=True)
|
|
63
|
+
|
|
64
|
+
copied = []
|
|
65
|
+
warnings = []
|
|
66
|
+
|
|
67
|
+
matched_names, unmatched = _match_skills(args.names, entry_map)
|
|
68
|
+
|
|
69
|
+
for pattern in unmatched:
|
|
70
|
+
warnings.append(f"No skills matched: {pattern}")
|
|
71
|
+
|
|
72
|
+
# Get manifests for tracking metadata
|
|
73
|
+
from manifest import load_manifest
|
|
74
|
+
|
|
75
|
+
# Separate remote and local skills for parallel processing
|
|
76
|
+
from skillcopy.remote import is_remote_source
|
|
77
|
+
|
|
78
|
+
remote_names = [name for name in matched_names if is_remote_source(entry_map[name].path)]
|
|
79
|
+
local_names = [name for name in matched_names if not is_remote_source(entry_map[name].path)]
|
|
80
|
+
|
|
81
|
+
# Handle remote skills with parallel fetching
|
|
82
|
+
if remote_names:
|
|
83
|
+
print(f"Fetching {len(remote_names)} remote skill(s)...", file=sys.stderr)
|
|
84
|
+
import threading
|
|
85
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
86
|
+
|
|
87
|
+
print_lock = threading.Lock()
|
|
88
|
+
|
|
89
|
+
def copy_remote_entry(name):
|
|
90
|
+
"""Copy a remote skill with thread-safe progress."""
|
|
91
|
+
entry = entry_map[name]
|
|
92
|
+
dest = output_dir / name
|
|
93
|
+
|
|
94
|
+
try:
|
|
95
|
+
with print_lock:
|
|
96
|
+
platform = (
|
|
97
|
+
"GitHub" if "github.com" in entry.path else "GitLab" if "gitlab.com" in entry.path else "remote"
|
|
98
|
+
)
|
|
99
|
+
print(f" ↓ {name} (fetching from {platform}...)", file=sys.stderr, flush=True)
|
|
100
|
+
|
|
101
|
+
# Get manifest hash for tracking
|
|
102
|
+
manifest = load_manifest(name)
|
|
103
|
+
source_hash = manifest.content_hash if manifest else None
|
|
104
|
+
|
|
105
|
+
copy_skill(
|
|
106
|
+
entry.path,
|
|
107
|
+
dest,
|
|
108
|
+
validate=False,
|
|
109
|
+
show_progress=False,
|
|
110
|
+
skill_name=name,
|
|
111
|
+
inject_tracking=True,
|
|
112
|
+
source_hash=source_hash,
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
with print_lock:
|
|
116
|
+
print(f" ✓ {name} (downloaded)", file=sys.stderr)
|
|
117
|
+
|
|
118
|
+
return {"name": name, "src": entry.path, "dest": str(dest)}, None
|
|
119
|
+
except Exception as e:
|
|
120
|
+
with print_lock:
|
|
121
|
+
print(f" ✗ {name} ({str(e)[:50]}...)", file=sys.stderr)
|
|
122
|
+
return None, f"Failed to copy {name}: {e}"
|
|
123
|
+
|
|
124
|
+
# Copy remote skills in parallel
|
|
125
|
+
with ThreadPoolExecutor(max_workers=4) as executor:
|
|
126
|
+
futures = {executor.submit(copy_remote_entry, name): name for name in remote_names}
|
|
127
|
+
|
|
128
|
+
for future in as_completed(futures):
|
|
129
|
+
result, error = future.result()
|
|
130
|
+
if result:
|
|
131
|
+
copied.append(result)
|
|
132
|
+
if error:
|
|
133
|
+
warnings.append(error)
|
|
134
|
+
|
|
135
|
+
# Handle local skills sequentially (fast anyway)
|
|
136
|
+
for name in sorted(local_names):
|
|
137
|
+
entry = entry_map[name]
|
|
138
|
+
dest = output_dir / name
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
# Get manifest hash for tracking
|
|
142
|
+
manifest = load_manifest(name)
|
|
143
|
+
source_hash = manifest.content_hash if manifest else None
|
|
144
|
+
|
|
145
|
+
# Unified copy with tracking
|
|
146
|
+
copy_skill(entry.path, dest, validate=False, inject_tracking=True, source_hash=source_hash)
|
|
147
|
+
copied.append({"name": name, "src": entry.path, "dest": str(dest)})
|
|
148
|
+
except Exception as e:
|
|
149
|
+
warnings.append(f"Failed to copy {name}: {e}")
|
|
150
|
+
|
|
151
|
+
if not args.quiet:
|
|
152
|
+
for w in warnings:
|
|
153
|
+
print(f"⚠ {w}", file=sys.stderr)
|
|
154
|
+
|
|
155
|
+
if args.json:
|
|
156
|
+
print(
|
|
157
|
+
json.dumps(
|
|
158
|
+
{
|
|
159
|
+
"copied": len(copied),
|
|
160
|
+
"warnings": len(warnings),
|
|
161
|
+
"skills": copied,
|
|
162
|
+
},
|
|
163
|
+
indent=2,
|
|
164
|
+
)
|
|
165
|
+
)
|
|
166
|
+
else:
|
|
167
|
+
for c in copied:
|
|
168
|
+
print(f"Copied: {c['name']} → {c['dest']}")
|
|
169
|
+
if copied:
|
|
170
|
+
print(f"\n{len(copied)} skill(s) copied to {output_dir}")
|
|
171
|
+
|
|
172
|
+
return 1 if warnings and not copied else 0
|
commands/validate.py
ADDED
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
"""`asr validate` command."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import json
|
|
7
|
+
import sys
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
|
|
10
|
+
from config import load_config
|
|
11
|
+
from registry import load_registry
|
|
12
|
+
from validate import validate_all, validate_skill
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def _print_validation_result(result) -> None:
|
|
16
|
+
print(f"{result.name}")
|
|
17
|
+
if result.valid and not result.warnings:
|
|
18
|
+
print(" ✓ Valid")
|
|
19
|
+
else:
|
|
20
|
+
for msg in result.all_messages:
|
|
21
|
+
print(f" {msg}")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def register(subparsers) -> None:
|
|
25
|
+
p = subparsers.add_parser("validate", help="Validate skills")
|
|
26
|
+
p.add_argument("path", type=Path, nargs="?", help="Path to skill directory")
|
|
27
|
+
p.add_argument("--all", action="store_true", dest="validate_all", help="Validate all registered skills")
|
|
28
|
+
p.add_argument("--strict", action="store_true", help="Treat warnings as errors")
|
|
29
|
+
p.add_argument("--json", action="store_true", help="Output in JSON format")
|
|
30
|
+
p.add_argument("--quiet", action="store_true", help="Suppress info/warnings")
|
|
31
|
+
p.add_argument("--config", type=Path, help="Override config file path")
|
|
32
|
+
p.set_defaults(func=run)
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def run(args: argparse.Namespace) -> int:
|
|
36
|
+
config = load_config(args.config)
|
|
37
|
+
max_lines = config["validation"]["reference_max_lines"]
|
|
38
|
+
|
|
39
|
+
if args.validate_all:
|
|
40
|
+
entries = load_registry()
|
|
41
|
+
if not entries:
|
|
42
|
+
if args.json:
|
|
43
|
+
print("[]")
|
|
44
|
+
else:
|
|
45
|
+
print("No skills registered.")
|
|
46
|
+
return 0
|
|
47
|
+
|
|
48
|
+
results = validate_all(entries, reference_max_lines=max_lines)
|
|
49
|
+
elif args.path:
|
|
50
|
+
result = validate_skill(args.path.resolve(), reference_max_lines=max_lines)
|
|
51
|
+
results = [result]
|
|
52
|
+
else:
|
|
53
|
+
print("Error: Specify a path or use --all", file=sys.stderr)
|
|
54
|
+
return 2
|
|
55
|
+
|
|
56
|
+
if args.json:
|
|
57
|
+
print(json.dumps([r.to_dict() for r in results], indent=2))
|
|
58
|
+
else:
|
|
59
|
+
for result in results:
|
|
60
|
+
_print_validation_result(result)
|
|
61
|
+
print()
|
|
62
|
+
|
|
63
|
+
total_errors = sum(len(r.errors) for r in results)
|
|
64
|
+
total_warnings = sum(len(r.warnings) for r in results)
|
|
65
|
+
|
|
66
|
+
if not args.json and not args.quiet:
|
|
67
|
+
print(f"{len(results)} skill(s) validated: {total_errors} error(s), {total_warnings} warning(s)")
|
|
68
|
+
|
|
69
|
+
if total_errors > 0:
|
|
70
|
+
return 1
|
|
71
|
+
if args.strict and total_warnings > 0:
|
|
72
|
+
return 1
|
|
73
|
+
|
|
74
|
+
return 0
|