oasr 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- __init__.py +3 -0
- __main__.py +6 -0
- adapter.py +396 -0
- adapters/__init__.py +17 -0
- adapters/base.py +254 -0
- adapters/claude.py +82 -0
- adapters/codex.py +84 -0
- adapters/copilot.py +210 -0
- adapters/cursor.py +78 -0
- adapters/windsurf.py +83 -0
- agents/__init__.py +25 -0
- agents/base.py +96 -0
- agents/claude.py +25 -0
- agents/codex.py +25 -0
- agents/copilot.py +25 -0
- agents/opencode.py +25 -0
- agents/registry.py +57 -0
- cli.py +97 -0
- commands/__init__.py +6 -0
- commands/adapter.py +102 -0
- commands/add.py +435 -0
- commands/clean.py +30 -0
- commands/clone.py +178 -0
- commands/config.py +163 -0
- commands/diff.py +180 -0
- commands/exec.py +245 -0
- commands/find.py +56 -0
- commands/help.py +51 -0
- commands/info.py +152 -0
- commands/list.py +110 -0
- commands/registry.py +447 -0
- commands/rm.py +128 -0
- commands/status.py +119 -0
- commands/sync.py +143 -0
- commands/update.py +417 -0
- commands/use.py +45 -0
- commands/validate.py +74 -0
- config/__init__.py +119 -0
- config/defaults.py +40 -0
- config/schema.py +73 -0
- discovery.py +145 -0
- manifest.py +437 -0
- oasr-0.5.0.dist-info/METADATA +358 -0
- oasr-0.5.0.dist-info/RECORD +59 -0
- oasr-0.5.0.dist-info/WHEEL +4 -0
- oasr-0.5.0.dist-info/entry_points.txt +3 -0
- oasr-0.5.0.dist-info/licenses/LICENSE +187 -0
- oasr-0.5.0.dist-info/licenses/NOTICE +8 -0
- policy/__init__.py +50 -0
- policy/defaults.py +27 -0
- policy/enforcement.py +98 -0
- policy/profile.py +185 -0
- registry.py +173 -0
- remote.py +482 -0
- skillcopy/__init__.py +71 -0
- skillcopy/local.py +40 -0
- skillcopy/remote.py +98 -0
- tracking.py +181 -0
- validate.py +362 -0
__init__.py
ADDED
__main__.py
ADDED
adapter.py
ADDED
|
@@ -0,0 +1,396 @@
|
|
|
1
|
+
"""Pure Python adapter generation - self-contained and portable.
|
|
2
|
+
|
|
3
|
+
This module provides standalone adapter generation for Cursor, Windsurf, and Codex
|
|
4
|
+
without dependencies on other CLI modules. Can be used as a library or run directly.
|
|
5
|
+
|
|
6
|
+
Usage:
|
|
7
|
+
python -m skills.adapter --skills-root /path/to/skills --output-dir /path/to/project
|
|
8
|
+
python -m skills.adapter --skills-root /path/to/skills --target cursor
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import argparse
|
|
14
|
+
import json
|
|
15
|
+
import sys
|
|
16
|
+
from collections.abc import Callable
|
|
17
|
+
from dataclasses import dataclass
|
|
18
|
+
from pathlib import Path
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
import yaml
|
|
22
|
+
|
|
23
|
+
HAS_YAML = True
|
|
24
|
+
except ImportError:
|
|
25
|
+
HAS_YAML = False
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class SkillInfo:
|
|
30
|
+
"""Discovered skill information."""
|
|
31
|
+
|
|
32
|
+
name: str
|
|
33
|
+
description: str
|
|
34
|
+
path: Path
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def parse_frontmatter(content: str) -> dict[str, str]:
|
|
38
|
+
"""Parse YAML frontmatter from markdown content.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
content: Markdown file content.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Dictionary of frontmatter fields, empty if no valid frontmatter.
|
|
45
|
+
"""
|
|
46
|
+
if not content.startswith("---"):
|
|
47
|
+
return {}
|
|
48
|
+
|
|
49
|
+
lines = content.split("\n")
|
|
50
|
+
end_idx = -1
|
|
51
|
+
|
|
52
|
+
for i, line in enumerate(lines[1:], start=1):
|
|
53
|
+
if line.strip() == "---":
|
|
54
|
+
end_idx = i
|
|
55
|
+
break
|
|
56
|
+
|
|
57
|
+
if end_idx == -1:
|
|
58
|
+
return {}
|
|
59
|
+
|
|
60
|
+
fm_text = "\n".join(lines[1:end_idx])
|
|
61
|
+
|
|
62
|
+
if HAS_YAML:
|
|
63
|
+
try:
|
|
64
|
+
parsed = yaml.safe_load(fm_text)
|
|
65
|
+
if isinstance(parsed, dict):
|
|
66
|
+
return {k: str(v) if v else "" for k, v in parsed.items()}
|
|
67
|
+
except yaml.YAMLError:
|
|
68
|
+
pass
|
|
69
|
+
|
|
70
|
+
frontmatter = {}
|
|
71
|
+
for line in lines[1:end_idx]:
|
|
72
|
+
if ":" in line and not line.startswith(" ") and not line.startswith("\t"):
|
|
73
|
+
key, _, value = line.partition(":")
|
|
74
|
+
frontmatter[key.strip()] = value.strip().strip('"').strip("'")
|
|
75
|
+
|
|
76
|
+
return frontmatter
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def find_skills(root: Path) -> list[SkillInfo]:
|
|
80
|
+
"""Recursively find all SKILL.md files and extract skill info.
|
|
81
|
+
|
|
82
|
+
Args:
|
|
83
|
+
root: Root directory to search.
|
|
84
|
+
|
|
85
|
+
Returns:
|
|
86
|
+
List of discovered skills with name, description, and path.
|
|
87
|
+
"""
|
|
88
|
+
skills = []
|
|
89
|
+
|
|
90
|
+
for skill_md in root.rglob("SKILL.md"):
|
|
91
|
+
skill_dir = skill_md.parent
|
|
92
|
+
|
|
93
|
+
try:
|
|
94
|
+
content = skill_md.read_text(encoding="utf-8")
|
|
95
|
+
except (OSError, UnicodeDecodeError):
|
|
96
|
+
continue
|
|
97
|
+
|
|
98
|
+
fm = parse_frontmatter(content)
|
|
99
|
+
name = fm.get("name", skill_dir.name)
|
|
100
|
+
description = fm.get("description", "")
|
|
101
|
+
|
|
102
|
+
if not name:
|
|
103
|
+
continue
|
|
104
|
+
|
|
105
|
+
skills.append(
|
|
106
|
+
SkillInfo(
|
|
107
|
+
name=name,
|
|
108
|
+
description=description,
|
|
109
|
+
path=skill_dir.resolve(),
|
|
110
|
+
)
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
return skills
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def generate_cursor(skill: SkillInfo, output_dir: Path) -> Path:
|
|
117
|
+
"""Generate a Cursor command file.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
skill: Skill information.
|
|
121
|
+
output_dir: Target directory (will create .cursor/commands/).
|
|
122
|
+
|
|
123
|
+
Returns:
|
|
124
|
+
Path to the generated file.
|
|
125
|
+
"""
|
|
126
|
+
target_dir = output_dir / ".cursor" / "commands"
|
|
127
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
128
|
+
|
|
129
|
+
output_file = target_dir / f"{skill.name}.md"
|
|
130
|
+
|
|
131
|
+
content = f"""# {skill.name}
|
|
132
|
+
|
|
133
|
+
{skill.description}
|
|
134
|
+
|
|
135
|
+
This command delegates to the agent skill at `{skill.path}/`.
|
|
136
|
+
|
|
137
|
+
## Skill Location
|
|
138
|
+
|
|
139
|
+
- **Path:** `{skill.path}/`
|
|
140
|
+
- **Manifest:** `{skill.path}/SKILL.md`
|
|
141
|
+
"""
|
|
142
|
+
|
|
143
|
+
output_file.write_text(content, encoding="utf-8")
|
|
144
|
+
return output_file
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def generate_windsurf(skill: SkillInfo, output_dir: Path) -> Path:
|
|
148
|
+
"""Generate a Windsurf workflow file.
|
|
149
|
+
|
|
150
|
+
Args:
|
|
151
|
+
skill: Skill information.
|
|
152
|
+
output_dir: Target directory (will create .windsurf/workflows/).
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
Path to the generated file.
|
|
156
|
+
"""
|
|
157
|
+
target_dir = output_dir / ".windsurf" / "workflows"
|
|
158
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
159
|
+
|
|
160
|
+
output_file = target_dir / f"{skill.name}.md"
|
|
161
|
+
|
|
162
|
+
desc_yaml = json.dumps(skill.description)
|
|
163
|
+
|
|
164
|
+
content = f"""---
|
|
165
|
+
description: {desc_yaml}
|
|
166
|
+
auto_execution_mode: 1
|
|
167
|
+
---
|
|
168
|
+
|
|
169
|
+
# {skill.name}
|
|
170
|
+
|
|
171
|
+
This workflow delegates to the agent skill at `{skill.path}/`.
|
|
172
|
+
|
|
173
|
+
## Skill Location
|
|
174
|
+
|
|
175
|
+
- **Path:** `{skill.path}/`
|
|
176
|
+
- **Manifest:** `{skill.path}/SKILL.md`
|
|
177
|
+
"""
|
|
178
|
+
|
|
179
|
+
output_file.write_text(content, encoding="utf-8")
|
|
180
|
+
return output_file
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def generate_codex(skill: SkillInfo, output_dir: Path) -> Path:
|
|
184
|
+
"""Generate a Codex skill file (placeholder, uses Cursor format).
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
skill: Skill information.
|
|
188
|
+
output_dir: Target directory (will create .codex/skills/).
|
|
189
|
+
|
|
190
|
+
Returns:
|
|
191
|
+
Path to the generated file.
|
|
192
|
+
"""
|
|
193
|
+
target_dir = output_dir / ".codex" / "skills"
|
|
194
|
+
target_dir.mkdir(parents=True, exist_ok=True)
|
|
195
|
+
|
|
196
|
+
output_file = target_dir / f"{skill.name}.md"
|
|
197
|
+
|
|
198
|
+
content = f"""# {skill.name}
|
|
199
|
+
|
|
200
|
+
{skill.description}
|
|
201
|
+
|
|
202
|
+
This skill delegates to the agent skill at `{skill.path}/`.
|
|
203
|
+
|
|
204
|
+
## Skill Location
|
|
205
|
+
|
|
206
|
+
- **Path:** `{skill.path}/`
|
|
207
|
+
- **Manifest:** `{skill.path}/SKILL.md`
|
|
208
|
+
"""
|
|
209
|
+
|
|
210
|
+
output_file.write_text(content, encoding="utf-8")
|
|
211
|
+
return output_file
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
def cleanup_stale(
|
|
215
|
+
output_dir: Path,
|
|
216
|
+
subdir: str,
|
|
217
|
+
valid_names: set[str],
|
|
218
|
+
marker: str,
|
|
219
|
+
) -> list[Path]:
|
|
220
|
+
"""Remove stale generated files.
|
|
221
|
+
|
|
222
|
+
Args:
|
|
223
|
+
output_dir: Base output directory.
|
|
224
|
+
subdir: Subdirectory path (e.g., ".cursor/commands").
|
|
225
|
+
valid_names: Set of valid skill names to keep.
|
|
226
|
+
marker: Text marker to identify generated files.
|
|
227
|
+
|
|
228
|
+
Returns:
|
|
229
|
+
List of removed file paths.
|
|
230
|
+
"""
|
|
231
|
+
removed = []
|
|
232
|
+
target_dir = output_dir / subdir
|
|
233
|
+
|
|
234
|
+
if not target_dir.is_dir():
|
|
235
|
+
return removed
|
|
236
|
+
|
|
237
|
+
for file in target_dir.glob("*.md"):
|
|
238
|
+
name = file.stem
|
|
239
|
+
|
|
240
|
+
if name in valid_names:
|
|
241
|
+
continue
|
|
242
|
+
|
|
243
|
+
try:
|
|
244
|
+
content = file.read_text(encoding="utf-8")
|
|
245
|
+
if marker in content:
|
|
246
|
+
file.unlink()
|
|
247
|
+
removed.append(file)
|
|
248
|
+
except (OSError, UnicodeDecodeError):
|
|
249
|
+
pass
|
|
250
|
+
|
|
251
|
+
return removed
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
ADAPTERS: dict[str, tuple[Callable[[SkillInfo, Path], Path], str, str]] = {
|
|
255
|
+
"cursor": (generate_cursor, ".cursor/commands", "This command delegates to the agent skill at"),
|
|
256
|
+
"windsurf": (generate_windsurf, ".windsurf/workflows", "This workflow delegates to the agent skill at"),
|
|
257
|
+
"codex": (generate_codex, ".codex/skills", "This skill delegates to the agent skill at"),
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def generate_adapters(
|
|
262
|
+
skills_root: Path,
|
|
263
|
+
output_dir: Path,
|
|
264
|
+
targets: list[str] | None = None,
|
|
265
|
+
exclude: set[str] | None = None,
|
|
266
|
+
cleanup: bool = True,
|
|
267
|
+
) -> dict[str, dict]:
|
|
268
|
+
"""Generate adapter files for all skills.
|
|
269
|
+
|
|
270
|
+
Args:
|
|
271
|
+
skills_root: Root directory containing skills.
|
|
272
|
+
output_dir: Output directory for generated files.
|
|
273
|
+
targets: List of adapter targets (default: all).
|
|
274
|
+
exclude: Set of skill names to exclude.
|
|
275
|
+
cleanup: Whether to remove stale files.
|
|
276
|
+
|
|
277
|
+
Returns:
|
|
278
|
+
Dictionary with results per target.
|
|
279
|
+
"""
|
|
280
|
+
if targets is None:
|
|
281
|
+
targets = list(ADAPTERS.keys())
|
|
282
|
+
|
|
283
|
+
if exclude is None:
|
|
284
|
+
exclude = set()
|
|
285
|
+
|
|
286
|
+
skills = find_skills(skills_root)
|
|
287
|
+
skills = [s for s in skills if s.name not in exclude]
|
|
288
|
+
valid_names = {s.name for s in skills}
|
|
289
|
+
|
|
290
|
+
results = {}
|
|
291
|
+
|
|
292
|
+
for target in targets:
|
|
293
|
+
if target not in ADAPTERS:
|
|
294
|
+
continue
|
|
295
|
+
|
|
296
|
+
generator, subdir, marker = ADAPTERS[target]
|
|
297
|
+
generated = []
|
|
298
|
+
removed = []
|
|
299
|
+
|
|
300
|
+
for skill in skills:
|
|
301
|
+
path = generator(skill, output_dir)
|
|
302
|
+
generated.append(path)
|
|
303
|
+
|
|
304
|
+
if cleanup:
|
|
305
|
+
removed = cleanup_stale(output_dir, subdir, valid_names, marker)
|
|
306
|
+
|
|
307
|
+
results[target] = {
|
|
308
|
+
"generated": [str(p) for p in generated],
|
|
309
|
+
"removed": [str(p) for p in removed],
|
|
310
|
+
"output_dir": str(output_dir / subdir),
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
return results
|
|
314
|
+
|
|
315
|
+
|
|
316
|
+
def main(argv: list[str] | None = None) -> int:
|
|
317
|
+
"""CLI entry point for standalone adapter generation."""
|
|
318
|
+
parser = argparse.ArgumentParser(
|
|
319
|
+
prog="skills.adapter",
|
|
320
|
+
description="Generate IDE adapter files from SKILL.md files.",
|
|
321
|
+
)
|
|
322
|
+
parser.add_argument(
|
|
323
|
+
"--skills-root",
|
|
324
|
+
type=Path,
|
|
325
|
+
required=True,
|
|
326
|
+
help="Root directory containing skills",
|
|
327
|
+
)
|
|
328
|
+
parser.add_argument(
|
|
329
|
+
"--output-dir",
|
|
330
|
+
type=Path,
|
|
331
|
+
default=Path("."),
|
|
332
|
+
help="Output directory (default: current)",
|
|
333
|
+
)
|
|
334
|
+
parser.add_argument(
|
|
335
|
+
"--target",
|
|
336
|
+
choices=list(ADAPTERS.keys()),
|
|
337
|
+
action="append",
|
|
338
|
+
dest="targets",
|
|
339
|
+
help="Target adapter(s) to generate (default: all)",
|
|
340
|
+
)
|
|
341
|
+
parser.add_argument(
|
|
342
|
+
"--exclude",
|
|
343
|
+
help="Comma-separated skill names to exclude",
|
|
344
|
+
)
|
|
345
|
+
parser.add_argument(
|
|
346
|
+
"--no-cleanup",
|
|
347
|
+
action="store_true",
|
|
348
|
+
help="Don't remove stale files",
|
|
349
|
+
)
|
|
350
|
+
parser.add_argument(
|
|
351
|
+
"--json",
|
|
352
|
+
action="store_true",
|
|
353
|
+
help="Output results as JSON",
|
|
354
|
+
)
|
|
355
|
+
parser.add_argument(
|
|
356
|
+
"--quiet",
|
|
357
|
+
action="store_true",
|
|
358
|
+
help="Suppress output",
|
|
359
|
+
)
|
|
360
|
+
|
|
361
|
+
args = parser.parse_args(argv)
|
|
362
|
+
|
|
363
|
+
skills_root = args.skills_root.resolve()
|
|
364
|
+
output_dir = args.output_dir.resolve()
|
|
365
|
+
|
|
366
|
+
if not skills_root.is_dir():
|
|
367
|
+
print(f"Error: Not a directory: {skills_root}", file=sys.stderr)
|
|
368
|
+
return 2
|
|
369
|
+
|
|
370
|
+
exclude = set()
|
|
371
|
+
if args.exclude:
|
|
372
|
+
exclude = set(args.exclude.split(","))
|
|
373
|
+
|
|
374
|
+
results = generate_adapters(
|
|
375
|
+
skills_root=skills_root,
|
|
376
|
+
output_dir=output_dir,
|
|
377
|
+
targets=args.targets,
|
|
378
|
+
exclude=exclude,
|
|
379
|
+
cleanup=not args.no_cleanup,
|
|
380
|
+
)
|
|
381
|
+
|
|
382
|
+
if args.json:
|
|
383
|
+
print(json.dumps(results, indent=2))
|
|
384
|
+
elif not args.quiet:
|
|
385
|
+
for target, info in results.items():
|
|
386
|
+
gen_count = len(info["generated"])
|
|
387
|
+
rem_count = len(info["removed"])
|
|
388
|
+
print(f"{target}: Generated {gen_count} file(s) in {info['output_dir']}")
|
|
389
|
+
if rem_count:
|
|
390
|
+
print(f" Removed {rem_count} stale file(s)")
|
|
391
|
+
|
|
392
|
+
return 0
|
|
393
|
+
|
|
394
|
+
|
|
395
|
+
if __name__ == "__main__":
|
|
396
|
+
sys.exit(main())
|
adapters/__init__.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
"""Adapter modules for generating IDE-specific skill files."""
|
|
2
|
+
|
|
3
|
+
from adapters.base import BaseAdapter
|
|
4
|
+
from adapters.claude import ClaudeAdapter
|
|
5
|
+
from adapters.codex import CodexAdapter
|
|
6
|
+
from adapters.copilot import CopilotAdapter
|
|
7
|
+
from adapters.cursor import CursorAdapter
|
|
8
|
+
from adapters.windsurf import WindsurfAdapter
|
|
9
|
+
|
|
10
|
+
__all__ = [
|
|
11
|
+
"BaseAdapter",
|
|
12
|
+
"CursorAdapter",
|
|
13
|
+
"WindsurfAdapter",
|
|
14
|
+
"CodexAdapter",
|
|
15
|
+
"CopilotAdapter",
|
|
16
|
+
"ClaudeAdapter",
|
|
17
|
+
]
|
adapters/base.py
ADDED
|
@@ -0,0 +1,254 @@
|
|
|
1
|
+
"""Base adapter interface for generating IDE-specific skill files."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from abc import ABC, abstractmethod
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Protocol
|
|
7
|
+
|
|
8
|
+
from skillcopy import copy_skill as copy_skill_unified
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class SkillInfo(Protocol):
|
|
12
|
+
"""Protocol for skill information."""
|
|
13
|
+
|
|
14
|
+
path: str
|
|
15
|
+
name: str
|
|
16
|
+
description: str
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
class BaseAdapter(ABC):
|
|
20
|
+
"""Abstract base class for adapters."""
|
|
21
|
+
|
|
22
|
+
target_name: str = ""
|
|
23
|
+
target_subdir: str = ""
|
|
24
|
+
|
|
25
|
+
def resolve_output_dir(self, output_dir: Path) -> Path:
|
|
26
|
+
"""Resolve the actual output directory based on smart path detection.
|
|
27
|
+
|
|
28
|
+
If output_dir ends with the target subdir pattern, use it directly.
|
|
29
|
+
Otherwise, append the target subdir.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
output_dir: User-specified output directory.
|
|
33
|
+
|
|
34
|
+
Returns:
|
|
35
|
+
Resolved output directory path.
|
|
36
|
+
"""
|
|
37
|
+
output_str = str(output_dir)
|
|
38
|
+
|
|
39
|
+
if output_str.endswith(self.target_subdir):
|
|
40
|
+
return output_dir
|
|
41
|
+
|
|
42
|
+
base_dir = self.target_subdir.rsplit("/", 1)[0]
|
|
43
|
+
if output_str.endswith(base_dir):
|
|
44
|
+
subdir_name = self.target_subdir.rsplit("/", 1)[1]
|
|
45
|
+
return output_dir / subdir_name
|
|
46
|
+
|
|
47
|
+
return output_dir / self.target_subdir
|
|
48
|
+
|
|
49
|
+
@abstractmethod
|
|
50
|
+
def generate(
|
|
51
|
+
self, skill: SkillInfo, output_dir: Path, copy: bool = True, base_output_dir: Path | None = None
|
|
52
|
+
) -> Path:
|
|
53
|
+
"""Generate IDE-specific file for a skill.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
skill: Skill information.
|
|
57
|
+
output_dir: Resolved output directory for adapter files.
|
|
58
|
+
copy: Always True (kept for backward compatibility).
|
|
59
|
+
base_output_dir: Base output directory (for computing relative paths).
|
|
60
|
+
|
|
61
|
+
Returns:
|
|
62
|
+
Path to the generated file.
|
|
63
|
+
"""
|
|
64
|
+
pass
|
|
65
|
+
|
|
66
|
+
@abstractmethod
|
|
67
|
+
def cleanup_stale(self, output_dir: Path, valid_names: set[str]) -> list[Path]:
|
|
68
|
+
"""Remove stale generated files.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
output_dir: Output directory to clean.
|
|
72
|
+
valid_names: Set of valid skill names (files to keep).
|
|
73
|
+
|
|
74
|
+
Returns:
|
|
75
|
+
List of removed file paths.
|
|
76
|
+
"""
|
|
77
|
+
pass
|
|
78
|
+
|
|
79
|
+
def get_skills_dir(self, output_dir: Path) -> Path:
|
|
80
|
+
"""Get the skills directory path for this adapter.
|
|
81
|
+
|
|
82
|
+
Returns the sibling skills/ directory relative to the adapter output.
|
|
83
|
+
E.g., for .windsurf/workflows/, returns .windsurf/skills/
|
|
84
|
+
|
|
85
|
+
Args:
|
|
86
|
+
output_dir: Base output directory.
|
|
87
|
+
|
|
88
|
+
Returns:
|
|
89
|
+
Path to the skills directory.
|
|
90
|
+
"""
|
|
91
|
+
base = self.target_subdir.split("/")[0] # e.g., ".windsurf" or ".github"
|
|
92
|
+
return output_dir / base / "skills"
|
|
93
|
+
|
|
94
|
+
def copy_skill(self, skill: SkillInfo, skills_dir: Path, show_progress: bool = False) -> Path:
|
|
95
|
+
"""Copy a skill to the local skills directory with tracking metadata.
|
|
96
|
+
|
|
97
|
+
Uses unified copy interface (handles both local and remote).
|
|
98
|
+
|
|
99
|
+
Args:
|
|
100
|
+
skill: Skill to copy.
|
|
101
|
+
skills_dir: Target skills directory.
|
|
102
|
+
show_progress: If True, show progress for remote skills
|
|
103
|
+
|
|
104
|
+
Returns:
|
|
105
|
+
Path to the copied skill directory.
|
|
106
|
+
"""
|
|
107
|
+
dest = skills_dir / skill.name
|
|
108
|
+
|
|
109
|
+
# Get the skill's content hash from manifest for tracking
|
|
110
|
+
# If manifest doesn't exist, skip tracking (graceful degradation)
|
|
111
|
+
inject_tracking = False
|
|
112
|
+
source_hash = None
|
|
113
|
+
|
|
114
|
+
try:
|
|
115
|
+
from manifest import load_manifest
|
|
116
|
+
|
|
117
|
+
manifest = load_manifest(skill.name)
|
|
118
|
+
if manifest:
|
|
119
|
+
source_hash = manifest.content_hash
|
|
120
|
+
inject_tracking = True
|
|
121
|
+
except Exception:
|
|
122
|
+
# Gracefully skip tracking if manifest cannot be loaded
|
|
123
|
+
pass
|
|
124
|
+
|
|
125
|
+
return copy_skill_unified(
|
|
126
|
+
skill.path,
|
|
127
|
+
dest,
|
|
128
|
+
validate=False,
|
|
129
|
+
show_progress=show_progress,
|
|
130
|
+
skill_name=skill.name,
|
|
131
|
+
inject_tracking=inject_tracking,
|
|
132
|
+
source_hash=source_hash,
|
|
133
|
+
)
|
|
134
|
+
|
|
135
|
+
def get_skill_path(self, skill: SkillInfo, output_dir: Path, copy: bool = True) -> str:
|
|
136
|
+
"""Get the skill path to use in generated files.
|
|
137
|
+
|
|
138
|
+
Args:
|
|
139
|
+
skill: Skill information.
|
|
140
|
+
output_dir: Base output directory.
|
|
141
|
+
copy: Always True (skills are always copied now).
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
Relative path string to use in adapter output.
|
|
145
|
+
"""
|
|
146
|
+
# Always return relative path to local copy
|
|
147
|
+
return f"../skills/{skill.name}"
|
|
148
|
+
|
|
149
|
+
def generate_all(
|
|
150
|
+
self,
|
|
151
|
+
skills: list[SkillInfo],
|
|
152
|
+
output_dir: Path,
|
|
153
|
+
exclude: set[str] | None = None,
|
|
154
|
+
copy: bool = True, # Always True, kept for backward compatibility
|
|
155
|
+
) -> tuple[list[Path], list[Path]]:
|
|
156
|
+
"""Generate files for all skills and cleanup stale ones.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
skills: List of skills to generate.
|
|
160
|
+
output_dir: Base output directory.
|
|
161
|
+
exclude: Set of skill names to exclude.
|
|
162
|
+
copy: Always True (kept for backward compatibility).
|
|
163
|
+
|
|
164
|
+
Returns:
|
|
165
|
+
Tuple of (generated files, removed stale files).
|
|
166
|
+
"""
|
|
167
|
+
exclude = exclude or set()
|
|
168
|
+
resolved_dir = self.resolve_output_dir(output_dir)
|
|
169
|
+
resolved_dir.mkdir(parents=True, exist_ok=True)
|
|
170
|
+
|
|
171
|
+
# Always copy skills (handles both local and remote)
|
|
172
|
+
skills_dir = self.get_skills_dir(output_dir)
|
|
173
|
+
skills_dir.mkdir(parents=True, exist_ok=True)
|
|
174
|
+
|
|
175
|
+
# Separate remote and local skills
|
|
176
|
+
from skillcopy.remote import is_remote_source
|
|
177
|
+
|
|
178
|
+
remote_skills = [(s, skills_dir / s.name) for s in skills if s.name not in exclude and is_remote_source(s.path)]
|
|
179
|
+
local_skills = [
|
|
180
|
+
(s, skills_dir / s.name) for s in skills if s.name not in exclude and not is_remote_source(s.path)
|
|
181
|
+
]
|
|
182
|
+
|
|
183
|
+
failed_skills = []
|
|
184
|
+
|
|
185
|
+
# Copy remote skills in parallel
|
|
186
|
+
if remote_skills:
|
|
187
|
+
print(f"\nCopying {len(remote_skills)} remote skill(s)...", file=sys.stderr)
|
|
188
|
+
import threading
|
|
189
|
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
190
|
+
|
|
191
|
+
# Thread-safe print lock
|
|
192
|
+
print_lock = threading.Lock()
|
|
193
|
+
|
|
194
|
+
def copy_remote_with_progress(skill, dest):
|
|
195
|
+
"""Copy a remote skill with thread-safe progress output."""
|
|
196
|
+
try:
|
|
197
|
+
with print_lock:
|
|
198
|
+
platform = (
|
|
199
|
+
"GitHub"
|
|
200
|
+
if "github.com" in skill.path
|
|
201
|
+
else "GitLab"
|
|
202
|
+
if "gitlab.com" in skill.path
|
|
203
|
+
else "remote"
|
|
204
|
+
)
|
|
205
|
+
print(f" ↓ {skill.name} (fetching from {platform}...)", file=sys.stderr, flush=True)
|
|
206
|
+
|
|
207
|
+
self.copy_skill(skill, skills_dir, show_progress=False) # We handle progress here
|
|
208
|
+
|
|
209
|
+
with print_lock:
|
|
210
|
+
print(f" ✓ {skill.name} (downloaded)", file=sys.stderr)
|
|
211
|
+
|
|
212
|
+
return skill.name, True, None
|
|
213
|
+
except Exception as e:
|
|
214
|
+
with print_lock:
|
|
215
|
+
print(f" ✗ {skill.name} ({str(e)[:50]}...)", file=sys.stderr)
|
|
216
|
+
return skill.name, False, str(e)
|
|
217
|
+
|
|
218
|
+
# Fetch remote skills in parallel (max 4 concurrent)
|
|
219
|
+
with ThreadPoolExecutor(max_workers=4) as executor:
|
|
220
|
+
futures = {
|
|
221
|
+
executor.submit(copy_remote_with_progress, skill, dest): skill.name for skill, dest in remote_skills
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
for future in as_completed(futures):
|
|
225
|
+
skill_name, success, error = future.result()
|
|
226
|
+
if not success:
|
|
227
|
+
failed_skills.append(skill_name)
|
|
228
|
+
|
|
229
|
+
# Copy local skills sequentially (fast anyway)
|
|
230
|
+
for skill, _dest in local_skills:
|
|
231
|
+
try:
|
|
232
|
+
self.copy_skill(skill, skills_dir, show_progress=False)
|
|
233
|
+
except Exception as e:
|
|
234
|
+
print(f"⚠ Warning: Failed to copy {skill.name}: {e}", file=sys.stderr)
|
|
235
|
+
failed_skills.append(skill.name)
|
|
236
|
+
|
|
237
|
+
generated = []
|
|
238
|
+
valid_names = set()
|
|
239
|
+
|
|
240
|
+
for skill in skills:
|
|
241
|
+
if skill.name in exclude:
|
|
242
|
+
continue
|
|
243
|
+
|
|
244
|
+
# Skip skills that failed to copy
|
|
245
|
+
if skill.name in failed_skills:
|
|
246
|
+
continue
|
|
247
|
+
|
|
248
|
+
valid_names.add(skill.name)
|
|
249
|
+
path = self.generate(skill, resolved_dir, copy=True, base_output_dir=output_dir)
|
|
250
|
+
generated.append(path)
|
|
251
|
+
|
|
252
|
+
removed = self.cleanup_stale(resolved_dir, valid_names)
|
|
253
|
+
|
|
254
|
+
return generated, removed
|