oasr 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- __init__.py +3 -0
- __main__.py +6 -0
- adapter.py +396 -0
- adapters/__init__.py +17 -0
- adapters/base.py +254 -0
- adapters/claude.py +82 -0
- adapters/codex.py +84 -0
- adapters/copilot.py +210 -0
- adapters/cursor.py +78 -0
- adapters/windsurf.py +83 -0
- cli.py +94 -0
- commands/__init__.py +6 -0
- commands/adapter.py +102 -0
- commands/add.py +302 -0
- commands/clean.py +155 -0
- commands/diff.py +180 -0
- commands/find.py +56 -0
- commands/help.py +51 -0
- commands/info.py +152 -0
- commands/list.py +110 -0
- commands/registry.py +303 -0
- commands/rm.py +128 -0
- commands/status.py +119 -0
- commands/sync.py +143 -0
- commands/update.py +417 -0
- commands/use.py +172 -0
- commands/validate.py +74 -0
- config.py +86 -0
- discovery.py +145 -0
- manifest.py +437 -0
- oasr-0.3.4.dist-info/METADATA +358 -0
- oasr-0.3.4.dist-info/RECORD +43 -0
- oasr-0.3.4.dist-info/WHEEL +4 -0
- oasr-0.3.4.dist-info/entry_points.txt +3 -0
- oasr-0.3.4.dist-info/licenses/LICENSE +187 -0
- oasr-0.3.4.dist-info/licenses/NOTICE +8 -0
- registry.py +173 -0
- remote.py +482 -0
- skillcopy/__init__.py +71 -0
- skillcopy/local.py +40 -0
- skillcopy/remote.py +98 -0
- tracking.py +181 -0
- validate.py +362 -0
config.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
1
|
+
"""Configuration management for ~/.oasr/config.toml."""
|
|
2
|
+
|
|
3
|
+
import sys
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
if sys.version_info >= (3, 11):
|
|
8
|
+
import tomllib
|
|
9
|
+
else:
|
|
10
|
+
import tomli as tomllib
|
|
11
|
+
|
|
12
|
+
import tomli_w
|
|
13
|
+
|
|
14
|
+
OASR_DIR = Path.home() / ".oasr"
|
|
15
|
+
CONFIG_FILE = OASR_DIR / "config.toml"
|
|
16
|
+
|
|
17
|
+
DEFAULT_CONFIG: dict[str, Any] = {
|
|
18
|
+
"validation": {
|
|
19
|
+
"reference_max_lines": 500,
|
|
20
|
+
"strict": False,
|
|
21
|
+
},
|
|
22
|
+
"adapter": {
|
|
23
|
+
"default_targets": ["cursor", "windsurf"],
|
|
24
|
+
},
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def ensure_oasr_dir() -> Path:
|
|
29
|
+
"""Ensure ~/.oasr/ directory exists."""
|
|
30
|
+
OASR_DIR.mkdir(parents=True, exist_ok=True)
|
|
31
|
+
return OASR_DIR
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
# Legacy alias for backwards compatibility
|
|
35
|
+
def ensure_skills_dir() -> Path:
|
|
36
|
+
"""Legacy alias for ensure_oasr_dir()."""
|
|
37
|
+
return ensure_oasr_dir()
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def load_config(config_path: Path | None = None) -> dict[str, Any]:
|
|
41
|
+
"""Load configuration from TOML file.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
config_path: Override config file path. Defaults to ~/.oasr/config.toml.
|
|
45
|
+
|
|
46
|
+
Returns:
|
|
47
|
+
Configuration dictionary with defaults applied.
|
|
48
|
+
"""
|
|
49
|
+
path = config_path or CONFIG_FILE
|
|
50
|
+
|
|
51
|
+
config = DEFAULT_CONFIG.copy()
|
|
52
|
+
config["validation"] = DEFAULT_CONFIG["validation"].copy()
|
|
53
|
+
config["adapter"] = DEFAULT_CONFIG["adapter"].copy()
|
|
54
|
+
|
|
55
|
+
if path.exists():
|
|
56
|
+
with open(path, "rb") as f:
|
|
57
|
+
loaded = tomllib.load(f)
|
|
58
|
+
|
|
59
|
+
if "validation" in loaded:
|
|
60
|
+
config["validation"].update(loaded["validation"])
|
|
61
|
+
if "adapter" in loaded:
|
|
62
|
+
config["adapter"].update(loaded["adapter"])
|
|
63
|
+
|
|
64
|
+
return config
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def save_config(config: dict[str, Any], config_path: Path | None = None) -> None:
|
|
68
|
+
"""Save configuration to TOML file.
|
|
69
|
+
|
|
70
|
+
Args:
|
|
71
|
+
config: Configuration dictionary to save.
|
|
72
|
+
config_path: Override config file path. Defaults to ~/.oasr/config.toml.
|
|
73
|
+
"""
|
|
74
|
+
path = config_path or CONFIG_FILE
|
|
75
|
+
ensure_oasr_dir()
|
|
76
|
+
|
|
77
|
+
with open(path, "wb") as f:
|
|
78
|
+
tomli_w.dump(config, f)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
def get_default_config() -> dict[str, Any]:
|
|
82
|
+
"""Return a copy of the default configuration."""
|
|
83
|
+
config = DEFAULT_CONFIG.copy()
|
|
84
|
+
config["validation"] = DEFAULT_CONFIG["validation"].copy()
|
|
85
|
+
config["adapter"] = DEFAULT_CONFIG["adapter"].copy()
|
|
86
|
+
return config
|
discovery.py
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
1
|
+
"""Discovery module for finding SKILL.md files recursively."""
|
|
2
|
+
|
|
3
|
+
from dataclasses import dataclass
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import yaml
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@dataclass
|
|
10
|
+
class DiscoveredSkill:
|
|
11
|
+
"""A discovered skill from filesystem."""
|
|
12
|
+
|
|
13
|
+
path: Path
|
|
14
|
+
name: str
|
|
15
|
+
description: str
|
|
16
|
+
raw_frontmatter: dict | None = None
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
def parse_frontmatter(content: str) -> dict | None:
|
|
20
|
+
"""Parse YAML frontmatter from markdown content.
|
|
21
|
+
|
|
22
|
+
Args:
|
|
23
|
+
content: Markdown file content.
|
|
24
|
+
|
|
25
|
+
Returns:
|
|
26
|
+
Parsed frontmatter dictionary, or None if not found/invalid.
|
|
27
|
+
"""
|
|
28
|
+
if not content.startswith("---"):
|
|
29
|
+
return None
|
|
30
|
+
|
|
31
|
+
lines = content.split("\n")
|
|
32
|
+
end_idx = None
|
|
33
|
+
|
|
34
|
+
for i, line in enumerate(lines[1:], start=1):
|
|
35
|
+
if line.strip() == "---":
|
|
36
|
+
end_idx = i
|
|
37
|
+
break
|
|
38
|
+
|
|
39
|
+
if end_idx is None:
|
|
40
|
+
return None
|
|
41
|
+
|
|
42
|
+
frontmatter_text = "\n".join(lines[1:end_idx])
|
|
43
|
+
|
|
44
|
+
try:
|
|
45
|
+
return yaml.safe_load(frontmatter_text)
|
|
46
|
+
except yaml.YAMLError:
|
|
47
|
+
return None
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def extract_skill_info(skill_md_path: Path) -> tuple[str, str, dict | None]:
|
|
51
|
+
"""Extract name and description from SKILL.md.
|
|
52
|
+
|
|
53
|
+
Args:
|
|
54
|
+
skill_md_path: Path to SKILL.md file.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
Tuple of (name, description, raw_frontmatter).
|
|
58
|
+
Name/description default to empty string if not found.
|
|
59
|
+
"""
|
|
60
|
+
try:
|
|
61
|
+
content = skill_md_path.read_text(encoding="utf-8")
|
|
62
|
+
except (OSError, UnicodeDecodeError):
|
|
63
|
+
return "", "", None
|
|
64
|
+
|
|
65
|
+
frontmatter = parse_frontmatter(content)
|
|
66
|
+
|
|
67
|
+
if frontmatter is None:
|
|
68
|
+
return "", "", None
|
|
69
|
+
|
|
70
|
+
name = frontmatter.get("name", "")
|
|
71
|
+
description = frontmatter.get("description", "")
|
|
72
|
+
|
|
73
|
+
if isinstance(name, str):
|
|
74
|
+
name = name.strip()
|
|
75
|
+
else:
|
|
76
|
+
name = ""
|
|
77
|
+
|
|
78
|
+
if isinstance(description, str):
|
|
79
|
+
description = " ".join(description.split())
|
|
80
|
+
else:
|
|
81
|
+
description = ""
|
|
82
|
+
|
|
83
|
+
return name, description, frontmatter
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
def find_skills(root: Path) -> list[DiscoveredSkill]:
|
|
87
|
+
"""Find all skills recursively under a root directory.
|
|
88
|
+
|
|
89
|
+
Args:
|
|
90
|
+
root: Root directory to search.
|
|
91
|
+
|
|
92
|
+
Returns:
|
|
93
|
+
List of discovered skills.
|
|
94
|
+
"""
|
|
95
|
+
skills = []
|
|
96
|
+
root = root.resolve()
|
|
97
|
+
|
|
98
|
+
if not root.is_dir():
|
|
99
|
+
return skills
|
|
100
|
+
|
|
101
|
+
for skill_md in root.rglob("SKILL.md"):
|
|
102
|
+
skill_dir = skill_md.parent
|
|
103
|
+
name, description, frontmatter = extract_skill_info(skill_md)
|
|
104
|
+
|
|
105
|
+
if not name:
|
|
106
|
+
name = skill_dir.name
|
|
107
|
+
|
|
108
|
+
skills.append(
|
|
109
|
+
DiscoveredSkill(
|
|
110
|
+
path=skill_dir,
|
|
111
|
+
name=name,
|
|
112
|
+
description=description,
|
|
113
|
+
raw_frontmatter=frontmatter,
|
|
114
|
+
)
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
return skills
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def discover_single(path: Path) -> DiscoveredSkill | None:
|
|
121
|
+
"""Discover a single skill at a given path.
|
|
122
|
+
|
|
123
|
+
Args:
|
|
124
|
+
path: Path to skill directory (containing SKILL.md).
|
|
125
|
+
|
|
126
|
+
Returns:
|
|
127
|
+
Discovered skill, or None if not a valid skill.
|
|
128
|
+
"""
|
|
129
|
+
path = path.resolve()
|
|
130
|
+
skill_md = path / "SKILL.md"
|
|
131
|
+
|
|
132
|
+
if not skill_md.exists():
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
name, description, frontmatter = extract_skill_info(skill_md)
|
|
136
|
+
|
|
137
|
+
if not name:
|
|
138
|
+
name = path.name
|
|
139
|
+
|
|
140
|
+
return DiscoveredSkill(
|
|
141
|
+
path=path,
|
|
142
|
+
name=name,
|
|
143
|
+
description=description,
|
|
144
|
+
raw_frontmatter=frontmatter,
|
|
145
|
+
)
|
manifest.py
ADDED
|
@@ -0,0 +1,437 @@
|
|
|
1
|
+
"""Skill manifest management for auditing and verification.
|
|
2
|
+
|
|
3
|
+
Manifests track the state of registered skills, enabling:
|
|
4
|
+
- Source verification (content hashing)
|
|
5
|
+
- Change detection (modified files)
|
|
6
|
+
- Existence validation (missing sources)
|
|
7
|
+
- Audit trails (registration timestamps)
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
from __future__ import annotations
|
|
11
|
+
|
|
12
|
+
import hashlib
|
|
13
|
+
import json
|
|
14
|
+
import shutil
|
|
15
|
+
from dataclasses import dataclass, field
|
|
16
|
+
from datetime import datetime, timezone
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Literal
|
|
19
|
+
|
|
20
|
+
from remote import check_remote_reachability, fetch_remote_to_temp
|
|
21
|
+
from skillcopy.remote import is_remote_source
|
|
22
|
+
|
|
23
|
+
MANIFESTS_DIR = "manifests"
|
|
24
|
+
MANIFEST_SUFFIX = ".manifest.json"
|
|
25
|
+
MANIFEST_VERSION = 1
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class FileEntry:
|
|
30
|
+
"""A single file in the manifest."""
|
|
31
|
+
|
|
32
|
+
path: str
|
|
33
|
+
hash: str
|
|
34
|
+
size: int
|
|
35
|
+
|
|
36
|
+
def to_dict(self) -> dict:
|
|
37
|
+
return {"path": self.path, "hash": self.hash, "size": self.size}
|
|
38
|
+
|
|
39
|
+
@classmethod
|
|
40
|
+
def from_dict(cls, data: dict) -> FileEntry:
|
|
41
|
+
return cls(path=data["path"], hash=data["hash"], size=data["size"])
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
@dataclass
|
|
45
|
+
class SkillManifest:
|
|
46
|
+
"""Manifest for a registered skill."""
|
|
47
|
+
|
|
48
|
+
name: str
|
|
49
|
+
source_path: str
|
|
50
|
+
description: str
|
|
51
|
+
registered_at: str
|
|
52
|
+
content_hash: str
|
|
53
|
+
files: list[FileEntry] = field(default_factory=list)
|
|
54
|
+
version: int = MANIFEST_VERSION
|
|
55
|
+
|
|
56
|
+
def to_dict(self) -> dict:
|
|
57
|
+
return {
|
|
58
|
+
"version": self.version,
|
|
59
|
+
"name": self.name,
|
|
60
|
+
"source_path": self.source_path,
|
|
61
|
+
"description": self.description,
|
|
62
|
+
"registered_at": self.registered_at,
|
|
63
|
+
"content_hash": self.content_hash,
|
|
64
|
+
"files": [f.to_dict() for f in self.files],
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
@classmethod
|
|
68
|
+
def from_dict(cls, data: dict) -> SkillManifest:
|
|
69
|
+
return cls(
|
|
70
|
+
version=data.get("version", 1),
|
|
71
|
+
name=data["name"],
|
|
72
|
+
source_path=data["source_path"],
|
|
73
|
+
description=data["description"],
|
|
74
|
+
registered_at=data["registered_at"],
|
|
75
|
+
content_hash=data["content_hash"],
|
|
76
|
+
files=[FileEntry.from_dict(f) for f in data.get("files", [])],
|
|
77
|
+
)
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
SkillStatus = Literal["valid", "modified", "missing", "orphaned", "untracked"]
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
@dataclass
|
|
84
|
+
class ManifestStatus:
|
|
85
|
+
"""Status of a skill manifest check."""
|
|
86
|
+
|
|
87
|
+
name: str
|
|
88
|
+
status: SkillStatus
|
|
89
|
+
source_path: str | None = None
|
|
90
|
+
message: str = ""
|
|
91
|
+
changed_files: list[str] = field(default_factory=list)
|
|
92
|
+
added_files: list[str] = field(default_factory=list)
|
|
93
|
+
removed_files: list[str] = field(default_factory=list)
|
|
94
|
+
|
|
95
|
+
def to_dict(self) -> dict:
|
|
96
|
+
return {
|
|
97
|
+
"name": self.name,
|
|
98
|
+
"status": self.status,
|
|
99
|
+
"source_path": self.source_path,
|
|
100
|
+
"message": self.message,
|
|
101
|
+
"changed_files": self.changed_files,
|
|
102
|
+
"added_files": self.added_files,
|
|
103
|
+
"removed_files": self.removed_files,
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def hash_file(path: Path) -> str:
|
|
108
|
+
"""Compute SHA256 hash of a file.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
path: Path to the file.
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Hash string in format "sha256:<hex>".
|
|
115
|
+
"""
|
|
116
|
+
hasher = hashlib.sha256()
|
|
117
|
+
try:
|
|
118
|
+
with open(path, "rb") as f:
|
|
119
|
+
for chunk in iter(lambda: f.read(8192), b""):
|
|
120
|
+
hasher.update(chunk)
|
|
121
|
+
return f"sha256:{hasher.hexdigest()}"
|
|
122
|
+
except OSError:
|
|
123
|
+
return "sha256:error"
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def hash_directory(path: Path) -> tuple[str, list[FileEntry]]:
|
|
127
|
+
"""Compute content hash of a directory.
|
|
128
|
+
|
|
129
|
+
Creates a deterministic hash based on all file paths and their contents.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
path: Path to the directory.
|
|
133
|
+
|
|
134
|
+
Returns:
|
|
135
|
+
Tuple of (content_hash, list of FileEntry).
|
|
136
|
+
"""
|
|
137
|
+
entries = []
|
|
138
|
+
hasher = hashlib.sha256()
|
|
139
|
+
|
|
140
|
+
if not path.is_dir():
|
|
141
|
+
return "sha256:missing", []
|
|
142
|
+
|
|
143
|
+
files = sorted(path.rglob("*"))
|
|
144
|
+
|
|
145
|
+
for file_path in files:
|
|
146
|
+
if file_path.is_file():
|
|
147
|
+
rel_path = str(file_path.relative_to(path))
|
|
148
|
+
file_hash = hash_file(file_path)
|
|
149
|
+
file_size = file_path.stat().st_size
|
|
150
|
+
|
|
151
|
+
entries.append(
|
|
152
|
+
FileEntry(
|
|
153
|
+
path=rel_path,
|
|
154
|
+
hash=file_hash,
|
|
155
|
+
size=file_size,
|
|
156
|
+
)
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
hasher.update(rel_path.encode("utf-8"))
|
|
160
|
+
hasher.update(file_hash.encode("utf-8"))
|
|
161
|
+
|
|
162
|
+
return f"sha256:{hasher.hexdigest()}", entries
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
def get_manifests_dir(config_dir: Path | None = None) -> Path:
|
|
166
|
+
"""Get the manifests directory path.
|
|
167
|
+
|
|
168
|
+
Args:
|
|
169
|
+
config_dir: Override config directory (default: ~/.oasr).
|
|
170
|
+
|
|
171
|
+
Returns:
|
|
172
|
+
Path to manifests directory.
|
|
173
|
+
"""
|
|
174
|
+
if config_dir is None:
|
|
175
|
+
config_dir = Path.home() / ".oasr"
|
|
176
|
+
return config_dir / MANIFESTS_DIR
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
def create_manifest(
|
|
180
|
+
name: str,
|
|
181
|
+
source_path: Path | str,
|
|
182
|
+
description: str,
|
|
183
|
+
) -> SkillManifest:
|
|
184
|
+
"""Create a new manifest for a skill.
|
|
185
|
+
|
|
186
|
+
Args:
|
|
187
|
+
name: Skill name.
|
|
188
|
+
source_path: Absolute path to skill directory or remote URL.
|
|
189
|
+
description: Skill description.
|
|
190
|
+
|
|
191
|
+
Returns:
|
|
192
|
+
New SkillManifest instance.
|
|
193
|
+
"""
|
|
194
|
+
source_path_str = str(source_path)
|
|
195
|
+
|
|
196
|
+
# Handle remote sources
|
|
197
|
+
if is_remote_source(source_path_str):
|
|
198
|
+
temp_dir = fetch_remote_to_temp(source_path_str)
|
|
199
|
+
try:
|
|
200
|
+
content_hash, files = hash_directory(temp_dir)
|
|
201
|
+
finally:
|
|
202
|
+
shutil.rmtree(temp_dir, ignore_errors=True)
|
|
203
|
+
else:
|
|
204
|
+
# Handle local sources
|
|
205
|
+
content_hash, files = hash_directory(Path(source_path_str))
|
|
206
|
+
|
|
207
|
+
return SkillManifest(
|
|
208
|
+
name=name,
|
|
209
|
+
source_path=source_path_str,
|
|
210
|
+
description=description,
|
|
211
|
+
registered_at=datetime.now(timezone.utc).isoformat(),
|
|
212
|
+
content_hash=content_hash,
|
|
213
|
+
files=files,
|
|
214
|
+
)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def save_manifest(manifest: SkillManifest, config_dir: Path | None = None) -> Path:
|
|
218
|
+
"""Save a manifest to disk.
|
|
219
|
+
|
|
220
|
+
Args:
|
|
221
|
+
manifest: Manifest to save.
|
|
222
|
+
config_dir: Override config directory.
|
|
223
|
+
|
|
224
|
+
Returns:
|
|
225
|
+
Path to saved manifest file.
|
|
226
|
+
"""
|
|
227
|
+
manifests_dir = get_manifests_dir(config_dir)
|
|
228
|
+
manifests_dir.mkdir(parents=True, exist_ok=True)
|
|
229
|
+
|
|
230
|
+
manifest_path = manifests_dir / f"{manifest.name}{MANIFEST_SUFFIX}"
|
|
231
|
+
manifest_path.write_text(
|
|
232
|
+
json.dumps(manifest.to_dict(), indent=2),
|
|
233
|
+
encoding="utf-8",
|
|
234
|
+
)
|
|
235
|
+
|
|
236
|
+
return manifest_path
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def load_manifest(name: str, config_dir: Path | None = None) -> SkillManifest | None:
|
|
240
|
+
"""Load a manifest from disk.
|
|
241
|
+
|
|
242
|
+
Args:
|
|
243
|
+
name: Skill name.
|
|
244
|
+
config_dir: Override config directory.
|
|
245
|
+
|
|
246
|
+
Returns:
|
|
247
|
+
SkillManifest if found, None otherwise.
|
|
248
|
+
"""
|
|
249
|
+
manifests_dir = get_manifests_dir(config_dir)
|
|
250
|
+
manifest_path = manifests_dir / f"{name}{MANIFEST_SUFFIX}"
|
|
251
|
+
|
|
252
|
+
if not manifest_path.is_file():
|
|
253
|
+
return None
|
|
254
|
+
|
|
255
|
+
try:
|
|
256
|
+
data = json.loads(manifest_path.read_text(encoding="utf-8"))
|
|
257
|
+
return SkillManifest.from_dict(data)
|
|
258
|
+
except (json.JSONDecodeError, KeyError, OSError):
|
|
259
|
+
return None
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
def delete_manifest(name: str, config_dir: Path | None = None) -> bool:
|
|
263
|
+
"""Delete a manifest from disk.
|
|
264
|
+
|
|
265
|
+
Args:
|
|
266
|
+
name: Skill name.
|
|
267
|
+
config_dir: Override config directory.
|
|
268
|
+
|
|
269
|
+
Returns:
|
|
270
|
+
True if deleted, False if not found.
|
|
271
|
+
"""
|
|
272
|
+
manifests_dir = get_manifests_dir(config_dir)
|
|
273
|
+
manifest_path = manifests_dir / f"{name}{MANIFEST_SUFFIX}"
|
|
274
|
+
|
|
275
|
+
if manifest_path.is_file():
|
|
276
|
+
manifest_path.unlink()
|
|
277
|
+
return True
|
|
278
|
+
return False
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def list_manifests(config_dir: Path | None = None) -> list[str]:
|
|
282
|
+
"""List all manifest names.
|
|
283
|
+
|
|
284
|
+
Args:
|
|
285
|
+
config_dir: Override config directory.
|
|
286
|
+
|
|
287
|
+
Returns:
|
|
288
|
+
List of skill names with manifests.
|
|
289
|
+
"""
|
|
290
|
+
manifests_dir = get_manifests_dir(config_dir)
|
|
291
|
+
|
|
292
|
+
if not manifests_dir.is_dir():
|
|
293
|
+
return []
|
|
294
|
+
|
|
295
|
+
names = []
|
|
296
|
+
for path in manifests_dir.glob(f"*{MANIFEST_SUFFIX}"):
|
|
297
|
+
name = path.name[: -len(MANIFEST_SUFFIX)]
|
|
298
|
+
names.append(name)
|
|
299
|
+
|
|
300
|
+
return sorted(names)
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def check_manifest(manifest: SkillManifest) -> ManifestStatus:
|
|
304
|
+
"""Check if a manifest matches its source.
|
|
305
|
+
|
|
306
|
+
Args:
|
|
307
|
+
manifest: Manifest to check.
|
|
308
|
+
|
|
309
|
+
Returns:
|
|
310
|
+
ManifestStatus with validation results.
|
|
311
|
+
"""
|
|
312
|
+
source_path_str = manifest.source_path
|
|
313
|
+
|
|
314
|
+
# Handle remote sources
|
|
315
|
+
if is_remote_source(source_path_str):
|
|
316
|
+
# Check remote reachability
|
|
317
|
+
reachable, status_code, message = check_remote_reachability(source_path_str)
|
|
318
|
+
|
|
319
|
+
if not reachable:
|
|
320
|
+
if status_code in (404, 410):
|
|
321
|
+
return ManifestStatus(
|
|
322
|
+
name=manifest.name,
|
|
323
|
+
status="missing",
|
|
324
|
+
source_path=source_path_str,
|
|
325
|
+
message=f"Remote source not found: {message}",
|
|
326
|
+
)
|
|
327
|
+
else:
|
|
328
|
+
# Network error - assume valid (transient)
|
|
329
|
+
return ManifestStatus(
|
|
330
|
+
name=manifest.name,
|
|
331
|
+
status="valid",
|
|
332
|
+
source_path=source_path_str,
|
|
333
|
+
message=f"Cannot verify remote (network issue): {message}",
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
# Fetch current content to temp dir
|
|
337
|
+
try:
|
|
338
|
+
temp_dir = fetch_remote_to_temp(source_path_str)
|
|
339
|
+
except Exception as e:
|
|
340
|
+
return ManifestStatus(
|
|
341
|
+
name=manifest.name,
|
|
342
|
+
status="valid",
|
|
343
|
+
source_path=source_path_str,
|
|
344
|
+
message=f"Cannot fetch remote (assuming unchanged): {e}",
|
|
345
|
+
)
|
|
346
|
+
|
|
347
|
+
try:
|
|
348
|
+
current_hash, current_files = hash_directory(temp_dir)
|
|
349
|
+
finally:
|
|
350
|
+
shutil.rmtree(temp_dir, ignore_errors=True)
|
|
351
|
+
else:
|
|
352
|
+
# Handle local sources
|
|
353
|
+
source_path = Path(source_path_str)
|
|
354
|
+
|
|
355
|
+
if not source_path.exists():
|
|
356
|
+
return ManifestStatus(
|
|
357
|
+
name=manifest.name,
|
|
358
|
+
status="missing",
|
|
359
|
+
source_path=source_path_str,
|
|
360
|
+
message=f"Source path no longer exists: {source_path_str}",
|
|
361
|
+
)
|
|
362
|
+
|
|
363
|
+
current_hash, current_files = hash_directory(source_path)
|
|
364
|
+
|
|
365
|
+
# Compare hashes
|
|
366
|
+
if current_hash == manifest.content_hash:
|
|
367
|
+
return ManifestStatus(
|
|
368
|
+
name=manifest.name,
|
|
369
|
+
status="valid",
|
|
370
|
+
source_path=source_path_str,
|
|
371
|
+
message="Source matches manifest",
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
# Compute differences
|
|
375
|
+
current_file_map = {f.path: f for f in current_files}
|
|
376
|
+
manifest_file_map = {f.path: f for f in manifest.files}
|
|
377
|
+
|
|
378
|
+
changed = []
|
|
379
|
+
added = []
|
|
380
|
+
removed = []
|
|
381
|
+
|
|
382
|
+
for path, entry in current_file_map.items():
|
|
383
|
+
if path not in manifest_file_map:
|
|
384
|
+
added.append(path)
|
|
385
|
+
elif entry.hash != manifest_file_map[path].hash:
|
|
386
|
+
changed.append(path)
|
|
387
|
+
|
|
388
|
+
for path in manifest_file_map:
|
|
389
|
+
if path not in current_file_map:
|
|
390
|
+
removed.append(path)
|
|
391
|
+
|
|
392
|
+
return ManifestStatus(
|
|
393
|
+
name=manifest.name,
|
|
394
|
+
status="modified",
|
|
395
|
+
source_path=source_path_str,
|
|
396
|
+
message=f"Source modified: {len(changed)} changed, {len(added)} added, {len(removed)} removed",
|
|
397
|
+
changed_files=changed,
|
|
398
|
+
added_files=added,
|
|
399
|
+
removed_files=removed,
|
|
400
|
+
)
|
|
401
|
+
|
|
402
|
+
|
|
403
|
+
def sync_manifest(manifest: SkillManifest, config_dir: Path | None = None) -> SkillManifest:
|
|
404
|
+
"""Update a manifest to match current source state.
|
|
405
|
+
|
|
406
|
+
Args:
|
|
407
|
+
manifest: Existing manifest.
|
|
408
|
+
config_dir: Override config directory.
|
|
409
|
+
|
|
410
|
+
Returns:
|
|
411
|
+
Updated manifest (also saved to disk).
|
|
412
|
+
"""
|
|
413
|
+
source_path_str = manifest.source_path
|
|
414
|
+
|
|
415
|
+
# Handle remote sources
|
|
416
|
+
if is_remote_source(source_path_str):
|
|
417
|
+
temp_dir = fetch_remote_to_temp(source_path_str)
|
|
418
|
+
try:
|
|
419
|
+
content_hash, files = hash_directory(temp_dir)
|
|
420
|
+
finally:
|
|
421
|
+
shutil.rmtree(temp_dir, ignore_errors=True)
|
|
422
|
+
else:
|
|
423
|
+
# Handle local sources
|
|
424
|
+
source_path = Path(source_path_str)
|
|
425
|
+
content_hash, files = hash_directory(source_path)
|
|
426
|
+
|
|
427
|
+
updated = SkillManifest(
|
|
428
|
+
name=manifest.name,
|
|
429
|
+
source_path=manifest.source_path,
|
|
430
|
+
description=manifest.description,
|
|
431
|
+
registered_at=manifest.registered_at,
|
|
432
|
+
content_hash=content_hash,
|
|
433
|
+
files=files,
|
|
434
|
+
)
|
|
435
|
+
|
|
436
|
+
save_manifest(updated, config_dir)
|
|
437
|
+
return updated
|