oasr 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- __init__.py +3 -0
- __main__.py +6 -0
- adapter.py +396 -0
- adapters/__init__.py +17 -0
- adapters/base.py +254 -0
- adapters/claude.py +82 -0
- adapters/codex.py +84 -0
- adapters/copilot.py +210 -0
- adapters/cursor.py +78 -0
- adapters/windsurf.py +83 -0
- agents/__init__.py +25 -0
- agents/base.py +96 -0
- agents/claude.py +25 -0
- agents/codex.py +25 -0
- agents/copilot.py +25 -0
- agents/opencode.py +25 -0
- agents/registry.py +57 -0
- cli.py +97 -0
- commands/__init__.py +6 -0
- commands/adapter.py +102 -0
- commands/add.py +435 -0
- commands/clean.py +30 -0
- commands/clone.py +178 -0
- commands/config.py +163 -0
- commands/diff.py +180 -0
- commands/exec.py +245 -0
- commands/find.py +56 -0
- commands/help.py +51 -0
- commands/info.py +152 -0
- commands/list.py +110 -0
- commands/registry.py +447 -0
- commands/rm.py +128 -0
- commands/status.py +119 -0
- commands/sync.py +143 -0
- commands/update.py +417 -0
- commands/use.py +45 -0
- commands/validate.py +74 -0
- config/__init__.py +119 -0
- config/defaults.py +40 -0
- config/schema.py +73 -0
- discovery.py +145 -0
- manifest.py +437 -0
- oasr-0.5.0.dist-info/METADATA +358 -0
- oasr-0.5.0.dist-info/RECORD +59 -0
- oasr-0.5.0.dist-info/WHEEL +4 -0
- oasr-0.5.0.dist-info/entry_points.txt +3 -0
- oasr-0.5.0.dist-info/licenses/LICENSE +187 -0
- oasr-0.5.0.dist-info/licenses/NOTICE +8 -0
- policy/__init__.py +50 -0
- policy/defaults.py +27 -0
- policy/enforcement.py +98 -0
- policy/profile.py +185 -0
- registry.py +173 -0
- remote.py +482 -0
- skillcopy/__init__.py +71 -0
- skillcopy/local.py +40 -0
- skillcopy/remote.py +98 -0
- tracking.py +181 -0
- validate.py +362 -0
tracking.py
ADDED
|
@@ -0,0 +1,181 @@
|
|
|
1
|
+
"""Skill tracking via metadata.oasr frontmatter injection.
|
|
2
|
+
|
|
3
|
+
This module handles injecting and extracting tracking metadata in SKILL.md files.
|
|
4
|
+
Tracking metadata is stored under the `metadata.oasr` field to comply with the
|
|
5
|
+
Open Agent Skill specification.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from datetime import datetime, timezone
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
|
|
11
|
+
import yaml
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
def inject_metadata(skill_path: Path, content_hash: str, source: str) -> bool:
|
|
15
|
+
"""Inject tracking metadata into SKILL.md frontmatter.
|
|
16
|
+
|
|
17
|
+
Args:
|
|
18
|
+
skill_path: Path to skill directory
|
|
19
|
+
content_hash: SHA256 hash of the skill content
|
|
20
|
+
source: Source path or URL of the skill
|
|
21
|
+
|
|
22
|
+
Returns:
|
|
23
|
+
True if metadata was injected, False if SKILL.md not found or injection failed
|
|
24
|
+
|
|
25
|
+
Raises:
|
|
26
|
+
OSError: If file cannot be read or written (permission, encoding issues)
|
|
27
|
+
"""
|
|
28
|
+
skill_md = skill_path / "SKILL.md"
|
|
29
|
+
if not skill_md.exists():
|
|
30
|
+
return False
|
|
31
|
+
|
|
32
|
+
try:
|
|
33
|
+
content = skill_md.read_text(encoding="utf-8")
|
|
34
|
+
except (OSError, UnicodeDecodeError) as e:
|
|
35
|
+
raise OSError(f"Failed to read {skill_md}: {e}") from e
|
|
36
|
+
|
|
37
|
+
# Parse existing frontmatter
|
|
38
|
+
frontmatter, body = _split_frontmatter(content)
|
|
39
|
+
|
|
40
|
+
if frontmatter is None:
|
|
41
|
+
# No frontmatter exists - shouldn't happen for valid skills, but handle it
|
|
42
|
+
return False
|
|
43
|
+
|
|
44
|
+
# Validate frontmatter is a dict
|
|
45
|
+
if not isinstance(frontmatter, dict):
|
|
46
|
+
return False
|
|
47
|
+
|
|
48
|
+
# Ensure metadata field exists
|
|
49
|
+
if "metadata" not in frontmatter:
|
|
50
|
+
frontmatter["metadata"] = {}
|
|
51
|
+
elif not isinstance(frontmatter["metadata"], dict):
|
|
52
|
+
# metadata exists but is not a dict - fix it
|
|
53
|
+
frontmatter["metadata"] = {}
|
|
54
|
+
|
|
55
|
+
# Inject oasr tracking metadata
|
|
56
|
+
frontmatter["metadata"]["oasr"] = {
|
|
57
|
+
"hash": content_hash,
|
|
58
|
+
"source": str(source),
|
|
59
|
+
"synced": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z"),
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
# Write back
|
|
63
|
+
try:
|
|
64
|
+
new_content = _serialize_frontmatter(frontmatter) + body
|
|
65
|
+
skill_md.write_text(new_content, encoding="utf-8")
|
|
66
|
+
except (OSError, UnicodeEncodeError) as e:
|
|
67
|
+
raise OSError(f"Failed to write {skill_md}: {e}") from e
|
|
68
|
+
|
|
69
|
+
return True
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def extract_metadata(skill_path: Path) -> dict | None:
|
|
73
|
+
"""Extract tracking metadata from SKILL.md.
|
|
74
|
+
|
|
75
|
+
Args:
|
|
76
|
+
skill_path: Path to skill directory
|
|
77
|
+
|
|
78
|
+
Returns:
|
|
79
|
+
Dictionary with 'hash', 'source', 'synced' keys, or None if not tracked
|
|
80
|
+
Returns None on any error (file not found, encoding issues, corrupted metadata)
|
|
81
|
+
"""
|
|
82
|
+
skill_md = skill_path / "SKILL.md"
|
|
83
|
+
if not skill_md.exists():
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
content = skill_md.read_text(encoding="utf-8")
|
|
88
|
+
except (OSError, UnicodeDecodeError):
|
|
89
|
+
# Cannot read file - treat as untracked
|
|
90
|
+
return None
|
|
91
|
+
|
|
92
|
+
frontmatter, _ = _split_frontmatter(content)
|
|
93
|
+
|
|
94
|
+
if frontmatter is None or not isinstance(frontmatter, dict):
|
|
95
|
+
return None
|
|
96
|
+
|
|
97
|
+
# Safely extract metadata.oasr
|
|
98
|
+
metadata = frontmatter.get("metadata")
|
|
99
|
+
if not isinstance(metadata, dict):
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
oasr = metadata.get("oasr")
|
|
103
|
+
if not isinstance(oasr, dict):
|
|
104
|
+
return None
|
|
105
|
+
|
|
106
|
+
# Validate required fields
|
|
107
|
+
if "hash" not in oasr or "source" not in oasr:
|
|
108
|
+
return None
|
|
109
|
+
|
|
110
|
+
return oasr
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def strip_tracking_metadata(frontmatter: dict) -> dict:
|
|
114
|
+
"""Remove metadata.oasr from frontmatter dictionary.
|
|
115
|
+
|
|
116
|
+
This is used when comparing registry skills to avoid flagging
|
|
117
|
+
tracking metadata as drift.
|
|
118
|
+
|
|
119
|
+
Args:
|
|
120
|
+
frontmatter: Frontmatter dictionary
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
Copy of frontmatter with metadata.oasr removed
|
|
124
|
+
"""
|
|
125
|
+
import copy
|
|
126
|
+
|
|
127
|
+
cleaned = copy.deepcopy(frontmatter)
|
|
128
|
+
|
|
129
|
+
if "metadata" in cleaned and isinstance(cleaned["metadata"], dict):
|
|
130
|
+
cleaned["metadata"].pop("oasr", None)
|
|
131
|
+
# Remove metadata field entirely if it's now empty
|
|
132
|
+
if not cleaned["metadata"]:
|
|
133
|
+
cleaned.pop("metadata")
|
|
134
|
+
|
|
135
|
+
return cleaned
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def _split_frontmatter(content: str) -> tuple[dict | None, str]:
|
|
139
|
+
"""Split markdown content into frontmatter and body.
|
|
140
|
+
|
|
141
|
+
Args:
|
|
142
|
+
content: Full markdown content
|
|
143
|
+
|
|
144
|
+
Returns:
|
|
145
|
+
Tuple of (frontmatter_dict, body_text)
|
|
146
|
+
"""
|
|
147
|
+
if not content.startswith("---"):
|
|
148
|
+
return None, content
|
|
149
|
+
|
|
150
|
+
lines = content.split("\n")
|
|
151
|
+
end_idx = None
|
|
152
|
+
|
|
153
|
+
for i, line in enumerate(lines[1:], start=1):
|
|
154
|
+
if line.strip() == "---":
|
|
155
|
+
end_idx = i
|
|
156
|
+
break
|
|
157
|
+
|
|
158
|
+
if end_idx is None:
|
|
159
|
+
return None, content
|
|
160
|
+
|
|
161
|
+
frontmatter_text = "\n".join(lines[1:end_idx])
|
|
162
|
+
body_text = "\n".join(lines[end_idx + 1 :])
|
|
163
|
+
|
|
164
|
+
try:
|
|
165
|
+
frontmatter = yaml.safe_load(frontmatter_text)
|
|
166
|
+
return frontmatter, body_text
|
|
167
|
+
except yaml.YAMLError:
|
|
168
|
+
return None, content
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _serialize_frontmatter(frontmatter: dict) -> str:
|
|
172
|
+
"""Serialize frontmatter dictionary back to YAML with delimiters.
|
|
173
|
+
|
|
174
|
+
Args:
|
|
175
|
+
frontmatter: Frontmatter dictionary
|
|
176
|
+
|
|
177
|
+
Returns:
|
|
178
|
+
YAML string with --- delimiters
|
|
179
|
+
"""
|
|
180
|
+
yaml_str = yaml.safe_dump(frontmatter, default_flow_style=False, allow_unicode=True, sort_keys=False)
|
|
181
|
+
return f"---\n{yaml_str}---\n"
|
validate.py
ADDED
|
@@ -0,0 +1,362 @@
|
|
|
1
|
+
"""Validation module for skill structure and frontmatter."""
|
|
2
|
+
|
|
3
|
+
import re
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from enum import Enum
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from discovery import parse_frontmatter
|
|
9
|
+
|
|
10
|
+
KEBAB_CASE_PATTERN = re.compile(r"^[a-z]+(-[a-z]+)*$")
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Severity(Enum):
|
|
14
|
+
"""Validation message severity."""
|
|
15
|
+
|
|
16
|
+
ERROR = "error"
|
|
17
|
+
WARNING = "warning"
|
|
18
|
+
INFO = "info"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@dataclass
|
|
22
|
+
class ValidationMessage:
|
|
23
|
+
"""A validation message."""
|
|
24
|
+
|
|
25
|
+
code: str
|
|
26
|
+
severity: Severity
|
|
27
|
+
message: str
|
|
28
|
+
file: str | None = None
|
|
29
|
+
|
|
30
|
+
def __str__(self) -> str:
|
|
31
|
+
prefix = {
|
|
32
|
+
Severity.ERROR: "✗",
|
|
33
|
+
Severity.WARNING: "⚠",
|
|
34
|
+
Severity.INFO: "ℹ",
|
|
35
|
+
}[self.severity]
|
|
36
|
+
|
|
37
|
+
if self.file:
|
|
38
|
+
return f"{prefix} {self.code}: {self.message} ({self.file})"
|
|
39
|
+
return f"{prefix} {self.code}: {self.message}"
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@dataclass
|
|
43
|
+
class ValidationResult:
|
|
44
|
+
"""Result of validating a skill."""
|
|
45
|
+
|
|
46
|
+
name: str
|
|
47
|
+
path: str
|
|
48
|
+
valid: bool
|
|
49
|
+
errors: list[ValidationMessage] = field(default_factory=list)
|
|
50
|
+
warnings: list[ValidationMessage] = field(default_factory=list)
|
|
51
|
+
info: list[ValidationMessage] = field(default_factory=list)
|
|
52
|
+
|
|
53
|
+
@property
|
|
54
|
+
def all_messages(self) -> list[ValidationMessage]:
|
|
55
|
+
"""All messages sorted by severity."""
|
|
56
|
+
return self.errors + self.warnings + self.info
|
|
57
|
+
|
|
58
|
+
def to_dict(self) -> dict:
|
|
59
|
+
"""Convert to dictionary for JSON output."""
|
|
60
|
+
return {
|
|
61
|
+
"name": self.name,
|
|
62
|
+
"path": self.path,
|
|
63
|
+
"valid": self.valid,
|
|
64
|
+
"errors": [{"code": m.code, "message": m.message, "file": m.file} for m in self.errors],
|
|
65
|
+
"warnings": [{"code": m.code, "message": m.message, "file": m.file} for m in self.warnings],
|
|
66
|
+
"info": [{"code": m.code, "message": m.message, "file": m.file} for m in self.info],
|
|
67
|
+
}
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def validate_skill(
|
|
71
|
+
path: Path,
|
|
72
|
+
reference_max_lines: int = 500,
|
|
73
|
+
check_exists: bool = True,
|
|
74
|
+
skip_name_match: bool = False,
|
|
75
|
+
) -> ValidationResult:
|
|
76
|
+
"""Validate a skill directory.
|
|
77
|
+
|
|
78
|
+
Args:
|
|
79
|
+
path: Path to skill directory.
|
|
80
|
+
reference_max_lines: Maximum lines for reference files (W007).
|
|
81
|
+
check_exists: If True, check if path exists (for I001).
|
|
82
|
+
skip_name_match: If True, skip W002 directory name check (for remote skills).
|
|
83
|
+
|
|
84
|
+
Returns:
|
|
85
|
+
ValidationResult with all messages.
|
|
86
|
+
"""
|
|
87
|
+
path = path.resolve()
|
|
88
|
+
result = ValidationResult(
|
|
89
|
+
name=path.name,
|
|
90
|
+
path=str(path),
|
|
91
|
+
valid=True,
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
if check_exists and not path.exists():
|
|
95
|
+
result.info.append(
|
|
96
|
+
ValidationMessage(
|
|
97
|
+
code="I001",
|
|
98
|
+
severity=Severity.INFO,
|
|
99
|
+
message="Registered skill path no longer exists",
|
|
100
|
+
)
|
|
101
|
+
)
|
|
102
|
+
result.valid = False
|
|
103
|
+
return result
|
|
104
|
+
|
|
105
|
+
if not path.is_dir():
|
|
106
|
+
result.errors.append(
|
|
107
|
+
ValidationMessage(
|
|
108
|
+
code="E001",
|
|
109
|
+
severity=Severity.ERROR,
|
|
110
|
+
message="Path is not a directory",
|
|
111
|
+
)
|
|
112
|
+
)
|
|
113
|
+
result.valid = False
|
|
114
|
+
return result
|
|
115
|
+
|
|
116
|
+
skill_md = path / "SKILL.md"
|
|
117
|
+
|
|
118
|
+
if not skill_md.exists():
|
|
119
|
+
result.errors.append(
|
|
120
|
+
ValidationMessage(
|
|
121
|
+
code="E001",
|
|
122
|
+
severity=Severity.ERROR,
|
|
123
|
+
message="Missing SKILL.md file",
|
|
124
|
+
)
|
|
125
|
+
)
|
|
126
|
+
result.valid = False
|
|
127
|
+
return result
|
|
128
|
+
|
|
129
|
+
try:
|
|
130
|
+
content = skill_md.read_text(encoding="utf-8")
|
|
131
|
+
except (OSError, UnicodeDecodeError) as e:
|
|
132
|
+
result.errors.append(
|
|
133
|
+
ValidationMessage(
|
|
134
|
+
code="E002",
|
|
135
|
+
severity=Severity.ERROR,
|
|
136
|
+
message=f"Cannot read SKILL.md: {e}",
|
|
137
|
+
)
|
|
138
|
+
)
|
|
139
|
+
result.valid = False
|
|
140
|
+
return result
|
|
141
|
+
|
|
142
|
+
frontmatter = parse_frontmatter(content)
|
|
143
|
+
|
|
144
|
+
if frontmatter is None:
|
|
145
|
+
result.errors.append(
|
|
146
|
+
ValidationMessage(
|
|
147
|
+
code="E002",
|
|
148
|
+
severity=Severity.ERROR,
|
|
149
|
+
message="Malformed or missing YAML frontmatter in SKILL.md",
|
|
150
|
+
)
|
|
151
|
+
)
|
|
152
|
+
result.valid = False
|
|
153
|
+
return result
|
|
154
|
+
|
|
155
|
+
name = frontmatter.get("name")
|
|
156
|
+
if not name:
|
|
157
|
+
result.errors.append(
|
|
158
|
+
ValidationMessage(
|
|
159
|
+
code="E003",
|
|
160
|
+
severity=Severity.ERROR,
|
|
161
|
+
message="Missing frontmatter field: name",
|
|
162
|
+
)
|
|
163
|
+
)
|
|
164
|
+
result.valid = False
|
|
165
|
+
elif not isinstance(name, str):
|
|
166
|
+
result.errors.append(
|
|
167
|
+
ValidationMessage(
|
|
168
|
+
code="E003",
|
|
169
|
+
severity=Severity.ERROR,
|
|
170
|
+
message="Frontmatter field 'name' must be a string",
|
|
171
|
+
)
|
|
172
|
+
)
|
|
173
|
+
result.valid = False
|
|
174
|
+
else:
|
|
175
|
+
result.name = name
|
|
176
|
+
|
|
177
|
+
if not KEBAB_CASE_PATTERN.match(name):
|
|
178
|
+
result.errors.append(
|
|
179
|
+
ValidationMessage(
|
|
180
|
+
code="E005",
|
|
181
|
+
severity=Severity.ERROR,
|
|
182
|
+
message=f"Name '{name}' violates kebab-case format (must match ^[a-z]+(-[a-z]+)*$)",
|
|
183
|
+
)
|
|
184
|
+
)
|
|
185
|
+
result.valid = False
|
|
186
|
+
|
|
187
|
+
if not skip_name_match and name != path.name:
|
|
188
|
+
result.warnings.append(
|
|
189
|
+
ValidationMessage(
|
|
190
|
+
code="W002",
|
|
191
|
+
severity=Severity.WARNING,
|
|
192
|
+
message=f"Directory name '{path.name}' doesn't match frontmatter name '{name}'",
|
|
193
|
+
)
|
|
194
|
+
)
|
|
195
|
+
|
|
196
|
+
description = frontmatter.get("description")
|
|
197
|
+
if description is None:
|
|
198
|
+
result.errors.append(
|
|
199
|
+
ValidationMessage(
|
|
200
|
+
code="E004",
|
|
201
|
+
severity=Severity.ERROR,
|
|
202
|
+
message="Missing frontmatter field: description",
|
|
203
|
+
)
|
|
204
|
+
)
|
|
205
|
+
result.valid = False
|
|
206
|
+
elif not isinstance(description, str):
|
|
207
|
+
result.errors.append(
|
|
208
|
+
ValidationMessage(
|
|
209
|
+
code="E004",
|
|
210
|
+
severity=Severity.ERROR,
|
|
211
|
+
message="Frontmatter field 'description' must be a string",
|
|
212
|
+
)
|
|
213
|
+
)
|
|
214
|
+
result.valid = False
|
|
215
|
+
elif not description.strip():
|
|
216
|
+
result.warnings.append(
|
|
217
|
+
ValidationMessage(
|
|
218
|
+
code="W001",
|
|
219
|
+
severity=Severity.WARNING,
|
|
220
|
+
message="Description is empty or whitespace-only",
|
|
221
|
+
)
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
if " " in str(path) or any(c in str(path) for c in ["'", '"', "&", "|", ";", "$"]):
|
|
225
|
+
result.warnings.append(
|
|
226
|
+
ValidationMessage(
|
|
227
|
+
code="W003",
|
|
228
|
+
severity=Severity.WARNING,
|
|
229
|
+
message="Skill path contains spaces or special characters",
|
|
230
|
+
)
|
|
231
|
+
)
|
|
232
|
+
|
|
233
|
+
_check_directory_structure(path, result)
|
|
234
|
+
_check_script_portability(path, result)
|
|
235
|
+
_check_empty_files(path, result)
|
|
236
|
+
_check_reference_lengths(path, result, reference_max_lines)
|
|
237
|
+
|
|
238
|
+
return result
|
|
239
|
+
|
|
240
|
+
|
|
241
|
+
def _check_directory_structure(path: Path, result: ValidationResult) -> None:
|
|
242
|
+
"""Check if skill has only scripts/ directory."""
|
|
243
|
+
subdirs = [d for d in path.iterdir() if d.is_dir()]
|
|
244
|
+
subdir_names = {d.name for d in subdirs}
|
|
245
|
+
|
|
246
|
+
if subdir_names == {"scripts"}:
|
|
247
|
+
result.warnings.append(
|
|
248
|
+
ValidationMessage(
|
|
249
|
+
code="W004",
|
|
250
|
+
severity=Severity.WARNING,
|
|
251
|
+
message="Skill contains only scripts/ directory - consider using scripts-only utility",
|
|
252
|
+
)
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
|
|
256
|
+
def _check_script_portability(path: Path, result: ValidationResult) -> None:
|
|
257
|
+
"""Check for .sh without .ps1 and vice versa."""
|
|
258
|
+
scripts_dir = path / "scripts"
|
|
259
|
+
if not scripts_dir.is_dir():
|
|
260
|
+
return
|
|
261
|
+
|
|
262
|
+
sh_files = {f.stem for f in scripts_dir.glob("*.sh")}
|
|
263
|
+
ps1_files = {f.stem for f in scripts_dir.glob("*.ps1")}
|
|
264
|
+
|
|
265
|
+
sh_only = sh_files - ps1_files
|
|
266
|
+
ps1_only = ps1_files - sh_files
|
|
267
|
+
|
|
268
|
+
for name in sh_only:
|
|
269
|
+
result.warnings.append(
|
|
270
|
+
ValidationMessage(
|
|
271
|
+
code="W006",
|
|
272
|
+
severity=Severity.WARNING,
|
|
273
|
+
message=f"scripts/{name}.sh has no accompanying {name}.ps1",
|
|
274
|
+
file=f"scripts/{name}.sh",
|
|
275
|
+
)
|
|
276
|
+
)
|
|
277
|
+
|
|
278
|
+
for name in ps1_only:
|
|
279
|
+
result.warnings.append(
|
|
280
|
+
ValidationMessage(
|
|
281
|
+
code="W006",
|
|
282
|
+
severity=Severity.WARNING,
|
|
283
|
+
message=f"scripts/{name}.ps1 has no accompanying {name}.sh",
|
|
284
|
+
file=f"scripts/{name}.ps1",
|
|
285
|
+
)
|
|
286
|
+
)
|
|
287
|
+
|
|
288
|
+
|
|
289
|
+
def _check_empty_files(path: Path, result: ValidationResult) -> None:
|
|
290
|
+
"""Check for empty files in references/, assets/, scripts/."""
|
|
291
|
+
for dirname in ["references", "assets", "scripts"]:
|
|
292
|
+
dir_path = path / dirname
|
|
293
|
+
if not dir_path.is_dir():
|
|
294
|
+
continue
|
|
295
|
+
|
|
296
|
+
for file in dir_path.iterdir():
|
|
297
|
+
if file.is_file():
|
|
298
|
+
try:
|
|
299
|
+
if file.stat().st_size == 0:
|
|
300
|
+
result.warnings.append(
|
|
301
|
+
ValidationMessage(
|
|
302
|
+
code="W005",
|
|
303
|
+
severity=Severity.WARNING,
|
|
304
|
+
message=f"Empty file: {dirname}/{file.name}",
|
|
305
|
+
file=f"{dirname}/{file.name}",
|
|
306
|
+
)
|
|
307
|
+
)
|
|
308
|
+
except OSError:
|
|
309
|
+
pass
|
|
310
|
+
|
|
311
|
+
|
|
312
|
+
def _check_reference_lengths(path: Path, result: ValidationResult, max_lines: int) -> None:
|
|
313
|
+
"""Check if reference files exceed line threshold."""
|
|
314
|
+
refs_dir = path / "references"
|
|
315
|
+
if not refs_dir.is_dir():
|
|
316
|
+
return
|
|
317
|
+
|
|
318
|
+
for file in refs_dir.iterdir():
|
|
319
|
+
if not file.is_file() or not file.suffix == ".md":
|
|
320
|
+
continue
|
|
321
|
+
|
|
322
|
+
try:
|
|
323
|
+
content = file.read_text(encoding="utf-8")
|
|
324
|
+
line_count = content.count("\n") + 1
|
|
325
|
+
|
|
326
|
+
if line_count > max_lines:
|
|
327
|
+
result.warnings.append(
|
|
328
|
+
ValidationMessage(
|
|
329
|
+
code="W007",
|
|
330
|
+
severity=Severity.WARNING,
|
|
331
|
+
message=f"references/{file.name} exceeds {max_lines} lines ({line_count} lines)",
|
|
332
|
+
file=f"references/{file.name}",
|
|
333
|
+
)
|
|
334
|
+
)
|
|
335
|
+
except (OSError, UnicodeDecodeError):
|
|
336
|
+
pass
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
def validate_all(
|
|
340
|
+
entries: list,
|
|
341
|
+
reference_max_lines: int = 500,
|
|
342
|
+
) -> list[ValidationResult]:
|
|
343
|
+
"""Validate all registered skills.
|
|
344
|
+
|
|
345
|
+
Args:
|
|
346
|
+
entries: List of SkillEntry objects.
|
|
347
|
+
reference_max_lines: Maximum lines for reference files.
|
|
348
|
+
|
|
349
|
+
Returns:
|
|
350
|
+
List of validation results.
|
|
351
|
+
"""
|
|
352
|
+
results = []
|
|
353
|
+
|
|
354
|
+
for entry in entries:
|
|
355
|
+
result = validate_skill(
|
|
356
|
+
Path(entry.path),
|
|
357
|
+
reference_max_lines=reference_max_lines,
|
|
358
|
+
check_exists=True,
|
|
359
|
+
)
|
|
360
|
+
results.append(result)
|
|
361
|
+
|
|
362
|
+
return results
|