skillscheck 0.1.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- skillscheck/__init__.py +0 -0
- skillscheck/agents/__init__.py +106 -0
- skillscheck/agents/_util.py +91 -0
- skillscheck/agents/claude.py +383 -0
- skillscheck/agents/codex.py +348 -0
- skillscheck/agents/copilot.py +50 -0
- skillscheck/agents/cursor.py +63 -0
- skillscheck/agents/gemini.py +87 -0
- skillscheck/agents/roo.py +122 -0
- skillscheck/agents/swival.py +66 -0
- skillscheck/agents/windsurf.py +48 -0
- skillscheck/checks/__init__.py +0 -0
- skillscheck/checks/disclosure.py +80 -0
- skillscheck/checks/quality.py +201 -0
- skillscheck/checks/spec.py +548 -0
- skillscheck/cli.py +113 -0
- skillscheck/mdutil.py +81 -0
- skillscheck/models.py +116 -0
- skillscheck/parser.py +99 -0
- skillscheck/tokenutil.py +25 -0
- skillscheck/validator.py +62 -0
- skillscheck-0.1.0.dist-info/METADATA +60 -0
- skillscheck-0.1.0.dist-info/RECORD +26 -0
- skillscheck-0.1.0.dist-info/WHEEL +4 -0
- skillscheck-0.1.0.dist-info/entry_points.txt +2 -0
- skillscheck-0.1.0.dist-info/licenses/LICENSE +21 -0
skillscheck/__init__.py
ADDED
|
File without changes
|
|
@@ -0,0 +1,106 @@
|
|
|
1
|
+
"""Agent adapters and cross-agent checks."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Protocol
|
|
7
|
+
|
|
8
|
+
from ..models import Diagnostic, Level, SkillInfo
|
|
9
|
+
|
|
10
|
+
from .claude import ClaudeAdapter
|
|
11
|
+
from .codex import CodexAdapter
|
|
12
|
+
from .copilot import CopilotAdapter
|
|
13
|
+
from .cursor import CursorAdapter
|
|
14
|
+
from .gemini import GeminiAdapter
|
|
15
|
+
from .roo import RooAdapter
|
|
16
|
+
from .swival import SwivalAdapter
|
|
17
|
+
from .windsurf import WindsurfAdapter
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class AgentAdapter(Protocol):
|
|
21
|
+
name: str
|
|
22
|
+
source_url: str
|
|
23
|
+
|
|
24
|
+
def detect(self, repo_root: Path) -> bool: ...
|
|
25
|
+
def check(self, repo_root: Path, skills: list[SkillInfo]) -> list[Diagnostic]: ...
|
|
26
|
+
def known_frontmatter_fields(self) -> set[str]: ...
|
|
27
|
+
def allows_tools_list_syntax(self) -> bool: ...
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
ALL_ADAPTERS: list[AgentAdapter] = [
|
|
31
|
+
ClaudeAdapter(),
|
|
32
|
+
CodexAdapter(),
|
|
33
|
+
CopilotAdapter(),
|
|
34
|
+
CursorAdapter(),
|
|
35
|
+
GeminiAdapter(),
|
|
36
|
+
RooAdapter(),
|
|
37
|
+
SwivalAdapter(),
|
|
38
|
+
WindsurfAdapter(),
|
|
39
|
+
] # type: ignore[list-item]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def get_adapters(names: list[str] | None, repo_root: Path) -> list[AgentAdapter]:
|
|
43
|
+
if names is None or names == ["all"]:
|
|
44
|
+
return [a for a in ALL_ADAPTERS if a.detect(repo_root)]
|
|
45
|
+
result = []
|
|
46
|
+
name_map = {a.name: a for a in ALL_ADAPTERS}
|
|
47
|
+
for n in names:
|
|
48
|
+
if n in name_map:
|
|
49
|
+
result.append(name_map[n])
|
|
50
|
+
return result
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def _read_json_metadata(path: Path) -> dict | None:
|
|
54
|
+
import json
|
|
55
|
+
|
|
56
|
+
try:
|
|
57
|
+
data = json.loads(path.read_text())
|
|
58
|
+
return data if isinstance(data, dict) else None
|
|
59
|
+
except (json.JSONDecodeError, OSError):
|
|
60
|
+
return None
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
def cross_agent_check(
|
|
64
|
+
repo_root: Path, adapters: list[AgentAdapter]
|
|
65
|
+
) -> list[Diagnostic]:
|
|
66
|
+
if len(adapters) < 2:
|
|
67
|
+
return []
|
|
68
|
+
|
|
69
|
+
diags: list[Diagnostic] = []
|
|
70
|
+
names: dict[str, str] = {}
|
|
71
|
+
versions: dict[str, str] = {}
|
|
72
|
+
descriptions: dict[str, str] = {}
|
|
73
|
+
|
|
74
|
+
config_files = {
|
|
75
|
+
"plugin.json": repo_root / ".claude-plugin" / "plugin.json",
|
|
76
|
+
"gemini-extension.json": repo_root / "gemini-extension.json",
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
for label, path in config_files.items():
|
|
80
|
+
data = _read_json_metadata(path)
|
|
81
|
+
if data is None:
|
|
82
|
+
continue
|
|
83
|
+
if "name" in data:
|
|
84
|
+
names[label] = data["name"]
|
|
85
|
+
if "version" in data:
|
|
86
|
+
versions[label] = data["version"]
|
|
87
|
+
if "description" in data:
|
|
88
|
+
descriptions[label] = data["description"]
|
|
89
|
+
|
|
90
|
+
for field, collected, check_id in [
|
|
91
|
+
("name", names, "3c.name-mismatch"),
|
|
92
|
+
("version", versions, "3c.version-mismatch"),
|
|
93
|
+
("description", descriptions, "3c.description-mismatch"),
|
|
94
|
+
]:
|
|
95
|
+
if len(set(collected.values())) > 1:
|
|
96
|
+
detail = ", ".join(f"{k}={v!r}" for k, v in collected.items())
|
|
97
|
+
diags.append(
|
|
98
|
+
Diagnostic(
|
|
99
|
+
Level.WARNING,
|
|
100
|
+
check_id,
|
|
101
|
+
f"{field} mismatch across agent configs: {detail}",
|
|
102
|
+
path=str(repo_root),
|
|
103
|
+
)
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
return diags
|
|
@@ -0,0 +1,91 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import json
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any
|
|
6
|
+
|
|
7
|
+
from ..models import Diagnostic, Level, SkillInfo
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def load_json_object(
|
|
11
|
+
path: Path,
|
|
12
|
+
prefix: str,
|
|
13
|
+
label: str,
|
|
14
|
+
source_url: str,
|
|
15
|
+
) -> tuple[dict | None, list[Diagnostic]]:
|
|
16
|
+
diags: list[Diagnostic] = []
|
|
17
|
+
if not path.exists():
|
|
18
|
+
diags.append(
|
|
19
|
+
Diagnostic(
|
|
20
|
+
Level.ERROR,
|
|
21
|
+
f"{prefix}.missing",
|
|
22
|
+
f"{label} not found",
|
|
23
|
+
path=str(path),
|
|
24
|
+
source_url=source_url,
|
|
25
|
+
)
|
|
26
|
+
)
|
|
27
|
+
return None, diags
|
|
28
|
+
try:
|
|
29
|
+
data = json.loads(path.read_text())
|
|
30
|
+
except json.JSONDecodeError as e:
|
|
31
|
+
diags.append(
|
|
32
|
+
Diagnostic(
|
|
33
|
+
Level.ERROR,
|
|
34
|
+
f"{prefix}.invalid",
|
|
35
|
+
f"invalid JSON: {e}",
|
|
36
|
+
path=str(path),
|
|
37
|
+
source_url=source_url,
|
|
38
|
+
)
|
|
39
|
+
)
|
|
40
|
+
return None, diags
|
|
41
|
+
if not isinstance(data, dict):
|
|
42
|
+
diags.append(
|
|
43
|
+
Diagnostic(
|
|
44
|
+
Level.ERROR,
|
|
45
|
+
f"{prefix}.type",
|
|
46
|
+
f"{label} must be a JSON object",
|
|
47
|
+
path=str(path),
|
|
48
|
+
source_url=source_url,
|
|
49
|
+
)
|
|
50
|
+
)
|
|
51
|
+
return None, diags
|
|
52
|
+
return data, diags
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def check_field_types(
|
|
56
|
+
skill: SkillInfo,
|
|
57
|
+
fields: set[str],
|
|
58
|
+
expected_type: type | tuple[type, ...],
|
|
59
|
+
type_label: str,
|
|
60
|
+
check_prefix: str,
|
|
61
|
+
source_url: str,
|
|
62
|
+
validator: Any = None,
|
|
63
|
+
) -> list[Diagnostic]:
|
|
64
|
+
diags: list[Diagnostic] = []
|
|
65
|
+
fm = skill.frontmatter or {}
|
|
66
|
+
for field in fields:
|
|
67
|
+
val = fm.get(field)
|
|
68
|
+
if val is None:
|
|
69
|
+
continue
|
|
70
|
+
if validator is not None:
|
|
71
|
+
if not validator(val):
|
|
72
|
+
diags.append(
|
|
73
|
+
Diagnostic(
|
|
74
|
+
Level.ERROR,
|
|
75
|
+
f"{check_prefix}.frontmatter.{field}-type",
|
|
76
|
+
f"'{field}' must be {type_label}, got {type(val).__name__}",
|
|
77
|
+
path=skill.skill_md_path,
|
|
78
|
+
source_url=source_url,
|
|
79
|
+
)
|
|
80
|
+
)
|
|
81
|
+
elif not isinstance(val, expected_type):
|
|
82
|
+
diags.append(
|
|
83
|
+
Diagnostic(
|
|
84
|
+
Level.ERROR,
|
|
85
|
+
f"{check_prefix}.frontmatter.{field}-type",
|
|
86
|
+
f"'{field}' must be {type_label}, got {type(val).__name__}",
|
|
87
|
+
path=skill.skill_md_path,
|
|
88
|
+
source_url=source_url,
|
|
89
|
+
)
|
|
90
|
+
)
|
|
91
|
+
return diags
|
|
@@ -0,0 +1,383 @@
|
|
|
1
|
+
"""Claude Code agent adapter (3a)."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
|
|
8
|
+
from ..models import Diagnostic, Level, SkillInfo
|
|
9
|
+
from ._util import load_json_object
|
|
10
|
+
|
|
11
|
+
EXTENSION_FIELDS = {
|
|
12
|
+
"disable-model-invocation",
|
|
13
|
+
"user-invocable",
|
|
14
|
+
"argument-hint",
|
|
15
|
+
"model",
|
|
16
|
+
"context",
|
|
17
|
+
"agent",
|
|
18
|
+
"hooks",
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
PLUGIN_JSON_ALLOWED_FIELDS = {
|
|
22
|
+
"name",
|
|
23
|
+
"description",
|
|
24
|
+
"version",
|
|
25
|
+
"author",
|
|
26
|
+
"keywords",
|
|
27
|
+
"license",
|
|
28
|
+
"repository",
|
|
29
|
+
"homepage",
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ClaudeAdapter:
|
|
34
|
+
name = "claude"
|
|
35
|
+
source_url = "https://code.claude.com/docs/en/skills"
|
|
36
|
+
|
|
37
|
+
def detect(self, repo_root: Path) -> bool:
|
|
38
|
+
return (repo_root / ".claude-plugin").is_dir()
|
|
39
|
+
|
|
40
|
+
def known_frontmatter_fields(self) -> set[str]:
|
|
41
|
+
return EXTENSION_FIELDS
|
|
42
|
+
|
|
43
|
+
def allows_tools_list_syntax(self) -> bool:
|
|
44
|
+
return False
|
|
45
|
+
|
|
46
|
+
def check(self, repo_root: Path, skills: list[SkillInfo]) -> list[Diagnostic]:
|
|
47
|
+
diags: list[Diagnostic] = []
|
|
48
|
+
plugin_dir = repo_root / ".claude-plugin"
|
|
49
|
+
|
|
50
|
+
diags.extend(self._check_plugin_json(plugin_dir))
|
|
51
|
+
diags.extend(self._check_marketplace_json(plugin_dir, root=repo_root))
|
|
52
|
+
diags.extend(self._check_consistency(plugin_dir))
|
|
53
|
+
return diags
|
|
54
|
+
|
|
55
|
+
def _check_plugin_json(self, plugin_dir: Path) -> list[Diagnostic]:
|
|
56
|
+
path = plugin_dir / "plugin.json"
|
|
57
|
+
data, diags = load_json_object(
|
|
58
|
+
path, "3a.plugin-json", "plugin.json", self.source_url
|
|
59
|
+
)
|
|
60
|
+
if data is None:
|
|
61
|
+
return diags
|
|
62
|
+
|
|
63
|
+
if "name" not in data:
|
|
64
|
+
diags.append(
|
|
65
|
+
Diagnostic(
|
|
66
|
+
Level.WARNING,
|
|
67
|
+
"3a.plugin-json.name",
|
|
68
|
+
"plugin.json missing 'name' field",
|
|
69
|
+
path=str(path),
|
|
70
|
+
source_url=self.source_url,
|
|
71
|
+
)
|
|
72
|
+
)
|
|
73
|
+
if "version" not in data:
|
|
74
|
+
diags.append(
|
|
75
|
+
Diagnostic(
|
|
76
|
+
Level.WARNING,
|
|
77
|
+
"3a.plugin-json.version",
|
|
78
|
+
"plugin.json missing 'version' field",
|
|
79
|
+
path=str(path),
|
|
80
|
+
source_url=self.source_url,
|
|
81
|
+
)
|
|
82
|
+
)
|
|
83
|
+
if "description" not in data:
|
|
84
|
+
diags.append(
|
|
85
|
+
Diagnostic(
|
|
86
|
+
Level.WARNING,
|
|
87
|
+
"3a.plugin-json.description",
|
|
88
|
+
"plugin.json missing 'description' field",
|
|
89
|
+
path=str(path),
|
|
90
|
+
source_url=self.source_url,
|
|
91
|
+
)
|
|
92
|
+
)
|
|
93
|
+
|
|
94
|
+
author = data.get("author")
|
|
95
|
+
if author is None:
|
|
96
|
+
diags.append(
|
|
97
|
+
Diagnostic(
|
|
98
|
+
Level.WARNING,
|
|
99
|
+
"3a.plugin-json.author",
|
|
100
|
+
"plugin.json missing 'author' field",
|
|
101
|
+
path=str(path),
|
|
102
|
+
source_url=self.source_url,
|
|
103
|
+
)
|
|
104
|
+
)
|
|
105
|
+
elif not isinstance(author, dict):
|
|
106
|
+
diags.append(
|
|
107
|
+
Diagnostic(
|
|
108
|
+
Level.ERROR,
|
|
109
|
+
"3a.plugin-json.author-type",
|
|
110
|
+
"plugin.json 'author' must be an object with a 'name' field",
|
|
111
|
+
path=str(path),
|
|
112
|
+
source_url=self.source_url,
|
|
113
|
+
)
|
|
114
|
+
)
|
|
115
|
+
elif "name" not in author:
|
|
116
|
+
diags.append(
|
|
117
|
+
Diagnostic(
|
|
118
|
+
Level.WARNING,
|
|
119
|
+
"3a.plugin-json.author-name",
|
|
120
|
+
"plugin.json 'author' object missing 'name' field",
|
|
121
|
+
path=str(path),
|
|
122
|
+
source_url=self.source_url,
|
|
123
|
+
)
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
keywords = data.get("keywords")
|
|
127
|
+
if keywords is None:
|
|
128
|
+
diags.append(
|
|
129
|
+
Diagnostic(
|
|
130
|
+
Level.WARNING,
|
|
131
|
+
"3a.plugin-json.keywords",
|
|
132
|
+
"plugin.json missing 'keywords' field",
|
|
133
|
+
path=str(path),
|
|
134
|
+
source_url=self.source_url,
|
|
135
|
+
)
|
|
136
|
+
)
|
|
137
|
+
elif not isinstance(keywords, list):
|
|
138
|
+
diags.append(
|
|
139
|
+
Diagnostic(
|
|
140
|
+
Level.ERROR,
|
|
141
|
+
"3a.plugin-json.keywords-type",
|
|
142
|
+
"plugin.json 'keywords' must be an array of strings",
|
|
143
|
+
path=str(path),
|
|
144
|
+
source_url=self.source_url,
|
|
145
|
+
)
|
|
146
|
+
)
|
|
147
|
+
elif not all(isinstance(k, str) for k in keywords):
|
|
148
|
+
diags.append(
|
|
149
|
+
Diagnostic(
|
|
150
|
+
Level.ERROR,
|
|
151
|
+
"3a.plugin-json.keywords-items",
|
|
152
|
+
"plugin.json 'keywords' array must contain only strings",
|
|
153
|
+
path=str(path),
|
|
154
|
+
source_url=self.source_url,
|
|
155
|
+
)
|
|
156
|
+
)
|
|
157
|
+
|
|
158
|
+
bad_fields = set(data.keys()) - PLUGIN_JSON_ALLOWED_FIELDS
|
|
159
|
+
if bad_fields:
|
|
160
|
+
diags.append(
|
|
161
|
+
Diagnostic(
|
|
162
|
+
Level.WARNING,
|
|
163
|
+
"3a.plugin-json.fields",
|
|
164
|
+
f"plugin.json has unrecognized fields: {', '.join(sorted(bad_fields))}",
|
|
165
|
+
path=str(path),
|
|
166
|
+
source_url=self.source_url,
|
|
167
|
+
)
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
return diags
|
|
171
|
+
|
|
172
|
+
def _check_marketplace_json(self, plugin_dir: Path, root: Path) -> list[Diagnostic]:
|
|
173
|
+
path = plugin_dir / "marketplace.json"
|
|
174
|
+
data, diags = load_json_object(
|
|
175
|
+
path, "3a.marketplace-json", "marketplace.json", self.source_url
|
|
176
|
+
)
|
|
177
|
+
if data is None:
|
|
178
|
+
return diags
|
|
179
|
+
|
|
180
|
+
if "name" not in data:
|
|
181
|
+
diags.append(
|
|
182
|
+
Diagnostic(
|
|
183
|
+
Level.WARNING,
|
|
184
|
+
"3a.marketplace-json.name",
|
|
185
|
+
"marketplace.json missing 'name' field",
|
|
186
|
+
path=str(path),
|
|
187
|
+
source_url=self.source_url,
|
|
188
|
+
)
|
|
189
|
+
)
|
|
190
|
+
|
|
191
|
+
metadata = data.get("metadata")
|
|
192
|
+
if metadata is None:
|
|
193
|
+
diags.append(
|
|
194
|
+
Diagnostic(
|
|
195
|
+
Level.WARNING,
|
|
196
|
+
"3a.marketplace-json.metadata",
|
|
197
|
+
"marketplace.json missing 'metadata' field",
|
|
198
|
+
path=str(path),
|
|
199
|
+
source_url=self.source_url,
|
|
200
|
+
)
|
|
201
|
+
)
|
|
202
|
+
elif not isinstance(metadata, dict):
|
|
203
|
+
diags.append(
|
|
204
|
+
Diagnostic(
|
|
205
|
+
Level.ERROR,
|
|
206
|
+
"3a.marketplace-json.metadata-type",
|
|
207
|
+
"marketplace.json 'metadata' must be an object",
|
|
208
|
+
path=str(path),
|
|
209
|
+
source_url=self.source_url,
|
|
210
|
+
)
|
|
211
|
+
)
|
|
212
|
+
else:
|
|
213
|
+
if "description" not in metadata:
|
|
214
|
+
diags.append(
|
|
215
|
+
Diagnostic(
|
|
216
|
+
Level.WARNING,
|
|
217
|
+
"3a.marketplace-json.metadata-desc",
|
|
218
|
+
"marketplace.json 'metadata' missing 'description' field",
|
|
219
|
+
path=str(path),
|
|
220
|
+
source_url=self.source_url,
|
|
221
|
+
)
|
|
222
|
+
)
|
|
223
|
+
|
|
224
|
+
owner = data.get("owner")
|
|
225
|
+
if owner is None:
|
|
226
|
+
diags.append(
|
|
227
|
+
Diagnostic(
|
|
228
|
+
Level.WARNING,
|
|
229
|
+
"3a.marketplace-json.owner",
|
|
230
|
+
"marketplace.json missing 'owner' field",
|
|
231
|
+
path=str(path),
|
|
232
|
+
source_url=self.source_url,
|
|
233
|
+
)
|
|
234
|
+
)
|
|
235
|
+
elif not isinstance(owner, dict):
|
|
236
|
+
diags.append(
|
|
237
|
+
Diagnostic(
|
|
238
|
+
Level.ERROR,
|
|
239
|
+
"3a.marketplace-json.owner-type",
|
|
240
|
+
"marketplace.json 'owner' must be an object",
|
|
241
|
+
path=str(path),
|
|
242
|
+
source_url=self.source_url,
|
|
243
|
+
)
|
|
244
|
+
)
|
|
245
|
+
else:
|
|
246
|
+
if "name" not in owner:
|
|
247
|
+
diags.append(
|
|
248
|
+
Diagnostic(
|
|
249
|
+
Level.WARNING,
|
|
250
|
+
"3a.marketplace-json.owner-name",
|
|
251
|
+
"marketplace.json 'owner' missing 'name' field",
|
|
252
|
+
path=str(path),
|
|
253
|
+
source_url=self.source_url,
|
|
254
|
+
)
|
|
255
|
+
)
|
|
256
|
+
|
|
257
|
+
if "plugins" not in data:
|
|
258
|
+
diags.append(
|
|
259
|
+
Diagnostic(
|
|
260
|
+
Level.WARNING,
|
|
261
|
+
"3a.marketplace-json.plugins",
|
|
262
|
+
"marketplace.json missing 'plugins' array",
|
|
263
|
+
path=str(path),
|
|
264
|
+
source_url=self.source_url,
|
|
265
|
+
)
|
|
266
|
+
)
|
|
267
|
+
elif not isinstance(data["plugins"], list):
|
|
268
|
+
diags.append(
|
|
269
|
+
Diagnostic(
|
|
270
|
+
Level.ERROR,
|
|
271
|
+
"3a.marketplace-json.plugins-type",
|
|
272
|
+
"'plugins' must be an array",
|
|
273
|
+
path=str(path),
|
|
274
|
+
source_url=self.source_url,
|
|
275
|
+
)
|
|
276
|
+
)
|
|
277
|
+
else:
|
|
278
|
+
for i, plugin in enumerate(data["plugins"]):
|
|
279
|
+
if not isinstance(plugin, dict):
|
|
280
|
+
diags.append(
|
|
281
|
+
Diagnostic(
|
|
282
|
+
Level.ERROR,
|
|
283
|
+
"3a.marketplace-json.plugin-type",
|
|
284
|
+
f"plugins[{i}] must be an object",
|
|
285
|
+
path=str(path),
|
|
286
|
+
source_url=self.source_url,
|
|
287
|
+
)
|
|
288
|
+
)
|
|
289
|
+
continue
|
|
290
|
+
source = plugin.get("source")
|
|
291
|
+
if source is None:
|
|
292
|
+
diags.append(
|
|
293
|
+
Diagnostic(
|
|
294
|
+
Level.WARNING,
|
|
295
|
+
"3a.marketplace-json.plugin-source",
|
|
296
|
+
f"plugins[{i}] missing 'source' field",
|
|
297
|
+
path=str(path),
|
|
298
|
+
source_url=self.source_url,
|
|
299
|
+
)
|
|
300
|
+
)
|
|
301
|
+
elif not isinstance(source, str):
|
|
302
|
+
diags.append(
|
|
303
|
+
Diagnostic(
|
|
304
|
+
Level.ERROR,
|
|
305
|
+
"3a.marketplace-json.plugin-source-type",
|
|
306
|
+
f"plugins[{i}] 'source' must be a string, got {type(source).__name__}",
|
|
307
|
+
path=str(path),
|
|
308
|
+
source_url=self.source_url,
|
|
309
|
+
)
|
|
310
|
+
)
|
|
311
|
+
else:
|
|
312
|
+
resolved = (root / source).resolve()
|
|
313
|
+
if not resolved.is_dir():
|
|
314
|
+
diags.append(
|
|
315
|
+
Diagnostic(
|
|
316
|
+
Level.ERROR,
|
|
317
|
+
"3a.marketplace-json.plugin-source-missing",
|
|
318
|
+
f"plugins[{i}] source '{source}' does not resolve to a directory",
|
|
319
|
+
path=str(path),
|
|
320
|
+
source_url=self.source_url,
|
|
321
|
+
)
|
|
322
|
+
)
|
|
323
|
+
|
|
324
|
+
return diags
|
|
325
|
+
|
|
326
|
+
def _check_consistency(self, plugin_dir: Path) -> list[Diagnostic]:
|
|
327
|
+
diags: list[Diagnostic] = []
|
|
328
|
+
pj = plugin_dir / "plugin.json"
|
|
329
|
+
mj = plugin_dir / "marketplace.json"
|
|
330
|
+
|
|
331
|
+
if not pj.exists() or not mj.exists():
|
|
332
|
+
return diags
|
|
333
|
+
|
|
334
|
+
try:
|
|
335
|
+
pj_data = json.loads(pj.read_text())
|
|
336
|
+
mj_data = json.loads(mj.read_text())
|
|
337
|
+
except (json.JSONDecodeError, OSError):
|
|
338
|
+
return diags
|
|
339
|
+
|
|
340
|
+
if not isinstance(pj_data, dict) or not isinstance(mj_data, dict):
|
|
341
|
+
return diags
|
|
342
|
+
|
|
343
|
+
pj_name = pj_data.get("name", "")
|
|
344
|
+
mj_name = mj_data.get("name", "")
|
|
345
|
+
if pj_name and mj_name and pj_name != mj_name:
|
|
346
|
+
diags.append(
|
|
347
|
+
Diagnostic(
|
|
348
|
+
Level.WARNING,
|
|
349
|
+
"3a.consistency.name",
|
|
350
|
+
f"name mismatch: plugin.json='{pj_name}' marketplace.json='{mj_name}'",
|
|
351
|
+
path=str(plugin_dir),
|
|
352
|
+
source_url=self.source_url,
|
|
353
|
+
)
|
|
354
|
+
)
|
|
355
|
+
|
|
356
|
+
pj_desc = pj_data.get("description", "")
|
|
357
|
+
mj_meta = mj_data.get("metadata", {})
|
|
358
|
+
mj_desc = mj_meta.get("description", "") if isinstance(mj_meta, dict) else ""
|
|
359
|
+
if pj_desc and mj_desc and pj_desc != mj_desc:
|
|
360
|
+
diags.append(
|
|
361
|
+
Diagnostic(
|
|
362
|
+
Level.WARNING,
|
|
363
|
+
"3a.consistency.description",
|
|
364
|
+
"description mismatch: plugin.json vs marketplace.json metadata.description",
|
|
365
|
+
path=str(plugin_dir),
|
|
366
|
+
source_url=self.source_url,
|
|
367
|
+
)
|
|
368
|
+
)
|
|
369
|
+
|
|
370
|
+
pj_version = pj_data.get("version", "")
|
|
371
|
+
mj_version = mj_data.get("version", "")
|
|
372
|
+
if pj_version and mj_version and pj_version != mj_version:
|
|
373
|
+
diags.append(
|
|
374
|
+
Diagnostic(
|
|
375
|
+
Level.WARNING,
|
|
376
|
+
"3a.consistency.version",
|
|
377
|
+
f"version mismatch: plugin.json='{pj_version}' marketplace.json='{mj_version}'",
|
|
378
|
+
path=str(plugin_dir),
|
|
379
|
+
source_url=self.source_url,
|
|
380
|
+
)
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
return diags
|