plugin-scanner 1.4.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- codex_plugin_scanner/__init__.py +29 -0
- codex_plugin_scanner/action_runner.py +470 -0
- codex_plugin_scanner/checks/__init__.py +0 -0
- codex_plugin_scanner/checks/best_practices.py +238 -0
- codex_plugin_scanner/checks/claude.py +285 -0
- codex_plugin_scanner/checks/code_quality.py +115 -0
- codex_plugin_scanner/checks/ecosystem_common.py +34 -0
- codex_plugin_scanner/checks/gemini.py +196 -0
- codex_plugin_scanner/checks/manifest.py +501 -0
- codex_plugin_scanner/checks/manifest_support.py +61 -0
- codex_plugin_scanner/checks/marketplace.py +334 -0
- codex_plugin_scanner/checks/opencode.py +223 -0
- codex_plugin_scanner/checks/operational_security.py +346 -0
- codex_plugin_scanner/checks/security.py +447 -0
- codex_plugin_scanner/checks/skill_security.py +241 -0
- codex_plugin_scanner/cli.py +467 -0
- codex_plugin_scanner/config.py +76 -0
- codex_plugin_scanner/ecosystems/__init__.py +15 -0
- codex_plugin_scanner/ecosystems/base.py +20 -0
- codex_plugin_scanner/ecosystems/claude.py +112 -0
- codex_plugin_scanner/ecosystems/codex.py +94 -0
- codex_plugin_scanner/ecosystems/detect.py +46 -0
- codex_plugin_scanner/ecosystems/gemini.py +80 -0
- codex_plugin_scanner/ecosystems/opencode.py +184 -0
- codex_plugin_scanner/ecosystems/registry.py +41 -0
- codex_plugin_scanner/ecosystems/types.py +45 -0
- codex_plugin_scanner/integrations/__init__.py +5 -0
- codex_plugin_scanner/integrations/cisco_skill_scanner.py +200 -0
- codex_plugin_scanner/lint_fixes.py +105 -0
- codex_plugin_scanner/marketplace_support.py +100 -0
- codex_plugin_scanner/models.py +177 -0
- codex_plugin_scanner/path_support.py +46 -0
- codex_plugin_scanner/policy.py +140 -0
- codex_plugin_scanner/quality_artifact.py +91 -0
- codex_plugin_scanner/repo_detect.py +137 -0
- codex_plugin_scanner/reporting.py +376 -0
- codex_plugin_scanner/rules/__init__.py +6 -0
- codex_plugin_scanner/rules/registry.py +101 -0
- codex_plugin_scanner/rules/specs.py +26 -0
- codex_plugin_scanner/scanner.py +557 -0
- codex_plugin_scanner/submission.py +284 -0
- codex_plugin_scanner/suppressions.py +87 -0
- codex_plugin_scanner/trust_domain_scoring.py +22 -0
- codex_plugin_scanner/trust_helpers.py +207 -0
- codex_plugin_scanner/trust_mcp_scoring.py +116 -0
- codex_plugin_scanner/trust_models.py +85 -0
- codex_plugin_scanner/trust_plugin_scoring.py +180 -0
- codex_plugin_scanner/trust_scoring.py +52 -0
- codex_plugin_scanner/trust_skill_scoring.py +296 -0
- codex_plugin_scanner/trust_specs.py +286 -0
- codex_plugin_scanner/verification.py +964 -0
- codex_plugin_scanner/version.py +3 -0
- plugin_scanner-1.4.15.dist-info/METADATA +596 -0
- plugin_scanner-1.4.15.dist-info/RECORD +57 -0
- plugin_scanner-1.4.15.dist-info/WHEEL +4 -0
- plugin_scanner-1.4.15.dist-info/entry_points.txt +4 -0
- plugin_scanner-1.4.15.dist-info/licenses/LICENSE +120 -0
|
@@ -0,0 +1,964 @@
|
|
|
1
|
+
"""Runtime verification engine for plugin readiness checks."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import json
|
|
6
|
+
import os
|
|
7
|
+
import queue
|
|
8
|
+
import re
|
|
9
|
+
import subprocess
|
|
10
|
+
import threading
|
|
11
|
+
import urllib.error
|
|
12
|
+
import urllib.parse
|
|
13
|
+
import urllib.request
|
|
14
|
+
from contextlib import suppress
|
|
15
|
+
from dataclasses import dataclass, replace
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
|
|
18
|
+
from . import __version__
|
|
19
|
+
from .checks.manifest import load_manifest
|
|
20
|
+
from .checks.manifest_support import safe_manifest_path
|
|
21
|
+
from .marketplace_support import (
|
|
22
|
+
extract_marketplace_source,
|
|
23
|
+
load_marketplace_context,
|
|
24
|
+
marketplace_label,
|
|
25
|
+
validate_marketplace_path_requirements,
|
|
26
|
+
)
|
|
27
|
+
from .models import ScanSkipTarget
|
|
28
|
+
from .path_support import is_safe_relative_path
|
|
29
|
+
from .repo_detect import discover_scan_targets
|
|
30
|
+
|
|
31
|
+
MARKDOWN_LINK_RE = re.compile(r"\[[^]]+\]\(([^)]+)\)")
|
|
32
|
+
INTERFACE_REQUIRED_FIELDS = (
|
|
33
|
+
"displayName",
|
|
34
|
+
"shortDescription",
|
|
35
|
+
"developerName",
|
|
36
|
+
"category",
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@dataclass(frozen=True, slots=True)
|
|
41
|
+
class VerificationCase:
|
|
42
|
+
component: str
|
|
43
|
+
name: str
|
|
44
|
+
passed: bool
|
|
45
|
+
message: str
|
|
46
|
+
classification: str = "pass"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@dataclass(frozen=True, slots=True)
|
|
50
|
+
class RuntimeTrace:
|
|
51
|
+
component: str
|
|
52
|
+
name: str
|
|
53
|
+
command: tuple[str, ...]
|
|
54
|
+
returncode: int | None
|
|
55
|
+
stdout: str
|
|
56
|
+
stderr: str
|
|
57
|
+
timed_out: bool = False
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
@dataclass(frozen=True, slots=True)
|
|
61
|
+
class VerificationResult:
|
|
62
|
+
verify_pass: bool
|
|
63
|
+
cases: tuple[VerificationCase, ...]
|
|
64
|
+
workspace: str
|
|
65
|
+
traces: tuple[RuntimeTrace, ...] = ()
|
|
66
|
+
scope: str = "plugin"
|
|
67
|
+
plugin_name: str | None = None
|
|
68
|
+
plugin_results: tuple[VerificationResult, ...] = ()
|
|
69
|
+
skipped_targets: tuple[ScanSkipTarget, ...] = ()
|
|
70
|
+
marketplace_file: str | None = None
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
def build_verification_payload(result: VerificationResult) -> dict[str, object]:
|
|
74
|
+
payload: dict[str, object] = {
|
|
75
|
+
"verify_pass": result.verify_pass,
|
|
76
|
+
"workspace": result.workspace,
|
|
77
|
+
"scope": result.scope,
|
|
78
|
+
"cases": [
|
|
79
|
+
{
|
|
80
|
+
"component": case.component,
|
|
81
|
+
"name": case.name,
|
|
82
|
+
"passed": case.passed,
|
|
83
|
+
"message": case.message,
|
|
84
|
+
"classification": case.classification,
|
|
85
|
+
}
|
|
86
|
+
for case in result.cases
|
|
87
|
+
],
|
|
88
|
+
}
|
|
89
|
+
if result.scope == "repository":
|
|
90
|
+
payload["repository"] = {
|
|
91
|
+
"marketplaceFile": result.marketplace_file,
|
|
92
|
+
"localPluginCount": len(result.plugin_results),
|
|
93
|
+
}
|
|
94
|
+
payload["plugins"] = [
|
|
95
|
+
{
|
|
96
|
+
"name": plugin.plugin_name,
|
|
97
|
+
"workspace": plugin.workspace,
|
|
98
|
+
"verify_pass": plugin.verify_pass,
|
|
99
|
+
}
|
|
100
|
+
for plugin in result.plugin_results
|
|
101
|
+
]
|
|
102
|
+
payload["skippedTargets"] = [
|
|
103
|
+
{
|
|
104
|
+
"name": skipped.name,
|
|
105
|
+
"reason": skipped.reason,
|
|
106
|
+
"sourcePath": skipped.source_path,
|
|
107
|
+
}
|
|
108
|
+
for skipped in result.skipped_targets
|
|
109
|
+
]
|
|
110
|
+
return payload
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def _read_json(path: Path) -> dict | list | None:
|
|
114
|
+
try:
|
|
115
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
116
|
+
except json.JSONDecodeError:
|
|
117
|
+
return None
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def _is_safe_relative_asset(plugin_dir: Path, value: str) -> bool:
|
|
121
|
+
return is_safe_relative_path(plugin_dir, value, require_prefix=True, require_exists=True)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def _readline_with_timeout(stream, *, timeout: float, command: list[str], transcript: list[str]) -> str:
|
|
125
|
+
result_queue: queue.Queue[str | BaseException] = queue.Queue(maxsize=1)
|
|
126
|
+
|
|
127
|
+
def _reader() -> None:
|
|
128
|
+
try:
|
|
129
|
+
result_queue.put(stream.readline())
|
|
130
|
+
except BaseException as exc: # pragma: no cover - defensive worker handoff
|
|
131
|
+
result_queue.put(exc)
|
|
132
|
+
|
|
133
|
+
thread = threading.Thread(target=_reader, daemon=True)
|
|
134
|
+
thread.start()
|
|
135
|
+
try:
|
|
136
|
+
result = result_queue.get(timeout=timeout)
|
|
137
|
+
except queue.Empty as exc:
|
|
138
|
+
raise subprocess.TimeoutExpired(command, timeout, output="\n".join(transcript)) from exc
|
|
139
|
+
if isinstance(result, BaseException):
|
|
140
|
+
raise result
|
|
141
|
+
return result
|
|
142
|
+
|
|
143
|
+
|
|
144
|
+
def _check_manifest(plugin_dir: Path) -> list[VerificationCase]:
|
|
145
|
+
manifest_path = plugin_dir / ".codex-plugin" / "plugin.json"
|
|
146
|
+
if not manifest_path.exists():
|
|
147
|
+
return [
|
|
148
|
+
VerificationCase(
|
|
149
|
+
"manifest",
|
|
150
|
+
"plugin.json exists",
|
|
151
|
+
False,
|
|
152
|
+
".codex-plugin/plugin.json is missing",
|
|
153
|
+
"missing-manifest",
|
|
154
|
+
)
|
|
155
|
+
]
|
|
156
|
+
|
|
157
|
+
payload = _read_json(manifest_path)
|
|
158
|
+
if payload is None:
|
|
159
|
+
return [
|
|
160
|
+
VerificationCase(
|
|
161
|
+
"manifest",
|
|
162
|
+
"plugin.json parses",
|
|
163
|
+
False,
|
|
164
|
+
"Invalid .codex-plugin/plugin.json",
|
|
165
|
+
"invalid-json",
|
|
166
|
+
)
|
|
167
|
+
]
|
|
168
|
+
if not isinstance(payload, dict):
|
|
169
|
+
return [
|
|
170
|
+
VerificationCase(
|
|
171
|
+
"manifest",
|
|
172
|
+
"plugin.json shape",
|
|
173
|
+
False,
|
|
174
|
+
".codex-plugin/plugin.json must be an object",
|
|
175
|
+
"schema",
|
|
176
|
+
)
|
|
177
|
+
]
|
|
178
|
+
|
|
179
|
+
cases = [
|
|
180
|
+
VerificationCase("manifest", "plugin.json parses", True, ".codex-plugin/plugin.json is valid JSON"),
|
|
181
|
+
]
|
|
182
|
+
missing_required = [
|
|
183
|
+
field
|
|
184
|
+
for field in ("name", "version", "description")
|
|
185
|
+
if not isinstance(payload.get(field), str) or not payload.get(field)
|
|
186
|
+
]
|
|
187
|
+
cases.append(
|
|
188
|
+
VerificationCase(
|
|
189
|
+
"manifest",
|
|
190
|
+
"required fields",
|
|
191
|
+
not missing_required,
|
|
192
|
+
"All required manifest fields are present"
|
|
193
|
+
if not missing_required
|
|
194
|
+
else f"Missing required manifest fields: {', '.join(missing_required)}",
|
|
195
|
+
"schema" if missing_required else "pass",
|
|
196
|
+
)
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
interface = payload.get("interface")
|
|
200
|
+
if interface is None:
|
|
201
|
+
cases.append(
|
|
202
|
+
VerificationCase(
|
|
203
|
+
"manifest",
|
|
204
|
+
"interface metadata",
|
|
205
|
+
True,
|
|
206
|
+
"interface metadata not declared",
|
|
207
|
+
"optional",
|
|
208
|
+
)
|
|
209
|
+
)
|
|
210
|
+
return cases
|
|
211
|
+
|
|
212
|
+
if not isinstance(interface, dict):
|
|
213
|
+
cases.append(
|
|
214
|
+
VerificationCase(
|
|
215
|
+
"manifest",
|
|
216
|
+
"interface metadata",
|
|
217
|
+
False,
|
|
218
|
+
"interface must be an object",
|
|
219
|
+
"schema",
|
|
220
|
+
)
|
|
221
|
+
)
|
|
222
|
+
return cases
|
|
223
|
+
|
|
224
|
+
missing_interface = [
|
|
225
|
+
field
|
|
226
|
+
for field in INTERFACE_REQUIRED_FIELDS
|
|
227
|
+
if not isinstance(interface.get(field), str) or not interface.get(field)
|
|
228
|
+
]
|
|
229
|
+
cases.append(
|
|
230
|
+
VerificationCase(
|
|
231
|
+
"manifest",
|
|
232
|
+
"interface metadata",
|
|
233
|
+
not missing_interface,
|
|
234
|
+
"interface metadata is publishable"
|
|
235
|
+
if not missing_interface
|
|
236
|
+
else f"Missing interface fields: {', '.join(missing_interface)}",
|
|
237
|
+
"schema" if missing_interface else "pass",
|
|
238
|
+
)
|
|
239
|
+
)
|
|
240
|
+
|
|
241
|
+
capabilities = interface.get("capabilities")
|
|
242
|
+
capabilities_valid = (
|
|
243
|
+
isinstance(capabilities, list)
|
|
244
|
+
and bool(capabilities)
|
|
245
|
+
and all(isinstance(item, str) and item for item in capabilities)
|
|
246
|
+
)
|
|
247
|
+
cases.append(
|
|
248
|
+
VerificationCase(
|
|
249
|
+
"manifest",
|
|
250
|
+
"capability enumeration",
|
|
251
|
+
capabilities_valid,
|
|
252
|
+
"Capabilities are declared for discovery"
|
|
253
|
+
if capabilities_valid
|
|
254
|
+
else "interface.capabilities must be a non-empty string array",
|
|
255
|
+
"schema" if not capabilities_valid else "pass",
|
|
256
|
+
)
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
asset_refs: list[str] = []
|
|
260
|
+
for field in ("composerIcon", "logo"):
|
|
261
|
+
value = interface.get(field)
|
|
262
|
+
if isinstance(value, str) and value:
|
|
263
|
+
asset_refs.append(value)
|
|
264
|
+
screenshots = interface.get("screenshots")
|
|
265
|
+
if isinstance(screenshots, list):
|
|
266
|
+
asset_refs.extend(value for value in screenshots if isinstance(value, str) and value)
|
|
267
|
+
missing_assets = [value for value in asset_refs if not _is_safe_relative_asset(plugin_dir, value)]
|
|
268
|
+
cases.append(
|
|
269
|
+
VerificationCase(
|
|
270
|
+
"manifest",
|
|
271
|
+
"interface assets",
|
|
272
|
+
not missing_assets,
|
|
273
|
+
"Declared interface assets resolve inside the plugin"
|
|
274
|
+
if not missing_assets
|
|
275
|
+
else f"Missing or unsafe interface assets: {', '.join(missing_assets)}",
|
|
276
|
+
"asset-missing" if missing_assets else "pass",
|
|
277
|
+
)
|
|
278
|
+
)
|
|
279
|
+
return cases
|
|
280
|
+
|
|
281
|
+
|
|
282
|
+
def _check_marketplace(plugin_dir: Path) -> list[VerificationCase]:
|
|
283
|
+
try:
|
|
284
|
+
context = load_marketplace_context(plugin_dir)
|
|
285
|
+
except json.JSONDecodeError:
|
|
286
|
+
return [
|
|
287
|
+
VerificationCase(
|
|
288
|
+
"marketplace",
|
|
289
|
+
"marketplace manifest parses",
|
|
290
|
+
False,
|
|
291
|
+
"Invalid marketplace manifest",
|
|
292
|
+
"invalid-json",
|
|
293
|
+
)
|
|
294
|
+
]
|
|
295
|
+
except ValueError:
|
|
296
|
+
return [
|
|
297
|
+
VerificationCase(
|
|
298
|
+
"marketplace",
|
|
299
|
+
"marketplace manifest shape",
|
|
300
|
+
False,
|
|
301
|
+
"Marketplace manifest must be a JSON object",
|
|
302
|
+
"schema",
|
|
303
|
+
)
|
|
304
|
+
]
|
|
305
|
+
|
|
306
|
+
if context is None:
|
|
307
|
+
return [
|
|
308
|
+
VerificationCase(
|
|
309
|
+
"marketplace",
|
|
310
|
+
"marketplace optional",
|
|
311
|
+
True,
|
|
312
|
+
"No marketplace manifest present",
|
|
313
|
+
"optional",
|
|
314
|
+
)
|
|
315
|
+
]
|
|
316
|
+
|
|
317
|
+
file_label = marketplace_label(context)
|
|
318
|
+
compatibility_message = " (legacy compatibility mode)" if context.legacy else ""
|
|
319
|
+
cases = [
|
|
320
|
+
VerificationCase(
|
|
321
|
+
"marketplace",
|
|
322
|
+
"marketplace manifest parses",
|
|
323
|
+
True,
|
|
324
|
+
f"{file_label} is valid JSON{compatibility_message}",
|
|
325
|
+
"compatibility" if context.legacy else "pass",
|
|
326
|
+
)
|
|
327
|
+
]
|
|
328
|
+
has_name = isinstance(context.payload.get("name"), str) and bool(context.payload.get("name"))
|
|
329
|
+
cases.append(
|
|
330
|
+
VerificationCase(
|
|
331
|
+
"marketplace",
|
|
332
|
+
"marketplace name",
|
|
333
|
+
has_name,
|
|
334
|
+
"Marketplace name is declared" if has_name else f'{file_label} must declare a string "name"',
|
|
335
|
+
"schema" if not has_name else ("compatibility" if context.legacy else "pass"),
|
|
336
|
+
)
|
|
337
|
+
)
|
|
338
|
+
|
|
339
|
+
plugins = context.payload.get("plugins")
|
|
340
|
+
if not isinstance(plugins, list) or not plugins:
|
|
341
|
+
cases.append(
|
|
342
|
+
VerificationCase(
|
|
343
|
+
"marketplace",
|
|
344
|
+
"plugins listed",
|
|
345
|
+
False,
|
|
346
|
+
"plugins array missing/empty",
|
|
347
|
+
"schema",
|
|
348
|
+
)
|
|
349
|
+
)
|
|
350
|
+
return cases
|
|
351
|
+
|
|
352
|
+
cases.append(VerificationCase("marketplace", "plugins listed", True, "plugins found"))
|
|
353
|
+
discovery_issues: list[str] = []
|
|
354
|
+
policy_issues: list[str] = []
|
|
355
|
+
for index, plugin in enumerate(plugins):
|
|
356
|
+
if not isinstance(plugin, dict):
|
|
357
|
+
discovery_issues.append(f"plugin[{index}] must be an object")
|
|
358
|
+
continue
|
|
359
|
+
if context.legacy:
|
|
360
|
+
source_ref, _source_path = extract_marketplace_source(plugin)
|
|
361
|
+
if not source_ref:
|
|
362
|
+
discovery_issues.append(f"plugin[{index}] missing source")
|
|
363
|
+
else:
|
|
364
|
+
issue = validate_marketplace_path_requirements(context, plugin)
|
|
365
|
+
if issue is not None:
|
|
366
|
+
discovery_issues.append(f"plugin[{index}] {issue}")
|
|
367
|
+
policy = plugin.get("policy")
|
|
368
|
+
if not isinstance(policy, dict):
|
|
369
|
+
policy_issues.append(f"plugin[{index}] missing policy object")
|
|
370
|
+
continue
|
|
371
|
+
if not isinstance(policy.get("installation"), str) or not policy.get("installation"):
|
|
372
|
+
policy_issues.append(f"plugin[{index}] missing policy.installation")
|
|
373
|
+
if not isinstance(policy.get("authentication"), str) or not policy.get("authentication"):
|
|
374
|
+
policy_issues.append(f"plugin[{index}] missing policy.authentication")
|
|
375
|
+
if not isinstance(plugin.get("category"), str) or not plugin.get("category"):
|
|
376
|
+
policy_issues.append(f"plugin[{index}] missing category")
|
|
377
|
+
|
|
378
|
+
cases.append(
|
|
379
|
+
VerificationCase(
|
|
380
|
+
"marketplace",
|
|
381
|
+
"discovery simulation",
|
|
382
|
+
not discovery_issues,
|
|
383
|
+
"Marketplace entries are discoverable" if not discovery_issues else "; ".join(discovery_issues),
|
|
384
|
+
"schema" if discovery_issues else ("compatibility" if context.legacy else "pass"),
|
|
385
|
+
)
|
|
386
|
+
)
|
|
387
|
+
cases.append(
|
|
388
|
+
VerificationCase(
|
|
389
|
+
"marketplace",
|
|
390
|
+
"policy metadata",
|
|
391
|
+
not policy_issues,
|
|
392
|
+
"Marketplace policy metadata is complete" if not policy_issues else "; ".join(policy_issues),
|
|
393
|
+
"schema" if policy_issues else ("compatibility" if context.legacy else "pass"),
|
|
394
|
+
)
|
|
395
|
+
)
|
|
396
|
+
return cases
|
|
397
|
+
|
|
398
|
+
|
|
399
|
+
def _check_mcp_http(remotes: list[dict], *, online: bool) -> list[VerificationCase]:
|
|
400
|
+
cases: list[VerificationCase] = []
|
|
401
|
+
for remote in remotes:
|
|
402
|
+
url = str(remote.get("url", ""))
|
|
403
|
+
if not url:
|
|
404
|
+
continue
|
|
405
|
+
parsed = urllib.parse.urlparse(url)
|
|
406
|
+
if parsed.scheme and parsed.scheme != "https":
|
|
407
|
+
cases.append(
|
|
408
|
+
VerificationCase("mcp", "remote scheme", False, f"Insecure scheme in {url}", "insecure-scheme")
|
|
409
|
+
)
|
|
410
|
+
continue
|
|
411
|
+
if online:
|
|
412
|
+
try:
|
|
413
|
+
req = urllib.request.Request(url, method="GET")
|
|
414
|
+
with urllib.request.urlopen(req, timeout=3) as resp:
|
|
415
|
+
if resp.status in (401, 403):
|
|
416
|
+
cases.append(
|
|
417
|
+
VerificationCase(
|
|
418
|
+
"mcp",
|
|
419
|
+
"remote auth",
|
|
420
|
+
True,
|
|
421
|
+
f"Auth required for {url}",
|
|
422
|
+
"auth-required",
|
|
423
|
+
)
|
|
424
|
+
)
|
|
425
|
+
elif 200 <= resp.status < 400:
|
|
426
|
+
cases.append(VerificationCase("mcp", "remote reachability", True, f"Reachable: {url}"))
|
|
427
|
+
else:
|
|
428
|
+
cases.append(
|
|
429
|
+
VerificationCase(
|
|
430
|
+
"mcp",
|
|
431
|
+
"remote reachability",
|
|
432
|
+
False,
|
|
433
|
+
f"HTTP {resp.status} for {url}",
|
|
434
|
+
"transport",
|
|
435
|
+
)
|
|
436
|
+
)
|
|
437
|
+
except urllib.error.HTTPError as exc:
|
|
438
|
+
if exc.code in (401, 403):
|
|
439
|
+
cases.append(
|
|
440
|
+
VerificationCase(
|
|
441
|
+
"mcp",
|
|
442
|
+
"remote auth",
|
|
443
|
+
True,
|
|
444
|
+
f"Auth required for {url}",
|
|
445
|
+
"auth-required",
|
|
446
|
+
)
|
|
447
|
+
)
|
|
448
|
+
else:
|
|
449
|
+
cases.append(
|
|
450
|
+
VerificationCase(
|
|
451
|
+
"mcp",
|
|
452
|
+
"remote reachability",
|
|
453
|
+
False,
|
|
454
|
+
f"HTTP error for {url}: {exc.code}",
|
|
455
|
+
"transport",
|
|
456
|
+
)
|
|
457
|
+
)
|
|
458
|
+
except Exception as exc:
|
|
459
|
+
cases.append(
|
|
460
|
+
VerificationCase(
|
|
461
|
+
"mcp",
|
|
462
|
+
"remote reachability",
|
|
463
|
+
False,
|
|
464
|
+
f"Transport failure for {url}: {exc}",
|
|
465
|
+
"transport",
|
|
466
|
+
)
|
|
467
|
+
)
|
|
468
|
+
else:
|
|
469
|
+
cases.append(
|
|
470
|
+
VerificationCase(
|
|
471
|
+
"mcp",
|
|
472
|
+
"remote reachability",
|
|
473
|
+
True,
|
|
474
|
+
f"Offline mode skipped: {url}",
|
|
475
|
+
"offline-skip",
|
|
476
|
+
)
|
|
477
|
+
)
|
|
478
|
+
return cases
|
|
479
|
+
|
|
480
|
+
|
|
481
|
+
def _check_mcp_stdio(servers: dict) -> tuple[list[VerificationCase], list[RuntimeTrace]]:
|
|
482
|
+
cases: list[VerificationCase] = []
|
|
483
|
+
traces: list[RuntimeTrace] = []
|
|
484
|
+
for name, server in servers.items():
|
|
485
|
+
cmd = server.get("command") if isinstance(server, dict) else None
|
|
486
|
+
args = server.get("args", []) if isinstance(server, dict) and isinstance(server.get("args", []), list) else []
|
|
487
|
+
if not cmd:
|
|
488
|
+
continue
|
|
489
|
+
command = [str(cmd), *[str(arg) for arg in args]]
|
|
490
|
+
try:
|
|
491
|
+
proc = subprocess.Popen(
|
|
492
|
+
command,
|
|
493
|
+
stdin=subprocess.PIPE,
|
|
494
|
+
stdout=subprocess.PIPE,
|
|
495
|
+
stderr=subprocess.PIPE,
|
|
496
|
+
text=True,
|
|
497
|
+
env=os.environ.copy(),
|
|
498
|
+
)
|
|
499
|
+
except Exception as exc:
|
|
500
|
+
cases.append(VerificationCase("mcp", f"stdio spawn:{name}", False, str(exc), "spawn-failure"))
|
|
501
|
+
traces.append(
|
|
502
|
+
RuntimeTrace(
|
|
503
|
+
component="mcp",
|
|
504
|
+
name=f"stdio spawn:{name}",
|
|
505
|
+
command=tuple(command),
|
|
506
|
+
returncode=None,
|
|
507
|
+
stdout="",
|
|
508
|
+
stderr=str(exc),
|
|
509
|
+
)
|
|
510
|
+
)
|
|
511
|
+
continue
|
|
512
|
+
transcript: list[str] = []
|
|
513
|
+
try:
|
|
514
|
+
if proc.stdin is None or proc.stdout is None or proc.stderr is None:
|
|
515
|
+
raise RuntimeError("stdio server did not expose all pipes")
|
|
516
|
+
initialize_request = {
|
|
517
|
+
"jsonrpc": "2.0",
|
|
518
|
+
"id": 1,
|
|
519
|
+
"method": "initialize",
|
|
520
|
+
"params": {
|
|
521
|
+
"protocolVersion": "2024-11-05",
|
|
522
|
+
"capabilities": {"tools": {}, "resources": {}, "prompts": {}},
|
|
523
|
+
"clientInfo": {"name": "codex-plugin-scanner", "version": __version__},
|
|
524
|
+
},
|
|
525
|
+
}
|
|
526
|
+
proc.stdin.write(json.dumps(initialize_request) + "\n")
|
|
527
|
+
proc.stdin.flush()
|
|
528
|
+
transcript.append("> " + json.dumps(initialize_request))
|
|
529
|
+
|
|
530
|
+
initialize_response_line = _readline_with_timeout(
|
|
531
|
+
proc.stdout,
|
|
532
|
+
timeout=2,
|
|
533
|
+
command=command,
|
|
534
|
+
transcript=transcript,
|
|
535
|
+
)
|
|
536
|
+
if not initialize_response_line:
|
|
537
|
+
raise RuntimeError("server did not respond to initialize")
|
|
538
|
+
transcript.append("< " + initialize_response_line.strip())
|
|
539
|
+
initialize_response = json.loads(initialize_response_line)
|
|
540
|
+
result_payload = initialize_response.get("result")
|
|
541
|
+
if not isinstance(result_payload, dict):
|
|
542
|
+
raise RuntimeError("server returned an invalid initialize result")
|
|
543
|
+
|
|
544
|
+
initialized_notification = {"jsonrpc": "2.0", "method": "notifications/initialized", "params": {}}
|
|
545
|
+
proc.stdin.write(json.dumps(initialized_notification) + "\n")
|
|
546
|
+
proc.stdin.flush()
|
|
547
|
+
transcript.append("> " + json.dumps(initialized_notification))
|
|
548
|
+
|
|
549
|
+
capabilities = result_payload.get("capabilities")
|
|
550
|
+
probe_methods = (
|
|
551
|
+
("tools/list", "tools"),
|
|
552
|
+
("resources/list", "resources"),
|
|
553
|
+
("prompts/list", "prompts"),
|
|
554
|
+
)
|
|
555
|
+
request_id = 2
|
|
556
|
+
if isinstance(capabilities, dict):
|
|
557
|
+
for method, key in probe_methods:
|
|
558
|
+
if key not in capabilities:
|
|
559
|
+
continue
|
|
560
|
+
request = {"jsonrpc": "2.0", "id": request_id, "method": method, "params": {}}
|
|
561
|
+
request_id += 1
|
|
562
|
+
proc.stdin.write(json.dumps(request) + "\n")
|
|
563
|
+
proc.stdin.flush()
|
|
564
|
+
transcript.append("> " + json.dumps(request))
|
|
565
|
+
response_line = _readline_with_timeout(
|
|
566
|
+
proc.stdout,
|
|
567
|
+
timeout=2,
|
|
568
|
+
command=command,
|
|
569
|
+
transcript=transcript,
|
|
570
|
+
)
|
|
571
|
+
if not response_line:
|
|
572
|
+
raise RuntimeError(f"server did not respond to {method}")
|
|
573
|
+
transcript.append("< " + response_line.strip())
|
|
574
|
+
json.loads(response_line)
|
|
575
|
+
|
|
576
|
+
proc.stdin.close()
|
|
577
|
+
proc.wait(timeout=2)
|
|
578
|
+
stdout = proc.stdout.read()
|
|
579
|
+
stderr = proc.stderr.read()
|
|
580
|
+
transcript_output = "\n".join(transcript)
|
|
581
|
+
if stdout:
|
|
582
|
+
transcript_output = f"{transcript_output}\n{stdout}".strip()
|
|
583
|
+
traces.append(
|
|
584
|
+
RuntimeTrace(
|
|
585
|
+
component="mcp",
|
|
586
|
+
name=f"stdio lifecycle:{name}",
|
|
587
|
+
command=tuple(command),
|
|
588
|
+
returncode=proc.returncode,
|
|
589
|
+
stdout=transcript_output,
|
|
590
|
+
stderr=stderr,
|
|
591
|
+
)
|
|
592
|
+
)
|
|
593
|
+
if proc.returncode not in (0, None):
|
|
594
|
+
cases.append(
|
|
595
|
+
VerificationCase(
|
|
596
|
+
"mcp",
|
|
597
|
+
f"stdio initialize:{name}",
|
|
598
|
+
False,
|
|
599
|
+
stderr or "non-zero exit",
|
|
600
|
+
"spawn-failure",
|
|
601
|
+
)
|
|
602
|
+
)
|
|
603
|
+
elif "error" in transcript_output.lower():
|
|
604
|
+
cases.append(
|
|
605
|
+
VerificationCase(
|
|
606
|
+
"mcp",
|
|
607
|
+
f"stdio initialize:{name}",
|
|
608
|
+
False,
|
|
609
|
+
transcript_output.strip(),
|
|
610
|
+
"protocol-failure",
|
|
611
|
+
)
|
|
612
|
+
)
|
|
613
|
+
else:
|
|
614
|
+
cases.append(VerificationCase("mcp", f"stdio initialize:{name}", True, "initialize completed"))
|
|
615
|
+
except subprocess.TimeoutExpired as exc:
|
|
616
|
+
proc.kill()
|
|
617
|
+
stdout = exc.stdout if isinstance(exc.stdout, str) else ""
|
|
618
|
+
stderr = exc.stderr if isinstance(exc.stderr, str) else ""
|
|
619
|
+
traces.append(
|
|
620
|
+
RuntimeTrace(
|
|
621
|
+
component="mcp",
|
|
622
|
+
name=f"stdio timeout:{name}",
|
|
623
|
+
command=tuple(command),
|
|
624
|
+
returncode=None,
|
|
625
|
+
stdout=stdout,
|
|
626
|
+
stderr=stderr,
|
|
627
|
+
timed_out=True,
|
|
628
|
+
)
|
|
629
|
+
)
|
|
630
|
+
cases.append(VerificationCase("mcp", f"stdio timeout:{name}", False, "process timed out", "timeout"))
|
|
631
|
+
except Exception as exc:
|
|
632
|
+
if proc.poll() is None:
|
|
633
|
+
proc.kill()
|
|
634
|
+
with suppress(Exception):
|
|
635
|
+
proc.wait(timeout=1)
|
|
636
|
+
stdout = proc.stdout.read() if proc.stdout is not None else ""
|
|
637
|
+
stderr = proc.stderr.read() if proc.stderr is not None else ""
|
|
638
|
+
transcript_output = "\n".join(transcript)
|
|
639
|
+
if stdout:
|
|
640
|
+
transcript_output = f"{transcript_output}\n{stdout}".strip()
|
|
641
|
+
traces.append(
|
|
642
|
+
RuntimeTrace(
|
|
643
|
+
component="mcp",
|
|
644
|
+
name=f"stdio lifecycle:{name}",
|
|
645
|
+
command=tuple(command),
|
|
646
|
+
returncode=proc.returncode,
|
|
647
|
+
stdout=transcript_output,
|
|
648
|
+
stderr=stderr or str(exc),
|
|
649
|
+
)
|
|
650
|
+
)
|
|
651
|
+
cases.append(VerificationCase("mcp", f"stdio run:{name}", False, str(exc), "spawn-failure"))
|
|
652
|
+
return cases, traces
|
|
653
|
+
|
|
654
|
+
|
|
655
|
+
def _check_mcp(plugin_dir: Path, *, online: bool) -> tuple[list[VerificationCase], list[RuntimeTrace]]:
|
|
656
|
+
mcp_config = plugin_dir / ".mcp.json"
|
|
657
|
+
if not mcp_config.exists():
|
|
658
|
+
return [VerificationCase("mcp", ".mcp.json optional", True, ".mcp.json not present", "optional")], []
|
|
659
|
+
|
|
660
|
+
payload = _read_json(mcp_config)
|
|
661
|
+
if payload is None:
|
|
662
|
+
return [VerificationCase("mcp", ".mcp.json parses", False, "Invalid .mcp.json", "invalid-json")], []
|
|
663
|
+
if not isinstance(payload, dict):
|
|
664
|
+
return [VerificationCase("mcp", ".mcp.json shape", False, ".mcp.json must be an object", "schema")], []
|
|
665
|
+
|
|
666
|
+
remotes = payload.get("remotes", [])
|
|
667
|
+
servers = payload.get("mcpServers", {})
|
|
668
|
+
cases = [VerificationCase("mcp", ".mcp.json parses", True, ".mcp.json is valid JSON")]
|
|
669
|
+
if not isinstance(remotes, list):
|
|
670
|
+
cases.append(VerificationCase("mcp", "remote list", False, "remotes must be an array", "schema"))
|
|
671
|
+
remotes = []
|
|
672
|
+
if not isinstance(servers, dict):
|
|
673
|
+
cases.append(VerificationCase("mcp", "server registry", False, "mcpServers must be an object", "schema"))
|
|
674
|
+
servers = {}
|
|
675
|
+
cases.extend(_check_mcp_http(remotes, online=online))
|
|
676
|
+
stdio_cases, traces = _check_mcp_stdio(servers)
|
|
677
|
+
cases.extend(stdio_cases)
|
|
678
|
+
if len(cases) == 1:
|
|
679
|
+
cases.append(VerificationCase("mcp", "mcp config", True, "No remote or stdio MCP surfaces declared"))
|
|
680
|
+
return cases, traces
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
def _check_skills(plugin_dir: Path) -> list[VerificationCase]:
|
|
684
|
+
manifest = load_manifest(plugin_dir)
|
|
685
|
+
if manifest is None:
|
|
686
|
+
return [
|
|
687
|
+
VerificationCase(
|
|
688
|
+
"skills",
|
|
689
|
+
"skills optional",
|
|
690
|
+
True,
|
|
691
|
+
"Manifest unavailable; skills verification skipped",
|
|
692
|
+
"optional",
|
|
693
|
+
)
|
|
694
|
+
]
|
|
695
|
+
skills_root = manifest.get("skills")
|
|
696
|
+
if not isinstance(skills_root, str) or not skills_root:
|
|
697
|
+
return [VerificationCase("skills", "skills optional", True, "No skills field declared", "optional")]
|
|
698
|
+
if not safe_manifest_path(plugin_dir, skills_root):
|
|
699
|
+
return [
|
|
700
|
+
VerificationCase(
|
|
701
|
+
"skills",
|
|
702
|
+
"skills directory",
|
|
703
|
+
False,
|
|
704
|
+
f'Skills path "{skills_root}" must stay within the plugin and start with "./"',
|
|
705
|
+
"schema",
|
|
706
|
+
)
|
|
707
|
+
]
|
|
708
|
+
|
|
709
|
+
skills_dir = plugin_dir / skills_root
|
|
710
|
+
if not skills_dir.exists():
|
|
711
|
+
return [
|
|
712
|
+
VerificationCase(
|
|
713
|
+
"skills",
|
|
714
|
+
"skills directory",
|
|
715
|
+
False,
|
|
716
|
+
f'Skills directory "{skills_root}" not found',
|
|
717
|
+
"missing-skill",
|
|
718
|
+
)
|
|
719
|
+
]
|
|
720
|
+
|
|
721
|
+
skill_files = sorted(skills_dir.rglob("SKILL.md"))
|
|
722
|
+
if not skill_files:
|
|
723
|
+
return [VerificationCase("skills", "skill manifests", False, "No SKILL.md found", "missing-skill")]
|
|
724
|
+
|
|
725
|
+
frontmatter_issues: list[str] = []
|
|
726
|
+
reference_issues: list[str] = []
|
|
727
|
+
for skill_file in skill_files:
|
|
728
|
+
try:
|
|
729
|
+
content = skill_file.read_text(encoding="utf-8")
|
|
730
|
+
except OSError as exc:
|
|
731
|
+
frontmatter_issues.append(f"{skill_file.relative_to(plugin_dir)} unreadable: {exc}")
|
|
732
|
+
continue
|
|
733
|
+
parts = content.split("---", 2)
|
|
734
|
+
if len(parts) < 3:
|
|
735
|
+
frontmatter_issues.append(str(skill_file.relative_to(plugin_dir)))
|
|
736
|
+
else:
|
|
737
|
+
frontmatter = parts[1]
|
|
738
|
+
if "name:" not in frontmatter or "description:" not in frontmatter:
|
|
739
|
+
frontmatter_issues.append(str(skill_file.relative_to(plugin_dir)))
|
|
740
|
+
for match in MARKDOWN_LINK_RE.finditer(content):
|
|
741
|
+
target = match.group(1).strip()
|
|
742
|
+
if not target or target.startswith(("#", "http://", "https://", "mailto:")):
|
|
743
|
+
continue
|
|
744
|
+
candidate = (skill_file.parent / target).resolve()
|
|
745
|
+
try:
|
|
746
|
+
candidate.relative_to(plugin_dir.resolve())
|
|
747
|
+
except ValueError:
|
|
748
|
+
reference_issues.append(f"{skill_file.relative_to(plugin_dir)} -> {target}")
|
|
749
|
+
continue
|
|
750
|
+
if not candidate.exists():
|
|
751
|
+
reference_issues.append(f"{skill_file.relative_to(plugin_dir)} -> {target}")
|
|
752
|
+
|
|
753
|
+
return [
|
|
754
|
+
VerificationCase(
|
|
755
|
+
"skills",
|
|
756
|
+
"skill manifests",
|
|
757
|
+
True,
|
|
758
|
+
f"{len(skill_files)} skill manifest(s) found",
|
|
759
|
+
),
|
|
760
|
+
VerificationCase(
|
|
761
|
+
"skills",
|
|
762
|
+
"skill frontmatter",
|
|
763
|
+
not frontmatter_issues,
|
|
764
|
+
"All skill manifests contain frontmatter" if not frontmatter_issues else "; ".join(frontmatter_issues),
|
|
765
|
+
"frontmatter" if frontmatter_issues else "pass",
|
|
766
|
+
),
|
|
767
|
+
VerificationCase(
|
|
768
|
+
"skills",
|
|
769
|
+
"skill references",
|
|
770
|
+
not reference_issues,
|
|
771
|
+
"Skill references resolve within the plugin" if not reference_issues else "; ".join(reference_issues),
|
|
772
|
+
"reference" if reference_issues else "pass",
|
|
773
|
+
),
|
|
774
|
+
]
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
def _check_apps(plugin_dir: Path) -> list[VerificationCase]:
|
|
778
|
+
app_config = plugin_dir / ".app.json"
|
|
779
|
+
if not app_config.exists():
|
|
780
|
+
return [VerificationCase("apps", "apps optional", True, ".app.json not present", "optional")]
|
|
781
|
+
payload = _read_json(app_config)
|
|
782
|
+
if payload is None:
|
|
783
|
+
return [VerificationCase("apps", ".app.json parses", False, "Invalid .app.json", "invalid-json")]
|
|
784
|
+
if not isinstance(payload, dict):
|
|
785
|
+
return [VerificationCase("apps", ".app.json shape", False, ".app.json must be an object", "schema")]
|
|
786
|
+
|
|
787
|
+
apps = payload.get("apps")
|
|
788
|
+
if apps is None:
|
|
789
|
+
return [VerificationCase("apps", ".app.json parses", True, ".app.json valid")]
|
|
790
|
+
if not isinstance(apps, list):
|
|
791
|
+
return [VerificationCase("apps", "apps registry", False, ".app.json apps must be an array", "schema")]
|
|
792
|
+
invalid_entries = [
|
|
793
|
+
str(index)
|
|
794
|
+
for index, entry in enumerate(apps)
|
|
795
|
+
if not isinstance(entry, dict)
|
|
796
|
+
or not isinstance(entry.get("name"), str)
|
|
797
|
+
or not entry.get("name")
|
|
798
|
+
or not any(isinstance(entry.get(field), str) and entry.get(field) for field in ("command", "url"))
|
|
799
|
+
]
|
|
800
|
+
return [
|
|
801
|
+
VerificationCase("apps", ".app.json parses", True, ".app.json valid"),
|
|
802
|
+
VerificationCase(
|
|
803
|
+
"apps",
|
|
804
|
+
"apps registry",
|
|
805
|
+
not invalid_entries,
|
|
806
|
+
"App entries are valid" if not invalid_entries else f"Invalid app entries: {', '.join(invalid_entries)}",
|
|
807
|
+
"schema" if invalid_entries else "pass",
|
|
808
|
+
),
|
|
809
|
+
]
|
|
810
|
+
|
|
811
|
+
|
|
812
|
+
def _check_assets(plugin_dir: Path) -> list[VerificationCase]:
|
|
813
|
+
assets = plugin_dir / "assets"
|
|
814
|
+
if not assets.exists():
|
|
815
|
+
return [VerificationCase("assets", "assets optional", True, "assets directory not present", "optional")]
|
|
816
|
+
zero = [path.name for path in assets.rglob("*") if path.is_file() and path.stat().st_size == 0]
|
|
817
|
+
return [
|
|
818
|
+
VerificationCase(
|
|
819
|
+
"assets",
|
|
820
|
+
"asset size",
|
|
821
|
+
not zero,
|
|
822
|
+
"asset files are non-empty" if not zero else f"Zero-byte assets: {', '.join(zero)}",
|
|
823
|
+
"zero-byte" if zero else "pass",
|
|
824
|
+
)
|
|
825
|
+
]
|
|
826
|
+
|
|
827
|
+
|
|
828
|
+
def _verify_single_plugin(plugin_dir: Path, *, online: bool) -> VerificationResult:
|
|
829
|
+
resolved = plugin_dir.resolve()
|
|
830
|
+
mcp_cases, traces = _check_mcp(resolved, online=online)
|
|
831
|
+
cases: list[VerificationCase] = [
|
|
832
|
+
*_check_manifest(resolved),
|
|
833
|
+
*_check_marketplace(resolved),
|
|
834
|
+
*mcp_cases,
|
|
835
|
+
*_check_skills(resolved),
|
|
836
|
+
*_check_apps(resolved),
|
|
837
|
+
*_check_assets(resolved),
|
|
838
|
+
]
|
|
839
|
+
return VerificationResult(
|
|
840
|
+
verify_pass=all(case.passed for case in cases),
|
|
841
|
+
cases=tuple(cases),
|
|
842
|
+
workspace=str(resolved),
|
|
843
|
+
traces=tuple(traces),
|
|
844
|
+
scope="plugin",
|
|
845
|
+
)
|
|
846
|
+
|
|
847
|
+
|
|
848
|
+
def _prefixed_cases(plugin_name: str, cases: tuple[VerificationCase, ...]) -> tuple[VerificationCase, ...]:
|
|
849
|
+
return tuple(
|
|
850
|
+
VerificationCase(
|
|
851
|
+
component=case.component,
|
|
852
|
+
name=f"{plugin_name} · {case.name}",
|
|
853
|
+
passed=case.passed,
|
|
854
|
+
message=case.message,
|
|
855
|
+
classification=case.classification,
|
|
856
|
+
)
|
|
857
|
+
for case in cases
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
def _prefixed_traces(plugin_name: str, traces: tuple[RuntimeTrace, ...]) -> tuple[RuntimeTrace, ...]:
|
|
862
|
+
return tuple(
|
|
863
|
+
RuntimeTrace(
|
|
864
|
+
component=trace.component,
|
|
865
|
+
name=f"{plugin_name} · {trace.name}",
|
|
866
|
+
command=trace.command,
|
|
867
|
+
returncode=trace.returncode,
|
|
868
|
+
stdout=trace.stdout,
|
|
869
|
+
stderr=trace.stderr,
|
|
870
|
+
timed_out=trace.timed_out,
|
|
871
|
+
)
|
|
872
|
+
for trace in traces
|
|
873
|
+
)
|
|
874
|
+
|
|
875
|
+
|
|
876
|
+
def _verify_repository(repo_root: Path, *, online: bool) -> VerificationResult:
|
|
877
|
+
discovery = discover_scan_targets(repo_root)
|
|
878
|
+
marketplace_cases = tuple(_check_marketplace(repo_root))
|
|
879
|
+
plugin_results = tuple(
|
|
880
|
+
replace(
|
|
881
|
+
_verify_single_plugin(target.plugin_dir, online=online),
|
|
882
|
+
plugin_name=target.name,
|
|
883
|
+
)
|
|
884
|
+
for target in discovery.local_plugins
|
|
885
|
+
)
|
|
886
|
+
prefixed_plugin_cases = tuple(
|
|
887
|
+
case
|
|
888
|
+
for plugin_result in plugin_results
|
|
889
|
+
for case in _prefixed_cases(plugin_result.plugin_name or "plugin", plugin_result.cases)
|
|
890
|
+
)
|
|
891
|
+
prefixed_plugin_traces = tuple(
|
|
892
|
+
trace
|
|
893
|
+
for plugin_result in plugin_results
|
|
894
|
+
for trace in _prefixed_traces(plugin_result.plugin_name or "plugin", plugin_result.traces)
|
|
895
|
+
)
|
|
896
|
+
cases = marketplace_cases + prefixed_plugin_cases
|
|
897
|
+
verify_pass = all(case.passed for case in cases) and bool(plugin_results)
|
|
898
|
+
return VerificationResult(
|
|
899
|
+
verify_pass=verify_pass,
|
|
900
|
+
cases=cases,
|
|
901
|
+
workspace=str(repo_root),
|
|
902
|
+
traces=prefixed_plugin_traces,
|
|
903
|
+
scope="repository",
|
|
904
|
+
plugin_results=plugin_results,
|
|
905
|
+
skipped_targets=discovery.skipped_targets,
|
|
906
|
+
marketplace_file=str(discovery.marketplace_file) if discovery.marketplace_file else None,
|
|
907
|
+
)
|
|
908
|
+
|
|
909
|
+
|
|
910
|
+
def verify_plugin(plugin_dir: str | Path, *, online: bool = False) -> VerificationResult:
|
|
911
|
+
resolved = Path(plugin_dir).resolve()
|
|
912
|
+
discovery = discover_scan_targets(resolved)
|
|
913
|
+
if discovery.scope == "repository":
|
|
914
|
+
return _verify_repository(resolved, online=online)
|
|
915
|
+
return _verify_single_plugin(resolved, online=online)
|
|
916
|
+
|
|
917
|
+
|
|
918
|
+
def build_doctor_report(plugin_dir: str | Path, component: str) -> dict[str, object]:
|
|
919
|
+
resolved = Path(plugin_dir).resolve()
|
|
920
|
+
verify = verify_plugin(resolved, online=False)
|
|
921
|
+
component_cases = [
|
|
922
|
+
{
|
|
923
|
+
"name": case.name,
|
|
924
|
+
"passed": case.passed,
|
|
925
|
+
"message": case.message,
|
|
926
|
+
"classification": case.classification,
|
|
927
|
+
}
|
|
928
|
+
for case in verify.cases
|
|
929
|
+
if component == "all" or case.component == component
|
|
930
|
+
]
|
|
931
|
+
trace_entries = [
|
|
932
|
+
{
|
|
933
|
+
"name": trace.name,
|
|
934
|
+
"command": list(trace.command),
|
|
935
|
+
"returncode": trace.returncode,
|
|
936
|
+
"stdout": trace.stdout,
|
|
937
|
+
"stderr": trace.stderr,
|
|
938
|
+
"timed_out": trace.timed_out,
|
|
939
|
+
}
|
|
940
|
+
for trace in verify.traces
|
|
941
|
+
if component in {"all", "mcp"} or trace.component == component
|
|
942
|
+
]
|
|
943
|
+
stdout_log = "\n\n".join(
|
|
944
|
+
f"[{trace['name']}]\n$ {' '.join(trace['command'])}\n{trace['stdout']}".rstrip()
|
|
945
|
+
for trace in trace_entries
|
|
946
|
+
if trace["stdout"]
|
|
947
|
+
)
|
|
948
|
+
stderr_log = "\n\n".join(
|
|
949
|
+
f"[{trace['name']}]\n$ {' '.join(trace['command'])}\n{trace['stderr']}".rstrip()
|
|
950
|
+
for trace in trace_entries
|
|
951
|
+
if trace["stderr"]
|
|
952
|
+
)
|
|
953
|
+
timeout_names = [trace["name"] for trace in trace_entries if trace["timed_out"]]
|
|
954
|
+
return {
|
|
955
|
+
"plugin_dir": str(resolved),
|
|
956
|
+
"component": component,
|
|
957
|
+
"verify_pass": verify.verify_pass,
|
|
958
|
+
"workspace": verify.workspace,
|
|
959
|
+
"cases": component_cases,
|
|
960
|
+
"runtime_traces": trace_entries,
|
|
961
|
+
"stdout_log": f"{stdout_log}\n" if stdout_log else "",
|
|
962
|
+
"stderr_log": f"{stderr_log}\n" if stderr_log else "",
|
|
963
|
+
"timeout_markers": "none\n" if not timeout_names else "\n".join(timeout_names) + "\n",
|
|
964
|
+
}
|