open-research-protocol 0.4.7 → 0.4.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +9 -0
- package/cli/orp.py +668 -43
- package/docs/ORP_REASONING_KERNEL_AGENT_PILOT.md +125 -0
- package/docs/ORP_REASONING_KERNEL_AGENT_REPLICATION.md +97 -0
- package/docs/ORP_REASONING_KERNEL_CANONICAL_CONTINUATION_PILOT.md +100 -0
- package/docs/ORP_REASONING_KERNEL_COMPARISON_PILOT.md +116 -0
- package/docs/ORP_REASONING_KERNEL_CONTINUATION_PILOT.md +86 -0
- package/docs/ORP_REASONING_KERNEL_EVALUATION_PLAN.md +261 -0
- package/docs/ORP_REASONING_KERNEL_EVIDENCE_MATRIX.md +131 -0
- package/docs/ORP_REASONING_KERNEL_EVOLUTION.md +123 -0
- package/docs/ORP_REASONING_KERNEL_PICKUP_PILOT.md +107 -0
- package/docs/ORP_REASONING_KERNEL_TECHNICAL_VALIDATION.md +140 -22
- package/docs/ORP_REASONING_KERNEL_V0_1.md +11 -0
- package/docs/benchmarks/orp_reasoning_kernel_agent_pilot_v0_1.json +796 -0
- package/docs/benchmarks/orp_reasoning_kernel_agent_replication_task_smoke.json +487 -0
- package/docs/benchmarks/orp_reasoning_kernel_agent_replication_v0_1.json +1927 -0
- package/docs/benchmarks/orp_reasoning_kernel_agent_replication_v0_2.json +10217 -0
- package/docs/benchmarks/orp_reasoning_kernel_canonical_continuation_task_smoke.json +174 -0
- package/docs/benchmarks/orp_reasoning_kernel_canonical_continuation_v0_1.json +598 -0
- package/docs/benchmarks/orp_reasoning_kernel_comparison_v0_1.json +688 -0
- package/docs/benchmarks/orp_reasoning_kernel_continuation_task_smoke.json +150 -0
- package/docs/benchmarks/orp_reasoning_kernel_continuation_v0_1.json +448 -0
- package/docs/benchmarks/orp_reasoning_kernel_pickup_v0_1.json +594 -0
- package/docs/benchmarks/orp_reasoning_kernel_v0_1_validation.json +769 -41
- package/examples/README.md +2 -0
- package/examples/kernel/comparison/comparison-corpus.json +337 -0
- package/examples/kernel/comparison/next-task-continuation.json +55 -0
- package/examples/kernel/corpus/operations/habanero-routing.checkpoint.kernel.yml +12 -0
- package/examples/kernel/corpus/operations/runner-routing.policy.kernel.yml +9 -0
- package/examples/kernel/corpus/product/project-home.decision.kernel.yml +11 -0
- package/examples/kernel/corpus/research/kernel-handoff.experiment.kernel.yml +16 -0
- package/examples/kernel/corpus/research/lane-drift.hypothesis.kernel.yml +11 -0
- package/examples/kernel/corpus/software/trace-widget.task.kernel.yml +13 -0
- package/examples/kernel/corpus/writing/kernel-launch.result.kernel.yml +12 -0
- package/package.json +4 -1
- package/scripts/orp-kernel-agent-pilot.py +673 -0
- package/scripts/orp-kernel-agent-replication.py +307 -0
- package/scripts/orp-kernel-benchmark.py +471 -2
- package/scripts/orp-kernel-canonical-continuation.py +381 -0
- package/scripts/orp-kernel-ci-check.py +138 -0
- package/scripts/orp-kernel-comparison.py +592 -0
- package/scripts/orp-kernel-continuation-pilot.py +384 -0
- package/scripts/orp-kernel-pickup.py +401 -0
- package/spec/v1/kernel-extension.schema.json +96 -0
- package/spec/v1/kernel-proposal.schema.json +115 -0
- package/spec/v1/kernel.schema.json +2 -1
package/cli/orp.py
CHANGED
|
@@ -111,6 +111,7 @@ ORP_PACKAGE_NAME = _tool_package_name()
|
|
|
111
111
|
DEFAULT_DISCOVER_PROFILE = "orp.profile.default.json"
|
|
112
112
|
DEFAULT_DISCOVER_SCAN_ROOT = "orp/discovery/github"
|
|
113
113
|
DEFAULT_HOSTED_BASE_URL = "https://orp.earth"
|
|
114
|
+
KERNEL_SCHEMA_VERSION = "1.0.0"
|
|
114
115
|
|
|
115
116
|
|
|
116
117
|
class HostedApiError(RuntimeError):
|
|
@@ -4802,6 +4803,11 @@ def _unique_strings(values: list[str]) -> list[str]:
|
|
|
4802
4803
|
return out
|
|
4803
4804
|
|
|
4804
4805
|
|
|
4806
|
+
def _slug_token(text: str, *, fallback: str = "item") -> str:
|
|
4807
|
+
token = re.sub(r"[^a-z0-9]+", "-", str(text or "").strip().lower()).strip("-")
|
|
4808
|
+
return token or fallback
|
|
4809
|
+
|
|
4810
|
+
|
|
4805
4811
|
def _resolve_config_paths(raw_paths: Any, repo_root: Path, vars_map: dict[str, str]) -> list[str]:
|
|
4806
4812
|
out: list[str] = []
|
|
4807
4813
|
if not isinstance(raw_paths, list):
|
|
@@ -5138,6 +5144,8 @@ def _about_payload() -> dict[str, Any]:
|
|
|
5138
5144
|
"config": "spec/v1/orp.config.schema.json",
|
|
5139
5145
|
"packet": "spec/v1/packet.schema.json",
|
|
5140
5146
|
"kernel": "spec/v1/kernel.schema.json",
|
|
5147
|
+
"kernel_proposal": "spec/v1/kernel-proposal.schema.json",
|
|
5148
|
+
"kernel_extension": "spec/v1/kernel-extension.schema.json",
|
|
5141
5149
|
"profile_pack": "spec/v1/profile-pack.schema.json",
|
|
5142
5150
|
"link_project": "spec/v1/link-project.schema.json",
|
|
5143
5151
|
"link_session": "spec/v1/link-session.schema.json",
|
|
@@ -5147,10 +5155,13 @@ def _about_payload() -> dict[str, Any]:
|
|
|
5147
5155
|
"abilities": [
|
|
5148
5156
|
{
|
|
5149
5157
|
"id": "kernel",
|
|
5150
|
-
"description": "Reasoning-kernel artifact scaffolding and
|
|
5158
|
+
"description": "Reasoning-kernel artifact scaffolding, validation, observation, proposal, and migration for promotable repository truth.",
|
|
5151
5159
|
"entrypoints": [
|
|
5152
5160
|
["kernel", "validate"],
|
|
5153
5161
|
["kernel", "scaffold"],
|
|
5162
|
+
["kernel", "stats"],
|
|
5163
|
+
["kernel", "propose"],
|
|
5164
|
+
["kernel", "migrate"],
|
|
5154
5165
|
],
|
|
5155
5166
|
},
|
|
5156
5167
|
{
|
|
@@ -5243,6 +5254,9 @@ def _about_payload() -> dict[str, Any]:
|
|
|
5243
5254
|
{"name": "about", "path": ["about"], "json_output": True},
|
|
5244
5255
|
{"name": "kernel_validate", "path": ["kernel", "validate"], "json_output": True},
|
|
5245
5256
|
{"name": "kernel_scaffold", "path": ["kernel", "scaffold"], "json_output": True},
|
|
5257
|
+
{"name": "kernel_stats", "path": ["kernel", "stats"], "json_output": True},
|
|
5258
|
+
{"name": "kernel_propose", "path": ["kernel", "propose"], "json_output": True},
|
|
5259
|
+
{"name": "kernel_migrate", "path": ["kernel", "migrate"], "json_output": True},
|
|
5246
5260
|
{"name": "auth_login", "path": ["auth", "login"], "json_output": True},
|
|
5247
5261
|
{"name": "auth_verify", "path": ["auth", "verify"], "json_output": True},
|
|
5248
5262
|
{"name": "auth_logout", "path": ["auth", "logout"], "json_output": True},
|
|
@@ -5311,6 +5325,7 @@ def _about_payload() -> dict[str, Any]:
|
|
|
5311
5325
|
"Canonical evidence lives in repo artifact paths outside ORP docs.",
|
|
5312
5326
|
"Default CLI output is human-readable; listed commands with json_output=true also support --json.",
|
|
5313
5327
|
"Reasoning-kernel artifacts shape promotable repository truth for tasks, decisions, hypotheses, experiments, checkpoints, policies, and results.",
|
|
5328
|
+
"Kernel evolution in ORP should stay explicit: observe real usage, propose changes, and migrate artifacts through versioned CLI surfaces rather than silent agent mutation.",
|
|
5314
5329
|
"Discovery profiles in ORP are portable search-intent files managed directly by ORP.",
|
|
5315
5330
|
"Collaboration is a built-in ORP ability exposed through `orp collaborate ...`.",
|
|
5316
5331
|
"Project/session linking is a built-in ORP ability exposed through `orp link ...` and stored machine-locally under `.git/orp/link/`.",
|
|
@@ -5507,13 +5522,20 @@ def _home_payload(repo_root: Path, config_arg: str) -> dict[str, Any]:
|
|
|
5507
5522
|
)
|
|
5508
5523
|
quick_actions.insert(
|
|
5509
5524
|
5,
|
|
5525
|
+
{
|
|
5526
|
+
"label": "Inspect kernel validation pressure across recorded runs",
|
|
5527
|
+
"command": "orp kernel stats --json",
|
|
5528
|
+
},
|
|
5529
|
+
)
|
|
5530
|
+
quick_actions.insert(
|
|
5531
|
+
6,
|
|
5510
5532
|
{
|
|
5511
5533
|
"label": "Mark the repo locally ready after validation",
|
|
5512
5534
|
"command": "orp ready --json",
|
|
5513
5535
|
},
|
|
5514
5536
|
)
|
|
5515
5537
|
quick_actions.insert(
|
|
5516
|
-
|
|
5538
|
+
7,
|
|
5517
5539
|
{
|
|
5518
5540
|
"label": "Inspect local project/session link state",
|
|
5519
5541
|
"command": "orp link status --json",
|
|
@@ -7256,27 +7278,280 @@ def _gate_map(config: dict[str, Any]) -> dict[str, dict[str, Any]]:
|
|
|
7256
7278
|
return out
|
|
7257
7279
|
|
|
7258
7280
|
|
|
7259
|
-
|
|
7260
|
-
|
|
7261
|
-
"decision": ["question", "chosen_path", "rejected_alternatives", "rationale", "consequences"],
|
|
7262
|
-
"hypothesis": ["claim", "boundary", "assumptions", "test_path", "falsifiers"],
|
|
7263
|
-
"experiment": ["objective", "method", "inputs", "outputs", "evidence_expectations", "interpretation_limits"],
|
|
7264
|
-
"checkpoint": ["completed_unit", "current_state", "risks", "next_handoff_target", "artifact_refs"],
|
|
7265
|
-
"policy": ["scope", "rule", "rationale", "invariants", "enforcement_surface"],
|
|
7266
|
-
"result": ["claim", "evidence_paths", "status", "interpretation_limits", "next_follow_up"],
|
|
7267
|
-
}
|
|
7281
|
+
def _kernel_schema_path() -> Path:
|
|
7282
|
+
return Path(__file__).resolve().parent.parent / "spec" / "v1" / "kernel.schema.json"
|
|
7268
7283
|
|
|
7269
7284
|
|
|
7270
|
-
def
|
|
7271
|
-
|
|
7272
|
-
|
|
7273
|
-
|
|
7274
|
-
|
|
7275
|
-
|
|
7276
|
-
|
|
7285
|
+
def _kernel_proposal_schema_path() -> Path:
|
|
7286
|
+
return Path(__file__).resolve().parent.parent / "spec" / "v1" / "kernel-proposal.schema.json"
|
|
7287
|
+
|
|
7288
|
+
|
|
7289
|
+
def _kernel_extension_schema_path() -> Path:
|
|
7290
|
+
return Path(__file__).resolve().parent.parent / "spec" / "v1" / "kernel-extension.schema.json"
|
|
7291
|
+
|
|
7292
|
+
|
|
7293
|
+
def _load_kernel_schema() -> dict[str, Any]:
|
|
7294
|
+
path = _kernel_schema_path()
|
|
7295
|
+
if not path.exists():
|
|
7296
|
+
raise RuntimeError(f"kernel schema is missing: {path}")
|
|
7297
|
+
payload = json.loads(path.read_text(encoding="utf-8"))
|
|
7298
|
+
if not isinstance(payload, dict):
|
|
7299
|
+
raise RuntimeError("kernel schema root must be an object")
|
|
7300
|
+
return payload
|
|
7301
|
+
|
|
7302
|
+
|
|
7303
|
+
def _kernel_schema_metadata() -> tuple[dict[str, list[str]], dict[str, dict[str, Any]], set[str], list[str]]:
|
|
7304
|
+
schema = _load_kernel_schema()
|
|
7305
|
+
properties = schema.get("properties")
|
|
7306
|
+
if not isinstance(properties, dict):
|
|
7307
|
+
raise RuntimeError("kernel schema is missing object properties")
|
|
7308
|
+
ordered_fields = [str(field).strip() for field in properties.keys() if str(field).strip()]
|
|
7309
|
+
|
|
7310
|
+
field_kinds: dict[str, dict[str, Any]] = {}
|
|
7311
|
+
for field, raw in properties.items():
|
|
7312
|
+
if not isinstance(raw, dict):
|
|
7313
|
+
continue
|
|
7314
|
+
if "const" in raw:
|
|
7315
|
+
field_kinds[field] = {"kind": "const", "value": raw.get("const")}
|
|
7316
|
+
continue
|
|
7317
|
+
if "enum" in raw and isinstance(raw.get("enum"), list):
|
|
7318
|
+
field_kinds[field] = {"kind": "enum", "value": list(raw.get("enum", []))}
|
|
7319
|
+
continue
|
|
7320
|
+
ref = raw.get("$ref")
|
|
7321
|
+
if isinstance(ref, str) and ref.startswith("#/$defs/"):
|
|
7322
|
+
field_kinds[field] = {"kind": ref.split("/")[-1]}
|
|
7323
|
+
|
|
7324
|
+
requirements: dict[str, list[str]] = {}
|
|
7325
|
+
raw_all_of = schema.get("allOf")
|
|
7326
|
+
if isinstance(raw_all_of, list):
|
|
7327
|
+
for clause in raw_all_of:
|
|
7328
|
+
if not isinstance(clause, dict):
|
|
7329
|
+
continue
|
|
7330
|
+
raw_if = clause.get("if")
|
|
7331
|
+
raw_then = clause.get("then")
|
|
7332
|
+
if not isinstance(raw_if, dict) or not isinstance(raw_then, dict):
|
|
7333
|
+
continue
|
|
7334
|
+
const = (
|
|
7335
|
+
raw_if.get("properties", {})
|
|
7336
|
+
.get("artifact_class", {})
|
|
7337
|
+
.get("const")
|
|
7338
|
+
)
|
|
7339
|
+
required_fields = raw_then.get("required")
|
|
7340
|
+
if isinstance(const, str) and isinstance(required_fields, list):
|
|
7341
|
+
requirements[const] = [
|
|
7342
|
+
str(field).strip()
|
|
7343
|
+
for field in required_fields
|
|
7344
|
+
if isinstance(field, str) and str(field).strip()
|
|
7345
|
+
]
|
|
7346
|
+
return requirements, field_kinds, set(field_kinds.keys()), ordered_fields
|
|
7347
|
+
|
|
7348
|
+
|
|
7349
|
+
(
|
|
7350
|
+
KERNEL_ARTIFACT_CLASS_REQUIREMENTS,
|
|
7351
|
+
KERNEL_FIELD_KINDS,
|
|
7352
|
+
KERNEL_ALLOWED_FIELDS,
|
|
7353
|
+
KERNEL_FIELD_ORDER,
|
|
7354
|
+
) = _kernel_schema_metadata()
|
|
7355
|
+
|
|
7356
|
+
|
|
7357
|
+
def _kernel_ordered_fields_for_class(artifact_class: str, present_fields: Sequence[str] | None = None) -> list[str]:
|
|
7358
|
+
ordered: list[str] = ["schema_version", "artifact_class"]
|
|
7359
|
+
required_fields = KERNEL_ARTIFACT_CLASS_REQUIREMENTS.get(str(artifact_class).strip(), [])
|
|
7360
|
+
for field in required_fields:
|
|
7361
|
+
if field not in ordered:
|
|
7362
|
+
ordered.append(field)
|
|
7363
|
+
for field in KERNEL_FIELD_ORDER:
|
|
7364
|
+
if field not in ordered:
|
|
7365
|
+
ordered.append(field)
|
|
7366
|
+
if present_fields is None:
|
|
7367
|
+
return ordered
|
|
7368
|
+
present_set = {str(field).strip() for field in present_fields if str(field).strip()}
|
|
7369
|
+
return [field for field in ordered if field in present_set]
|
|
7370
|
+
|
|
7371
|
+
|
|
7372
|
+
def _kernel_text_valid(value: Any) -> bool:
|
|
7373
|
+
return isinstance(value, str) and bool(value.strip())
|
|
7374
|
+
|
|
7375
|
+
|
|
7376
|
+
def _kernel_text_list_valid(value: Any) -> bool:
|
|
7377
|
+
return isinstance(value, list) and len(value) > 0 and all(_kernel_text_valid(item) for item in value)
|
|
7378
|
+
|
|
7379
|
+
|
|
7380
|
+
def _kernel_field_present(field: str, value: Any) -> bool:
|
|
7381
|
+
kind = str(KERNEL_FIELD_KINDS.get(field, {}).get("kind", ""))
|
|
7382
|
+
if kind == "non_empty_text":
|
|
7383
|
+
return _kernel_text_valid(value)
|
|
7384
|
+
if kind == "text_list":
|
|
7385
|
+
return _kernel_text_list_valid(value)
|
|
7386
|
+
if kind == "text_or_text_list":
|
|
7387
|
+
return _kernel_text_valid(value) or _kernel_text_list_valid(value)
|
|
7388
|
+
if kind == "const":
|
|
7389
|
+
return value is not None
|
|
7390
|
+
if kind == "enum":
|
|
7391
|
+
return value is not None
|
|
7277
7392
|
return value is not None
|
|
7278
7393
|
|
|
7279
7394
|
|
|
7395
|
+
def _kernel_field_shape_issues(field: str, value: Any) -> list[str]:
|
|
7396
|
+
meta = KERNEL_FIELD_KINDS.get(field, {})
|
|
7397
|
+
kind = str(meta.get("kind", ""))
|
|
7398
|
+
if kind == "const":
|
|
7399
|
+
expected = meta.get("value")
|
|
7400
|
+
return [] if value == expected else [f"must equal `{expected}`."]
|
|
7401
|
+
if kind == "enum":
|
|
7402
|
+
allowed = [str(x) for x in meta.get("value", [])]
|
|
7403
|
+
return [] if value in allowed else [f"must be one of: {', '.join(allowed)}."]
|
|
7404
|
+
if kind == "non_empty_text":
|
|
7405
|
+
return [] if _kernel_text_valid(value) else ["must be a non-empty string."]
|
|
7406
|
+
if kind == "text_list":
|
|
7407
|
+
return [] if _kernel_text_list_valid(value) else ["must be a non-empty list of non-empty strings."]
|
|
7408
|
+
if kind == "text_or_text_list":
|
|
7409
|
+
return [] if (_kernel_text_valid(value) or _kernel_text_list_valid(value)) else [
|
|
7410
|
+
"must be a non-empty string or a non-empty list of non-empty strings."
|
|
7411
|
+
]
|
|
7412
|
+
return []
|
|
7413
|
+
|
|
7414
|
+
|
|
7415
|
+
def _validate_kernel_payload(
|
|
7416
|
+
payload: dict[str, Any],
|
|
7417
|
+
*,
|
|
7418
|
+
expected_class: str = "",
|
|
7419
|
+
extra_required_fields: Sequence[str] = (),
|
|
7420
|
+
) -> dict[str, Any]:
|
|
7421
|
+
artifact_issues: list[str] = []
|
|
7422
|
+
missing_fields: list[str] = []
|
|
7423
|
+
|
|
7424
|
+
for field in sorted(str(key) for key in payload.keys() if str(key) not in KERNEL_ALLOWED_FIELDS):
|
|
7425
|
+
artifact_issues.append(f"unexpected field: `{field}`.")
|
|
7426
|
+
|
|
7427
|
+
schema_version = payload.get("schema_version")
|
|
7428
|
+
artifact_issues.extend(
|
|
7429
|
+
[f"field `schema_version` {issue}" for issue in _kernel_field_shape_issues("schema_version", schema_version)]
|
|
7430
|
+
)
|
|
7431
|
+
|
|
7432
|
+
actual_class = str(payload.get("artifact_class", "")).strip()
|
|
7433
|
+
artifact_issues.extend(
|
|
7434
|
+
[f"field `artifact_class` {issue}" for issue in _kernel_field_shape_issues("artifact_class", payload.get("artifact_class"))]
|
|
7435
|
+
)
|
|
7436
|
+
if actual_class not in KERNEL_ARTIFACT_CLASS_REQUIREMENTS:
|
|
7437
|
+
artifact_issues.append(f"unsupported artifact_class: {actual_class or '(missing)'}.")
|
|
7438
|
+
|
|
7439
|
+
if expected_class and actual_class and expected_class != actual_class:
|
|
7440
|
+
artifact_issues.append(
|
|
7441
|
+
f"artifact_class mismatch: expected `{expected_class}`, found `{actual_class}`."
|
|
7442
|
+
)
|
|
7443
|
+
|
|
7444
|
+
field_class = actual_class or expected_class
|
|
7445
|
+
required_fields = list(KERNEL_ARTIFACT_CLASS_REQUIREMENTS.get(field_class, []))
|
|
7446
|
+
for field in _unique_strings([str(x).strip() for x in extra_required_fields if str(x).strip()]):
|
|
7447
|
+
if field not in required_fields:
|
|
7448
|
+
required_fields.append(field)
|
|
7449
|
+
for field, value in payload.items():
|
|
7450
|
+
if not isinstance(field, str) or field not in KERNEL_ALLOWED_FIELDS:
|
|
7451
|
+
continue
|
|
7452
|
+
for issue in _kernel_field_shape_issues(field, value):
|
|
7453
|
+
artifact_issues.append(f"field `{field}` {issue}")
|
|
7454
|
+
for field in required_fields:
|
|
7455
|
+
if not _kernel_field_present(field, payload.get(field)):
|
|
7456
|
+
missing_fields.append(field)
|
|
7457
|
+
if missing_fields:
|
|
7458
|
+
artifact_issues.append("missing required fields: " + ", ".join(missing_fields))
|
|
7459
|
+
|
|
7460
|
+
return {
|
|
7461
|
+
"artifact_class": actual_class,
|
|
7462
|
+
"expected_artifact_class": expected_class,
|
|
7463
|
+
"valid": not artifact_issues,
|
|
7464
|
+
"missing_fields": missing_fields,
|
|
7465
|
+
"issues": artifact_issues,
|
|
7466
|
+
}
|
|
7467
|
+
|
|
7468
|
+
|
|
7469
|
+
def _kernel_canonical_payload(
|
|
7470
|
+
payload: dict[str, Any],
|
|
7471
|
+
*,
|
|
7472
|
+
drop_unknown_fields: bool,
|
|
7473
|
+
) -> tuple[dict[str, Any], list[str]]:
|
|
7474
|
+
unknown_fields = sorted(str(key) for key in payload.keys() if str(key) not in KERNEL_ALLOWED_FIELDS)
|
|
7475
|
+
if unknown_fields and not drop_unknown_fields:
|
|
7476
|
+
raise RuntimeError(
|
|
7477
|
+
"kernel artifact has unknown fields: " + ", ".join(unknown_fields) + ". Re-run with --drop-unknown-fields to discard them."
|
|
7478
|
+
)
|
|
7479
|
+
|
|
7480
|
+
artifact_class = str(payload.get("artifact_class", "")).strip()
|
|
7481
|
+
if artifact_class not in KERNEL_ARTIFACT_CLASS_REQUIREMENTS:
|
|
7482
|
+
raise RuntimeError(f"unsupported artifact_class: {artifact_class or '(missing)'}")
|
|
7483
|
+
|
|
7484
|
+
known_payload = {
|
|
7485
|
+
str(key): value
|
|
7486
|
+
for key, value in payload.items()
|
|
7487
|
+
if str(key) in KERNEL_ALLOWED_FIELDS
|
|
7488
|
+
}
|
|
7489
|
+
known_payload["schema_version"] = KERNEL_SCHEMA_VERSION
|
|
7490
|
+
known_payload["artifact_class"] = artifact_class
|
|
7491
|
+
|
|
7492
|
+
ordered_fields = _kernel_ordered_fields_for_class(artifact_class, present_fields=list(known_payload.keys()))
|
|
7493
|
+
canonical: dict[str, Any] = {}
|
|
7494
|
+
for field in ordered_fields:
|
|
7495
|
+
if field in known_payload:
|
|
7496
|
+
canonical[field] = known_payload[field]
|
|
7497
|
+
return canonical, unknown_fields
|
|
7498
|
+
|
|
7499
|
+
|
|
7500
|
+
def _kernel_proposal_template(
|
|
7501
|
+
*,
|
|
7502
|
+
proposal_kind: str,
|
|
7503
|
+
title: str,
|
|
7504
|
+
target_artifact_classes: Sequence[str],
|
|
7505
|
+
target_fields: Sequence[str],
|
|
7506
|
+
) -> dict[str, Any]:
|
|
7507
|
+
clean_classes = _unique_strings([str(x).strip() for x in target_artifact_classes if str(x).strip()])
|
|
7508
|
+
clean_fields = _unique_strings([str(x).strip() for x in target_fields if str(x).strip()])
|
|
7509
|
+
return {
|
|
7510
|
+
"schema_version": KERNEL_SCHEMA_VERSION,
|
|
7511
|
+
"proposal_kind": proposal_kind,
|
|
7512
|
+
"title": title,
|
|
7513
|
+
"status": "draft",
|
|
7514
|
+
"summary": "describe the kernel evolution being proposed",
|
|
7515
|
+
"target_scope": {
|
|
7516
|
+
"artifact_classes": clean_classes,
|
|
7517
|
+
"fields": clean_fields,
|
|
7518
|
+
},
|
|
7519
|
+
"proposed_change": [
|
|
7520
|
+
"describe the exact structural change",
|
|
7521
|
+
],
|
|
7522
|
+
"rationale": [
|
|
7523
|
+
"describe why the current kernel is insufficient",
|
|
7524
|
+
],
|
|
7525
|
+
"evidence_refs": [
|
|
7526
|
+
"docs/ORP_REASONING_KERNEL_EVIDENCE_MATRIX.md",
|
|
7527
|
+
],
|
|
7528
|
+
"compatibility_notes": [
|
|
7529
|
+
"describe backward-compatibility expectations",
|
|
7530
|
+
],
|
|
7531
|
+
"migration_plan": [
|
|
7532
|
+
"describe how existing artifacts will be preserved or migrated",
|
|
7533
|
+
],
|
|
7534
|
+
"evaluation_plan": [
|
|
7535
|
+
"describe what new evidence should justify promotion into the core kernel",
|
|
7536
|
+
],
|
|
7537
|
+
}
|
|
7538
|
+
|
|
7539
|
+
|
|
7540
|
+
def _kernel_observation_stats_from_run(run: dict[str, Any]) -> dict[str, Any]:
|
|
7541
|
+
results = run.get("results", [])
|
|
7542
|
+
if not isinstance(results, list):
|
|
7543
|
+
results = []
|
|
7544
|
+
kernel_rows = [
|
|
7545
|
+
row.get("kernel_validation")
|
|
7546
|
+
for row in results
|
|
7547
|
+
if isinstance(row, dict) and isinstance(row.get("kernel_validation"), dict)
|
|
7548
|
+
]
|
|
7549
|
+
return {
|
|
7550
|
+
"run_id": str(run.get("run_id", "")).strip(),
|
|
7551
|
+
"kernel_validations": kernel_rows,
|
|
7552
|
+
}
|
|
7553
|
+
|
|
7554
|
+
|
|
7280
7555
|
def _kernel_validation_mode(gate: dict[str, Any]) -> str:
|
|
7281
7556
|
kernel_cfg = gate.get("kernel") if isinstance(gate.get("kernel"), dict) else {}
|
|
7282
7557
|
default_mode = "hard" if str(gate.get("phase", "")).strip() == "structure_kernel" else "soft"
|
|
@@ -7371,31 +7646,14 @@ def _validate_kernel_gate(
|
|
|
7371
7646
|
|
|
7372
7647
|
actual_class = ""
|
|
7373
7648
|
if payload:
|
|
7374
|
-
|
|
7375
|
-
|
|
7376
|
-
|
|
7377
|
-
|
|
7378
|
-
|
|
7379
|
-
|
|
7380
|
-
|
|
7381
|
-
|
|
7382
|
-
)
|
|
7383
|
-
|
|
7384
|
-
if expected_class and actual_class and expected_class != actual_class:
|
|
7385
|
-
artifact_issues.append(
|
|
7386
|
-
f"artifact_class mismatch: expected `{expected_class}`, found `{actual_class}`."
|
|
7387
|
-
)
|
|
7388
|
-
|
|
7389
|
-
field_class = actual_class or expected_class
|
|
7390
|
-
required_fields = list(KERNEL_ARTIFACT_CLASS_REQUIREMENTS.get(field_class, []))
|
|
7391
|
-
for field in extra_required_fields:
|
|
7392
|
-
if field not in required_fields:
|
|
7393
|
-
required_fields.append(field)
|
|
7394
|
-
for field in required_fields:
|
|
7395
|
-
if not _kernel_field_present(payload.get(field)):
|
|
7396
|
-
missing_fields.append(field)
|
|
7397
|
-
if missing_fields:
|
|
7398
|
-
artifact_issues.append("missing required fields: " + ", ".join(missing_fields))
|
|
7649
|
+
validation = _validate_kernel_payload(
|
|
7650
|
+
payload,
|
|
7651
|
+
expected_class=expected_class,
|
|
7652
|
+
extra_required_fields=extra_required_fields,
|
|
7653
|
+
)
|
|
7654
|
+
actual_class = str(validation.get("artifact_class", "")).strip()
|
|
7655
|
+
missing_fields = list(validation.get("missing_fields", []))
|
|
7656
|
+
artifact_issues.extend([str(issue) for issue in validation.get("issues", []) if isinstance(issue, str)])
|
|
7399
7657
|
|
|
7400
7658
|
valid = optional_skipped or (exists and not artifact_issues)
|
|
7401
7659
|
path_state = _path_for_state(path, repo_root)
|
|
@@ -7576,6 +7834,273 @@ def cmd_kernel_scaffold(args: argparse.Namespace) -> int:
|
|
|
7576
7834
|
return 0
|
|
7577
7835
|
|
|
7578
7836
|
|
|
7837
|
+
def _resolve_kernel_run_json_paths(
|
|
7838
|
+
*,
|
|
7839
|
+
repo_root: Path,
|
|
7840
|
+
run_ids: Sequence[str],
|
|
7841
|
+
run_jsons: Sequence[str],
|
|
7842
|
+
) -> list[Path]:
|
|
7843
|
+
resolved: list[Path] = []
|
|
7844
|
+
if run_jsons:
|
|
7845
|
+
for raw in run_jsons:
|
|
7846
|
+
if not str(raw).strip():
|
|
7847
|
+
continue
|
|
7848
|
+
_, path = _resolve_run_json_path(repo_root=repo_root, run_id_arg="", run_json_arg=str(raw))
|
|
7849
|
+
resolved.append(path)
|
|
7850
|
+
return resolved
|
|
7851
|
+
if run_ids:
|
|
7852
|
+
for raw in run_ids:
|
|
7853
|
+
if not str(raw).strip():
|
|
7854
|
+
continue
|
|
7855
|
+
_, path = _resolve_run_json_path(repo_root=repo_root, run_id_arg=str(raw), run_json_arg="")
|
|
7856
|
+
resolved.append(path)
|
|
7857
|
+
return resolved
|
|
7858
|
+
|
|
7859
|
+
seen: set[Path] = set()
|
|
7860
|
+
state_path = repo_root / "orp" / "state.json"
|
|
7861
|
+
if state_path.exists():
|
|
7862
|
+
try:
|
|
7863
|
+
state = _read_json(state_path)
|
|
7864
|
+
except Exception:
|
|
7865
|
+
state = {}
|
|
7866
|
+
runs = state.get("runs")
|
|
7867
|
+
if isinstance(runs, dict):
|
|
7868
|
+
for value in runs.values():
|
|
7869
|
+
if not isinstance(value, str) or not value.strip():
|
|
7870
|
+
continue
|
|
7871
|
+
candidate = (repo_root / value).resolve()
|
|
7872
|
+
if candidate.exists() and candidate not in seen:
|
|
7873
|
+
seen.add(candidate)
|
|
7874
|
+
resolved.append(candidate)
|
|
7875
|
+
artifacts_root = repo_root / "orp" / "artifacts"
|
|
7876
|
+
if artifacts_root.exists():
|
|
7877
|
+
for candidate in sorted(artifacts_root.glob("*/RUN.json")):
|
|
7878
|
+
candidate = candidate.resolve()
|
|
7879
|
+
if candidate not in seen:
|
|
7880
|
+
seen.add(candidate)
|
|
7881
|
+
resolved.append(candidate)
|
|
7882
|
+
return resolved
|
|
7883
|
+
|
|
7884
|
+
|
|
7885
|
+
def _kernel_stats_payload(
|
|
7886
|
+
repo_root: Path,
|
|
7887
|
+
run_json_paths: Sequence[Path],
|
|
7888
|
+
) -> dict[str, Any]:
|
|
7889
|
+
runs_scanned = 0
|
|
7890
|
+
runs_with_kernel_validation = 0
|
|
7891
|
+
gate_rows_total = 0
|
|
7892
|
+
artifacts_total = 0
|
|
7893
|
+
artifacts_valid = 0
|
|
7894
|
+
artifacts_invalid = 0
|
|
7895
|
+
mode_counts: dict[str, int] = {}
|
|
7896
|
+
artifact_class_counts: dict[str, int] = {}
|
|
7897
|
+
missing_field_counts: dict[str, int] = {}
|
|
7898
|
+
issue_counts: dict[str, int] = {}
|
|
7899
|
+
path_counts: dict[str, int] = {}
|
|
7900
|
+
per_run: list[dict[str, Any]] = []
|
|
7901
|
+
|
|
7902
|
+
for run_json in run_json_paths:
|
|
7903
|
+
run = _read_json(run_json)
|
|
7904
|
+
stats = _kernel_observation_stats_from_run(run)
|
|
7905
|
+
kernel_rows = stats["kernel_validations"]
|
|
7906
|
+
runs_scanned += 1
|
|
7907
|
+
if kernel_rows:
|
|
7908
|
+
runs_with_kernel_validation += 1
|
|
7909
|
+
per_run.append(
|
|
7910
|
+
{
|
|
7911
|
+
"run_id": stats["run_id"] or run_json.parent.name,
|
|
7912
|
+
"run_json": _path_for_state(run_json, repo_root),
|
|
7913
|
+
"kernel_validations": len(kernel_rows),
|
|
7914
|
+
}
|
|
7915
|
+
)
|
|
7916
|
+
for row in kernel_rows:
|
|
7917
|
+
if not isinstance(row, dict):
|
|
7918
|
+
continue
|
|
7919
|
+
gate_rows_total += 1
|
|
7920
|
+
mode = str(row.get("mode", "")).strip() or "unknown"
|
|
7921
|
+
mode_counts[mode] = mode_counts.get(mode, 0) + 1
|
|
7922
|
+
for artifact in row.get("artifacts", []) if isinstance(row.get("artifacts"), list) else []:
|
|
7923
|
+
if not isinstance(artifact, dict):
|
|
7924
|
+
continue
|
|
7925
|
+
artifacts_total += 1
|
|
7926
|
+
if artifact.get("valid"):
|
|
7927
|
+
artifacts_valid += 1
|
|
7928
|
+
else:
|
|
7929
|
+
artifacts_invalid += 1
|
|
7930
|
+
artifact_class = str(
|
|
7931
|
+
artifact.get("artifact_class") or artifact.get("expected_artifact_class") or "unknown"
|
|
7932
|
+
).strip() or "unknown"
|
|
7933
|
+
artifact_class_counts[artifact_class] = artifact_class_counts.get(artifact_class, 0) + 1
|
|
7934
|
+
artifact_path = str(artifact.get("path", "")).strip()
|
|
7935
|
+
if artifact_path:
|
|
7936
|
+
path_counts[artifact_path] = path_counts.get(artifact_path, 0) + 1
|
|
7937
|
+
for field in artifact.get("missing_fields", []) if isinstance(artifact.get("missing_fields"), list) else []:
|
|
7938
|
+
key = str(field).strip()
|
|
7939
|
+
if key:
|
|
7940
|
+
missing_field_counts[key] = missing_field_counts.get(key, 0) + 1
|
|
7941
|
+
for issue in artifact.get("issues", []) if isinstance(artifact.get("issues"), list) else []:
|
|
7942
|
+
key = str(issue).strip()
|
|
7943
|
+
if key:
|
|
7944
|
+
issue_counts[key] = issue_counts.get(key, 0) + 1
|
|
7945
|
+
|
|
7946
|
+
top_missing_fields = [
|
|
7947
|
+
{"field": key, "count": count}
|
|
7948
|
+
for key, count in sorted(missing_field_counts.items(), key=lambda item: (-item[1], item[0]))[:10]
|
|
7949
|
+
]
|
|
7950
|
+
top_issue_signals = [
|
|
7951
|
+
{"issue": key, "count": count}
|
|
7952
|
+
for key, count in sorted(issue_counts.items(), key=lambda item: (-item[1], item[0]))[:10]
|
|
7953
|
+
]
|
|
7954
|
+
top_paths = [
|
|
7955
|
+
{"path": key, "count": count}
|
|
7956
|
+
for key, count in sorted(path_counts.items(), key=lambda item: (-item[1], item[0]))[:10]
|
|
7957
|
+
]
|
|
7958
|
+
observations: list[str] = []
|
|
7959
|
+
if runs_scanned == 0:
|
|
7960
|
+
observations.append("No RUN.json artifacts were found. Run `orp gate run` with a structure_kernel gate to collect kernel observations.")
|
|
7961
|
+
elif runs_with_kernel_validation == 0:
|
|
7962
|
+
observations.append("RUN.json artifacts exist, but none recorded kernel_validation. Add a structure_kernel gate with a kernel.artifacts block.")
|
|
7963
|
+
else:
|
|
7964
|
+
if top_missing_fields:
|
|
7965
|
+
focus = ", ".join(f"{row['field']} ({row['count']})" for row in top_missing_fields[:5])
|
|
7966
|
+
observations.append(f"Most repeated missing fields: {focus}.")
|
|
7967
|
+
if artifacts_invalid == 0:
|
|
7968
|
+
observations.append("All observed kernel artifacts validated successfully across scanned runs.")
|
|
7969
|
+
else:
|
|
7970
|
+
observations.append(
|
|
7971
|
+
f"{artifacts_invalid} of {artifacts_total} observed kernel artifacts failed validation."
|
|
7972
|
+
)
|
|
7973
|
+
return {
|
|
7974
|
+
"ok": True,
|
|
7975
|
+
"repo_root": str(repo_root),
|
|
7976
|
+
"runs_scanned": runs_scanned,
|
|
7977
|
+
"runs_with_kernel_validation": runs_with_kernel_validation,
|
|
7978
|
+
"kernel_validation_rows": gate_rows_total,
|
|
7979
|
+
"artifacts_total": artifacts_total,
|
|
7980
|
+
"artifacts_valid": artifacts_valid,
|
|
7981
|
+
"artifacts_invalid": artifacts_invalid,
|
|
7982
|
+
"artifact_validation_rate": round((artifacts_valid / artifacts_total), 3) if artifacts_total else None,
|
|
7983
|
+
"mode_counts": mode_counts,
|
|
7984
|
+
"artifact_class_counts": artifact_class_counts,
|
|
7985
|
+
"top_missing_fields": top_missing_fields,
|
|
7986
|
+
"top_issue_signals": top_issue_signals,
|
|
7987
|
+
"top_paths": top_paths,
|
|
7988
|
+
"observations": observations,
|
|
7989
|
+
"runs": per_run,
|
|
7990
|
+
}
|
|
7991
|
+
|
|
7992
|
+
|
|
7993
|
+
def cmd_kernel_stats(args: argparse.Namespace) -> int:
|
|
7994
|
+
repo_root = Path(args.repo_root).resolve()
|
|
7995
|
+
run_json_paths = _resolve_kernel_run_json_paths(
|
|
7996
|
+
repo_root=repo_root,
|
|
7997
|
+
run_ids=list(getattr(args, "run_id", []) or []),
|
|
7998
|
+
run_jsons=list(getattr(args, "run_json", []) or []),
|
|
7999
|
+
)
|
|
8000
|
+
payload = _kernel_stats_payload(repo_root, run_json_paths)
|
|
8001
|
+
if args.json_output:
|
|
8002
|
+
_print_json(payload)
|
|
8003
|
+
else:
|
|
8004
|
+
print(f"runs_scanned={payload['runs_scanned']}")
|
|
8005
|
+
print(f"runs_with_kernel_validation={payload['runs_with_kernel_validation']}")
|
|
8006
|
+
print(f"artifacts_total={payload['artifacts_total']}")
|
|
8007
|
+
print(f"artifacts_valid={payload['artifacts_valid']}")
|
|
8008
|
+
print(f"artifacts_invalid={payload['artifacts_invalid']}")
|
|
8009
|
+
for row in payload.get("top_missing_fields", []):
|
|
8010
|
+
print(f"missing_field={row['field']} count={row['count']}")
|
|
8011
|
+
for note in payload.get("observations", []):
|
|
8012
|
+
print(f"note={note}")
|
|
8013
|
+
return 0
|
|
8014
|
+
|
|
8015
|
+
|
|
8016
|
+
def cmd_kernel_propose(args: argparse.Namespace) -> int:
|
|
8017
|
+
repo_root = Path(args.repo_root).resolve()
|
|
8018
|
+
title = str(args.title or "").strip()
|
|
8019
|
+
if not title:
|
|
8020
|
+
raise RuntimeError("proposal title is required.")
|
|
8021
|
+
slug = _slug_token(getattr(args, "slug", "") or title, fallback="kernel-proposal")
|
|
8022
|
+
out_raw = str(getattr(args, "out", "") or "").strip()
|
|
8023
|
+
if out_raw:
|
|
8024
|
+
out_path = _resolve_cli_path(out_raw, repo_root)
|
|
8025
|
+
else:
|
|
8026
|
+
out_path = repo_root / "analysis" / "kernel-proposals" / f"{slug}.yml"
|
|
8027
|
+
if out_path.exists() and not args.force:
|
|
8028
|
+
raise RuntimeError(
|
|
8029
|
+
f"kernel proposal already exists: {_path_for_state(out_path, repo_root)}. Use --force to overwrite."
|
|
8030
|
+
)
|
|
8031
|
+
payload = _kernel_proposal_template(
|
|
8032
|
+
proposal_kind=str(args.kind).strip(),
|
|
8033
|
+
title=title,
|
|
8034
|
+
target_artifact_classes=list(getattr(args, "artifact_class", []) or []),
|
|
8035
|
+
target_fields=list(getattr(args, "field", []) or []),
|
|
8036
|
+
)
|
|
8037
|
+
emitted_format = _write_structured_payload(out_path, payload, format_hint=args.format)
|
|
8038
|
+
result = {
|
|
8039
|
+
"ok": True,
|
|
8040
|
+
"path": _path_for_state(out_path, repo_root),
|
|
8041
|
+
"format": emitted_format,
|
|
8042
|
+
"proposal_kind": payload["proposal_kind"],
|
|
8043
|
+
"title": payload["title"],
|
|
8044
|
+
}
|
|
8045
|
+
if args.json_output:
|
|
8046
|
+
_print_json(result)
|
|
8047
|
+
else:
|
|
8048
|
+
print(f"path={result['path']}")
|
|
8049
|
+
print(f"proposal_kind={result['proposal_kind']}")
|
|
8050
|
+
print(f"title={result['title']}")
|
|
8051
|
+
print(f"format={result['format']}")
|
|
8052
|
+
return 0
|
|
8053
|
+
|
|
8054
|
+
|
|
8055
|
+
def cmd_kernel_migrate(args: argparse.Namespace) -> int:
|
|
8056
|
+
repo_root = Path(args.repo_root).resolve()
|
|
8057
|
+
artifact_path = _resolve_cli_path(args.artifact, repo_root)
|
|
8058
|
+
if not artifact_path.exists():
|
|
8059
|
+
raise RuntimeError(f"kernel artifact not found: {_path_for_state(artifact_path, repo_root)}")
|
|
8060
|
+
loaded_payload = _load_config(artifact_path)
|
|
8061
|
+
if not isinstance(loaded_payload, dict):
|
|
8062
|
+
raise RuntimeError("kernel artifact root must be an object.")
|
|
8063
|
+
out_raw = str(getattr(args, "out", "") or "").strip()
|
|
8064
|
+
out_path = _resolve_cli_path(out_raw, repo_root) if out_raw else artifact_path
|
|
8065
|
+
if out_path.exists() and out_path != artifact_path and not args.force:
|
|
8066
|
+
raise RuntimeError(
|
|
8067
|
+
f"output path already exists: {_path_for_state(out_path, repo_root)}. Use --force to overwrite."
|
|
8068
|
+
)
|
|
8069
|
+
|
|
8070
|
+
original_schema_version = str(loaded_payload.get("schema_version", "") or "").strip()
|
|
8071
|
+
canonical_payload, dropped_unknown_fields = _kernel_canonical_payload(
|
|
8072
|
+
loaded_payload,
|
|
8073
|
+
drop_unknown_fields=bool(getattr(args, "drop_unknown_fields", False)),
|
|
8074
|
+
)
|
|
8075
|
+
emitted_format = _write_structured_payload(out_path, canonical_payload, format_hint=args.format)
|
|
8076
|
+
validation = _validate_kernel_payload(canonical_payload, expected_class=str(canonical_payload.get("artifact_class", "")).strip())
|
|
8077
|
+
result = {
|
|
8078
|
+
"ok": True,
|
|
8079
|
+
"artifact": _path_for_state(artifact_path, repo_root),
|
|
8080
|
+
"path": _path_for_state(out_path, repo_root),
|
|
8081
|
+
"format": emitted_format,
|
|
8082
|
+
"schema_version_before": original_schema_version or "(missing)",
|
|
8083
|
+
"schema_version_after": str(canonical_payload.get("schema_version", "")),
|
|
8084
|
+
"schema_version_updated": (original_schema_version or "") != str(canonical_payload.get("schema_version", "")),
|
|
8085
|
+
"artifact_class": str(canonical_payload.get("artifact_class", "")),
|
|
8086
|
+
"dropped_unknown_fields": dropped_unknown_fields,
|
|
8087
|
+
"validation": validation,
|
|
8088
|
+
}
|
|
8089
|
+
if args.json_output:
|
|
8090
|
+
_print_json(result)
|
|
8091
|
+
else:
|
|
8092
|
+
print(f"path={result['path']}")
|
|
8093
|
+
print(f"artifact_class={result['artifact_class']}")
|
|
8094
|
+
print(f"schema_version_before={result['schema_version_before']}")
|
|
8095
|
+
print(f"schema_version_after={result['schema_version_after']}")
|
|
8096
|
+
if dropped_unknown_fields:
|
|
8097
|
+
print("dropped_unknown_fields=" + ",".join(dropped_unknown_fields))
|
|
8098
|
+
print(f"valid={'true' if validation.get('valid') else 'false'}")
|
|
8099
|
+
for issue in validation.get("issues", []):
|
|
8100
|
+
print(f"issue={issue}")
|
|
8101
|
+
return 0
|
|
8102
|
+
|
|
8103
|
+
|
|
7579
8104
|
def cmd_gate_run(args: argparse.Namespace) -> int:
|
|
7580
8105
|
repo_root = Path(args.repo_root).resolve()
|
|
7581
8106
|
_ensure_dirs(repo_root)
|
|
@@ -12658,6 +13183,106 @@ def build_parser() -> argparse.ArgumentParser:
|
|
|
12658
13183
|
add_json_flag(s_kernel_scaffold)
|
|
12659
13184
|
s_kernel_scaffold.set_defaults(func=cmd_kernel_scaffold, json_output=False)
|
|
12660
13185
|
|
|
13186
|
+
s_kernel_stats = kernel_sub.add_parser(
|
|
13187
|
+
"stats",
|
|
13188
|
+
help="Summarize observed kernel validation pressure from RUN.json artifacts",
|
|
13189
|
+
)
|
|
13190
|
+
s_kernel_stats.add_argument(
|
|
13191
|
+
"--run-id",
|
|
13192
|
+
action="append",
|
|
13193
|
+
default=[],
|
|
13194
|
+
help="Specific run id to include (repeatable). Defaults to all discovered runs.",
|
|
13195
|
+
)
|
|
13196
|
+
s_kernel_stats.add_argument(
|
|
13197
|
+
"--run-json",
|
|
13198
|
+
action="append",
|
|
13199
|
+
default=[],
|
|
13200
|
+
help="Explicit RUN.json path to include (repeatable). Defaults to all discovered runs.",
|
|
13201
|
+
)
|
|
13202
|
+
add_json_flag(s_kernel_stats)
|
|
13203
|
+
s_kernel_stats.set_defaults(func=cmd_kernel_stats, json_output=False)
|
|
13204
|
+
|
|
13205
|
+
s_kernel_propose = kernel_sub.add_parser(
|
|
13206
|
+
"propose",
|
|
13207
|
+
help="Scaffold a governed kernel-evolution proposal artifact",
|
|
13208
|
+
)
|
|
13209
|
+
s_kernel_propose.add_argument(
|
|
13210
|
+
"--kind",
|
|
13211
|
+
required=True,
|
|
13212
|
+
choices=["add_field", "new_class", "requirement_change", "deprecate_field"],
|
|
13213
|
+
help="Type of kernel evolution proposal",
|
|
13214
|
+
)
|
|
13215
|
+
s_kernel_propose.add_argument(
|
|
13216
|
+
"--title",
|
|
13217
|
+
required=True,
|
|
13218
|
+
help="Proposal title",
|
|
13219
|
+
)
|
|
13220
|
+
s_kernel_propose.add_argument(
|
|
13221
|
+
"--artifact-class",
|
|
13222
|
+
action="append",
|
|
13223
|
+
default=[],
|
|
13224
|
+
choices=sorted(KERNEL_ARTIFACT_CLASS_REQUIREMENTS.keys()),
|
|
13225
|
+
help="Affected kernel artifact class (repeatable)",
|
|
13226
|
+
)
|
|
13227
|
+
s_kernel_propose.add_argument(
|
|
13228
|
+
"--field",
|
|
13229
|
+
action="append",
|
|
13230
|
+
default=[],
|
|
13231
|
+
help="Affected kernel field name (repeatable)",
|
|
13232
|
+
)
|
|
13233
|
+
s_kernel_propose.add_argument(
|
|
13234
|
+
"--slug",
|
|
13235
|
+
default="",
|
|
13236
|
+
help="Optional output slug override",
|
|
13237
|
+
)
|
|
13238
|
+
s_kernel_propose.add_argument(
|
|
13239
|
+
"--out",
|
|
13240
|
+
default="",
|
|
13241
|
+
help="Optional output path (default: analysis/kernel-proposals/<slug>.yml)",
|
|
13242
|
+
)
|
|
13243
|
+
s_kernel_propose.add_argument(
|
|
13244
|
+
"--format",
|
|
13245
|
+
default="",
|
|
13246
|
+
choices=["", "yaml", "json"],
|
|
13247
|
+
help="Optional explicit output format",
|
|
13248
|
+
)
|
|
13249
|
+
s_kernel_propose.add_argument(
|
|
13250
|
+
"--force",
|
|
13251
|
+
action="store_true",
|
|
13252
|
+
help="Overwrite an existing proposal at the output path",
|
|
13253
|
+
)
|
|
13254
|
+
add_json_flag(s_kernel_propose)
|
|
13255
|
+
s_kernel_propose.set_defaults(func=cmd_kernel_propose, json_output=False)
|
|
13256
|
+
|
|
13257
|
+
s_kernel_migrate = kernel_sub.add_parser(
|
|
13258
|
+
"migrate",
|
|
13259
|
+
help="Rewrite a kernel artifact into the current canonical field order and schema version",
|
|
13260
|
+
)
|
|
13261
|
+
s_kernel_migrate.add_argument("artifact", help="Kernel artifact path (.yml, .yaml, or .json)")
|
|
13262
|
+
s_kernel_migrate.add_argument(
|
|
13263
|
+
"--out",
|
|
13264
|
+
default="",
|
|
13265
|
+
help="Optional output path (default: rewrite in place)",
|
|
13266
|
+
)
|
|
13267
|
+
s_kernel_migrate.add_argument(
|
|
13268
|
+
"--format",
|
|
13269
|
+
default="",
|
|
13270
|
+
choices=["", "yaml", "json"],
|
|
13271
|
+
help="Optional explicit output format",
|
|
13272
|
+
)
|
|
13273
|
+
s_kernel_migrate.add_argument(
|
|
13274
|
+
"--drop-unknown-fields",
|
|
13275
|
+
action="store_true",
|
|
13276
|
+
help="Drop unknown fields instead of failing migration",
|
|
13277
|
+
)
|
|
13278
|
+
s_kernel_migrate.add_argument(
|
|
13279
|
+
"--force",
|
|
13280
|
+
action="store_true",
|
|
13281
|
+
help="Allow overwriting an existing --out path",
|
|
13282
|
+
)
|
|
13283
|
+
add_json_flag(s_kernel_migrate)
|
|
13284
|
+
s_kernel_migrate.set_defaults(func=cmd_kernel_migrate, json_output=False)
|
|
13285
|
+
|
|
12661
13286
|
s_gate = sub.add_parser("gate", help="Gate operations")
|
|
12662
13287
|
gate_sub = s_gate.add_subparsers(dest="gate_cmd", required=True)
|
|
12663
13288
|
s_run = gate_sub.add_parser("run", help="Run configured gates for a profile")
|