open-research-protocol 0.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/AGENT_INTEGRATION.md +94 -0
- package/INSTALL.md +159 -0
- package/LICENSE +22 -0
- package/PROTOCOL.md +140 -0
- package/README.md +312 -0
- package/bin/orp.js +38 -0
- package/cli/orp.py +3595 -0
- package/cone/CONTEXT_LOG.md +33 -0
- package/docs/AGENT_LOOP.md +63 -0
- package/docs/CHOOSING_OR_IGNORING_INSTRUMENTS.md +128 -0
- package/docs/CODA_ORP_CONTRACT.md +222 -0
- package/docs/CORE_ABILITY_REFOCUS_CHECKLIST.md +62 -0
- package/docs/DISCOVER.md +69 -0
- package/docs/EXTERNAL_CONTRIBUTION_GOVERNANCE.md +275 -0
- package/docs/MATHLIB_COLLABORATION_FLOW_PROMPT.md +112 -0
- package/docs/NPM_RELEASE_CHECKLIST.md +55 -0
- package/docs/ORP_V1_ATOMIC_DISCOVERY_EVOLUTION.md +186 -0
- package/docs/OSS_CONTRIBUTION_AGENT_LOOP.md +69 -0
- package/docs/PRESENTATION_BOW.md +100 -0
- package/docs/PROFILE_PACKS.md +227 -0
- package/docs/SUNFLOWER_CODA_PR_GOVERNANCE_MAPPING.md +77 -0
- package/docs/WHY_INSTRUMENTS.md +118 -0
- package/examples/README.md +21 -0
- package/examples/example_claim.md +33 -0
- package/examples/example_failed.md +24 -0
- package/examples/example_verification.md +36 -0
- package/examples/orp.erdos-problems.catalog.yml +88 -0
- package/examples/orp.external-pr-governance.yml +223 -0
- package/examples/orp.sunflower-coda.atomic.yml +144 -0
- package/examples/orp.sunflower-coda.live-compare.yml +181 -0
- package/examples/orp.sunflower-coda.pr-governance.yml +253 -0
- package/examples/packet.problem_scope.example.json +123 -0
- package/examples/reports/README.md +16 -0
- package/examples/reports/sunflower_live_compare_20.RUN_SUMMARY.md +37 -0
- package/examples/reports/sunflower_live_compare_367.RUN_SUMMARY.md +37 -0
- package/examples/reports/sunflower_live_compare_857.RUN_SUMMARY.md +37 -0
- package/llms.txt +58 -0
- package/modules/instruments/ADVERSARIAL/README.md +109 -0
- package/modules/instruments/ADVERSARIAL/TEMPLATE.md +27 -0
- package/modules/instruments/COMPRESSION/README.md +112 -0
- package/modules/instruments/COMPRESSION/TEMPLATE.md +27 -0
- package/modules/instruments/INSTRUMENT_TEMPLATE.md +30 -0
- package/modules/instruments/ORBIT/README.md +124 -0
- package/modules/instruments/ORBIT/TEMPLATE.md +28 -0
- package/modules/instruments/README.md +179 -0
- package/package.json +54 -0
- package/packs/README.md +16 -0
- package/packs/erdos-open-problems/README.md +287 -0
- package/packs/erdos-open-problems/data/README.md +43 -0
- package/packs/erdos-open-problems/data/erdos_open_problems.md +697 -0
- package/packs/erdos-open-problems/data/erdos_problems.active.json +15561 -0
- package/packs/erdos-open-problems/data/erdos_problems.all.json +26289 -0
- package/packs/erdos-open-problems/data/erdos_problems.closed.json +10760 -0
- package/packs/erdos-open-problems/data/erdos_problems.open.json +15561 -0
- package/packs/erdos-open-problems/docs/SUNFLOWER_ADAPTER_DEPENDENCIES.md +63 -0
- package/packs/erdos-open-problems/pack.yml +131 -0
- package/packs/erdos-open-problems/profiles/erdos-problems-catalog-sync.yml.tmpl +99 -0
- package/packs/erdos-open-problems/profiles/sunflower-live-compare.yml.tmpl +188 -0
- package/packs/erdos-open-problems/profiles/sunflower-mathlib-pr-governance.yml.tmpl +253 -0
- package/packs/erdos-open-problems/profiles/sunflower-problem857-discovery-public-repo.yml.tmpl +152 -0
- package/packs/erdos-open-problems/profiles/sunflower-problem857-discovery.yml.tmpl +154 -0
- package/packs/external-pr-governance/README.md +116 -0
- package/packs/external-pr-governance/adapters/formal-conjectures/README.md +35 -0
- package/packs/external-pr-governance/adapters/mathlib/README.md +37 -0
- package/packs/external-pr-governance/pack.yml +146 -0
- package/packs/external-pr-governance/profiles/oss-feedback-hardening.yml.tmpl +92 -0
- package/packs/external-pr-governance/profiles/oss-pr-governance.yml.tmpl +233 -0
- package/packs/issue-smashers/README.md +92 -0
- package/packs/issue-smashers/adapters/formal-conjectures/README.md +17 -0
- package/packs/issue-smashers/adapters/generic-github/README.md +16 -0
- package/packs/issue-smashers/adapters/mathlib/README.md +32 -0
- package/packs/issue-smashers/bootstrap/README.md +19 -0
- package/packs/issue-smashers/bootstrap/setup-issue-smashers.sh +18 -0
- package/packs/issue-smashers/examples/issue-smashers.workspace.yml +24 -0
- package/packs/issue-smashers/pack.yml +178 -0
- package/packs/issue-smashers/profiles/issue-smashers-feedback-hardening.yml.tmpl +102 -0
- package/packs/issue-smashers/profiles/issue-smashers.yml.tmpl +258 -0
- package/scripts/npm-postinstall-check.js +31 -0
- package/scripts/orp +11 -0
- package/scripts/orp-agent-integrate.sh +197 -0
- package/scripts/orp-checkpoint.sh +184 -0
- package/scripts/orp-erdos-problems-sync.py +580 -0
- package/scripts/orp-init.sh +50 -0
- package/scripts/orp-pack-fetch.py +155 -0
- package/scripts/orp-pack-install.py +2273 -0
- package/scripts/orp-pack-render.py +188 -0
- package/spec/v1/LIFECYCLE_MAPPING.md +40 -0
- package/spec/v1/orp.config.schema.json +385 -0
- package/spec/v1/packet.schema.json +552 -0
- package/spec/v1/profile-pack.schema.json +95 -0
- package/templates/CLAIM.md +33 -0
- package/templates/FAILED_TOPIC.md +19 -0
- package/templates/ISSUE_TEMPLATE.md +22 -0
- package/templates/VERIFICATION_RECORD.md +34 -0
|
@@ -0,0 +1,2273 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""Install ORP profile-pack templates into a target repository.
|
|
3
|
+
|
|
4
|
+
This script keeps ORP core generic while making pack adoption easy:
|
|
5
|
+
- render selected pack templates to concrete config files,
|
|
6
|
+
- optionally scaffold starter adapters for install-and-go usage,
|
|
7
|
+
- audit expected adapter dependencies in the target repo,
|
|
8
|
+
- write an install report with next steps.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
from __future__ import annotations
|
|
12
|
+
|
|
13
|
+
import argparse
|
|
14
|
+
import datetime as dt
|
|
15
|
+
import json
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
import re
|
|
18
|
+
import shutil
|
|
19
|
+
import subprocess
|
|
20
|
+
import sys
|
|
21
|
+
import tempfile
|
|
22
|
+
from typing import Any
|
|
23
|
+
|
|
24
|
+
import yaml
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
PACK_SPECS: dict[str, dict[str, Any]] = {
|
|
28
|
+
"erdos-open-problems": {
|
|
29
|
+
"default_includes": ["catalog", "live_compare", "problem857"],
|
|
30
|
+
"report_name": "orp.erdos.pack-install-report.md",
|
|
31
|
+
"components": {
|
|
32
|
+
"catalog": {
|
|
33
|
+
"template_id": "erdos_problems_catalog_sync",
|
|
34
|
+
"output_name": "orp.erdos-catalog-sync.yml",
|
|
35
|
+
"description": "Erdos catalog sync (all/open/closed/active snapshots).",
|
|
36
|
+
"required_paths": [],
|
|
37
|
+
},
|
|
38
|
+
"live_compare": {
|
|
39
|
+
"template_id": "sunflower_live_compare_suite",
|
|
40
|
+
"output_name": "orp.erdos-live-compare.yml",
|
|
41
|
+
"description": "Side-by-side atomic-board compare for Problems 857/20/367.",
|
|
42
|
+
"required_paths": [
|
|
43
|
+
"analysis/problem857_counting_gateboard.json",
|
|
44
|
+
"analysis/problem20_k3_gateboard.json",
|
|
45
|
+
"analysis/problem367_sharp_gateboard.json",
|
|
46
|
+
"scripts/problem857_ops_board.py",
|
|
47
|
+
"scripts/problem20_ops_board.py",
|
|
48
|
+
"scripts/problem367_ops_board.py",
|
|
49
|
+
"scripts/frontier_status.py",
|
|
50
|
+
],
|
|
51
|
+
},
|
|
52
|
+
"problem857": {
|
|
53
|
+
"template_id": "sunflower_problem857_discovery",
|
|
54
|
+
"output_name": "orp.erdos-problem857.yml",
|
|
55
|
+
"description": "Problem 857 discovery profile (board refresh/ready/spec/lean/frontier).",
|
|
56
|
+
"required_paths": [
|
|
57
|
+
"analysis/problem857_counting_gateboard.json",
|
|
58
|
+
"docs/PROBLEM857_COUNTING_OPS_BOARD.md",
|
|
59
|
+
"orchestrator/v2/scopes/problem_857.yaml",
|
|
60
|
+
"orchestrator/problem857_public_spec_check.py",
|
|
61
|
+
"scripts/problem857_ops_board.py",
|
|
62
|
+
"scripts/frontier_status.py",
|
|
63
|
+
"sunflower_lean",
|
|
64
|
+
],
|
|
65
|
+
},
|
|
66
|
+
"governance": {
|
|
67
|
+
"template_id": "sunflower_mathlib_pr_governance",
|
|
68
|
+
"output_name": "orp.erdos-mathlib-pr-governance.yml",
|
|
69
|
+
"description": "Mathlib PR governance profile set (pre-open, draft-readiness, full flow).",
|
|
70
|
+
"required_paths": [
|
|
71
|
+
"docs/MATHLIB_SUBMISSION_CHECKLIST.md",
|
|
72
|
+
"docs/MATHLIB_DRAFT_PR_TEMPLATE.md",
|
|
73
|
+
"docs/MATHLIB_ISSUE_VIABILITY_GATE.md",
|
|
74
|
+
"docs/UPSTREAM_PR_LANE.md",
|
|
75
|
+
"analysis/UPSTREAM_PR_PLAN.yaml",
|
|
76
|
+
"scripts/upstream-pr-plan.py",
|
|
77
|
+
"scripts/upstream-pr-lane.sh",
|
|
78
|
+
"scripts/mathlib-issue-viability-gate.py",
|
|
79
|
+
"scripts/mathlib-naturality-snippet.sh",
|
|
80
|
+
"scripts/mathlib-issue-local-gate.sh",
|
|
81
|
+
"scripts/mathlib-tighten-fine-tooth-gate.sh",
|
|
82
|
+
"scripts/mathlib-ready-to-draft-gate.sh",
|
|
83
|
+
"scripts/mathlib-pr-body-preflight.py",
|
|
84
|
+
],
|
|
85
|
+
},
|
|
86
|
+
},
|
|
87
|
+
},
|
|
88
|
+
"external-pr-governance": {
|
|
89
|
+
"default_includes": ["governance", "feedback_hardening"],
|
|
90
|
+
"report_name": "orp.external-pr.pack-install-report.md",
|
|
91
|
+
"components": {
|
|
92
|
+
"governance": {
|
|
93
|
+
"template_id": "oss_pr_governance",
|
|
94
|
+
"output_name": "orp.external-pr-governance.yml",
|
|
95
|
+
"description": "Generic external contribution governance profiles (watch/select through draft lifecycle).",
|
|
96
|
+
"required_paths": ["analysis/PR_DRAFT_BODY.md"],
|
|
97
|
+
},
|
|
98
|
+
"feedback_hardening": {
|
|
99
|
+
"template_id": "oss_feedback_hardening",
|
|
100
|
+
"output_name": "orp.external-pr-feedback-hardening.yml",
|
|
101
|
+
"description": "Maintainer-feedback hardening profile.",
|
|
102
|
+
"required_paths": [],
|
|
103
|
+
},
|
|
104
|
+
},
|
|
105
|
+
},
|
|
106
|
+
"issue-smashers": {
|
|
107
|
+
"default_includes": ["workspace", "feedback_hardening"],
|
|
108
|
+
"report_name": "orp.issue-smashers.pack-install-report.md",
|
|
109
|
+
"components": {
|
|
110
|
+
"workspace": {
|
|
111
|
+
"template_id": "issue_smashers_workspace",
|
|
112
|
+
"output_name": "orp.issue-smashers.yml",
|
|
113
|
+
"description": "Opinionated issue-smashers workspace and external contribution governance profiles.",
|
|
114
|
+
"required_paths": [
|
|
115
|
+
"issue-smashers/README.md",
|
|
116
|
+
"issue-smashers/WORKSPACE_RULES.md",
|
|
117
|
+
"issue-smashers/setup-issue-smashers.sh",
|
|
118
|
+
"issue-smashers/analysis/ISSUE_SMASHERS_WATCHLIST.json",
|
|
119
|
+
"issue-smashers/analysis/ISSUE_SMASHERS_STATUS.md",
|
|
120
|
+
"issue-smashers/analysis/PR_DRAFT_BODY.md",
|
|
121
|
+
],
|
|
122
|
+
},
|
|
123
|
+
"feedback_hardening": {
|
|
124
|
+
"template_id": "issue_smashers_feedback_hardening",
|
|
125
|
+
"output_name": "orp.issue-smashers-feedback-hardening.yml",
|
|
126
|
+
"description": "Issue-smashers feedback hardening profile.",
|
|
127
|
+
"required_paths": [
|
|
128
|
+
"issue-smashers/WORKSPACE_RULES.md",
|
|
129
|
+
"issue-smashers/analysis/ISSUE_SMASHERS_STATUS.md",
|
|
130
|
+
],
|
|
131
|
+
},
|
|
132
|
+
},
|
|
133
|
+
},
|
|
134
|
+
}
|
|
135
|
+
|
|
136
|
+
BOARD_PATHS = {
|
|
137
|
+
857: "analysis/problem857_counting_gateboard.json",
|
|
138
|
+
20: "analysis/problem20_k3_gateboard.json",
|
|
139
|
+
367: "analysis/problem367_sharp_gateboard.json",
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
BOARD_MD_PATHS = {
|
|
143
|
+
857: "docs/PROBLEM857_COUNTING_OPS_BOARD.md",
|
|
144
|
+
20: "docs/PROBLEM20_K3_OPS_BOARD.md",
|
|
145
|
+
367: "docs/PROBLEM367_SHARP_OPS_BOARD.md",
|
|
146
|
+
}
|
|
147
|
+
|
|
148
|
+
BOARD_SEEDS: dict[int, dict[str, Any]] = {
|
|
149
|
+
857: {
|
|
150
|
+
"board_id": "problem857_counting_gateboard",
|
|
151
|
+
"problem_id": 857,
|
|
152
|
+
"updated_utc": "",
|
|
153
|
+
"starter_scaffold": True,
|
|
154
|
+
"starter_note": "starter board generated by ORP pack install",
|
|
155
|
+
"route_status": [
|
|
156
|
+
{"route": "counting_uniform", "loose_done": 7, "loose_total": 7, "strict_done": 7, "strict_total": 7},
|
|
157
|
+
{"route": "container_v2", "loose_done": 5, "loose_total": 5, "strict_done": 5, "strict_total": 5},
|
|
158
|
+
],
|
|
159
|
+
"tickets": [
|
|
160
|
+
{"ticket": "T1", "leaf": "CountingUniformCore", "leaf_strict": "done", "gates_done": 5, "gates_total": 5, "atoms_done": 14, "atoms_total": 14},
|
|
161
|
+
{"ticket": "T6", "leaf": "ContainerLift", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 32, "atoms_total": 32},
|
|
162
|
+
],
|
|
163
|
+
"ready_atoms": 0,
|
|
164
|
+
"no_go_active": [],
|
|
165
|
+
},
|
|
166
|
+
20: {
|
|
167
|
+
"board_id": "problem20_k3_gateboard",
|
|
168
|
+
"problem_id": 20,
|
|
169
|
+
"updated_utc": "",
|
|
170
|
+
"starter_scaffold": True,
|
|
171
|
+
"starter_note": "starter board generated by ORP pack install",
|
|
172
|
+
"route_status": [
|
|
173
|
+
{"route": "uniform_prize", "loose_done": 7, "loose_total": 7, "strict_done": 7, "strict_total": 7},
|
|
174
|
+
{"route": "uniform_prize_final_k3", "loose_done": 5, "loose_total": 5, "strict_done": 5, "strict_total": 5},
|
|
175
|
+
{"route": "uniform_prize_full_all_k", "loose_done": 0, "loose_total": 1, "strict_done": 0, "strict_total": 1},
|
|
176
|
+
],
|
|
177
|
+
"tickets": [
|
|
178
|
+
{"ticket": "T1", "leaf": "UniformBoundF3Global", "leaf_strict": "done", "gates_done": 5, "gates_total": 5, "atoms_done": 16, "atoms_total": 16},
|
|
179
|
+
{"ticket": "T6", "leaf": "UniformK3From7BaseRangeHyp", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 77, "atoms_total": 77},
|
|
180
|
+
],
|
|
181
|
+
"ready_atoms": 0,
|
|
182
|
+
"no_go_active": [],
|
|
183
|
+
},
|
|
184
|
+
367: {
|
|
185
|
+
"board_id": "problem367_sharp_gateboard",
|
|
186
|
+
"problem_id": 367,
|
|
187
|
+
"updated_utc": "",
|
|
188
|
+
"starter_scaffold": True,
|
|
189
|
+
"starter_note": "starter board generated by ORP pack install",
|
|
190
|
+
"route_status": [
|
|
191
|
+
{"route": "sieve_weighted_tail", "loose_done": 5, "loose_total": 5, "strict_done": 5, "strict_total": 5},
|
|
192
|
+
{"route": "full", "loose_done": 9, "loose_total": 9, "strict_done": 8, "strict_total": 9},
|
|
193
|
+
{"route": "sieve", "loose_done": 5, "loose_total": 5, "strict_done": 4, "strict_total": 5},
|
|
194
|
+
],
|
|
195
|
+
"tickets": [
|
|
196
|
+
{"ticket": "T1", "leaf": "LargeTwoFullPartRarity", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 15, "atoms_total": 15},
|
|
197
|
+
{"ticket": "T5", "leaf": "LargeTwoFullPartRarity", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 15, "atoms_total": 15},
|
|
198
|
+
],
|
|
199
|
+
"ready_atoms": 0,
|
|
200
|
+
"no_go_active": [
|
|
201
|
+
"not_FilteredDyadicLargeRadCardBound10_6",
|
|
202
|
+
"not_FilteredDyadicSmallRadCardBound10_4",
|
|
203
|
+
"not_WeightedTailSeriesBoundOnLeFilteredDyadic10SplitBudgetAtoms",
|
|
204
|
+
],
|
|
205
|
+
},
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
STARTER_RUNTIME = """#!/usr/bin/env python3
|
|
209
|
+
from __future__ import annotations
|
|
210
|
+
|
|
211
|
+
import argparse
|
|
212
|
+
import datetime as dt
|
|
213
|
+
import json
|
|
214
|
+
from pathlib import Path
|
|
215
|
+
import re
|
|
216
|
+
from typing import Any
|
|
217
|
+
|
|
218
|
+
BOARD_PATHS = {
|
|
219
|
+
857: "analysis/problem857_counting_gateboard.json",
|
|
220
|
+
20: "analysis/problem20_k3_gateboard.json",
|
|
221
|
+
367: "analysis/problem367_sharp_gateboard.json",
|
|
222
|
+
}
|
|
223
|
+
|
|
224
|
+
BOARD_MD_PATHS = {
|
|
225
|
+
857: "docs/PROBLEM857_COUNTING_OPS_BOARD.md",
|
|
226
|
+
20: "docs/PROBLEM20_K3_OPS_BOARD.md",
|
|
227
|
+
367: "docs/PROBLEM367_SHARP_OPS_BOARD.md",
|
|
228
|
+
}
|
|
229
|
+
|
|
230
|
+
BOARD_SEEDS = {
|
|
231
|
+
857: {
|
|
232
|
+
"board_id": "problem857_counting_gateboard",
|
|
233
|
+
"problem_id": 857,
|
|
234
|
+
"updated_utc": "",
|
|
235
|
+
"starter_scaffold": True,
|
|
236
|
+
"starter_note": "starter board generated by ORP pack install",
|
|
237
|
+
"route_status": [
|
|
238
|
+
{"route": "counting_uniform", "loose_done": 7, "loose_total": 7, "strict_done": 7, "strict_total": 7},
|
|
239
|
+
{"route": "container_v2", "loose_done": 5, "loose_total": 5, "strict_done": 5, "strict_total": 5},
|
|
240
|
+
],
|
|
241
|
+
"tickets": [
|
|
242
|
+
{"ticket": "T1", "leaf": "CountingUniformCore", "leaf_strict": "done", "gates_done": 5, "gates_total": 5, "atoms_done": 14, "atoms_total": 14},
|
|
243
|
+
{"ticket": "T6", "leaf": "ContainerLift", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 32, "atoms_total": 32},
|
|
244
|
+
],
|
|
245
|
+
"ready_atoms": 0,
|
|
246
|
+
"no_go_active": [],
|
|
247
|
+
},
|
|
248
|
+
20: {
|
|
249
|
+
"board_id": "problem20_k3_gateboard",
|
|
250
|
+
"problem_id": 20,
|
|
251
|
+
"updated_utc": "",
|
|
252
|
+
"starter_scaffold": True,
|
|
253
|
+
"starter_note": "starter board generated by ORP pack install",
|
|
254
|
+
"route_status": [
|
|
255
|
+
{"route": "uniform_prize", "loose_done": 7, "loose_total": 7, "strict_done": 7, "strict_total": 7},
|
|
256
|
+
{"route": "uniform_prize_final_k3", "loose_done": 5, "loose_total": 5, "strict_done": 5, "strict_total": 5},
|
|
257
|
+
{"route": "uniform_prize_full_all_k", "loose_done": 0, "loose_total": 1, "strict_done": 0, "strict_total": 1},
|
|
258
|
+
],
|
|
259
|
+
"tickets": [
|
|
260
|
+
{"ticket": "T1", "leaf": "UniformBoundF3Global", "leaf_strict": "done", "gates_done": 5, "gates_total": 5, "atoms_done": 16, "atoms_total": 16},
|
|
261
|
+
{"ticket": "T6", "leaf": "UniformK3From7BaseRangeHyp", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 77, "atoms_total": 77},
|
|
262
|
+
],
|
|
263
|
+
"ready_atoms": 0,
|
|
264
|
+
"no_go_active": [],
|
|
265
|
+
},
|
|
266
|
+
367: {
|
|
267
|
+
"board_id": "problem367_sharp_gateboard",
|
|
268
|
+
"problem_id": 367,
|
|
269
|
+
"updated_utc": "",
|
|
270
|
+
"starter_scaffold": True,
|
|
271
|
+
"starter_note": "starter board generated by ORP pack install",
|
|
272
|
+
"route_status": [
|
|
273
|
+
{"route": "sieve_weighted_tail", "loose_done": 5, "loose_total": 5, "strict_done": 5, "strict_total": 5},
|
|
274
|
+
{"route": "full", "loose_done": 9, "loose_total": 9, "strict_done": 8, "strict_total": 9},
|
|
275
|
+
{"route": "sieve", "loose_done": 5, "loose_total": 5, "strict_done": 4, "strict_total": 5},
|
|
276
|
+
],
|
|
277
|
+
"tickets": [
|
|
278
|
+
{"ticket": "T1", "leaf": "LargeTwoFullPartRarity", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 15, "atoms_total": 15},
|
|
279
|
+
{"ticket": "T5", "leaf": "LargeTwoFullPartRarity", "leaf_strict": "open", "gates_done": 5, "gates_total": 5, "atoms_done": 15, "atoms_total": 15},
|
|
280
|
+
],
|
|
281
|
+
"ready_atoms": 0,
|
|
282
|
+
"no_go_active": [
|
|
283
|
+
"not_FilteredDyadicLargeRadCardBound10_6",
|
|
284
|
+
"not_FilteredDyadicSmallRadCardBound10_4",
|
|
285
|
+
"not_WeightedTailSeriesBoundOnLeFilteredDyadic10SplitBudgetAtoms",
|
|
286
|
+
],
|
|
287
|
+
},
|
|
288
|
+
}
|
|
289
|
+
|
|
290
|
+
|
|
291
|
+
def _now_utc() -> str:
|
|
292
|
+
return dt.datetime.now(dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
def _board_path(root: Path, problem: int) -> Path:
|
|
296
|
+
return root / BOARD_PATHS[problem]
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _board_md_path(root: Path, problem: int) -> Path:
|
|
300
|
+
return root / BOARD_MD_PATHS[problem]
|
|
301
|
+
|
|
302
|
+
|
|
303
|
+
def _seed(problem: int) -> dict[str, Any]:
|
|
304
|
+
payload = json.loads(json.dumps(BOARD_SEEDS[problem]))
|
|
305
|
+
payload["updated_utc"] = _now_utc()
|
|
306
|
+
return payload
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
def _load_board(root: Path, problem: int) -> dict[str, Any]:
|
|
310
|
+
path = _board_path(root, problem)
|
|
311
|
+
if not path.exists():
|
|
312
|
+
board = _seed(problem)
|
|
313
|
+
_save_board(root, problem, board)
|
|
314
|
+
return board
|
|
315
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
316
|
+
|
|
317
|
+
|
|
318
|
+
def _save_board(root: Path, problem: int, board: dict[str, Any]) -> None:
|
|
319
|
+
path = _board_path(root, problem)
|
|
320
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
321
|
+
path.write_text(json.dumps(board, indent=2) + "\\n", encoding="utf-8")
|
|
322
|
+
|
|
323
|
+
|
|
324
|
+
def _atoms(board: dict[str, Any]) -> dict[str, Any]:
|
|
325
|
+
atoms = board.get("atoms", {})
|
|
326
|
+
if isinstance(atoms, dict):
|
|
327
|
+
return atoms
|
|
328
|
+
return {}
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def _ready_atom_ids(board: dict[str, Any]) -> list[str]:
|
|
332
|
+
ready_ids: list[str] = []
|
|
333
|
+
for atom_id, payload in _atoms(board).items():
|
|
334
|
+
if not isinstance(payload, dict):
|
|
335
|
+
continue
|
|
336
|
+
status = str(payload.get("status", "")).strip().lower()
|
|
337
|
+
if status == "ready":
|
|
338
|
+
ready_ids.append(str(atom_id))
|
|
339
|
+
return sorted(ready_ids)
|
|
340
|
+
|
|
341
|
+
|
|
342
|
+
def _sync_ready_atoms(board: dict[str, Any]) -> None:
|
|
343
|
+
board["ready_atoms"] = len(_ready_atom_ids(board))
|
|
344
|
+
|
|
345
|
+
|
|
346
|
+
def _write_board_md(root: Path, problem: int, board: dict[str, Any]) -> None:
|
|
347
|
+
_sync_ready_atoms(board)
|
|
348
|
+
lines = []
|
|
349
|
+
lines.append(f"# Problem {problem} Ops Board")
|
|
350
|
+
lines.append("")
|
|
351
|
+
lines.append(f"- updated_utc: `{board.get('updated_utc', '')}`")
|
|
352
|
+
lines.append(f"- ready_atoms: `{int(board.get('ready_atoms', 0))}`")
|
|
353
|
+
public_repo = board.get("public_repo", {})
|
|
354
|
+
if isinstance(public_repo, dict) and public_repo:
|
|
355
|
+
lines.append(f"- public_repo_url: `{public_repo.get('url', '')}`")
|
|
356
|
+
lines.append(f"- public_repo_ref: `{public_repo.get('ref', '')}`")
|
|
357
|
+
lines.append(f"- public_repo_sync_root: `{public_repo.get('sync_root', '')}`")
|
|
358
|
+
lines.append("")
|
|
359
|
+
lines.append("## Routes")
|
|
360
|
+
lines.append("")
|
|
361
|
+
for row in board.get("route_status", []):
|
|
362
|
+
if not isinstance(row, dict):
|
|
363
|
+
continue
|
|
364
|
+
lines.append(
|
|
365
|
+
"- route={route} loose={ld}/{lt} strict={sd}/{st}".format(
|
|
366
|
+
route=row.get("route", ""),
|
|
367
|
+
ld=row.get("loose_done", 0),
|
|
368
|
+
lt=row.get("loose_total", 0),
|
|
369
|
+
sd=row.get("strict_done", 0),
|
|
370
|
+
st=row.get("strict_total", 0),
|
|
371
|
+
)
|
|
372
|
+
)
|
|
373
|
+
lines.append("")
|
|
374
|
+
lines.append("## Tickets")
|
|
375
|
+
lines.append("")
|
|
376
|
+
for row in board.get("tickets", []):
|
|
377
|
+
if not isinstance(row, dict):
|
|
378
|
+
continue
|
|
379
|
+
lines.append(
|
|
380
|
+
"- ticket={ticket} leaf={leaf} leaf_strict={leaf_strict} gates={gd}/{gt} atoms={ad}/{at}".format(
|
|
381
|
+
ticket=row.get("ticket", ""),
|
|
382
|
+
leaf=row.get("leaf", ""),
|
|
383
|
+
leaf_strict=row.get("leaf_strict", ""),
|
|
384
|
+
gd=row.get("gates_done", 0),
|
|
385
|
+
gt=row.get("gates_total", 0),
|
|
386
|
+
ad=row.get("atoms_done", 0),
|
|
387
|
+
at=row.get("atoms_total", 0),
|
|
388
|
+
)
|
|
389
|
+
)
|
|
390
|
+
|
|
391
|
+
atoms = _atoms(board)
|
|
392
|
+
if atoms:
|
|
393
|
+
lines.append("")
|
|
394
|
+
lines.append("## Atom States")
|
|
395
|
+
lines.append("")
|
|
396
|
+
for atom_id in sorted(atoms):
|
|
397
|
+
payload = atoms.get(atom_id, {})
|
|
398
|
+
status = payload.get("status", "") if isinstance(payload, dict) else ""
|
|
399
|
+
lines.append(f"- atom={atom_id} status={status}")
|
|
400
|
+
|
|
401
|
+
out_path = _board_md_path(root, problem)
|
|
402
|
+
out_path.parent.mkdir(parents=True, exist_ok=True)
|
|
403
|
+
out_path.write_text("\\n".join(lines) + "\\n", encoding="utf-8")
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
def _print_show(board: dict[str, Any]) -> None:
|
|
407
|
+
_sync_ready_atoms(board)
|
|
408
|
+
print(f"updated_utc={board.get('updated_utc', '')}")
|
|
409
|
+
for row in board.get("route_status", []):
|
|
410
|
+
if not isinstance(row, dict):
|
|
411
|
+
continue
|
|
412
|
+
print(
|
|
413
|
+
"route={route} loose={ld}/{lt} strict={sd}/{st}".format(
|
|
414
|
+
route=row.get("route", ""),
|
|
415
|
+
ld=row.get("loose_done", 0),
|
|
416
|
+
lt=row.get("loose_total", 0),
|
|
417
|
+
sd=row.get("strict_done", 0),
|
|
418
|
+
st=row.get("strict_total", 0),
|
|
419
|
+
)
|
|
420
|
+
)
|
|
421
|
+
for row in board.get("tickets", []):
|
|
422
|
+
if not isinstance(row, dict):
|
|
423
|
+
continue
|
|
424
|
+
print(
|
|
425
|
+
"ticket={ticket} leaf={leaf} leaf_strict={leaf_strict} gates={gd}/{gt} atoms={ad}/{at}".format(
|
|
426
|
+
ticket=row.get("ticket", ""),
|
|
427
|
+
leaf=row.get("leaf", ""),
|
|
428
|
+
leaf_strict=row.get("leaf_strict", ""),
|
|
429
|
+
gd=row.get("gates_done", 0),
|
|
430
|
+
gt=row.get("gates_total", 0),
|
|
431
|
+
ad=row.get("atoms_done", 0),
|
|
432
|
+
at=row.get("atoms_total", 0),
|
|
433
|
+
)
|
|
434
|
+
)
|
|
435
|
+
atoms = _atoms(board)
|
|
436
|
+
for atom_id in sorted(atoms):
|
|
437
|
+
payload = atoms.get(atom_id, {})
|
|
438
|
+
status = payload.get("status", "") if isinstance(payload, dict) else ""
|
|
439
|
+
print(f"atom={atom_id} status={status}")
|
|
440
|
+
print(f"ready_atoms={int(board.get('ready_atoms', 0))}")
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
def main() -> int:
|
|
444
|
+
parser = argparse.ArgumentParser(description="Starter atomic board ops runtime")
|
|
445
|
+
parser.add_argument("--problem", required=True, type=int, choices=[857, 20, 367])
|
|
446
|
+
sub = parser.add_subparsers(dest="cmd", required=True)
|
|
447
|
+
|
|
448
|
+
sub.add_parser("show")
|
|
449
|
+
|
|
450
|
+
ready = sub.add_parser("ready")
|
|
451
|
+
ready.add_argument("--allow-no-go", action="store_true")
|
|
452
|
+
|
|
453
|
+
refresh = sub.add_parser("refresh")
|
|
454
|
+
refresh.add_argument("--write-md", action="store_true")
|
|
455
|
+
refresh.add_argument("--sync-json", action="store_true")
|
|
456
|
+
|
|
457
|
+
set_atom = sub.add_parser("set-atom")
|
|
458
|
+
set_atom.add_argument("atom_id")
|
|
459
|
+
set_atom.add_argument("--status", required=True)
|
|
460
|
+
|
|
461
|
+
args = parser.parse_args()
|
|
462
|
+
|
|
463
|
+
root = Path(".").resolve()
|
|
464
|
+
board = _load_board(root, args.problem)
|
|
465
|
+
|
|
466
|
+
if args.cmd == "show":
|
|
467
|
+
_print_show(board)
|
|
468
|
+
return 0
|
|
469
|
+
|
|
470
|
+
if args.cmd == "ready":
|
|
471
|
+
_sync_ready_atoms(board)
|
|
472
|
+
atoms = _atoms(board)
|
|
473
|
+
if args.problem == 367:
|
|
474
|
+
no_go = board.get("no_go_active", [])
|
|
475
|
+
if isinstance(no_go, list):
|
|
476
|
+
print("no_go_active=" + ",".join(str(x) for x in no_go))
|
|
477
|
+
else:
|
|
478
|
+
print("no_go_active=")
|
|
479
|
+
for atom_id in _ready_atom_ids(board):
|
|
480
|
+
payload = atoms.get(atom_id, {})
|
|
481
|
+
ticket = "starter"
|
|
482
|
+
gate = "ready"
|
|
483
|
+
deps = "root"
|
|
484
|
+
if isinstance(payload, dict):
|
|
485
|
+
raw_ticket = str(payload.get("ticket_id", "")).strip()
|
|
486
|
+
raw_gate = str(payload.get("gate_id", "")).strip()
|
|
487
|
+
raw_deps = payload.get("deps", "root")
|
|
488
|
+
if raw_ticket:
|
|
489
|
+
ticket = raw_ticket
|
|
490
|
+
if raw_gate:
|
|
491
|
+
gate = raw_gate
|
|
492
|
+
if isinstance(raw_deps, list):
|
|
493
|
+
deps_items = [str(x).strip() for x in raw_deps if str(x).strip()]
|
|
494
|
+
deps = ",".join(deps_items) if deps_items else "root"
|
|
495
|
+
else:
|
|
496
|
+
deps_text = str(raw_deps).strip()
|
|
497
|
+
deps = deps_text or "root"
|
|
498
|
+
print(f"ready={atom_id} ticket={ticket} gate={gate} deps={deps}")
|
|
499
|
+
print(f"ready_atoms={int(board.get('ready_atoms', 0))}")
|
|
500
|
+
return 0
|
|
501
|
+
|
|
502
|
+
if args.cmd == "refresh":
|
|
503
|
+
board["updated_utc"] = _now_utc()
|
|
504
|
+
_sync_ready_atoms(board)
|
|
505
|
+
_save_board(root, args.problem, board)
|
|
506
|
+
if args.write_md:
|
|
507
|
+
_write_board_md(root, args.problem, board)
|
|
508
|
+
print(f"refreshed_board={_board_path(root, args.problem)}")
|
|
509
|
+
if args.write_md:
|
|
510
|
+
print(f"refreshed_board_md={_board_md_path(root, args.problem)}")
|
|
511
|
+
return 0
|
|
512
|
+
|
|
513
|
+
if args.cmd == "set-atom":
|
|
514
|
+
atoms = board.setdefault("atoms", {})
|
|
515
|
+
if not isinstance(atoms, dict):
|
|
516
|
+
atoms = {}
|
|
517
|
+
board["atoms"] = atoms
|
|
518
|
+
atoms[args.atom_id] = {"status": args.status}
|
|
519
|
+
board["updated_utc"] = _now_utc()
|
|
520
|
+
_sync_ready_atoms(board)
|
|
521
|
+
_save_board(root, args.problem, board)
|
|
522
|
+
print(f"atom_id={args.atom_id}")
|
|
523
|
+
print(f"atom_status={args.status}")
|
|
524
|
+
return 0
|
|
525
|
+
|
|
526
|
+
return 2
|
|
527
|
+
|
|
528
|
+
|
|
529
|
+
if __name__ == "__main__":
|
|
530
|
+
raise SystemExit(main())
|
|
531
|
+
"""
|
|
532
|
+
|
|
533
|
+
STARTER_FRONTIER = """#!/usr/bin/env python3
|
|
534
|
+
from __future__ import annotations
|
|
535
|
+
|
|
536
|
+
import argparse
|
|
537
|
+
import json
|
|
538
|
+
from pathlib import Path
|
|
539
|
+
import re
|
|
540
|
+
from typing import Any
|
|
541
|
+
|
|
542
|
+
BOARD_PATHS = {
|
|
543
|
+
857: "analysis/problem857_counting_gateboard.json",
|
|
544
|
+
20: "analysis/problem20_k3_gateboard.json",
|
|
545
|
+
367: "analysis/problem367_sharp_gateboard.json",
|
|
546
|
+
}
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
def _load_board(root: Path, problem: int) -> dict[str, Any]:
|
|
550
|
+
path = root / BOARD_PATHS[problem]
|
|
551
|
+
if not path.exists():
|
|
552
|
+
return {"route_status": []}
|
|
553
|
+
return json.loads(path.read_text(encoding="utf-8"))
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
def _pct(done: int, total: int) -> int:
|
|
557
|
+
if total <= 0:
|
|
558
|
+
return 0
|
|
559
|
+
return int(round((100.0 * done) / total))
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
def main() -> int:
|
|
563
|
+
parser = argparse.ArgumentParser(description="Starter frontier status")
|
|
564
|
+
parser.add_argument("--problem", required=True, type=int, choices=[857, 20, 367])
|
|
565
|
+
args = parser.parse_args()
|
|
566
|
+
|
|
567
|
+
board = _load_board(Path(".").resolve(), args.problem)
|
|
568
|
+
routes = board.get("route_status", [])
|
|
569
|
+
if not isinstance(routes, list):
|
|
570
|
+
routes = []
|
|
571
|
+
|
|
572
|
+
print(f"== Loose Routes ({args.problem}) ==")
|
|
573
|
+
best_name = ""
|
|
574
|
+
best_pct = -1
|
|
575
|
+
for row in routes:
|
|
576
|
+
if not isinstance(row, dict):
|
|
577
|
+
continue
|
|
578
|
+
name = str(row.get("route", "")).strip()
|
|
579
|
+
ld = int(row.get("loose_done", 0) or 0)
|
|
580
|
+
lt = int(row.get("loose_total", 0) or 0)
|
|
581
|
+
print(f"{name}: {ld}/{lt}")
|
|
582
|
+
pct = _pct(ld, lt)
|
|
583
|
+
if pct > best_pct:
|
|
584
|
+
best_pct = pct
|
|
585
|
+
best_name = name
|
|
586
|
+
|
|
587
|
+
print("")
|
|
588
|
+
print(f"== Strict Routes ({args.problem}) ==")
|
|
589
|
+
for row in routes:
|
|
590
|
+
if not isinstance(row, dict):
|
|
591
|
+
continue
|
|
592
|
+
name = str(row.get("route", "")).strip()
|
|
593
|
+
sd = int(row.get("strict_done", 0) or 0)
|
|
594
|
+
st = int(row.get("strict_total", 0) or 0)
|
|
595
|
+
print(f"{name}: {sd}/{st}")
|
|
596
|
+
|
|
597
|
+
print("")
|
|
598
|
+
print("== Next Focus ==")
|
|
599
|
+
if best_name:
|
|
600
|
+
print(f"Best loose route: {best_name} ({best_pct}%)")
|
|
601
|
+
else:
|
|
602
|
+
print("Best loose route: (none) (0%)")
|
|
603
|
+
return 0
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
if __name__ == "__main__":
|
|
607
|
+
raise SystemExit(main())
|
|
608
|
+
"""
|
|
609
|
+
|
|
610
|
+
STARTER_WRAPPER = """#!/usr/bin/env python3
|
|
611
|
+
from __future__ import annotations
|
|
612
|
+
|
|
613
|
+
from pathlib import Path
|
|
614
|
+
import subprocess
|
|
615
|
+
import sys
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
def main() -> int:
|
|
619
|
+
root = Path(__file__).resolve().parent
|
|
620
|
+
cmd = [sys.executable, str(root / "orp_atomic_board_runtime.py"), "--problem", "{PROBLEM}", *sys.argv[1:]]
|
|
621
|
+
return int(subprocess.call(cmd))
|
|
622
|
+
|
|
623
|
+
|
|
624
|
+
if __name__ == "__main__":
|
|
625
|
+
raise SystemExit(main())
|
|
626
|
+
"""
|
|
627
|
+
|
|
628
|
+
STARTER_SPEC_CHECK = """#!/usr/bin/env python3
|
|
629
|
+
from __future__ import annotations
|
|
630
|
+
|
|
631
|
+
import argparse
|
|
632
|
+
import datetime as dt
|
|
633
|
+
import json
|
|
634
|
+
from pathlib import Path
|
|
635
|
+
import re
|
|
636
|
+
from typing import Any
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
PROBLEM_ID = 857
|
|
640
|
+
DEFAULT_SELECTED_PROBLEM = "analysis/erdos_problems/selected/erdos_problem.857.json"
|
|
641
|
+
LEGACY_SELECTED_PROBLEM = "analysis/selected/erdos_problem.857.json"
|
|
642
|
+
DEFAULT_SCOPE = "orchestrator/v2/scopes/problem_857.yaml"
|
|
643
|
+
DEFAULT_BOARD = "analysis/problem857_counting_gateboard.json"
|
|
644
|
+
|
|
645
|
+
|
|
646
|
+
def _now_utc() -> str:
|
|
647
|
+
return dt.datetime.now(dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z")
|
|
648
|
+
|
|
649
|
+
|
|
650
|
+
def _rel(path: Path) -> str:
|
|
651
|
+
try:
|
|
652
|
+
return str(path.relative_to(Path(".").resolve()))
|
|
653
|
+
except Exception:
|
|
654
|
+
return str(path)
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
def _coerce_scalar(text: str) -> Any:
|
|
658
|
+
raw = text.strip()
|
|
659
|
+
if not raw:
|
|
660
|
+
return ""
|
|
661
|
+
if raw.startswith(("'", '"')) and raw.endswith(("'", '"')) and len(raw) >= 2:
|
|
662
|
+
raw = raw[1:-1]
|
|
663
|
+
lowered = raw.lower()
|
|
664
|
+
if lowered == "true":
|
|
665
|
+
return True
|
|
666
|
+
if lowered == "false":
|
|
667
|
+
return False
|
|
668
|
+
if raw.isdigit():
|
|
669
|
+
return int(raw)
|
|
670
|
+
return raw
|
|
671
|
+
|
|
672
|
+
|
|
673
|
+
def _load_scope(path: Path) -> dict[str, Any]:
|
|
674
|
+
text = path.read_text(encoding="utf-8")
|
|
675
|
+
try:
|
|
676
|
+
import yaml # type: ignore
|
|
677
|
+
except Exception:
|
|
678
|
+
yaml = None # type: ignore[assignment]
|
|
679
|
+
|
|
680
|
+
if yaml is not None:
|
|
681
|
+
loaded = yaml.safe_load(text)
|
|
682
|
+
if isinstance(loaded, dict):
|
|
683
|
+
return loaded
|
|
684
|
+
|
|
685
|
+
payload: dict[str, Any] = {}
|
|
686
|
+
active_list: str | None = None
|
|
687
|
+
for raw_line in text.splitlines():
|
|
688
|
+
if not raw_line.strip() or raw_line.lstrip().startswith("#"):
|
|
689
|
+
continue
|
|
690
|
+
if active_list and raw_line.startswith(" - "):
|
|
691
|
+
values = payload.setdefault(active_list, [])
|
|
692
|
+
if isinstance(values, list):
|
|
693
|
+
values.append(_coerce_scalar(raw_line.split("- ", 1)[1]))
|
|
694
|
+
continue
|
|
695
|
+
active_list = None
|
|
696
|
+
if ":" not in raw_line:
|
|
697
|
+
continue
|
|
698
|
+
key, value = raw_line.split(":", 1)
|
|
699
|
+
key = key.strip()
|
|
700
|
+
value = value.strip()
|
|
701
|
+
if not value:
|
|
702
|
+
payload[key] = []
|
|
703
|
+
active_list = key
|
|
704
|
+
continue
|
|
705
|
+
payload[key] = _coerce_scalar(value)
|
|
706
|
+
return payload
|
|
707
|
+
|
|
708
|
+
|
|
709
|
+
def _add_check(
|
|
710
|
+
checks: list[dict[str, Any]],
|
|
711
|
+
*,
|
|
712
|
+
check_id: str,
|
|
713
|
+
ok: bool,
|
|
714
|
+
detail: str,
|
|
715
|
+
path: Path | None = None,
|
|
716
|
+
expected: Any = None,
|
|
717
|
+
actual: Any = None,
|
|
718
|
+
) -> bool:
|
|
719
|
+
row: dict[str, Any] = {
|
|
720
|
+
"id": check_id,
|
|
721
|
+
"status": "PASS" if ok else "FAIL",
|
|
722
|
+
"detail": detail,
|
|
723
|
+
}
|
|
724
|
+
if path is not None:
|
|
725
|
+
row["path"] = _rel(path)
|
|
726
|
+
if expected is not None:
|
|
727
|
+
row["expected"] = expected
|
|
728
|
+
if actual is not None:
|
|
729
|
+
row["actual"] = actual
|
|
730
|
+
checks.append(row)
|
|
731
|
+
return ok
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
def _load_json(path: Path) -> dict[str, Any]:
|
|
735
|
+
payload = json.loads(path.read_text(encoding="utf-8"))
|
|
736
|
+
if not isinstance(payload, dict):
|
|
737
|
+
raise RuntimeError(f"json root must be object: {path}")
|
|
738
|
+
return payload
|
|
739
|
+
|
|
740
|
+
|
|
741
|
+
def _candidate_paths(raw: list[str]) -> list[Path]:
|
|
742
|
+
out: list[Path] = []
|
|
743
|
+
seen: set[str] = set()
|
|
744
|
+
for entry in raw:
|
|
745
|
+
value = entry.strip()
|
|
746
|
+
if not value or value in seen:
|
|
747
|
+
continue
|
|
748
|
+
seen.add(value)
|
|
749
|
+
out.append(Path(value))
|
|
750
|
+
return out
|
|
751
|
+
|
|
752
|
+
|
|
753
|
+
def _first_existing(paths: list[Path]) -> Path | None:
|
|
754
|
+
root = Path(".").resolve()
|
|
755
|
+
for candidate in paths:
|
|
756
|
+
full = candidate if candidate.is_absolute() else root / candidate
|
|
757
|
+
if full.exists():
|
|
758
|
+
return full
|
|
759
|
+
return None
|
|
760
|
+
|
|
761
|
+
|
|
762
|
+
def _problem_id_from_value(value: Any) -> int:
|
|
763
|
+
if isinstance(value, int):
|
|
764
|
+
return value
|
|
765
|
+
text = str(value).strip()
|
|
766
|
+
if not text:
|
|
767
|
+
return 0
|
|
768
|
+
try:
|
|
769
|
+
return int(text)
|
|
770
|
+
except Exception:
|
|
771
|
+
match = re.search(r"(\\d+)", text)
|
|
772
|
+
if match:
|
|
773
|
+
return int(match.group(1))
|
|
774
|
+
return 0
|
|
775
|
+
|
|
776
|
+
|
|
777
|
+
def main() -> int:
|
|
778
|
+
parser = argparse.ArgumentParser(
|
|
779
|
+
description="Validate public Problem 857 scope consistency against synced Erdos data"
|
|
780
|
+
)
|
|
781
|
+
parser.add_argument("--run-id", required=True)
|
|
782
|
+
parser.add_argument("--problem-id", type=int, default=PROBLEM_ID)
|
|
783
|
+
parser.add_argument(
|
|
784
|
+
"--scope-mode",
|
|
785
|
+
default="starter",
|
|
786
|
+
help="Scope schema mode to validate (starter|public_repo).",
|
|
787
|
+
)
|
|
788
|
+
parser.add_argument(
|
|
789
|
+
"--expect-starter-scaffold",
|
|
790
|
+
default="true",
|
|
791
|
+
help="Whether the board is expected to be marked starter_scaffold (true|false).",
|
|
792
|
+
)
|
|
793
|
+
parser.add_argument(
|
|
794
|
+
"--selected-problem",
|
|
795
|
+
action="append",
|
|
796
|
+
default=[],
|
|
797
|
+
help="Selected problem JSON path (repeatable; first existing path wins).",
|
|
798
|
+
)
|
|
799
|
+
parser.add_argument("--scope", default=DEFAULT_SCOPE, help="Scope YAML path.")
|
|
800
|
+
parser.add_argument("--board", default=DEFAULT_BOARD, help="Board JSON path.")
|
|
801
|
+
args = parser.parse_args()
|
|
802
|
+
scope_mode = str(args.scope_mode).strip().lower()
|
|
803
|
+
if scope_mode not in {"starter", "public_repo"}:
|
|
804
|
+
raise RuntimeError(f"unsupported --scope-mode: {scope_mode}")
|
|
805
|
+
expected_starter_scaffold = str(args.expect_starter_scaffold).strip().lower() in {
|
|
806
|
+
"1",
|
|
807
|
+
"true",
|
|
808
|
+
"yes",
|
|
809
|
+
"on",
|
|
810
|
+
}
|
|
811
|
+
|
|
812
|
+
checks: list[dict[str, Any]] = []
|
|
813
|
+
selected_candidates = _candidate_paths(
|
|
814
|
+
list(args.selected_problem) or [DEFAULT_SELECTED_PROBLEM, LEGACY_SELECTED_PROBLEM]
|
|
815
|
+
)
|
|
816
|
+
selected_path = _first_existing(selected_candidates)
|
|
817
|
+
scope_path = Path(args.scope).resolve()
|
|
818
|
+
board_path = Path(args.board).resolve()
|
|
819
|
+
|
|
820
|
+
selected_payload: dict[str, Any] = {}
|
|
821
|
+
if selected_path is None:
|
|
822
|
+
checked = ", ".join(str(path) for path in selected_candidates)
|
|
823
|
+
_add_check(
|
|
824
|
+
checks,
|
|
825
|
+
check_id="selected_problem_exists",
|
|
826
|
+
ok=False,
|
|
827
|
+
detail=f"selected problem JSON not found; checked: {checked}",
|
|
828
|
+
)
|
|
829
|
+
else:
|
|
830
|
+
try:
|
|
831
|
+
selected_payload = _load_json(selected_path)
|
|
832
|
+
except Exception as exc:
|
|
833
|
+
_add_check(
|
|
834
|
+
checks,
|
|
835
|
+
check_id="selected_problem_json_valid",
|
|
836
|
+
ok=False,
|
|
837
|
+
detail=f"failed to parse selected problem JSON: {exc}",
|
|
838
|
+
path=selected_path,
|
|
839
|
+
)
|
|
840
|
+
else:
|
|
841
|
+
_add_check(
|
|
842
|
+
checks,
|
|
843
|
+
check_id="selected_problem_exists",
|
|
844
|
+
ok=True,
|
|
845
|
+
detail="selected problem JSON is present",
|
|
846
|
+
path=selected_path,
|
|
847
|
+
)
|
|
848
|
+
|
|
849
|
+
problem_payload = (
|
|
850
|
+
selected_payload.get("problem", {})
|
|
851
|
+
if isinstance(selected_payload.get("problem"), dict)
|
|
852
|
+
else {}
|
|
853
|
+
)
|
|
854
|
+
source_payload = (
|
|
855
|
+
selected_payload.get("source", {})
|
|
856
|
+
if isinstance(selected_payload.get("source"), dict)
|
|
857
|
+
else {}
|
|
858
|
+
)
|
|
859
|
+
|
|
860
|
+
public_problem_id = _problem_id_from_value(problem_payload.get("problem_id", 0))
|
|
861
|
+
_add_check(
|
|
862
|
+
checks,
|
|
863
|
+
check_id="selected_problem_id_matches",
|
|
864
|
+
ok=public_problem_id == int(args.problem_id),
|
|
865
|
+
detail="public selected problem id matches workflow problem id",
|
|
866
|
+
path=selected_path,
|
|
867
|
+
expected=int(args.problem_id),
|
|
868
|
+
actual=public_problem_id,
|
|
869
|
+
)
|
|
870
|
+
public_status = str(problem_payload.get("status_bucket", "")).strip()
|
|
871
|
+
_add_check(
|
|
872
|
+
checks,
|
|
873
|
+
check_id="selected_problem_status_present",
|
|
874
|
+
ok=bool(public_status),
|
|
875
|
+
detail="public selected problem includes status_bucket",
|
|
876
|
+
path=selected_path,
|
|
877
|
+
actual=public_status,
|
|
878
|
+
)
|
|
879
|
+
source_ok = (
|
|
880
|
+
str(source_payload.get("site", "")).strip() == "erdosproblems.com"
|
|
881
|
+
and bool(str(source_payload.get("url", "")).strip())
|
|
882
|
+
and bool(str(source_payload.get("source_sha256", "")).strip())
|
|
883
|
+
)
|
|
884
|
+
_add_check(
|
|
885
|
+
checks,
|
|
886
|
+
check_id="selected_problem_source_metadata_present",
|
|
887
|
+
ok=source_ok,
|
|
888
|
+
detail="public selected problem records Erdos source metadata",
|
|
889
|
+
path=selected_path,
|
|
890
|
+
expected="site=erdosproblems.com with url and source_sha256",
|
|
891
|
+
actual={
|
|
892
|
+
"site": str(source_payload.get("site", "")).strip(),
|
|
893
|
+
"url": str(source_payload.get("url", "")).strip(),
|
|
894
|
+
"source_sha256": str(source_payload.get("source_sha256", "")).strip(),
|
|
895
|
+
},
|
|
896
|
+
)
|
|
897
|
+
statement = str(problem_payload.get("statement", "")).strip()
|
|
898
|
+
_add_check(
|
|
899
|
+
checks,
|
|
900
|
+
check_id="selected_problem_statement_present",
|
|
901
|
+
ok=bool(statement),
|
|
902
|
+
detail="public selected problem includes a non-empty statement",
|
|
903
|
+
path=selected_path,
|
|
904
|
+
)
|
|
905
|
+
|
|
906
|
+
scope_payload: dict[str, Any] = {}
|
|
907
|
+
if not scope_path.exists():
|
|
908
|
+
_add_check(
|
|
909
|
+
checks,
|
|
910
|
+
check_id="scope_exists",
|
|
911
|
+
ok=False,
|
|
912
|
+
detail="installed Problem 857 scope YAML is missing",
|
|
913
|
+
path=scope_path,
|
|
914
|
+
)
|
|
915
|
+
else:
|
|
916
|
+
try:
|
|
917
|
+
scope_payload = _load_scope(scope_path)
|
|
918
|
+
except Exception as exc:
|
|
919
|
+
_add_check(
|
|
920
|
+
checks,
|
|
921
|
+
check_id="scope_yaml_valid",
|
|
922
|
+
ok=False,
|
|
923
|
+
detail=f"failed to parse scope YAML: {exc}",
|
|
924
|
+
path=scope_path,
|
|
925
|
+
)
|
|
926
|
+
else:
|
|
927
|
+
_add_check(
|
|
928
|
+
checks,
|
|
929
|
+
check_id="scope_exists",
|
|
930
|
+
ok=True,
|
|
931
|
+
detail="installed Problem 857 scope YAML is present",
|
|
932
|
+
path=scope_path,
|
|
933
|
+
)
|
|
934
|
+
|
|
935
|
+
scope_problem_id = _problem_id_from_value(scope_payload.get("problem_id", 0))
|
|
936
|
+
_add_check(
|
|
937
|
+
checks,
|
|
938
|
+
check_id="scope_problem_id_matches",
|
|
939
|
+
ok=scope_problem_id == int(args.problem_id),
|
|
940
|
+
detail="scope problem id matches workflow problem id",
|
|
941
|
+
path=scope_path,
|
|
942
|
+
expected=int(args.problem_id),
|
|
943
|
+
actual=scope_problem_id,
|
|
944
|
+
)
|
|
945
|
+
if scope_mode == "starter":
|
|
946
|
+
scope_status = str(scope_payload.get("status", "")).strip()
|
|
947
|
+
_add_check(
|
|
948
|
+
checks,
|
|
949
|
+
check_id="scope_status_matches_public_status",
|
|
950
|
+
ok=bool(scope_status) and bool(public_status) and scope_status == public_status,
|
|
951
|
+
detail="scope status matches public selected problem status",
|
|
952
|
+
path=scope_path,
|
|
953
|
+
expected=public_status or "(non-empty public status)",
|
|
954
|
+
actual=scope_status,
|
|
955
|
+
)
|
|
956
|
+
scope_name = str(scope_payload.get("name", "")).strip()
|
|
957
|
+
_add_check(
|
|
958
|
+
checks,
|
|
959
|
+
check_id="scope_name_mentions_problem",
|
|
960
|
+
ok=str(args.problem_id) in scope_name,
|
|
961
|
+
detail="scope name mentions the workflow problem id",
|
|
962
|
+
path=scope_path,
|
|
963
|
+
expected=f"contains {args.problem_id}",
|
|
964
|
+
actual=scope_name,
|
|
965
|
+
)
|
|
966
|
+
else:
|
|
967
|
+
scope_display_name = str(scope_payload.get("display_name", scope_payload.get("name", ""))).strip()
|
|
968
|
+
_add_check(
|
|
969
|
+
checks,
|
|
970
|
+
check_id="scope_display_name_mentions_problem",
|
|
971
|
+
ok=str(args.problem_id) in scope_display_name,
|
|
972
|
+
detail="public scope display name mentions the workflow problem id",
|
|
973
|
+
path=scope_path,
|
|
974
|
+
expected=f"contains {args.problem_id}",
|
|
975
|
+
actual=scope_display_name,
|
|
976
|
+
)
|
|
977
|
+
lean_files = scope_payload.get("lean_files", [])
|
|
978
|
+
_add_check(
|
|
979
|
+
checks,
|
|
980
|
+
check_id="scope_lean_files_present",
|
|
981
|
+
ok=isinstance(lean_files, list) and len(lean_files) > 0,
|
|
982
|
+
detail="public scope lists Lean files for the problem workspace",
|
|
983
|
+
path=scope_path,
|
|
984
|
+
expected="non-empty list",
|
|
985
|
+
actual=f"{len(lean_files) if isinstance(lean_files, list) else 0} files",
|
|
986
|
+
)
|
|
987
|
+
north_star = str(scope_payload.get("north_star_lane", "")).strip()
|
|
988
|
+
reduction_route = str(scope_payload.get("reduction_route", "")).strip()
|
|
989
|
+
_add_check(
|
|
990
|
+
checks,
|
|
991
|
+
check_id="scope_routes_present",
|
|
992
|
+
ok=bool(north_star or reduction_route),
|
|
993
|
+
detail="public scope records an active route or north-star lane",
|
|
994
|
+
path=scope_path,
|
|
995
|
+
expected="north_star_lane or reduction_route",
|
|
996
|
+
actual={"north_star_lane": north_star, "reduction_route": reduction_route},
|
|
997
|
+
)
|
|
998
|
+
|
|
999
|
+
board_payload: dict[str, Any] = {}
|
|
1000
|
+
if not board_path.exists():
|
|
1001
|
+
_add_check(
|
|
1002
|
+
checks,
|
|
1003
|
+
check_id="board_exists",
|
|
1004
|
+
ok=False,
|
|
1005
|
+
detail="starter board JSON is missing",
|
|
1006
|
+
path=board_path,
|
|
1007
|
+
)
|
|
1008
|
+
else:
|
|
1009
|
+
try:
|
|
1010
|
+
board_payload = _load_json(board_path)
|
|
1011
|
+
except Exception as exc:
|
|
1012
|
+
_add_check(
|
|
1013
|
+
checks,
|
|
1014
|
+
check_id="board_json_valid",
|
|
1015
|
+
ok=False,
|
|
1016
|
+
detail=f"failed to parse board JSON: {exc}",
|
|
1017
|
+
path=board_path,
|
|
1018
|
+
)
|
|
1019
|
+
else:
|
|
1020
|
+
_add_check(
|
|
1021
|
+
checks,
|
|
1022
|
+
check_id="board_exists",
|
|
1023
|
+
ok=True,
|
|
1024
|
+
detail="starter board JSON is present",
|
|
1025
|
+
path=board_path,
|
|
1026
|
+
)
|
|
1027
|
+
|
|
1028
|
+
board_problem_id = _problem_id_from_value(board_payload.get("problem_id", 0))
|
|
1029
|
+
_add_check(
|
|
1030
|
+
checks,
|
|
1031
|
+
check_id="board_problem_id_matches",
|
|
1032
|
+
ok=board_problem_id == int(args.problem_id),
|
|
1033
|
+
detail="board problem id matches workflow problem id",
|
|
1034
|
+
path=board_path,
|
|
1035
|
+
expected=int(args.problem_id),
|
|
1036
|
+
actual=board_problem_id,
|
|
1037
|
+
)
|
|
1038
|
+
_add_check(
|
|
1039
|
+
checks,
|
|
1040
|
+
check_id="board_starter_scaffold_matches_mode",
|
|
1041
|
+
ok=bool(board_payload.get("starter_scaffold", False)) == expected_starter_scaffold,
|
|
1042
|
+
detail="board starter_scaffold flag matches the expected workflow mode",
|
|
1043
|
+
path=board_path,
|
|
1044
|
+
expected=expected_starter_scaffold,
|
|
1045
|
+
actual=bool(board_payload.get("starter_scaffold", False)),
|
|
1046
|
+
)
|
|
1047
|
+
live_payload = board_payload.get("live_snapshot", board_payload.get("live", {}))
|
|
1048
|
+
if not isinstance(live_payload, dict):
|
|
1049
|
+
live_payload = {}
|
|
1050
|
+
route_status = board_payload.get("route_status", live_payload.get("routes", []))
|
|
1051
|
+
_add_check(
|
|
1052
|
+
checks,
|
|
1053
|
+
check_id="board_route_status_present",
|
|
1054
|
+
ok=isinstance(route_status, list) and len(route_status) > 0,
|
|
1055
|
+
detail="board includes route status rows",
|
|
1056
|
+
path=board_path,
|
|
1057
|
+
expected="non-empty list",
|
|
1058
|
+
actual=f"{len(route_status) if isinstance(route_status, list) else 0} rows",
|
|
1059
|
+
)
|
|
1060
|
+
|
|
1061
|
+
failures = [row for row in checks if row.get("status") == "FAIL"]
|
|
1062
|
+
status = "PASS" if not failures else "FAIL"
|
|
1063
|
+
out_path = Path("orchestrator") / "logs" / args.run_id / "SPEC_CHECK.json"
|
|
1064
|
+
out_path.parent.mkdir(parents=True, exist_ok=True)
|
|
1065
|
+
payload = {
|
|
1066
|
+
"status": status,
|
|
1067
|
+
"checker": "problem857_public_spec_check",
|
|
1068
|
+
"run_id": args.run_id,
|
|
1069
|
+
"problem_id": int(args.problem_id),
|
|
1070
|
+
"scope_mode": scope_mode,
|
|
1071
|
+
"checked_at_utc": _now_utc(),
|
|
1072
|
+
"note": (
|
|
1073
|
+
"Validates that the synced public Problem 857 payload, installed scope, "
|
|
1074
|
+
"and board agree on the target problem."
|
|
1075
|
+
),
|
|
1076
|
+
"starter_scaffold": bool(board_payload.get("starter_scaffold", False)),
|
|
1077
|
+
"selected_problem_path": _rel(selected_path) if selected_path is not None else "",
|
|
1078
|
+
"scope_path": _rel(scope_path),
|
|
1079
|
+
"board_path": _rel(board_path),
|
|
1080
|
+
"summary": {
|
|
1081
|
+
"passed": len(checks) - len(failures),
|
|
1082
|
+
"failed": len(failures),
|
|
1083
|
+
"total": len(checks),
|
|
1084
|
+
},
|
|
1085
|
+
"public_problem": {
|
|
1086
|
+
"problem_id": public_problem_id,
|
|
1087
|
+
"status_bucket": public_status,
|
|
1088
|
+
"status_label": str(problem_payload.get("status_label", "")).strip(),
|
|
1089
|
+
"problem_url": str(problem_payload.get("problem_url", "")).strip(),
|
|
1090
|
+
},
|
|
1091
|
+
"checks": checks,
|
|
1092
|
+
}
|
|
1093
|
+
out_path.write_text(json.dumps(payload, indent=2) + "\\n", encoding="utf-8")
|
|
1094
|
+
|
|
1095
|
+
print(f"spec_check={status}")
|
|
1096
|
+
if selected_path is not None:
|
|
1097
|
+
print(f"selected_problem_json={_rel(selected_path)}")
|
|
1098
|
+
print(f"scope_yaml={_rel(scope_path)}")
|
|
1099
|
+
print(f"board_json={_rel(board_path)}")
|
|
1100
|
+
print(f"spec_check_json={out_path}")
|
|
1101
|
+
return 0 if status == "PASS" else 1
|
|
1102
|
+
|
|
1103
|
+
|
|
1104
|
+
if __name__ == "__main__":
|
|
1105
|
+
raise SystemExit(main())
|
|
1106
|
+
"""
|
|
1107
|
+
|
|
1108
|
+
STARTER_LEAN_STUB = """#!/usr/bin/env python3
|
|
1109
|
+
from __future__ import annotations
|
|
1110
|
+
|
|
1111
|
+
import sys
|
|
1112
|
+
|
|
1113
|
+
|
|
1114
|
+
def main() -> int:
|
|
1115
|
+
target = ""
|
|
1116
|
+
if len(sys.argv) > 1:
|
|
1117
|
+
target = sys.argv[1]
|
|
1118
|
+
print(f"lean_build_stub=PASS target={target}")
|
|
1119
|
+
return 0
|
|
1120
|
+
|
|
1121
|
+
|
|
1122
|
+
if __name__ == "__main__":
|
|
1123
|
+
raise SystemExit(main())
|
|
1124
|
+
"""
|
|
1125
|
+
|
|
1126
|
+
STARTER_PROBLEM857_SCOPE = """problem_id: 857
|
|
1127
|
+
name: Erdos Problem 857 Starter Scope
|
|
1128
|
+
status: open
|
|
1129
|
+
notes:
|
|
1130
|
+
- starter scope generated by ORP pack install
|
|
1131
|
+
"""
|
|
1132
|
+
|
|
1133
|
+
STARTER_LAKEFILE = """import Lake
|
|
1134
|
+
open Lake DSL
|
|
1135
|
+
|
|
1136
|
+
package SunflowerLean where
|
|
1137
|
+
|
|
1138
|
+
@[default_target]
|
|
1139
|
+
lean_lib SunflowerLean where
|
|
1140
|
+
"""
|
|
1141
|
+
|
|
1142
|
+
STARTER_EXTERNAL_PR_BODY = """# Draft PR Body
|
|
1143
|
+
|
|
1144
|
+
## Summary
|
|
1145
|
+
|
|
1146
|
+
- TODO: summarize the proposed upstream contribution.
|
|
1147
|
+
|
|
1148
|
+
## Local Verification
|
|
1149
|
+
|
|
1150
|
+
- TODO: record the local gate outputs and any follow-up notes.
|
|
1151
|
+
|
|
1152
|
+
## Coordination
|
|
1153
|
+
|
|
1154
|
+
- TODO: note any overlap checks, issue references, or reviewer context.
|
|
1155
|
+
"""
|
|
1156
|
+
|
|
1157
|
+
STARTER_ISSUE_SMASHERS_SETUP = """#!/usr/bin/env bash
|
|
1158
|
+
set -eu
|
|
1159
|
+
|
|
1160
|
+
ROOT="$(CDPATH= cd -- "$(dirname -- "$0")" && pwd)"
|
|
1161
|
+
|
|
1162
|
+
mkdir -p \
|
|
1163
|
+
"$ROOT/analysis" \
|
|
1164
|
+
"$ROOT/repos" \
|
|
1165
|
+
"$ROOT/worktrees" \
|
|
1166
|
+
"$ROOT/scratch" \
|
|
1167
|
+
"$ROOT/archive"
|
|
1168
|
+
|
|
1169
|
+
printf 'workspace_root=%s\\n' "$ROOT"
|
|
1170
|
+
printf 'ensured=analysis\\n'
|
|
1171
|
+
printf 'ensured=repos\\n'
|
|
1172
|
+
printf 'ensured=worktrees\\n'
|
|
1173
|
+
printf 'ensured=scratch\\n'
|
|
1174
|
+
printf 'ensured=archive\\n'
|
|
1175
|
+
"""
|
|
1176
|
+
|
|
1177
|
+
PROBLEM857_PUBLIC_WORKSPACE_PATHS = [
|
|
1178
|
+
"analysis/problem857_counting_gateboard.json",
|
|
1179
|
+
"docs/PROBLEM857_COUNTING_OPS_BOARD.md",
|
|
1180
|
+
"scripts/problem857_ops_board.py",
|
|
1181
|
+
"scripts/frontier_status.py",
|
|
1182
|
+
"orchestrator/reduction_graph.yaml",
|
|
1183
|
+
"orchestrator/v2",
|
|
1184
|
+
"sunflower_lean",
|
|
1185
|
+
]
|
|
1186
|
+
|
|
1187
|
+
PROBLEM857_PUBLIC_SYNC_IGNORES = {
|
|
1188
|
+
".git",
|
|
1189
|
+
".lake",
|
|
1190
|
+
"__pycache__",
|
|
1191
|
+
".pytest_cache",
|
|
1192
|
+
"build",
|
|
1193
|
+
}
|
|
1194
|
+
|
|
1195
|
+
PROBLEM857_PUBLIC_LEAN_REPO_MARKERS = [
|
|
1196
|
+
"SunflowerLean.lean",
|
|
1197
|
+
"SunflowerLean/Balance.lean",
|
|
1198
|
+
"lakefile.toml",
|
|
1199
|
+
"lean-toolchain",
|
|
1200
|
+
]
|
|
1201
|
+
|
|
1202
|
+
PROBLEM857_PUBLIC_LEAN_COPY_PREFIX = Path("sunflower_lean")
|
|
1203
|
+
|
|
1204
|
+
PROBLEM857_PUBLIC_LEAN_SCOPE_FILES = [
|
|
1205
|
+
"SunflowerLean/Balance.lean",
|
|
1206
|
+
"SunflowerLean/BalanceCore.lean",
|
|
1207
|
+
"SunflowerLean/BalanceCasesA.lean",
|
|
1208
|
+
"SunflowerLean/BalanceCasesB.lean",
|
|
1209
|
+
"SunflowerLean/BalanceCandidatesA.lean",
|
|
1210
|
+
"SunflowerLean/BalanceCandidatesB.lean",
|
|
1211
|
+
"SunflowerLean/Container.lean",
|
|
1212
|
+
"SunflowerLean/LocalTuran.lean",
|
|
1213
|
+
"SunflowerLean/Obstruction.lean",
|
|
1214
|
+
"SunflowerLean/SATBridge.lean",
|
|
1215
|
+
"SunflowerLean/UnionBounds.lean",
|
|
1216
|
+
]
|
|
1217
|
+
|
|
1218
|
+
PROBLEM857_PUBLIC_ROUTE_GROUPS = [
|
|
1219
|
+
{
|
|
1220
|
+
"route": "balance_core",
|
|
1221
|
+
"ticket": "P857_BALANCE_CORE",
|
|
1222
|
+
"leaf": "SunflowerLean.Balance",
|
|
1223
|
+
"files": [
|
|
1224
|
+
"SunflowerLean/Balance.lean",
|
|
1225
|
+
"SunflowerLean/BalanceCore.lean",
|
|
1226
|
+
"SunflowerLean/LocalTuran.lean",
|
|
1227
|
+
"SunflowerLean/UnionBounds.lean",
|
|
1228
|
+
],
|
|
1229
|
+
},
|
|
1230
|
+
{
|
|
1231
|
+
"route": "balance_cases",
|
|
1232
|
+
"ticket": "P857_BALANCE_CASES",
|
|
1233
|
+
"leaf": "SunflowerLean.BalanceCases",
|
|
1234
|
+
"files": [
|
|
1235
|
+
"SunflowerLean/BalanceCasesA.lean",
|
|
1236
|
+
"SunflowerLean/BalanceCasesB.lean",
|
|
1237
|
+
"SunflowerLean/BalanceCandidatesA.lean",
|
|
1238
|
+
"SunflowerLean/BalanceCandidatesB.lean",
|
|
1239
|
+
],
|
|
1240
|
+
},
|
|
1241
|
+
{
|
|
1242
|
+
"route": "support_modules",
|
|
1243
|
+
"ticket": "P857_SUPPORT",
|
|
1244
|
+
"leaf": "SunflowerLean.Support",
|
|
1245
|
+
"files": [
|
|
1246
|
+
"SunflowerLean/Container.lean",
|
|
1247
|
+
"SunflowerLean/Obstruction.lean",
|
|
1248
|
+
"SunflowerLean/SATBridge.lean",
|
|
1249
|
+
],
|
|
1250
|
+
},
|
|
1251
|
+
]
|
|
1252
|
+
|
|
1253
|
+
|
|
1254
|
+
def _now_utc() -> str:
|
|
1255
|
+
return (
|
|
1256
|
+
dt.datetime.now(dt.timezone.utc)
|
|
1257
|
+
.replace(microsecond=0)
|
|
1258
|
+
.isoformat()
|
|
1259
|
+
.replace("+00:00", "Z")
|
|
1260
|
+
)
|
|
1261
|
+
|
|
1262
|
+
|
|
1263
|
+
def _load_yaml(path: Path) -> dict[str, Any]:
|
|
1264
|
+
payload = yaml.safe_load(path.read_text(encoding="utf-8"))
|
|
1265
|
+
if not isinstance(payload, dict):
|
|
1266
|
+
raise RuntimeError(f"yaml root must be object: {path}")
|
|
1267
|
+
return payload
|
|
1268
|
+
|
|
1269
|
+
|
|
1270
|
+
def _pack_spec(pack_id: str) -> dict[str, Any]:
|
|
1271
|
+
spec = PACK_SPECS.get(pack_id)
|
|
1272
|
+
if not isinstance(spec, dict):
|
|
1273
|
+
raise RuntimeError(f"unsupported pack for install flow: {pack_id}")
|
|
1274
|
+
return spec
|
|
1275
|
+
|
|
1276
|
+
|
|
1277
|
+
def _pack_components(pack_id: str) -> dict[str, dict[str, Any]]:
|
|
1278
|
+
components = _pack_spec(pack_id).get("components", {})
|
|
1279
|
+
if not isinstance(components, dict) or not components:
|
|
1280
|
+
raise RuntimeError(f"pack has no installable components: {pack_id}")
|
|
1281
|
+
return components
|
|
1282
|
+
|
|
1283
|
+
|
|
1284
|
+
def _validate_var(raw: str) -> str:
|
|
1285
|
+
if "=" not in raw:
|
|
1286
|
+
raise RuntimeError(f"invalid --var, expected KEY=VALUE: {raw}")
|
|
1287
|
+
key, _value = raw.split("=", 1)
|
|
1288
|
+
if not key or not all(c.isupper() or c.isdigit() or c == "_" for c in key):
|
|
1289
|
+
raise RuntimeError(f"invalid variable key: {key}")
|
|
1290
|
+
return raw
|
|
1291
|
+
|
|
1292
|
+
|
|
1293
|
+
def _write_text(path: Path, text: str, *, overwrite: bool) -> bool:
|
|
1294
|
+
if path.exists() and not overwrite:
|
|
1295
|
+
return False
|
|
1296
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
1297
|
+
path.write_text(text, encoding="utf-8")
|
|
1298
|
+
return True
|
|
1299
|
+
|
|
1300
|
+
|
|
1301
|
+
def _write_json(path: Path, payload: dict[str, Any], *, overwrite: bool) -> bool:
|
|
1302
|
+
if path.exists() and not overwrite:
|
|
1303
|
+
return False
|
|
1304
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
1305
|
+
path.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8")
|
|
1306
|
+
return True
|
|
1307
|
+
|
|
1308
|
+
|
|
1309
|
+
def _vars_defaults(pack_meta: dict[str, Any]) -> dict[str, str]:
|
|
1310
|
+
raw = pack_meta.get("variables", {})
|
|
1311
|
+
if not isinstance(raw, dict):
|
|
1312
|
+
return {}
|
|
1313
|
+
out: dict[str, str] = {}
|
|
1314
|
+
for key, meta in raw.items():
|
|
1315
|
+
if not isinstance(key, str) or not isinstance(meta, dict):
|
|
1316
|
+
continue
|
|
1317
|
+
default = meta.get("default")
|
|
1318
|
+
if isinstance(default, str):
|
|
1319
|
+
out[key] = default
|
|
1320
|
+
return out
|
|
1321
|
+
|
|
1322
|
+
|
|
1323
|
+
def _vars_map(pack_meta: dict[str, Any], extra_vars: list[str]) -> dict[str, str]:
|
|
1324
|
+
out = _vars_defaults(pack_meta)
|
|
1325
|
+
for raw in extra_vars:
|
|
1326
|
+
validated = _validate_var(raw)
|
|
1327
|
+
key, value = validated.split("=", 1)
|
|
1328
|
+
out[key] = value
|
|
1329
|
+
return out
|
|
1330
|
+
|
|
1331
|
+
|
|
1332
|
+
def _problem857_source_mode(vars_map: dict[str, str]) -> str:
|
|
1333
|
+
mode = vars_map.get("PROBLEM857_SOURCE_MODE", "starter").strip().lower()
|
|
1334
|
+
if mode not in {"starter", "public_repo"}:
|
|
1335
|
+
raise RuntimeError(f"unsupported PROBLEM857_SOURCE_MODE: {mode}")
|
|
1336
|
+
return mode
|
|
1337
|
+
|
|
1338
|
+
|
|
1339
|
+
def _issue_smashers_workspace_readme(
|
|
1340
|
+
*,
|
|
1341
|
+
workspace_root_rel: str,
|
|
1342
|
+
repos_rel: str,
|
|
1343
|
+
worktrees_rel: str,
|
|
1344
|
+
scratch_rel: str,
|
|
1345
|
+
archive_rel: str,
|
|
1346
|
+
watchlist_rel: str,
|
|
1347
|
+
status_rel: str,
|
|
1348
|
+
pr_body_rel: str,
|
|
1349
|
+
) -> str:
|
|
1350
|
+
return (
|
|
1351
|
+
"# Issue Smashers Workspace\n\n"
|
|
1352
|
+
"This directory is the operator-facing workspace scaffold installed by the ORP "
|
|
1353
|
+
"`issue-smashers` pack.\n\n"
|
|
1354
|
+
"## Layout\n\n"
|
|
1355
|
+
f"- `{repos_rel}` - base clones for target projects\n"
|
|
1356
|
+
f"- `{worktrees_rel}` - one active worktree per issue lane\n"
|
|
1357
|
+
f"- `{scratch_rel}` - disposable notes and experiments\n"
|
|
1358
|
+
f"- `{archive_rel}` - optional non-canonical archive space\n"
|
|
1359
|
+
f"- `{watchlist_rel}` - machine-readable watchlist\n"
|
|
1360
|
+
f"- `{status_rel}` - human-readable lane/status board\n"
|
|
1361
|
+
f"- `{pr_body_rel}` - default public PR draft body\n\n"
|
|
1362
|
+
"## Usage\n\n"
|
|
1363
|
+
"1. Keep ORP outside this workspace as the protocol/runtime.\n"
|
|
1364
|
+
"2. Put base clones under `repos/`.\n"
|
|
1365
|
+
"3. Put one active lane per issue under `worktrees/`.\n"
|
|
1366
|
+
"4. Use the rendered ORP configs at the install target root to run governance.\n"
|
|
1367
|
+
"5. Treat this workspace as process-only; it coordinates contribution work but is not evidence.\n\n"
|
|
1368
|
+
"## First step\n\n"
|
|
1369
|
+
"Run `bash setup-issue-smashers.sh` inside this directory if you want to re-ensure the "
|
|
1370
|
+
"workspace folders exist.\n"
|
|
1371
|
+
)
|
|
1372
|
+
|
|
1373
|
+
|
|
1374
|
+
def _issue_smashers_workspace_rules(
|
|
1375
|
+
*,
|
|
1376
|
+
workspace_root_rel: str,
|
|
1377
|
+
repos_rel: str,
|
|
1378
|
+
worktrees_rel: str,
|
|
1379
|
+
scratch_rel: str,
|
|
1380
|
+
archive_rel: str,
|
|
1381
|
+
) -> str:
|
|
1382
|
+
return (
|
|
1383
|
+
"# Issue Smashers Workspace Rules\n\n"
|
|
1384
|
+
f"- workspace root: `{workspace_root_rel}`\n"
|
|
1385
|
+
f"- base clones live under `{repos_rel}`\n"
|
|
1386
|
+
f"- active issue work lives under `{worktrees_rel}`\n"
|
|
1387
|
+
f"- scratch space lives under `{scratch_rel}` and is disposable\n"
|
|
1388
|
+
f"- archive space lives under `{archive_rel}` and is optional\n\n"
|
|
1389
|
+
"## Rules\n\n"
|
|
1390
|
+
"1. `issue-smashers/` is a plain directory, not the ORP source repo.\n"
|
|
1391
|
+
"2. Base clones live under `repos/`.\n"
|
|
1392
|
+
"3. Active issue work lives under `worktrees/`.\n"
|
|
1393
|
+
"4. One worktree per issue lane.\n"
|
|
1394
|
+
"5. `scratch/` is disposable.\n"
|
|
1395
|
+
"6. `archive/` is non-canonical and optional.\n"
|
|
1396
|
+
"7. `origin` should point at the operator fork when host repo policy allows it.\n"
|
|
1397
|
+
"8. `upstream` should point at the canonical target repo.\n"
|
|
1398
|
+
"9. ORP stays outside the workspace as the protocol/runtime.\n"
|
|
1399
|
+
)
|
|
1400
|
+
|
|
1401
|
+
|
|
1402
|
+
def _issue_smashers_status_markdown(
|
|
1403
|
+
*,
|
|
1404
|
+
watchlist_rel: str,
|
|
1405
|
+
pr_body_rel: str,
|
|
1406
|
+
) -> str:
|
|
1407
|
+
return (
|
|
1408
|
+
"# Issue Smashers Status\n\n"
|
|
1409
|
+
"- active_lanes: `0`\n"
|
|
1410
|
+
f"- watchlist_json: `{watchlist_rel}`\n"
|
|
1411
|
+
f"- default_pr_body: `{pr_body_rel}`\n\n"
|
|
1412
|
+
"## Queue\n\n"
|
|
1413
|
+
"- none yet\n"
|
|
1414
|
+
)
|
|
1415
|
+
|
|
1416
|
+
|
|
1417
|
+
def _issue_smashers_watchlist_payload(*, workspace_root_rel: str) -> dict[str, Any]:
|
|
1418
|
+
return {
|
|
1419
|
+
"schema_version": "1.0.0",
|
|
1420
|
+
"generated_at_utc": _now_utc(),
|
|
1421
|
+
"workspace_root": workspace_root_rel,
|
|
1422
|
+
"lanes": [],
|
|
1423
|
+
"notes": [
|
|
1424
|
+
"Issue Smashers watchlist is process-only.",
|
|
1425
|
+
"Keep one active worktree per issue lane.",
|
|
1426
|
+
],
|
|
1427
|
+
}
|
|
1428
|
+
|
|
1429
|
+
|
|
1430
|
+
def _run_checked(cmd: list[str], *, cwd: Path | None = None) -> None:
|
|
1431
|
+
proc = subprocess.run(
|
|
1432
|
+
cmd,
|
|
1433
|
+
cwd=str(cwd) if cwd is not None else None,
|
|
1434
|
+
capture_output=True,
|
|
1435
|
+
text=True,
|
|
1436
|
+
)
|
|
1437
|
+
if proc.returncode != 0:
|
|
1438
|
+
msg = (proc.stderr or proc.stdout or "command failed").strip()
|
|
1439
|
+
raise RuntimeError(f"{' '.join(cmd)}: {msg}")
|
|
1440
|
+
|
|
1441
|
+
|
|
1442
|
+
def _copy_repo_path(src_root: Path, dst_root: Path, rel: str, *, overwrite: bool) -> list[str]:
|
|
1443
|
+
src = src_root / rel
|
|
1444
|
+
if not src.exists():
|
|
1445
|
+
raise RuntimeError(f"missing public Problem 857 sync path in source repo: {rel}")
|
|
1446
|
+
|
|
1447
|
+
created: list[str] = []
|
|
1448
|
+
dst = dst_root / rel
|
|
1449
|
+
|
|
1450
|
+
if src.is_dir():
|
|
1451
|
+
for child in sorted(src.rglob("*")):
|
|
1452
|
+
parts = set(child.relative_to(src).parts)
|
|
1453
|
+
if parts & PROBLEM857_PUBLIC_SYNC_IGNORES:
|
|
1454
|
+
continue
|
|
1455
|
+
if child.is_dir():
|
|
1456
|
+
continue
|
|
1457
|
+
rel_child = child.relative_to(src_root)
|
|
1458
|
+
target = dst_root / rel_child
|
|
1459
|
+
if target.exists() and not overwrite:
|
|
1460
|
+
continue
|
|
1461
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
1462
|
+
shutil.copy2(child, target)
|
|
1463
|
+
created.append(str(rel_child))
|
|
1464
|
+
return created
|
|
1465
|
+
|
|
1466
|
+
if dst.exists() and not overwrite:
|
|
1467
|
+
return created
|
|
1468
|
+
dst.parent.mkdir(parents=True, exist_ok=True)
|
|
1469
|
+
shutil.copy2(src, dst)
|
|
1470
|
+
created.append(rel)
|
|
1471
|
+
return created
|
|
1472
|
+
|
|
1473
|
+
|
|
1474
|
+
def _copy_repo_tree(
|
|
1475
|
+
src_root: Path,
|
|
1476
|
+
target_repo_root: Path,
|
|
1477
|
+
dst_prefix: Path,
|
|
1478
|
+
*,
|
|
1479
|
+
overwrite: bool,
|
|
1480
|
+
) -> list[str]:
|
|
1481
|
+
created: list[str] = []
|
|
1482
|
+
for child in sorted(src_root.rglob("*")):
|
|
1483
|
+
rel_child = child.relative_to(src_root)
|
|
1484
|
+
parts = set(rel_child.parts)
|
|
1485
|
+
if parts & PROBLEM857_PUBLIC_SYNC_IGNORES:
|
|
1486
|
+
continue
|
|
1487
|
+
if child.is_dir():
|
|
1488
|
+
continue
|
|
1489
|
+
out_rel = dst_prefix / rel_child
|
|
1490
|
+
target = target_repo_root / out_rel
|
|
1491
|
+
if target.exists() and not overwrite:
|
|
1492
|
+
continue
|
|
1493
|
+
target.parent.mkdir(parents=True, exist_ok=True)
|
|
1494
|
+
shutil.copy2(child, target)
|
|
1495
|
+
created.append(out_rel.as_posix())
|
|
1496
|
+
return created
|
|
1497
|
+
|
|
1498
|
+
|
|
1499
|
+
def _has_problem857_public_workspace_shape(repo_path: Path) -> bool:
|
|
1500
|
+
return (repo_path / "analysis/problem857_counting_gateboard.json").exists()
|
|
1501
|
+
|
|
1502
|
+
|
|
1503
|
+
def _has_problem857_public_lean_shape(repo_path: Path) -> bool:
|
|
1504
|
+
return all((repo_path / rel).exists() for rel in PROBLEM857_PUBLIC_LEAN_REPO_MARKERS)
|
|
1505
|
+
|
|
1506
|
+
|
|
1507
|
+
def _problem857_public_sync_note(source: str, ref: str) -> str:
|
|
1508
|
+
clean_ref = ref.strip() or "HEAD"
|
|
1509
|
+
return (
|
|
1510
|
+
"ORP-generated public bridge over synced sunflower-lean repository "
|
|
1511
|
+
f"({source}@{clean_ref}); board routes summarize public module inventory, not proof completion."
|
|
1512
|
+
)
|
|
1513
|
+
|
|
1514
|
+
|
|
1515
|
+
def _problem857_public_scope_text(source_repo_url: str, ref: str, repo_path: Path) -> str:
|
|
1516
|
+
lean_files = [
|
|
1517
|
+
f"sunflower_lean/{rel}"
|
|
1518
|
+
for rel in PROBLEM857_PUBLIC_LEAN_SCOPE_FILES
|
|
1519
|
+
if (repo_path / rel).exists()
|
|
1520
|
+
]
|
|
1521
|
+
if not lean_files:
|
|
1522
|
+
lean_files = ["sunflower_lean/SunflowerLean/Balance.lean"]
|
|
1523
|
+
lines = [
|
|
1524
|
+
"problem_id: 857",
|
|
1525
|
+
'display_name: "Sunflower Problem 857 Public Lean Scope"',
|
|
1526
|
+
"source_mode: public_repo",
|
|
1527
|
+
f"public_repo_url: {json.dumps(source_repo_url)}",
|
|
1528
|
+
f"public_repo_ref: {json.dumps(ref.strip() or 'main')}",
|
|
1529
|
+
"lean_root: sunflower_lean",
|
|
1530
|
+
"lean_entrypoint: sunflower_lean/SunflowerLean/Balance.lean",
|
|
1531
|
+
"north_star_lane: balance_core",
|
|
1532
|
+
"reduction_route: balance_core",
|
|
1533
|
+
"lean_files:",
|
|
1534
|
+
]
|
|
1535
|
+
for rel in lean_files:
|
|
1536
|
+
lines.append(f" - {rel}")
|
|
1537
|
+
lines.extend(
|
|
1538
|
+
[
|
|
1539
|
+
"notes:",
|
|
1540
|
+
' - "ORP-generated scope over the synced public sunflower-lean repo."',
|
|
1541
|
+
' - "These files are public evidence inputs; ORP board/frontier files are derived process views."',
|
|
1542
|
+
]
|
|
1543
|
+
)
|
|
1544
|
+
return "\n".join(lines) + "\n"
|
|
1545
|
+
|
|
1546
|
+
|
|
1547
|
+
def _problem857_public_reduction_graph_text(source_repo_url: str, ref: str, repo_path: Path) -> str:
|
|
1548
|
+
lines = [
|
|
1549
|
+
"problem_id: 857",
|
|
1550
|
+
"source_mode: public_repo",
|
|
1551
|
+
f"public_repo_url: {json.dumps(source_repo_url)}",
|
|
1552
|
+
f"public_repo_ref: {json.dumps(ref.strip() or 'main')}",
|
|
1553
|
+
"routes:",
|
|
1554
|
+
]
|
|
1555
|
+
for group in PROBLEM857_PUBLIC_ROUTE_GROUPS:
|
|
1556
|
+
files = [rel for rel in group["files"] if (repo_path / rel).exists()]
|
|
1557
|
+
lines.append(f" {group['route']}:")
|
|
1558
|
+
lines.append(f" ticket: {group['ticket']}")
|
|
1559
|
+
lines.append(f" leaf: {group['leaf']}")
|
|
1560
|
+
lines.append(" lean_files:")
|
|
1561
|
+
for rel in files:
|
|
1562
|
+
lines.append(f" - sunflower_lean/{rel}")
|
|
1563
|
+
return "\n".join(lines) + "\n"
|
|
1564
|
+
|
|
1565
|
+
|
|
1566
|
+
def _problem857_public_board_payload(source_repo_url: str, ref: str, repo_path: Path) -> dict[str, Any]:
|
|
1567
|
+
route_status: list[dict[str, Any]] = []
|
|
1568
|
+
tickets: list[dict[str, Any]] = []
|
|
1569
|
+
for group in PROBLEM857_PUBLIC_ROUTE_GROUPS:
|
|
1570
|
+
existing = [rel for rel in group["files"] if (repo_path / rel).exists()]
|
|
1571
|
+
total = len(group["files"])
|
|
1572
|
+
done = len(existing)
|
|
1573
|
+
route_status.append(
|
|
1574
|
+
{
|
|
1575
|
+
"route": group["route"],
|
|
1576
|
+
"loose_done": done,
|
|
1577
|
+
"loose_total": total,
|
|
1578
|
+
"strict_done": done,
|
|
1579
|
+
"strict_total": total,
|
|
1580
|
+
}
|
|
1581
|
+
)
|
|
1582
|
+
tickets.append(
|
|
1583
|
+
{
|
|
1584
|
+
"ticket": group["ticket"],
|
|
1585
|
+
"leaf": group["leaf"],
|
|
1586
|
+
"leaf_strict": "synced" if done == total else "partial",
|
|
1587
|
+
"gates_done": done,
|
|
1588
|
+
"gates_total": total,
|
|
1589
|
+
"atoms_done": done,
|
|
1590
|
+
"atoms_total": total,
|
|
1591
|
+
}
|
|
1592
|
+
)
|
|
1593
|
+
payload = {
|
|
1594
|
+
"board_id": "problem857_public_repo_board",
|
|
1595
|
+
"problem_id": 857,
|
|
1596
|
+
"updated_utc": _now_utc(),
|
|
1597
|
+
"starter_scaffold": False,
|
|
1598
|
+
"starter_note": _problem857_public_sync_note(source_repo_url, ref),
|
|
1599
|
+
"public_repo": {
|
|
1600
|
+
"url": source_repo_url,
|
|
1601
|
+
"ref": ref.strip() or "main",
|
|
1602
|
+
"sync_root": "sunflower_lean",
|
|
1603
|
+
"entrypoint": "sunflower_lean/SunflowerLean/Balance.lean",
|
|
1604
|
+
},
|
|
1605
|
+
"route_status": route_status,
|
|
1606
|
+
"tickets": tickets,
|
|
1607
|
+
"atoms": {
|
|
1608
|
+
"A_public_balance_build": {
|
|
1609
|
+
"status": "ready",
|
|
1610
|
+
"ticket_id": "P857_BALANCE_CORE",
|
|
1611
|
+
"gate_id": "lean_build_balance",
|
|
1612
|
+
"deps": ["spec_faithfulness"],
|
|
1613
|
+
}
|
|
1614
|
+
},
|
|
1615
|
+
"ready_atoms": 1,
|
|
1616
|
+
"no_go_active": [],
|
|
1617
|
+
}
|
|
1618
|
+
return payload
|
|
1619
|
+
|
|
1620
|
+
|
|
1621
|
+
def _problem857_public_board_markdown(board: dict[str, Any]) -> str:
|
|
1622
|
+
lines = [
|
|
1623
|
+
"# Problem 857 Ops Board",
|
|
1624
|
+
"",
|
|
1625
|
+
f"- updated_utc: `{board.get('updated_utc', '')}`",
|
|
1626
|
+
f"- ready_atoms: `{int(board.get('ready_atoms', 0) or 0)}`",
|
|
1627
|
+
]
|
|
1628
|
+
public_repo = board.get("public_repo", {})
|
|
1629
|
+
if isinstance(public_repo, dict):
|
|
1630
|
+
lines.append(f"- public_repo_url: `{public_repo.get('url', '')}`")
|
|
1631
|
+
lines.append(f"- public_repo_ref: `{public_repo.get('ref', '')}`")
|
|
1632
|
+
lines.extend(["", "## Routes", ""])
|
|
1633
|
+
for row in board.get("route_status", []):
|
|
1634
|
+
if not isinstance(row, dict):
|
|
1635
|
+
continue
|
|
1636
|
+
lines.append(
|
|
1637
|
+
"- route={route} loose={ld}/{lt} strict={sd}/{st}".format(
|
|
1638
|
+
route=row.get("route", ""),
|
|
1639
|
+
ld=row.get("loose_done", 0),
|
|
1640
|
+
lt=row.get("loose_total", 0),
|
|
1641
|
+
sd=row.get("strict_done", 0),
|
|
1642
|
+
st=row.get("strict_total", 0),
|
|
1643
|
+
)
|
|
1644
|
+
)
|
|
1645
|
+
lines.extend(
|
|
1646
|
+
[
|
|
1647
|
+
"",
|
|
1648
|
+
"## Note",
|
|
1649
|
+
"",
|
|
1650
|
+
"- This board is ORP-derived from the synced public sunflower-lean repo.",
|
|
1651
|
+
"- It tracks public module inventory for the Problem 857 workflow, not proof completion.",
|
|
1652
|
+
"",
|
|
1653
|
+
]
|
|
1654
|
+
)
|
|
1655
|
+
return "\n".join(lines)
|
|
1656
|
+
|
|
1657
|
+
|
|
1658
|
+
def _sync_problem857_public_lean_repo(
|
|
1659
|
+
*,
|
|
1660
|
+
target_repo_root: Path,
|
|
1661
|
+
repo_path: Path,
|
|
1662
|
+
source: str,
|
|
1663
|
+
ref: str,
|
|
1664
|
+
overwrite: bool,
|
|
1665
|
+
) -> list[str]:
|
|
1666
|
+
created = _copy_repo_tree(
|
|
1667
|
+
repo_path,
|
|
1668
|
+
target_repo_root,
|
|
1669
|
+
PROBLEM857_PUBLIC_LEAN_COPY_PREFIX,
|
|
1670
|
+
overwrite=overwrite,
|
|
1671
|
+
)
|
|
1672
|
+
board_payload = _problem857_public_board_payload(source, ref, repo_path)
|
|
1673
|
+
board_path = target_repo_root / "analysis/problem857_counting_gateboard.json"
|
|
1674
|
+
if _write_json(board_path, board_payload, overwrite=overwrite):
|
|
1675
|
+
created.append(str(board_path.relative_to(target_repo_root)))
|
|
1676
|
+
board_md_path = target_repo_root / "docs/PROBLEM857_COUNTING_OPS_BOARD.md"
|
|
1677
|
+
if _write_text(board_md_path, _problem857_public_board_markdown(board_payload), overwrite=overwrite):
|
|
1678
|
+
created.append(str(board_md_path.relative_to(target_repo_root)))
|
|
1679
|
+
reduction_graph_path = target_repo_root / "orchestrator/reduction_graph.yaml"
|
|
1680
|
+
if _write_text(
|
|
1681
|
+
reduction_graph_path,
|
|
1682
|
+
_problem857_public_reduction_graph_text(source, ref, repo_path),
|
|
1683
|
+
overwrite=overwrite,
|
|
1684
|
+
):
|
|
1685
|
+
created.append(str(reduction_graph_path.relative_to(target_repo_root)))
|
|
1686
|
+
scope_path = target_repo_root / "orchestrator/v2/scopes/problem_857.yaml"
|
|
1687
|
+
if _write_text(scope_path, _problem857_public_scope_text(source, ref, repo_path), overwrite=overwrite):
|
|
1688
|
+
created.append(str(scope_path.relative_to(target_repo_root)))
|
|
1689
|
+
return sorted(set(created))
|
|
1690
|
+
|
|
1691
|
+
|
|
1692
|
+
def _sync_problem857_public_repo(
|
|
1693
|
+
*,
|
|
1694
|
+
target_repo_root: Path,
|
|
1695
|
+
source: str,
|
|
1696
|
+
ref: str,
|
|
1697
|
+
overwrite: bool,
|
|
1698
|
+
) -> list[str]:
|
|
1699
|
+
created: list[str] = []
|
|
1700
|
+
with tempfile.TemporaryDirectory(prefix="orp-problem857-public-") as td:
|
|
1701
|
+
repo_path = Path(td) / "repo"
|
|
1702
|
+
_run_checked(["git", "clone", source, str(repo_path)])
|
|
1703
|
+
if ref.strip():
|
|
1704
|
+
_run_checked(["git", "-C", str(repo_path), "checkout", ref.strip()])
|
|
1705
|
+
if _has_problem857_public_workspace_shape(repo_path):
|
|
1706
|
+
for rel in PROBLEM857_PUBLIC_WORKSPACE_PATHS:
|
|
1707
|
+
created.extend(_copy_repo_path(repo_path, target_repo_root, rel, overwrite=overwrite))
|
|
1708
|
+
elif _has_problem857_public_lean_shape(repo_path):
|
|
1709
|
+
created.extend(
|
|
1710
|
+
_sync_problem857_public_lean_repo(
|
|
1711
|
+
target_repo_root=target_repo_root,
|
|
1712
|
+
repo_path=repo_path,
|
|
1713
|
+
source=source,
|
|
1714
|
+
ref=ref,
|
|
1715
|
+
overwrite=overwrite,
|
|
1716
|
+
)
|
|
1717
|
+
)
|
|
1718
|
+
else:
|
|
1719
|
+
raise RuntimeError(
|
|
1720
|
+
"public Problem 857 repo did not match a supported shape; expected either "
|
|
1721
|
+
"a workspace repo with analysis/docs/scripts/orchestrator paths or a public "
|
|
1722
|
+
"sunflower-lean repo with SunflowerLean/Balance.lean and lakefile.toml"
|
|
1723
|
+
)
|
|
1724
|
+
return sorted(set(created))
|
|
1725
|
+
|
|
1726
|
+
|
|
1727
|
+
def _install_starter_adapters(
|
|
1728
|
+
*,
|
|
1729
|
+
pack_id: str,
|
|
1730
|
+
pack_meta: dict[str, Any],
|
|
1731
|
+
target_repo_root: Path,
|
|
1732
|
+
includes: list[str],
|
|
1733
|
+
extra_vars: list[str],
|
|
1734
|
+
overwrite: bool,
|
|
1735
|
+
) -> list[str]:
|
|
1736
|
+
created: list[str] = []
|
|
1737
|
+
vars_map = _vars_map(pack_meta, extra_vars)
|
|
1738
|
+
problem857_source_mode = _problem857_source_mode(vars_map)
|
|
1739
|
+
public_repo_requested = "problem857" in includes and problem857_source_mode == "public_repo"
|
|
1740
|
+
|
|
1741
|
+
if pack_id == "external-pr-governance":
|
|
1742
|
+
if "governance" not in includes:
|
|
1743
|
+
return created
|
|
1744
|
+
draft_body = target_repo_root / "analysis/PR_DRAFT_BODY.md"
|
|
1745
|
+
if _write_text(draft_body, STARTER_EXTERNAL_PR_BODY, overwrite=overwrite):
|
|
1746
|
+
created.append(str(draft_body.relative_to(target_repo_root)))
|
|
1747
|
+
return created
|
|
1748
|
+
|
|
1749
|
+
if pack_id == "issue-smashers":
|
|
1750
|
+
workspace_root_rel = vars_map.get("ISSUE_SMASHERS_ROOT", "issue-smashers").strip() or "issue-smashers"
|
|
1751
|
+
repos_rel = vars_map.get("ISSUE_SMASHERS_REPOS_DIR", f"{workspace_root_rel}/repos").strip() or f"{workspace_root_rel}/repos"
|
|
1752
|
+
worktrees_rel = vars_map.get(
|
|
1753
|
+
"ISSUE_SMASHERS_WORKTREES_DIR", f"{workspace_root_rel}/worktrees"
|
|
1754
|
+
).strip() or f"{workspace_root_rel}/worktrees"
|
|
1755
|
+
scratch_rel = vars_map.get("ISSUE_SMASHERS_SCRATCH_DIR", f"{workspace_root_rel}/scratch").strip() or f"{workspace_root_rel}/scratch"
|
|
1756
|
+
archive_rel = vars_map.get("ISSUE_SMASHERS_ARCHIVE_DIR", f"{workspace_root_rel}/archive").strip() or f"{workspace_root_rel}/archive"
|
|
1757
|
+
watchlist_rel = vars_map.get(
|
|
1758
|
+
"WATCHLIST_FILE", f"{workspace_root_rel}/analysis/ISSUE_SMASHERS_WATCHLIST.json"
|
|
1759
|
+
).strip() or f"{workspace_root_rel}/analysis/ISSUE_SMASHERS_WATCHLIST.json"
|
|
1760
|
+
status_rel = vars_map.get(
|
|
1761
|
+
"STATUS_FILE", f"{workspace_root_rel}/analysis/ISSUE_SMASHERS_STATUS.md"
|
|
1762
|
+
).strip() or f"{workspace_root_rel}/analysis/ISSUE_SMASHERS_STATUS.md"
|
|
1763
|
+
rules_rel = vars_map.get("WORKSPACE_RULES_FILE", f"{workspace_root_rel}/WORKSPACE_RULES.md").strip() or f"{workspace_root_rel}/WORKSPACE_RULES.md"
|
|
1764
|
+
pr_body_rel = vars_map.get(
|
|
1765
|
+
"DEFAULT_PR_BODY_FILE", f"{workspace_root_rel}/analysis/PR_DRAFT_BODY.md"
|
|
1766
|
+
).strip() or f"{workspace_root_rel}/analysis/PR_DRAFT_BODY.md"
|
|
1767
|
+
|
|
1768
|
+
workspace_files: list[tuple[Path, str]] = [
|
|
1769
|
+
(
|
|
1770
|
+
target_repo_root / workspace_root_rel / "README.md",
|
|
1771
|
+
_issue_smashers_workspace_readme(
|
|
1772
|
+
workspace_root_rel=workspace_root_rel,
|
|
1773
|
+
repos_rel=repos_rel,
|
|
1774
|
+
worktrees_rel=worktrees_rel,
|
|
1775
|
+
scratch_rel=scratch_rel,
|
|
1776
|
+
archive_rel=archive_rel,
|
|
1777
|
+
watchlist_rel=watchlist_rel,
|
|
1778
|
+
status_rel=status_rel,
|
|
1779
|
+
pr_body_rel=pr_body_rel,
|
|
1780
|
+
),
|
|
1781
|
+
),
|
|
1782
|
+
(
|
|
1783
|
+
target_repo_root / rules_rel,
|
|
1784
|
+
_issue_smashers_workspace_rules(
|
|
1785
|
+
workspace_root_rel=workspace_root_rel,
|
|
1786
|
+
repos_rel=repos_rel,
|
|
1787
|
+
worktrees_rel=worktrees_rel,
|
|
1788
|
+
scratch_rel=scratch_rel,
|
|
1789
|
+
archive_rel=archive_rel,
|
|
1790
|
+
),
|
|
1791
|
+
),
|
|
1792
|
+
(target_repo_root / workspace_root_rel / "setup-issue-smashers.sh", STARTER_ISSUE_SMASHERS_SETUP),
|
|
1793
|
+
(target_repo_root / status_rel, _issue_smashers_status_markdown(watchlist_rel=watchlist_rel, pr_body_rel=pr_body_rel)),
|
|
1794
|
+
(target_repo_root / pr_body_rel, STARTER_EXTERNAL_PR_BODY),
|
|
1795
|
+
]
|
|
1796
|
+
for path, text in workspace_files:
|
|
1797
|
+
if _write_text(path, text, overwrite=overwrite):
|
|
1798
|
+
created.append(str(path.relative_to(target_repo_root)))
|
|
1799
|
+
|
|
1800
|
+
watchlist_path = target_repo_root / watchlist_rel
|
|
1801
|
+
if _write_json(
|
|
1802
|
+
watchlist_path,
|
|
1803
|
+
_issue_smashers_watchlist_payload(workspace_root_rel=workspace_root_rel),
|
|
1804
|
+
overwrite=overwrite,
|
|
1805
|
+
):
|
|
1806
|
+
created.append(str(watchlist_path.relative_to(target_repo_root)))
|
|
1807
|
+
|
|
1808
|
+
for rel in [repos_rel, worktrees_rel, scratch_rel, archive_rel]:
|
|
1809
|
+
placeholder = target_repo_root / rel / ".gitkeep"
|
|
1810
|
+
if _write_text(placeholder, "", overwrite=overwrite):
|
|
1811
|
+
created.append(str(placeholder.relative_to(target_repo_root)))
|
|
1812
|
+
return sorted(set(created))
|
|
1813
|
+
|
|
1814
|
+
if public_repo_requested:
|
|
1815
|
+
source = vars_map.get("PROBLEM857_PUBLIC_REPO_URL", "").strip()
|
|
1816
|
+
ref = vars_map.get("PROBLEM857_PUBLIC_REPO_REF", "").strip()
|
|
1817
|
+
if not source:
|
|
1818
|
+
raise RuntimeError("PROBLEM857_PUBLIC_REPO_URL cannot be empty in public_repo mode")
|
|
1819
|
+
created.extend(
|
|
1820
|
+
_sync_problem857_public_repo(
|
|
1821
|
+
target_repo_root=target_repo_root,
|
|
1822
|
+
source=source,
|
|
1823
|
+
ref=ref,
|
|
1824
|
+
overwrite=overwrite,
|
|
1825
|
+
)
|
|
1826
|
+
)
|
|
1827
|
+
|
|
1828
|
+
needs_atomic = "live_compare" in includes or "problem857" in includes
|
|
1829
|
+
if not needs_atomic:
|
|
1830
|
+
return sorted(set(created))
|
|
1831
|
+
|
|
1832
|
+
# Shared starter runtime and wrappers for 857/20/367.
|
|
1833
|
+
files: list[tuple[Path, str]] = [(target_repo_root / "scripts/orp_atomic_board_runtime.py", STARTER_RUNTIME)]
|
|
1834
|
+
if "live_compare" in includes or "problem857" in includes:
|
|
1835
|
+
files.extend(
|
|
1836
|
+
[
|
|
1837
|
+
(target_repo_root / "scripts/frontier_status.py", STARTER_FRONTIER),
|
|
1838
|
+
(target_repo_root / "scripts/problem857_ops_board.py", STARTER_WRAPPER.replace("{PROBLEM}", "857")),
|
|
1839
|
+
]
|
|
1840
|
+
)
|
|
1841
|
+
if "live_compare" in includes:
|
|
1842
|
+
files.extend(
|
|
1843
|
+
[
|
|
1844
|
+
(target_repo_root / "scripts/problem20_ops_board.py", STARTER_WRAPPER.replace("{PROBLEM}", "20")),
|
|
1845
|
+
(target_repo_root / "scripts/problem367_ops_board.py", STARTER_WRAPPER.replace("{PROBLEM}", "367")),
|
|
1846
|
+
]
|
|
1847
|
+
)
|
|
1848
|
+
for path, text in files:
|
|
1849
|
+
if _write_text(path, text, overwrite=overwrite):
|
|
1850
|
+
created.append(str(path.relative_to(target_repo_root)))
|
|
1851
|
+
|
|
1852
|
+
# Seed board JSON + markdown so live_compare is runnable immediately.
|
|
1853
|
+
stamped = _now_utc()
|
|
1854
|
+
for problem, seed in BOARD_SEEDS.items():
|
|
1855
|
+
if problem == 857 and public_repo_requested:
|
|
1856
|
+
continue
|
|
1857
|
+
if problem in {20, 367} and "live_compare" not in includes:
|
|
1858
|
+
continue
|
|
1859
|
+
payload = json.loads(json.dumps(seed))
|
|
1860
|
+
payload["updated_utc"] = stamped
|
|
1861
|
+
board_path = target_repo_root / BOARD_PATHS[problem]
|
|
1862
|
+
if _write_json(board_path, payload, overwrite=overwrite):
|
|
1863
|
+
created.append(str(board_path.relative_to(target_repo_root)))
|
|
1864
|
+
|
|
1865
|
+
md_path = target_repo_root / BOARD_MD_PATHS[problem]
|
|
1866
|
+
md = (
|
|
1867
|
+
f"# Problem {problem} Ops Board\\n\\n"
|
|
1868
|
+
f"- updated_utc: `{stamped}`\\n"
|
|
1869
|
+
f"- note: starter board generated by ORP pack install\\n"
|
|
1870
|
+
)
|
|
1871
|
+
if _write_text(md_path, md, overwrite=overwrite):
|
|
1872
|
+
created.append(str(md_path.relative_to(target_repo_root)))
|
|
1873
|
+
|
|
1874
|
+
if "problem857" in includes and problem857_source_mode != "public_repo":
|
|
1875
|
+
extra_files: list[tuple[Path, str]] = [
|
|
1876
|
+
(target_repo_root / "orchestrator/problem857_public_spec_check.py", STARTER_SPEC_CHECK),
|
|
1877
|
+
(target_repo_root / "scripts/orp-lean-build-stub.py", STARTER_LEAN_STUB),
|
|
1878
|
+
(target_repo_root / "orchestrator/v2/scopes/problem_857.yaml", STARTER_PROBLEM857_SCOPE),
|
|
1879
|
+
(target_repo_root / "sunflower_lean/lakefile.lean", STARTER_LAKEFILE),
|
|
1880
|
+
]
|
|
1881
|
+
for path, text in extra_files:
|
|
1882
|
+
if _write_text(path, text, overwrite=overwrite):
|
|
1883
|
+
created.append(str(path.relative_to(target_repo_root)))
|
|
1884
|
+
|
|
1885
|
+
if "problem857" in includes and problem857_source_mode == "public_repo":
|
|
1886
|
+
checker_path = target_repo_root / "orchestrator/problem857_public_spec_check.py"
|
|
1887
|
+
if _write_text(checker_path, STARTER_SPEC_CHECK, overwrite=overwrite):
|
|
1888
|
+
created.append(str(checker_path.relative_to(target_repo_root)))
|
|
1889
|
+
|
|
1890
|
+
return sorted(set(created))
|
|
1891
|
+
|
|
1892
|
+
|
|
1893
|
+
def _render_component(
|
|
1894
|
+
*,
|
|
1895
|
+
orp_repo_root: Path,
|
|
1896
|
+
pack_root: Path,
|
|
1897
|
+
target_repo_root: Path,
|
|
1898
|
+
components: dict[str, dict[str, Any]],
|
|
1899
|
+
component_key: str,
|
|
1900
|
+
extra_vars: list[str],
|
|
1901
|
+
internal_vars: list[str],
|
|
1902
|
+
template_id_override: str = "",
|
|
1903
|
+
) -> Path:
|
|
1904
|
+
comp = components[component_key]
|
|
1905
|
+
out_path = target_repo_root / str(comp["output_name"])
|
|
1906
|
+
render_script = orp_repo_root / "scripts" / "orp-pack-render.py"
|
|
1907
|
+
if not render_script.exists():
|
|
1908
|
+
raise RuntimeError(f"missing renderer script: {render_script}")
|
|
1909
|
+
|
|
1910
|
+
cmd = [
|
|
1911
|
+
sys.executable,
|
|
1912
|
+
str(render_script),
|
|
1913
|
+
"--pack",
|
|
1914
|
+
str(pack_root),
|
|
1915
|
+
"--template",
|
|
1916
|
+
template_id_override or str(comp["template_id"]),
|
|
1917
|
+
"--var",
|
|
1918
|
+
f"TARGET_REPO_ROOT={target_repo_root}",
|
|
1919
|
+
"--var",
|
|
1920
|
+
f"ORP_REPO_ROOT={orp_repo_root}",
|
|
1921
|
+
"--out",
|
|
1922
|
+
str(out_path),
|
|
1923
|
+
]
|
|
1924
|
+
|
|
1925
|
+
# Internal vars first, user vars last so users can override defaults.
|
|
1926
|
+
for raw in internal_vars:
|
|
1927
|
+
cmd.extend(["--var", _validate_var(raw)])
|
|
1928
|
+
for raw in extra_vars:
|
|
1929
|
+
cmd.extend(["--var", _validate_var(raw)])
|
|
1930
|
+
|
|
1931
|
+
proc = subprocess.run(cmd, cwd=str(orp_repo_root), capture_output=True, text=True)
|
|
1932
|
+
if proc.returncode != 0:
|
|
1933
|
+
stderr = (proc.stderr or "").strip()
|
|
1934
|
+
stdout = (proc.stdout or "").strip()
|
|
1935
|
+
msg = stderr or stdout or "unknown renderer failure"
|
|
1936
|
+
raise RuntimeError(f"render failed for {component_key}: {msg}")
|
|
1937
|
+
return out_path
|
|
1938
|
+
|
|
1939
|
+
|
|
1940
|
+
def _check_dependencies(
|
|
1941
|
+
target_repo_root: Path,
|
|
1942
|
+
components: dict[str, dict[str, Any]],
|
|
1943
|
+
component_key: str,
|
|
1944
|
+
) -> tuple[list[str], list[str]]:
|
|
1945
|
+
comp = components[component_key]
|
|
1946
|
+
required = [str(x) for x in comp.get("required_paths", [])]
|
|
1947
|
+
present: list[str] = []
|
|
1948
|
+
missing: list[str] = []
|
|
1949
|
+
for rel in required:
|
|
1950
|
+
p = target_repo_root / rel
|
|
1951
|
+
if p.exists():
|
|
1952
|
+
present.append(rel)
|
|
1953
|
+
else:
|
|
1954
|
+
missing.append(rel)
|
|
1955
|
+
return present, missing
|
|
1956
|
+
|
|
1957
|
+
|
|
1958
|
+
def _write_report(
|
|
1959
|
+
*,
|
|
1960
|
+
report_path: Path,
|
|
1961
|
+
generated_at_utc: str,
|
|
1962
|
+
pack_id: str,
|
|
1963
|
+
pack_version: str,
|
|
1964
|
+
target_repo_root: Path,
|
|
1965
|
+
components: dict[str, dict[str, Any]],
|
|
1966
|
+
rendered: dict[str, Path],
|
|
1967
|
+
dep_summary: dict[str, dict[str, Any]],
|
|
1968
|
+
bootstrap_enabled: bool,
|
|
1969
|
+
bootstrap_created: list[str],
|
|
1970
|
+
) -> None:
|
|
1971
|
+
lines: list[str] = []
|
|
1972
|
+
lines.append("# ORP Pack Install Report")
|
|
1973
|
+
lines.append("")
|
|
1974
|
+
lines.append(f"- generated_at_utc: `{generated_at_utc}`")
|
|
1975
|
+
lines.append(f"- pack_id: `{pack_id}`")
|
|
1976
|
+
lines.append(f"- pack_version: `{pack_version}`")
|
|
1977
|
+
lines.append(f"- target_repo_root: `{target_repo_root}`")
|
|
1978
|
+
lines.append(f"- bootstrap_enabled: `{bootstrap_enabled}`")
|
|
1979
|
+
lines.append("")
|
|
1980
|
+
lines.append("## Rendered Configs")
|
|
1981
|
+
lines.append("")
|
|
1982
|
+
lines.append("| Component | Template | Output |")
|
|
1983
|
+
lines.append("|---|---|---|")
|
|
1984
|
+
for key, out_path in rendered.items():
|
|
1985
|
+
template_id = str(components[key]["template_id"])
|
|
1986
|
+
lines.append(f"| `{key}` | `{template_id}` | `{out_path}` |")
|
|
1987
|
+
|
|
1988
|
+
lines.append("")
|
|
1989
|
+
lines.append("## Starter Bootstrap")
|
|
1990
|
+
lines.append("")
|
|
1991
|
+
lines.append(f"- created_files: `{len(bootstrap_created)}`")
|
|
1992
|
+
if bootstrap_created:
|
|
1993
|
+
for rel in bootstrap_created:
|
|
1994
|
+
lines.append(f"- `{rel}`")
|
|
1995
|
+
|
|
1996
|
+
lines.append("")
|
|
1997
|
+
lines.append("## Dependency Audit")
|
|
1998
|
+
lines.append("")
|
|
1999
|
+
lines.append("| Component | Required | Present | Missing |")
|
|
2000
|
+
lines.append("|---|---:|---:|---:|")
|
|
2001
|
+
for key, row in dep_summary.items():
|
|
2002
|
+
required = int(row["required"])
|
|
2003
|
+
present = int(row["present"])
|
|
2004
|
+
missing = int(row["missing"])
|
|
2005
|
+
lines.append(f"| `{key}` | {required} | {present} | {missing} |")
|
|
2006
|
+
|
|
2007
|
+
lines.append("")
|
|
2008
|
+
lines.append("## Missing Paths")
|
|
2009
|
+
lines.append("")
|
|
2010
|
+
any_missing = False
|
|
2011
|
+
for key, row in dep_summary.items():
|
|
2012
|
+
missing_paths = row.get("missing_paths", [])
|
|
2013
|
+
if not missing_paths:
|
|
2014
|
+
continue
|
|
2015
|
+
any_missing = True
|
|
2016
|
+
lines.append(f"- `{key}`")
|
|
2017
|
+
for rel in missing_paths:
|
|
2018
|
+
lines.append(f" - {rel}")
|
|
2019
|
+
if not any_missing:
|
|
2020
|
+
lines.append("- none")
|
|
2021
|
+
|
|
2022
|
+
lines.append("")
|
|
2023
|
+
lines.append("## Next Steps")
|
|
2024
|
+
lines.append("")
|
|
2025
|
+
if pack_id == "erdos-open-problems" and "problem857" in rendered:
|
|
2026
|
+
lines.append(
|
|
2027
|
+
"- Sync public Problem 857 data first with `orp erdos sync --problem-id 857 --out-problem-dir analysis/erdos_problems/selected`."
|
|
2028
|
+
)
|
|
2029
|
+
lines.append(
|
|
2030
|
+
"- For a real host workspace instead of starter scaffolding, install with `--var PROBLEM857_SOURCE_MODE=public_repo` (and optionally `--var PROBLEM857_PUBLIC_REPO_URL=<git-url>`)."
|
|
2031
|
+
)
|
|
2032
|
+
if pack_id == "external-pr-governance":
|
|
2033
|
+
lines.append(
|
|
2034
|
+
"- Replace the placeholder commands and repo metadata in the rendered configs before treating any governance run as meaningful."
|
|
2035
|
+
)
|
|
2036
|
+
if "governance" in rendered:
|
|
2037
|
+
lines.append(
|
|
2038
|
+
"- Run the lifecycle in order: `external_watch_select`, `external_pre_open`, `external_local_readiness`, `external_draft_transition`, then `external_draft_lifecycle`."
|
|
2039
|
+
)
|
|
2040
|
+
if "feedback_hardening" in rendered:
|
|
2041
|
+
lines.append(
|
|
2042
|
+
"- Use `external_feedback_hardening` when maintainer feedback reveals a missed check that should become a reusable guard."
|
|
2043
|
+
)
|
|
2044
|
+
if pack_id == "issue-smashers":
|
|
2045
|
+
lines.append(
|
|
2046
|
+
"- Treat `issue-smashers/` as a plain workspace scaffold, not as a replacement for ORP core or as a monorepo of cloned projects."
|
|
2047
|
+
)
|
|
2048
|
+
lines.append(
|
|
2049
|
+
"- Replace the placeholder commands in the rendered configs before treating any governance run as meaningful."
|
|
2050
|
+
)
|
|
2051
|
+
lines.append(
|
|
2052
|
+
"- Use `issue_smashers_full_flow` for the main lifecycle and `issue_smashers_feedback_hardening` when maintainer feedback should become a reusable guard."
|
|
2053
|
+
)
|
|
2054
|
+
lines.append(
|
|
2055
|
+
"- Keep base clones in `issue-smashers/repos/` and one active worktree per issue lane in `issue-smashers/worktrees/`."
|
|
2056
|
+
)
|
|
2057
|
+
lines.append("- Run selected ORP profiles with `orp --config <rendered-config> gate run --profile <profile>`.")
|
|
2058
|
+
lines.append("- If developing ORP locally, the equivalent command is `./scripts/orp --config <rendered-config> gate run --profile <profile>`.")
|
|
2059
|
+
lines.append("- Emit process packets with `orp --config <rendered-config> packet emit --profile <profile> --run-id <run_id>`.")
|
|
2060
|
+
lines.append("- Generate one-page run digest with `orp report summary --run-id <run_id>`.")
|
|
2061
|
+
lines.append("- Keep ORP core generic; treat this pack as optional domain wiring.")
|
|
2062
|
+
lines.append("")
|
|
2063
|
+
|
|
2064
|
+
report_path.parent.mkdir(parents=True, exist_ok=True)
|
|
2065
|
+
report_path.write_text("\n".join(lines), encoding="utf-8")
|
|
2066
|
+
|
|
2067
|
+
|
|
2068
|
+
def _build_parser() -> argparse.ArgumentParser:
|
|
2069
|
+
p = argparse.ArgumentParser(description="Install ORP pack templates into a target repo")
|
|
2070
|
+
p.add_argument(
|
|
2071
|
+
"--orp-repo-root",
|
|
2072
|
+
default="",
|
|
2073
|
+
help="ORP repo root (default: auto-detect from script location)",
|
|
2074
|
+
)
|
|
2075
|
+
p.add_argument(
|
|
2076
|
+
"--pack-id",
|
|
2077
|
+
default="erdos-open-problems",
|
|
2078
|
+
help="Pack id under ORP packs/ (default: erdos-open-problems)",
|
|
2079
|
+
)
|
|
2080
|
+
p.add_argument(
|
|
2081
|
+
"--pack-path",
|
|
2082
|
+
default="",
|
|
2083
|
+
help="Explicit pack root path containing pack.yml (overrides --pack-id lookup)",
|
|
2084
|
+
)
|
|
2085
|
+
p.add_argument(
|
|
2086
|
+
"--target-repo-root",
|
|
2087
|
+
default=".",
|
|
2088
|
+
help="Target repository root where rendered ORP configs are written (default: current directory)",
|
|
2089
|
+
)
|
|
2090
|
+
p.add_argument(
|
|
2091
|
+
"--include",
|
|
2092
|
+
action="append",
|
|
2093
|
+
default=[],
|
|
2094
|
+
help=(
|
|
2095
|
+
"Component to install (repeatable). "
|
|
2096
|
+
"Valid values depend on the selected pack. "
|
|
2097
|
+
"Examples: erdos-open-problems -> catalog/live_compare/problem857/governance; "
|
|
2098
|
+
"external-pr-governance -> governance/feedback_hardening; "
|
|
2099
|
+
"issue-smashers -> workspace/feedback_hardening."
|
|
2100
|
+
),
|
|
2101
|
+
)
|
|
2102
|
+
p.add_argument(
|
|
2103
|
+
"--var",
|
|
2104
|
+
action="append",
|
|
2105
|
+
default=[],
|
|
2106
|
+
help="Extra template variable KEY=VALUE (repeatable)",
|
|
2107
|
+
)
|
|
2108
|
+
p.add_argument(
|
|
2109
|
+
"--report",
|
|
2110
|
+
default="",
|
|
2111
|
+
help="Install report output path (default depends on selected pack)",
|
|
2112
|
+
)
|
|
2113
|
+
p.add_argument(
|
|
2114
|
+
"--strict-deps",
|
|
2115
|
+
action="store_true",
|
|
2116
|
+
help="Exit non-zero if dependency audit finds missing paths.",
|
|
2117
|
+
)
|
|
2118
|
+
p.add_argument(
|
|
2119
|
+
"--no-bootstrap",
|
|
2120
|
+
dest="bootstrap",
|
|
2121
|
+
action="store_false",
|
|
2122
|
+
help="Disable starter adapter scaffolding",
|
|
2123
|
+
)
|
|
2124
|
+
p.add_argument(
|
|
2125
|
+
"--overwrite-bootstrap",
|
|
2126
|
+
action="store_true",
|
|
2127
|
+
help="Allow bootstrap to overwrite existing scaffolded files",
|
|
2128
|
+
)
|
|
2129
|
+
p.set_defaults(bootstrap=True)
|
|
2130
|
+
return p
|
|
2131
|
+
|
|
2132
|
+
|
|
2133
|
+
def main() -> int:
|
|
2134
|
+
args = _build_parser().parse_args()
|
|
2135
|
+
|
|
2136
|
+
if args.orp_repo_root:
|
|
2137
|
+
orp_repo_root = Path(args.orp_repo_root).resolve()
|
|
2138
|
+
else:
|
|
2139
|
+
orp_repo_root = Path(__file__).resolve().parent.parent
|
|
2140
|
+
target_repo_root = Path(args.target_repo_root).resolve()
|
|
2141
|
+
target_repo_root.mkdir(parents=True, exist_ok=True)
|
|
2142
|
+
|
|
2143
|
+
if args.pack_path.strip():
|
|
2144
|
+
pack_root = Path(args.pack_path).resolve()
|
|
2145
|
+
else:
|
|
2146
|
+
pack_root = orp_repo_root / "packs" / args.pack_id
|
|
2147
|
+
pack_yml = pack_root / "pack.yml"
|
|
2148
|
+
if not pack_yml.exists():
|
|
2149
|
+
print(f"error: pack not found: {pack_root}", file=sys.stderr)
|
|
2150
|
+
return 2
|
|
2151
|
+
|
|
2152
|
+
pack_meta = _load_yaml(pack_yml)
|
|
2153
|
+
pack_id = str(pack_meta.get("pack_id", args.pack_id))
|
|
2154
|
+
pack_version = str(pack_meta.get("version", "unknown"))
|
|
2155
|
+
generated_at_utc = _now_utc()
|
|
2156
|
+
components = _pack_components(pack_id)
|
|
2157
|
+
effective_vars = _vars_map(pack_meta, list(args.var or []))
|
|
2158
|
+
problem857_source_mode = _problem857_source_mode(effective_vars)
|
|
2159
|
+
|
|
2160
|
+
includes = list(args.include or [])
|
|
2161
|
+
if not includes:
|
|
2162
|
+
default_includes = _pack_spec(pack_id).get("default_includes", [])
|
|
2163
|
+
if isinstance(default_includes, list):
|
|
2164
|
+
includes = [str(x) for x in default_includes if isinstance(x, str)]
|
|
2165
|
+
if not includes:
|
|
2166
|
+
includes = sorted(components.keys())
|
|
2167
|
+
|
|
2168
|
+
unknown = [key for key in includes if key not in components]
|
|
2169
|
+
if unknown:
|
|
2170
|
+
valid = ", ".join(sorted(components.keys()))
|
|
2171
|
+
print(
|
|
2172
|
+
f"error: unknown component(s) for pack {pack_id}: {', '.join(unknown)}; valid: {valid}",
|
|
2173
|
+
file=sys.stderr,
|
|
2174
|
+
)
|
|
2175
|
+
return 2
|
|
2176
|
+
|
|
2177
|
+
bootstrap_created: list[str] = []
|
|
2178
|
+
if args.bootstrap:
|
|
2179
|
+
bootstrap_created = _install_starter_adapters(
|
|
2180
|
+
pack_id=pack_id,
|
|
2181
|
+
pack_meta=pack_meta,
|
|
2182
|
+
target_repo_root=target_repo_root,
|
|
2183
|
+
includes=includes,
|
|
2184
|
+
extra_vars=list(args.var or []),
|
|
2185
|
+
overwrite=bool(args.overwrite_bootstrap),
|
|
2186
|
+
)
|
|
2187
|
+
|
|
2188
|
+
rendered: dict[str, Path] = {}
|
|
2189
|
+
for key in includes:
|
|
2190
|
+
internal_vars: list[str] = []
|
|
2191
|
+
template_id_override = ""
|
|
2192
|
+
if key == "problem857":
|
|
2193
|
+
if problem857_source_mode == "public_repo":
|
|
2194
|
+
template_id_override = "sunflower_problem857_discovery_public_repo"
|
|
2195
|
+
elif args.bootstrap:
|
|
2196
|
+
internal_vars.append(
|
|
2197
|
+
"PROBLEM857_LEAN_BUILD_COMMAND=python3 ../scripts/orp-lean-build-stub.py SunflowerLean.Balance"
|
|
2198
|
+
)
|
|
2199
|
+
out_path = _render_component(
|
|
2200
|
+
orp_repo_root=orp_repo_root,
|
|
2201
|
+
pack_root=pack_root,
|
|
2202
|
+
target_repo_root=target_repo_root,
|
|
2203
|
+
components=components,
|
|
2204
|
+
component_key=key,
|
|
2205
|
+
extra_vars=list(args.var or []),
|
|
2206
|
+
internal_vars=internal_vars,
|
|
2207
|
+
template_id_override=template_id_override,
|
|
2208
|
+
)
|
|
2209
|
+
rendered[key] = out_path
|
|
2210
|
+
|
|
2211
|
+
dep_summary: dict[str, dict[str, Any]] = {}
|
|
2212
|
+
total_missing = 0
|
|
2213
|
+
for key in includes:
|
|
2214
|
+
present_paths, missing_paths = _check_dependencies(target_repo_root, components, key)
|
|
2215
|
+
dep_summary[key] = {
|
|
2216
|
+
"required": len(present_paths) + len(missing_paths),
|
|
2217
|
+
"present": len(present_paths),
|
|
2218
|
+
"missing": len(missing_paths),
|
|
2219
|
+
"missing_paths": missing_paths,
|
|
2220
|
+
}
|
|
2221
|
+
total_missing += len(missing_paths)
|
|
2222
|
+
|
|
2223
|
+
if args.report.strip():
|
|
2224
|
+
report_path = Path(args.report)
|
|
2225
|
+
if not report_path.is_absolute():
|
|
2226
|
+
report_path = (target_repo_root / report_path).resolve()
|
|
2227
|
+
else:
|
|
2228
|
+
report_path = report_path.resolve()
|
|
2229
|
+
else:
|
|
2230
|
+
report_name = str(_pack_spec(pack_id).get("report_name", f"orp.{pack_id}.pack-install-report.md"))
|
|
2231
|
+
report_path = (target_repo_root / report_name).resolve()
|
|
2232
|
+
|
|
2233
|
+
_write_report(
|
|
2234
|
+
report_path=report_path,
|
|
2235
|
+
generated_at_utc=generated_at_utc,
|
|
2236
|
+
pack_id=pack_id,
|
|
2237
|
+
pack_version=pack_version,
|
|
2238
|
+
target_repo_root=target_repo_root,
|
|
2239
|
+
components=components,
|
|
2240
|
+
rendered=rendered,
|
|
2241
|
+
dep_summary=dep_summary,
|
|
2242
|
+
bootstrap_enabled=bool(args.bootstrap),
|
|
2243
|
+
bootstrap_created=bootstrap_created,
|
|
2244
|
+
)
|
|
2245
|
+
|
|
2246
|
+
print(f"pack_id={pack_id}")
|
|
2247
|
+
print(f"pack_version={pack_version}")
|
|
2248
|
+
print(f"pack_root={pack_root}")
|
|
2249
|
+
print(f"target_repo_root={target_repo_root}")
|
|
2250
|
+
print(f"included_components={','.join(includes)}")
|
|
2251
|
+
print(f"bootstrap.enabled={bool(args.bootstrap)}")
|
|
2252
|
+
print(f"bootstrap.created={len(bootstrap_created)}")
|
|
2253
|
+
if "problem857" in includes:
|
|
2254
|
+
print(f"problem857.source_mode={problem857_source_mode}")
|
|
2255
|
+
if problem857_source_mode == "public_repo":
|
|
2256
|
+
print(f"problem857.public_repo_url={effective_vars.get('PROBLEM857_PUBLIC_REPO_URL', '')}")
|
|
2257
|
+
print(f"problem857.public_repo_ref={effective_vars.get('PROBLEM857_PUBLIC_REPO_REF', '')}")
|
|
2258
|
+
for key, out_path in rendered.items():
|
|
2259
|
+
print(f"rendered.{key}={out_path}")
|
|
2260
|
+
print(f"deps.missing_total={total_missing}")
|
|
2261
|
+
print(f"report={report_path}")
|
|
2262
|
+
|
|
2263
|
+
if total_missing > 0 and args.strict_deps:
|
|
2264
|
+
return 3
|
|
2265
|
+
return 0
|
|
2266
|
+
|
|
2267
|
+
|
|
2268
|
+
if __name__ == "__main__":
|
|
2269
|
+
try:
|
|
2270
|
+
raise SystemExit(main())
|
|
2271
|
+
except RuntimeError as exc:
|
|
2272
|
+
print(f"error: {exc}", file=sys.stderr)
|
|
2273
|
+
raise SystemExit(2)
|