its-magic 0.1.2-37 → 0.1.2-39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/installer.ps1 +20 -0
- package/installer.py +66 -2
- package/installer.sh +22 -0
- package/package.json +2 -1
- package/scripts/check_intake_template_parity.py +1 -0
- package/scripts/intake_evidence_lib.py +413 -10
- package/scripts/intake_evidence_validate.py +2 -2
- package/scripts/materialize_codebase_map.py +184 -0
- package/template/.cursor/agents/po.mdc +19 -0
- package/template/.cursor/commands/architecture.md +12 -0
- package/template/.cursor/commands/ask.md +11 -0
- package/template/.cursor/commands/auto.md +20 -2
- package/template/.cursor/commands/intake.md +64 -9
- package/template/.cursor/commands/map-codebase.md +18 -1
- package/template/.cursor/commands/refresh-context.md +7 -0
- package/template/.cursor/rules/core.mdc +5 -0
- package/template/docs/engineering/artifact-ownership-policy.md +1 -1
- package/template/docs/engineering/context/installer-owned-paths.manifest +17 -0
- package/template/docs/engineering/runbook.md +76 -2
- package/template/scripts/check_intake_template_parity.py +1 -0
- package/template/scripts/enforce-triad-hot-surface.py +626 -0
- package/template/scripts/intake_bug_resume_brief_refresh.py +303 -0
- package/template/scripts/intake_evidence_lib.py +413 -10
- package/template/scripts/intake_evidence_validate.py +2 -2
- package/template/scripts/materialize_codebase_map.py +184 -0
|
@@ -0,0 +1,626 @@
|
|
|
1
|
+
#!/usr/bin/env python3
|
|
2
|
+
"""
|
|
3
|
+
Deterministic hot-surface enforcement for the engineering triad (DEC-0054).
|
|
4
|
+
|
|
5
|
+
Surfaces:
|
|
6
|
+
docs/engineering/state.md
|
|
7
|
+
handoffs/po_to_tl.md
|
|
8
|
+
docs/engineering/architecture.md
|
|
9
|
+
|
|
10
|
+
Thresholds resolve from merged .cursor/scratchpad.md + scratchpad.local.md.
|
|
11
|
+
|
|
12
|
+
Modes:
|
|
13
|
+
--check fail closed if any surface exceeds policy (exit 1)
|
|
14
|
+
--rollover archive oldest material into deterministic packs; idempotent
|
|
15
|
+
--self-test built-in regression (temp fixtures, no repo mutation)
|
|
16
|
+
|
|
17
|
+
User-facing diagnostics avoid planning-shaped tokens; use paths + reason codes.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
import argparse
|
|
23
|
+
import json
|
|
24
|
+
import re
|
|
25
|
+
import sys
|
|
26
|
+
import tempfile
|
|
27
|
+
from datetime import datetime, timezone
|
|
28
|
+
from pathlib import Path
|
|
29
|
+
from typing import Dict, List, Optional, Sequence, Tuple
|
|
30
|
+
|
|
31
|
+
DEFAULTS = {
|
|
32
|
+
"STATE_HOT_MAX_LINES": "1200",
|
|
33
|
+
"STATE_HOT_MAX_CHECKPOINTS": "80",
|
|
34
|
+
"PO_TO_TL_HOT_MAX_LINES": "800",
|
|
35
|
+
"PO_TO_TL_HOT_MAX_SECTIONS": "60",
|
|
36
|
+
"ARCH_HOT_MAX_LINES": "3500",
|
|
37
|
+
"ARCH_HOT_MAX_STORY_SECTIONS": "120",
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
STATE_REL = Path("docs/engineering/state.md")
|
|
41
|
+
PO_REL = Path("handoffs/po_to_tl.md")
|
|
42
|
+
ARCH_REL = Path("docs/engineering/architecture.md")
|
|
43
|
+
STATE_ARCH_DIR = Path("docs/engineering/state-archive")
|
|
44
|
+
PO_ARCH_DIR = Path("handoffs/archive")
|
|
45
|
+
ARCH_ARCH_DIR = Path("docs/engineering/architecture-archive")
|
|
46
|
+
|
|
47
|
+
CHECKPOINT_HEADING = re.compile(r"^## .*\bcheckpoint\b.*$", re.I)
|
|
48
|
+
STORY_HEADING = re.compile(r"^# US-\d{4}\s*[:\u2014\-].+$")
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class PolicyError(Exception):
|
|
52
|
+
def __init__(self, code: str, message: str) -> None:
|
|
53
|
+
super().__init__(message)
|
|
54
|
+
self.code = code
|
|
55
|
+
self.message = message
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def _repo_root(cli: Optional[str]) -> Path:
|
|
59
|
+
if cli:
|
|
60
|
+
return Path(cli).resolve()
|
|
61
|
+
return Path(__file__).resolve().parent.parent
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
def _parse_scratchpad_text(text: str, into: Dict[str, str]) -> None:
|
|
65
|
+
for raw in text.splitlines():
|
|
66
|
+
line = raw.strip()
|
|
67
|
+
if not line or line.startswith("#"):
|
|
68
|
+
continue
|
|
69
|
+
if line.startswith("- "):
|
|
70
|
+
continue
|
|
71
|
+
if "=" not in line:
|
|
72
|
+
continue
|
|
73
|
+
key, _, val = line.partition("=")
|
|
74
|
+
key, val = key.strip(), val.strip()
|
|
75
|
+
if key and val:
|
|
76
|
+
into[key] = val
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def load_merged_policy(repo: Path) -> Dict[str, str]:
|
|
80
|
+
"""Merge scratchpad layers: DEFAULTS < example < baseline < local (DEC-0055)."""
|
|
81
|
+
out = dict(DEFAULTS)
|
|
82
|
+
example = repo / ".cursor" / "scratchpad.local.example.md"
|
|
83
|
+
base = repo / ".cursor" / "scratchpad.md"
|
|
84
|
+
local = repo / ".cursor" / "scratchpad.local.md"
|
|
85
|
+
if example.is_file():
|
|
86
|
+
_parse_scratchpad_text(example.read_text(encoding="utf-8"), out)
|
|
87
|
+
if base.is_file():
|
|
88
|
+
_parse_scratchpad_text(base.read_text(encoding="utf-8"), out)
|
|
89
|
+
if local.is_file():
|
|
90
|
+
_parse_scratchpad_text(local.read_text(encoding="utf-8"), out)
|
|
91
|
+
return out
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def _int_policy(policy: Dict[str, str], key: str) -> int:
|
|
95
|
+
try:
|
|
96
|
+
v = int(policy[key])
|
|
97
|
+
if v < 1:
|
|
98
|
+
raise ValueError
|
|
99
|
+
return v
|
|
100
|
+
except (KeyError, ValueError) as exc:
|
|
101
|
+
raise PolicyError(
|
|
102
|
+
"STATE_ARCHIVE_VERIFICATION_FAILED",
|
|
103
|
+
f"invalid or missing integer policy {key}",
|
|
104
|
+
) from exc
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
def line_count(text: str) -> int:
|
|
108
|
+
if not text:
|
|
109
|
+
return 0
|
|
110
|
+
return len(text.splitlines())
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
def split_state_checkpoints(text: str) -> Tuple[str, List[str]]:
|
|
114
|
+
lines = text.splitlines(keepends=True)
|
|
115
|
+
idxs = [i for i, ln in enumerate(lines) if CHECKPOINT_HEADING.match(ln.rstrip("\r\n"))]
|
|
116
|
+
if not idxs:
|
|
117
|
+
return text, []
|
|
118
|
+
preamble = "".join(lines[: idxs[0]])
|
|
119
|
+
blocks: List[str] = []
|
|
120
|
+
for j, start in enumerate(idxs):
|
|
121
|
+
end = idxs[j + 1] if j + 1 < len(idxs) else len(lines)
|
|
122
|
+
blocks.append("".join(lines[start:end]))
|
|
123
|
+
return preamble, blocks
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def split_po_sections(text: str) -> List[str]:
|
|
127
|
+
lines = text.splitlines(keepends=True)
|
|
128
|
+
starts = [i for i, ln in enumerate(lines) if ln.startswith("## ")]
|
|
129
|
+
if not starts:
|
|
130
|
+
return [text] if text.strip() else []
|
|
131
|
+
sections: List[str] = []
|
|
132
|
+
for j, start in enumerate(starts):
|
|
133
|
+
end = starts[j + 1] if j + 1 < len(starts) else len(lines)
|
|
134
|
+
sections.append("".join(lines[start:end]))
|
|
135
|
+
return sections
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def split_arch_stories(text: str) -> Tuple[str, List[str]]:
|
|
139
|
+
lines = text.splitlines(keepends=True)
|
|
140
|
+
idxs = [i for i, ln in enumerate(lines) if STORY_HEADING.match(ln.rstrip("\r\n"))]
|
|
141
|
+
if not idxs:
|
|
142
|
+
return text, []
|
|
143
|
+
preamble = "".join(lines[: idxs[0]])
|
|
144
|
+
blocks: List[str] = []
|
|
145
|
+
for j, start in enumerate(idxs):
|
|
146
|
+
end = idxs[j + 1] if j + 1 < len(idxs) else len(lines)
|
|
147
|
+
blocks.append("".join(lines[start:end]))
|
|
148
|
+
return preamble, blocks
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def next_pack_path(repo: Path, archive_dir: Path, stem: str) -> Path:
|
|
152
|
+
archive_dir.mkdir(parents=True, exist_ok=True)
|
|
153
|
+
day = datetime.now(timezone.utc).strftime("%Y%m%d")
|
|
154
|
+
base = archive_dir / f"{stem}-{day}.md"
|
|
155
|
+
if not base.exists():
|
|
156
|
+
return base
|
|
157
|
+
alphabet = "abcdefghijklmnopqrstuvwxyz"
|
|
158
|
+
# Deterministic expansion: first single-letter suffixes, then double-letter.
|
|
159
|
+
for c in alphabet:
|
|
160
|
+
cand = archive_dir / f"{stem}-{day}-{c}.md"
|
|
161
|
+
if not cand.exists():
|
|
162
|
+
return cand
|
|
163
|
+
for c1 in alphabet:
|
|
164
|
+
for c2 in alphabet:
|
|
165
|
+
cand = archive_dir / f"{stem}-{day}-{c1}{c2}.md"
|
|
166
|
+
if not cand.exists():
|
|
167
|
+
return cand
|
|
168
|
+
raise PolicyError(
|
|
169
|
+
"STATE_ARCHIVE_WRITE_FAILED",
|
|
170
|
+
"exhausted deterministic pack disambiguators for today",
|
|
171
|
+
)
|
|
172
|
+
|
|
173
|
+
|
|
174
|
+
def write_pack_header(
|
|
175
|
+
pack_path: Path,
|
|
176
|
+
title: str,
|
|
177
|
+
source_rel: str,
|
|
178
|
+
trigger: str,
|
|
179
|
+
verification: Dict[str, object],
|
|
180
|
+
first_heading: str,
|
|
181
|
+
last_heading: str,
|
|
182
|
+
moved_units: int,
|
|
183
|
+
retained_units: int,
|
|
184
|
+
) -> None:
|
|
185
|
+
header_lines = [
|
|
186
|
+
f"# {title}",
|
|
187
|
+
"",
|
|
188
|
+
f"- Rollover trigger: `{trigger}`",
|
|
189
|
+
f"- Source: `{source_rel}`",
|
|
190
|
+
f"- Archived units (oldest first, contiguous prefix): {moved_units}",
|
|
191
|
+
f"- Retained units in hot file: {retained_units}",
|
|
192
|
+
f"- First archived heading: `{first_heading}`",
|
|
193
|
+
f"- Last archived heading: `{last_heading}`",
|
|
194
|
+
"- Verification tuple (mandatory):",
|
|
195
|
+
]
|
|
196
|
+
for k in sorted(verification.keys()):
|
|
197
|
+
header_lines.append(f" - {k}={verification[k]}")
|
|
198
|
+
header_lines.extend(["", "---", ""])
|
|
199
|
+
block = "\n".join(header_lines) + "\n"
|
|
200
|
+
pack_path.write_text(block, encoding="utf-8")
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
def rollover_state(repo: Path, policy: Dict[str, str], dry_run: bool) -> Optional[Dict[str, object]]:
|
|
204
|
+
path = repo / STATE_REL
|
|
205
|
+
text = path.read_text(encoding="utf-8")
|
|
206
|
+
max_lines = _int_policy(policy, "STATE_HOT_MAX_LINES")
|
|
207
|
+
max_cp = _int_policy(policy, "STATE_HOT_MAX_CHECKPOINTS")
|
|
208
|
+
preamble, blocks = split_state_checkpoints(text)
|
|
209
|
+
if not blocks:
|
|
210
|
+
if line_count(text) <= max_lines:
|
|
211
|
+
return None
|
|
212
|
+
raise PolicyError(
|
|
213
|
+
"STATE_ARCHIVE_BOUNDARY_AMBIGUOUS",
|
|
214
|
+
"state file exceeds line cap but has no checkpoint headings to archive",
|
|
215
|
+
)
|
|
216
|
+
if line_count(text) <= max_lines and len(blocks) <= max_cp:
|
|
217
|
+
return None
|
|
218
|
+
moved = 0
|
|
219
|
+
work_blocks = list(blocks)
|
|
220
|
+
archived_chunks: List[str] = []
|
|
221
|
+
while work_blocks and (
|
|
222
|
+
line_count(preamble + "".join(work_blocks)) > max_lines or len(work_blocks) > max_cp
|
|
223
|
+
):
|
|
224
|
+
archived_chunks.append(work_blocks.pop(0))
|
|
225
|
+
moved += 1
|
|
226
|
+
if not archived_chunks:
|
|
227
|
+
return None
|
|
228
|
+
combined = preamble + "".join(work_blocks)
|
|
229
|
+
if line_count(combined) > max_lines or len(work_blocks) > max_cp:
|
|
230
|
+
raise PolicyError(
|
|
231
|
+
"ARTIFACT_HOT_SURFACE_OVERSIZE",
|
|
232
|
+
"state preamble or single checkpoint exceeds hot line cap; manual split required",
|
|
233
|
+
)
|
|
234
|
+
new_body = preamble + "".join(work_blocks)
|
|
235
|
+
first_h = archived_chunks[0].splitlines()[0].strip() if archived_chunks else ""
|
|
236
|
+
last_h = archived_chunks[-1].splitlines()[0].strip() if archived_chunks else ""
|
|
237
|
+
trigger = (
|
|
238
|
+
f"STATE_HOT_MAX_LINES={max_lines}, STATE_HOT_MAX_CHECKPOINTS={max_cp}"
|
|
239
|
+
)
|
|
240
|
+
pack = next_pack_path(repo, repo / STATE_ARCH_DIR, "state-pack")
|
|
241
|
+
ver = {
|
|
242
|
+
"boundary": "triad-rollover|state",
|
|
243
|
+
"moved": moved,
|
|
244
|
+
"retained_checkpoints": len(work_blocks),
|
|
245
|
+
"retained_lines": line_count(new_body),
|
|
246
|
+
"pack_ref": str(pack.as_posix()),
|
|
247
|
+
}
|
|
248
|
+
if dry_run:
|
|
249
|
+
return ver
|
|
250
|
+
archived_body = "".join(archived_chunks)
|
|
251
|
+
write_pack_header(
|
|
252
|
+
pack,
|
|
253
|
+
f"State archive pack ({day_stamp()})",
|
|
254
|
+
STATE_REL.as_posix(),
|
|
255
|
+
trigger,
|
|
256
|
+
{
|
|
257
|
+
"archived_body_lines": line_count(archived_body),
|
|
258
|
+
"retained_body_lines": line_count(new_body),
|
|
259
|
+
"preamble_lines": line_count(preamble),
|
|
260
|
+
},
|
|
261
|
+
first_h,
|
|
262
|
+
last_h,
|
|
263
|
+
moved,
|
|
264
|
+
len(work_blocks),
|
|
265
|
+
)
|
|
266
|
+
with pack.open("a", encoding="utf-8") as fh:
|
|
267
|
+
fh.write(archived_body)
|
|
268
|
+
if not archived_body.endswith("\n"):
|
|
269
|
+
fh.write("\n")
|
|
270
|
+
path.write_text(new_body, encoding="utf-8", newline="\n")
|
|
271
|
+
return ver
|
|
272
|
+
|
|
273
|
+
|
|
274
|
+
def day_stamp() -> str:
|
|
275
|
+
return datetime.now(timezone.utc).strftime("%Y-%m-%d")
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
def rollover_po_to_tl(repo: Path, policy: Dict[str, str], dry_run: bool) -> Optional[Dict[str, object]]:
|
|
279
|
+
path = repo / PO_REL
|
|
280
|
+
text = path.read_text(encoding="utf-8")
|
|
281
|
+
max_lines = _int_policy(policy, "PO_TO_TL_HOT_MAX_LINES")
|
|
282
|
+
max_sec = _int_policy(policy, "PO_TO_TL_HOT_MAX_SECTIONS")
|
|
283
|
+
sections = split_po_sections(text)
|
|
284
|
+
if not sections:
|
|
285
|
+
if line_count(text) <= max_lines:
|
|
286
|
+
return None
|
|
287
|
+
raise PolicyError(
|
|
288
|
+
"STATE_ARCHIVE_BOUNDARY_AMBIGUOUS",
|
|
289
|
+
"handoff file exceeds line cap but has no ## sections to archive",
|
|
290
|
+
)
|
|
291
|
+
moved = 0
|
|
292
|
+
work = list(sections)
|
|
293
|
+
archived: List[str] = []
|
|
294
|
+
while work and (line_count("".join(work)) > max_lines or len(work) > max_sec):
|
|
295
|
+
archived.append(work.pop(0))
|
|
296
|
+
moved += 1
|
|
297
|
+
if not archived:
|
|
298
|
+
return None
|
|
299
|
+
new_body = "".join(work)
|
|
300
|
+
if line_count(new_body) > max_lines or len(work) > max_sec:
|
|
301
|
+
raise PolicyError(
|
|
302
|
+
"ARTIFACT_HOT_SURFACE_OVERSIZE",
|
|
303
|
+
"a single handoff section exceeds policy; manual split required",
|
|
304
|
+
)
|
|
305
|
+
first_h = archived[0].splitlines()[0].strip() if archived else ""
|
|
306
|
+
last_h = archived[-1].splitlines()[0].strip() if archived else ""
|
|
307
|
+
trigger = (
|
|
308
|
+
f"PO_TO_TL_HOT_MAX_LINES={max_lines}, "
|
|
309
|
+
f"PO_TO_TL_HOT_MAX_SECTIONS={max_sec}"
|
|
310
|
+
)
|
|
311
|
+
pack = next_pack_path(repo, repo / PO_ARCH_DIR, "po-to-tl-pack")
|
|
312
|
+
ver = {
|
|
313
|
+
"boundary": "triad-rollover|po_to_tl",
|
|
314
|
+
"moved": moved,
|
|
315
|
+
"retained_sections": len(work),
|
|
316
|
+
"retained_lines": line_count(new_body),
|
|
317
|
+
"pack_ref": str(pack.as_posix()),
|
|
318
|
+
}
|
|
319
|
+
if dry_run:
|
|
320
|
+
return ver
|
|
321
|
+
archived_body = "".join(archived)
|
|
322
|
+
write_pack_header(
|
|
323
|
+
pack,
|
|
324
|
+
f"PO to TL archive pack ({day_stamp()})",
|
|
325
|
+
PO_REL.as_posix(),
|
|
326
|
+
trigger,
|
|
327
|
+
{
|
|
328
|
+
"archived_body_lines": line_count(archived_body),
|
|
329
|
+
"retained_body_lines": line_count(new_body),
|
|
330
|
+
},
|
|
331
|
+
first_h,
|
|
332
|
+
last_h,
|
|
333
|
+
moved,
|
|
334
|
+
len(work),
|
|
335
|
+
)
|
|
336
|
+
with pack.open("a", encoding="utf-8") as fh:
|
|
337
|
+
fh.write(archived_body)
|
|
338
|
+
if not archived_body.endswith("\n"):
|
|
339
|
+
fh.write("\n")
|
|
340
|
+
path.write_text(new_body, encoding="utf-8", newline="\n")
|
|
341
|
+
return ver
|
|
342
|
+
|
|
343
|
+
|
|
344
|
+
def rollover_architecture(repo: Path, policy: Dict[str, str], dry_run: bool) -> Optional[Dict[str, object]]:
|
|
345
|
+
path = repo / ARCH_REL
|
|
346
|
+
text = path.read_text(encoding="utf-8")
|
|
347
|
+
max_lines = _int_policy(policy, "ARCH_HOT_MAX_LINES")
|
|
348
|
+
max_stories = _int_policy(policy, "ARCH_HOT_MAX_STORY_SECTIONS")
|
|
349
|
+
preamble, stories = split_arch_stories(text)
|
|
350
|
+
if not stories:
|
|
351
|
+
if line_count(text) <= max_lines:
|
|
352
|
+
return None
|
|
353
|
+
raise PolicyError(
|
|
354
|
+
"STATE_ARCHIVE_BOUNDARY_AMBIGUOUS",
|
|
355
|
+
"architecture file exceeds line cap but has no US story headings to archive",
|
|
356
|
+
)
|
|
357
|
+
moved = 0
|
|
358
|
+
work = list(stories)
|
|
359
|
+
archived: List[str] = []
|
|
360
|
+
while work and (
|
|
361
|
+
line_count(preamble + "".join(work)) > max_lines or len(work) > max_stories
|
|
362
|
+
):
|
|
363
|
+
archived.append(work.pop(0))
|
|
364
|
+
moved += 1
|
|
365
|
+
if not archived:
|
|
366
|
+
return None
|
|
367
|
+
new_body = preamble + "".join(work)
|
|
368
|
+
if line_count(new_body) > max_lines or len(work) > max_stories:
|
|
369
|
+
raise PolicyError(
|
|
370
|
+
"ARTIFACT_HOT_SURFACE_OVERSIZE",
|
|
371
|
+
"architecture preamble or single story block exceeds policy; manual split required",
|
|
372
|
+
)
|
|
373
|
+
first_h = archived[0].splitlines()[0].strip() if archived else ""
|
|
374
|
+
last_h = archived[-1].splitlines()[0].strip() if archived else ""
|
|
375
|
+
trigger = f"ARCH_HOT_MAX_LINES={max_lines}, ARCH_HOT_MAX_STORY_SECTIONS={max_stories}"
|
|
376
|
+
pack = next_pack_path(repo, repo / ARCH_ARCH_DIR, "architecture-pack")
|
|
377
|
+
ver = {
|
|
378
|
+
"boundary": "triad-rollover|architecture",
|
|
379
|
+
"moved": moved,
|
|
380
|
+
"retained_story_sections": len(work),
|
|
381
|
+
"retained_lines": line_count(new_body),
|
|
382
|
+
"pack_ref": str(pack.as_posix()),
|
|
383
|
+
}
|
|
384
|
+
if dry_run:
|
|
385
|
+
return ver
|
|
386
|
+
archived_body = "".join(archived)
|
|
387
|
+
write_pack_header(
|
|
388
|
+
pack,
|
|
389
|
+
f"Architecture archive pack ({day_stamp()})",
|
|
390
|
+
ARCH_REL.as_posix(),
|
|
391
|
+
trigger,
|
|
392
|
+
{
|
|
393
|
+
"archived_body_lines": line_count(archived_body),
|
|
394
|
+
"retained_body_lines": line_count(new_body),
|
|
395
|
+
"preamble_lines": line_count(preamble),
|
|
396
|
+
},
|
|
397
|
+
first_h,
|
|
398
|
+
last_h,
|
|
399
|
+
moved,
|
|
400
|
+
len(work),
|
|
401
|
+
)
|
|
402
|
+
with pack.open("a", encoding="utf-8") as fh:
|
|
403
|
+
fh.write(archived_body)
|
|
404
|
+
if not archived_body.endswith("\n"):
|
|
405
|
+
fh.write("\n")
|
|
406
|
+
path.write_text(new_body, encoding="utf-8", newline="\n")
|
|
407
|
+
return ver
|
|
408
|
+
|
|
409
|
+
|
|
410
|
+
def check_surface(
|
|
411
|
+
name: str,
|
|
412
|
+
path: Path,
|
|
413
|
+
lines: int,
|
|
414
|
+
units: int,
|
|
415
|
+
max_lines: int,
|
|
416
|
+
max_units: int,
|
|
417
|
+
) -> Optional[str]:
|
|
418
|
+
if lines <= max_lines and units <= max_units:
|
|
419
|
+
return None
|
|
420
|
+
return (
|
|
421
|
+
f"STATE_ARCHIVE_REQUIRED surface={name} path={path.as_posix()} "
|
|
422
|
+
f"lines={lines}/{max_lines} units={units}/{max_units} "
|
|
423
|
+
f"reason=ARTIFACT_HOT_SURFACE_OVERSIZE"
|
|
424
|
+
)
|
|
425
|
+
|
|
426
|
+
|
|
427
|
+
def run_check(repo: Path, policy: Dict[str, str]) -> List[str]:
|
|
428
|
+
errors: List[str] = []
|
|
429
|
+
s_path = repo / STATE_REL
|
|
430
|
+
s_text = s_path.read_text(encoding="utf-8")
|
|
431
|
+
pre, cps = split_state_checkpoints(s_text)
|
|
432
|
+
s_lines = line_count(s_text)
|
|
433
|
+
max_sl = _int_policy(policy, "STATE_HOT_MAX_LINES")
|
|
434
|
+
max_sc = _int_policy(policy, "STATE_HOT_MAX_CHECKPOINTS")
|
|
435
|
+
msg = check_surface("state", STATE_REL, s_lines, len(cps), max_sl, max_sc)
|
|
436
|
+
if msg:
|
|
437
|
+
errors.append(msg)
|
|
438
|
+
if line_count(pre) > max_sl:
|
|
439
|
+
errors.append(
|
|
440
|
+
f"STATE_ARCHIVE_REQUIRED surface=state path={STATE_REL.as_posix()} "
|
|
441
|
+
f"reason=ARTIFACT_HOT_SURFACE_OVERSIZE preamble exceeds line cap"
|
|
442
|
+
)
|
|
443
|
+
|
|
444
|
+
p_path = repo / PO_REL
|
|
445
|
+
p_text = p_path.read_text(encoding="utf-8")
|
|
446
|
+
secs = split_po_sections(p_text)
|
|
447
|
+
p_lines = line_count(p_text)
|
|
448
|
+
max_pl = _int_policy(policy, "PO_TO_TL_HOT_MAX_LINES")
|
|
449
|
+
max_ps = _int_policy(policy, "PO_TO_TL_HOT_MAX_SECTIONS")
|
|
450
|
+
msg = check_surface("po_to_tl", PO_REL, p_lines, len(secs), max_pl, max_ps)
|
|
451
|
+
if msg:
|
|
452
|
+
errors.append(msg)
|
|
453
|
+
|
|
454
|
+
a_path = repo / ARCH_REL
|
|
455
|
+
a_text = a_path.read_text(encoding="utf-8")
|
|
456
|
+
_, stories = split_arch_stories(a_text)
|
|
457
|
+
a_lines = line_count(a_text)
|
|
458
|
+
max_al = _int_policy(policy, "ARCH_HOT_MAX_LINES")
|
|
459
|
+
max_as = _int_policy(policy, "ARCH_HOT_MAX_STORY_SECTIONS")
|
|
460
|
+
msg = check_surface("architecture", ARCH_REL, a_lines, len(stories), max_al, max_as)
|
|
461
|
+
if msg:
|
|
462
|
+
errors.append(msg)
|
|
463
|
+
|
|
464
|
+
return errors
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
def run_rollover_all(repo: Path, policy: Dict[str, str], dry_run: bool) -> List[Dict[str, object]]:
|
|
468
|
+
results: List[Dict[str, object]] = []
|
|
469
|
+
for fn in (rollover_state, rollover_po_to_tl, rollover_architecture):
|
|
470
|
+
out = fn(repo, policy, dry_run)
|
|
471
|
+
if out:
|
|
472
|
+
results.append(out)
|
|
473
|
+
return results
|
|
474
|
+
|
|
475
|
+
|
|
476
|
+
def cmd_self_test() -> int:
|
|
477
|
+
errors: List[str] = []
|
|
478
|
+
|
|
479
|
+
def fail(m: str) -> None:
|
|
480
|
+
errors.append(m)
|
|
481
|
+
|
|
482
|
+
# --- state checkpoint split + rollover ---
|
|
483
|
+
cp = "## Alpha checkpoint\nbody\n\n## Beta checkpoint\nmore\n"
|
|
484
|
+
pre, blocks = split_state_checkpoints(cp)
|
|
485
|
+
if pre.strip() or len(blocks) != 2:
|
|
486
|
+
fail("state split expected two checkpoint blocks")
|
|
487
|
+
|
|
488
|
+
# --- po sections ---
|
|
489
|
+
po = "## A\nx\n\n## B\ny\n\n## C\nz\n\n"
|
|
490
|
+
secs = split_po_sections(po)
|
|
491
|
+
if [line_count(s) for s in secs] != [3, 3, 3]:
|
|
492
|
+
fail("po section split line counts unexpected")
|
|
493
|
+
|
|
494
|
+
# --- arch stories ---
|
|
495
|
+
arch = "# Preamble line\n\n# US-0001: One\nx\n# US-0002: Two\ny\n"
|
|
496
|
+
apre, stories = split_arch_stories(arch)
|
|
497
|
+
if "Preamble" not in apre or len(stories) != 2:
|
|
498
|
+
fail("architecture story split failed")
|
|
499
|
+
|
|
500
|
+
with tempfile.TemporaryDirectory() as tmp:
|
|
501
|
+
root = Path(tmp)
|
|
502
|
+
(root / ".cursor").mkdir(parents=True, exist_ok=True)
|
|
503
|
+
(root / STATE_REL).parent.mkdir(parents=True, exist_ok=True)
|
|
504
|
+
(root / PO_REL).parent.mkdir(parents=True, exist_ok=True)
|
|
505
|
+
(root / PO_REL).write_text("## Stub\n\n", encoding="utf-8")
|
|
506
|
+
(root / ARCH_REL).write_text("# Architecture\n\n## Overview\nstub\n", encoding="utf-8")
|
|
507
|
+
policy = dict(DEFAULTS)
|
|
508
|
+
policy["STATE_HOT_MAX_LINES"] = "50"
|
|
509
|
+
policy["STATE_HOT_MAX_CHECKPOINTS"] = "2"
|
|
510
|
+
big_state = (
|
|
511
|
+
"## Active context surface\npreamble\n\n"
|
|
512
|
+
+ "".join(f"## Checkpoint {i}\nbody {i}\n\n" for i in range(5))
|
|
513
|
+
)
|
|
514
|
+
(root / STATE_REL).write_text(big_state, encoding="utf-8")
|
|
515
|
+
v1 = rollover_state(root, policy, dry_run=False)
|
|
516
|
+
if not v1 or v1.get("moved", 0) < 1:
|
|
517
|
+
fail("state rollover should move at least one checkpoint")
|
|
518
|
+
v2 = rollover_state(root, policy, dry_run=False)
|
|
519
|
+
if v2 is not None:
|
|
520
|
+
fail("state rollover should be idempotent when within caps")
|
|
521
|
+
err = run_check(root, policy)
|
|
522
|
+
if err:
|
|
523
|
+
fail(f"state fixture should pass after rollover: {err}")
|
|
524
|
+
|
|
525
|
+
policy_po = dict(DEFAULTS)
|
|
526
|
+
policy_po["PO_TO_TL_HOT_MAX_LINES"] = "10"
|
|
527
|
+
policy_po["PO_TO_TL_HOT_MAX_SECTIONS"] = "2"
|
|
528
|
+
po_big = "".join(f"## S{i}\nL{i}\n\n" for i in range(5))
|
|
529
|
+
(root / PO_REL).write_text(po_big, encoding="utf-8")
|
|
530
|
+
r1 = rollover_po_to_tl(root, policy_po, dry_run=False)
|
|
531
|
+
if not r1:
|
|
532
|
+
fail("po_to_tl rollover expected")
|
|
533
|
+
r2 = rollover_po_to_tl(root, policy_po, dry_run=False)
|
|
534
|
+
if r2 is not None:
|
|
535
|
+
fail("po_to_tl idempotent")
|
|
536
|
+
if run_check(root, policy_po):
|
|
537
|
+
fail("po_to_tl check should pass")
|
|
538
|
+
|
|
539
|
+
policy_arch = dict(DEFAULTS)
|
|
540
|
+
policy_arch["ARCH_HOT_MAX_LINES"] = "12"
|
|
541
|
+
policy_arch["ARCH_HOT_MAX_STORY_SECTIONS"] = "2"
|
|
542
|
+
arch_big = "# Top\n\n" + "".join(f"# US-100{i}: X\nL\n\n" for i in range(5))
|
|
543
|
+
(root / ARCH_REL).write_text(arch_big, encoding="utf-8")
|
|
544
|
+
a1 = rollover_architecture(root, policy_arch, dry_run=False)
|
|
545
|
+
if not a1:
|
|
546
|
+
fail("architecture rollover expected")
|
|
547
|
+
a2 = rollover_architecture(root, policy_arch, dry_run=False)
|
|
548
|
+
if a2 is not None:
|
|
549
|
+
fail("architecture idempotent")
|
|
550
|
+
merged_arch = dict(DEFAULTS)
|
|
551
|
+
merged_arch.update(policy_arch)
|
|
552
|
+
if run_check(root, merged_arch):
|
|
553
|
+
fail("architecture check should pass")
|
|
554
|
+
|
|
555
|
+
if errors:
|
|
556
|
+
for e in errors:
|
|
557
|
+
print(e, file=sys.stderr)
|
|
558
|
+
return 1
|
|
559
|
+
return 0
|
|
560
|
+
|
|
561
|
+
|
|
562
|
+
def main(argv: Optional[Sequence[str]] = None) -> int:
|
|
563
|
+
p = argparse.ArgumentParser(description="Triad hot-surface enforcement (DEC-0054).")
|
|
564
|
+
p.add_argument("--repo", help="repository root (default: parent of scripts/)")
|
|
565
|
+
mx = p.add_mutually_exclusive_group(required=True)
|
|
566
|
+
mx.add_argument("--check", action="store_true", help="verify caps, no writes")
|
|
567
|
+
mx.add_argument("--rollover", action="store_true", help="archive oldest units when over cap")
|
|
568
|
+
mx.add_argument("--self-test", action="store_true", help="internal regression fixtures")
|
|
569
|
+
p.add_argument(
|
|
570
|
+
"--json",
|
|
571
|
+
action="store_true",
|
|
572
|
+
help="emit verification tuples as JSON lines (stderr for human mode)",
|
|
573
|
+
)
|
|
574
|
+
p.add_argument(
|
|
575
|
+
"--dry-run",
|
|
576
|
+
action="store_true",
|
|
577
|
+
help="with --rollover, compute moves without writing",
|
|
578
|
+
)
|
|
579
|
+
args = p.parse_args(argv)
|
|
580
|
+
|
|
581
|
+
if args.self_test:
|
|
582
|
+
return cmd_self_test()
|
|
583
|
+
|
|
584
|
+
repo = _repo_root(args.repo)
|
|
585
|
+
try:
|
|
586
|
+
policy = load_merged_policy(repo)
|
|
587
|
+
except OSError as exc:
|
|
588
|
+
print(
|
|
589
|
+
f"STATE_ARCHIVE_WRITE_FAILED could_not_read_scratchpad detail={exc}",
|
|
590
|
+
file=sys.stderr,
|
|
591
|
+
)
|
|
592
|
+
return 2
|
|
593
|
+
|
|
594
|
+
if args.check:
|
|
595
|
+
try:
|
|
596
|
+
errs = run_check(repo, policy)
|
|
597
|
+
except PolicyError as exc:
|
|
598
|
+
print(f"{exc.code} {exc.message}", file=sys.stderr)
|
|
599
|
+
return 2
|
|
600
|
+
if errs:
|
|
601
|
+
for e in errs:
|
|
602
|
+
print(e, file=sys.stderr)
|
|
603
|
+
return 1
|
|
604
|
+
return 0
|
|
605
|
+
|
|
606
|
+
if args.rollover:
|
|
607
|
+
try:
|
|
608
|
+
outs = run_rollover_all(repo, policy, dry_run=args.dry_run)
|
|
609
|
+
except PolicyError as exc:
|
|
610
|
+
print(f"{exc.code} {exc.message}", file=sys.stderr)
|
|
611
|
+
return 2
|
|
612
|
+
if args.json:
|
|
613
|
+
for row in outs:
|
|
614
|
+
print(json.dumps(row, sort_keys=True, separators=(",", ":")))
|
|
615
|
+
elif outs:
|
|
616
|
+
print(
|
|
617
|
+
"rollover_complete units=" + ",".join(str(x.get("moved", 0)) for x in outs),
|
|
618
|
+
file=sys.stderr,
|
|
619
|
+
)
|
|
620
|
+
return 0
|
|
621
|
+
|
|
622
|
+
return 2
|
|
623
|
+
|
|
624
|
+
|
|
625
|
+
if __name__ == "__main__":
|
|
626
|
+
raise SystemExit(main())
|