@jhlee0619/codexloop 0.1.0 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,56 @@
1
+ // Artifact-mode adapter — for document/paper/report completion loops.
2
+ // Prefers operation-based proposals over unified diffs. Runs built-in
3
+ // validators keyed by spec.validators plus mode-required implicit
4
+ // validators (file_exists on required_files).
5
+
6
+ export const name = "artifact";
7
+
8
+ export function getCommonVars(state) {
9
+ const spec = state.goal?.spec ?? null;
10
+ const vars = {};
11
+ if (spec) {
12
+ vars.REQUIRED_FILES = Array.isArray(spec.required_files) ? spec.required_files : [];
13
+ vars.REQUIRED_SECTIONS = Array.isArray(spec.required_sections) ? spec.required_sections : [];
14
+ vars.PLACEHOLDER_POLICY = spec.placeholder_policy ?? null;
15
+ }
16
+ return vars;
17
+ }
18
+
19
+ export function getImplicitValidators(spec) {
20
+ const implicit = [];
21
+ if (Array.isArray(spec?.required_files) && spec.required_files.length > 0) {
22
+ implicit.push({
23
+ name: "file_exists",
24
+ args: { files: spec.required_files }
25
+ });
26
+ }
27
+ if (Array.isArray(spec?.required_sections) && spec.required_sections.length > 0) {
28
+ implicit.push({
29
+ name: "headings_present",
30
+ args: { sections: spec.required_sections }
31
+ });
32
+ }
33
+ return implicit;
34
+ }
35
+
36
+ export function getQualityWeights() {
37
+ return {
38
+ validatorPassRate: 0.35,
39
+ requiredFilePresence: 0.25,
40
+ distanceFromGoal: 0.20,
41
+ winnerConfidence: 0.10,
42
+ issueReduction: 0.10
43
+ };
44
+ }
45
+
46
+ export function getGuards() {
47
+ return ["requiredFileDeletion", "wholeSectionDeletion"];
48
+ }
49
+
50
+ export function getPromptSuffix() {
51
+ return [
52
+ "Task mode: artifact. Proposals SHOULD prefer `operations[]` over unified diffs for document editing.",
53
+ "Supported operation kinds: replace_section, insert_after_heading, replace_file, append_to_file.",
54
+ "Use `patch` (unified diff) only when the target file is code, not a document."
55
+ ].join("\n");
56
+ }
@@ -0,0 +1,33 @@
1
+ // Code-mode adapter — extracts the existing v0.1.0 hardcoded values as a
2
+ // named adapter so iteration.mjs can dispatch by state.taskMode.
3
+ //
4
+ // This adapter must produce IDENTICAL behavior to v0.1.0 when taskMode === "code".
5
+
6
+ export const name = "code";
7
+
8
+ export function getCommonVars(_state) {
9
+ return {};
10
+ }
11
+
12
+ export function getImplicitValidators(_spec) {
13
+ return [];
14
+ }
15
+
16
+ export function getQualityWeights() {
17
+ return {
18
+ testPassRate: 0.30,
19
+ issueReduction: 0.20,
20
+ winnerConfidence: 0.15,
21
+ typeClean: 0.15,
22
+ lintClean: 0.10,
23
+ distanceFromGoal: 0.10
24
+ };
25
+ }
26
+
27
+ export function getGuards() {
28
+ return ["testFileDeleted", "testDisabled", "missingTestJustification"];
29
+ }
30
+
31
+ export function getPromptSuffix() {
32
+ return "Task mode: code. Proposals MUST use unified diffs in the `patch` field.";
33
+ }
@@ -0,0 +1,30 @@
1
+ // Mode adapter registry. iteration.mjs calls getModeAdapter(state.taskMode)
2
+ // once per iteration to get the per-mode constants (prompt vars, quality
3
+ // weights, validators, guards, prompt suffix).
4
+ //
5
+ // In v0.2 this is a simple lookup table. In v0.5 it grows into a real
6
+ // interface with per-mode prompt file resolution.
7
+
8
+ import * as code from "./code.mjs";
9
+ import * as artifact from "./artifact.mjs";
10
+
11
+ const ADAPTERS = {
12
+ code,
13
+ artifact,
14
+ paper: artifact // v0.5 will ship a dedicated paper adapter; for now alias to artifact
15
+ };
16
+
17
+ export function getModeAdapter(taskMode) {
18
+ const key = String(taskMode ?? "code").toLowerCase();
19
+ const adapter = ADAPTERS[key];
20
+ if (!adapter) {
21
+ throw new Error(
22
+ `Unknown task mode "${taskMode}". Valid modes: ${Object.keys(ADAPTERS).join(", ")}`
23
+ );
24
+ }
25
+ return adapter;
26
+ }
27
+
28
+ export function getValidModes() {
29
+ return Object.keys(ADAPTERS);
30
+ }
@@ -0,0 +1,244 @@
1
+ // Operation-based proposal translator for artifact/paper mode.
2
+ //
3
+ // Codex returns operations[] in the proposal instead of a unified diff.
4
+ // Each operation targets a single file with a specific kind of edit. The
5
+ // runtime applies every operation in order, then stages + commits the
6
+ // result in the same preSha/postSha/rollback framework as the patch path.
7
+ //
8
+ // v0.2 ships four operation kinds:
9
+ // replace_section — find Markdown heading, replace its body
10
+ // insert_after_heading — insert content after a heading line
11
+ // replace_file — overwrite entire file
12
+ // append_to_file — append to end
13
+
14
+ import fs from "node:fs";
15
+ import path from "node:path";
16
+
17
+ export class OperationError extends Error {
18
+ constructor(message, { op, file, kind } = {}) {
19
+ super(message);
20
+ this.name = "OperationError";
21
+ this.op = op;
22
+ this.file = file;
23
+ this.kind = kind;
24
+ }
25
+ }
26
+
27
+ const OP_HANDLERS = {
28
+ replace_section: handleReplaceSection,
29
+ insert_after_heading: handleInsertAfterHeading,
30
+ replace_file: handleReplaceFile,
31
+ append_to_file: handleAppendToFile
32
+ };
33
+
34
+ export function getSupportedOpKinds() {
35
+ return Object.keys(OP_HANDLERS);
36
+ }
37
+
38
+ export function applyOperations({ cwd, operations }) {
39
+ if (!Array.isArray(operations) || operations.length === 0) {
40
+ return { applied: false, empty: true, filesTouched: [], opResults: [], error: null };
41
+ }
42
+
43
+ const filesTouched = new Set();
44
+ const opResults = [];
45
+
46
+ for (const op of operations) {
47
+ const handler = OP_HANDLERS[op.kind];
48
+ if (!handler) {
49
+ return {
50
+ applied: false,
51
+ empty: false,
52
+ filesTouched: [...filesTouched],
53
+ opResults,
54
+ error: `Unsupported operation kind: ${op.kind}. Supported: ${getSupportedOpKinds().join(", ")}`
55
+ };
56
+ }
57
+ try {
58
+ const result = handler(cwd, op);
59
+ opResults.push({ kind: op.kind, file: op.file, ...result });
60
+ if (result.changed) {
61
+ filesTouched.add(op.file);
62
+ }
63
+ } catch (err) {
64
+ return {
65
+ applied: false,
66
+ empty: false,
67
+ filesTouched: [...filesTouched],
68
+ opResults,
69
+ error: err instanceof OperationError
70
+ ? `${err.kind}:${err.file}: ${err.message}`
71
+ : `${op.kind}:${op.file}: ${err.message}`
72
+ };
73
+ }
74
+ }
75
+
76
+ return {
77
+ applied: filesTouched.size > 0,
78
+ empty: filesTouched.size === 0,
79
+ filesTouched: [...filesTouched],
80
+ opResults,
81
+ error: null
82
+ };
83
+ }
84
+
85
+ // ── replace_section ────────────────────────────────────────────────
86
+ // Find the first exact Markdown heading match (case-sensitive, trimmed).
87
+ // Replace from the line AFTER the heading up to (but not including) the
88
+ // next heading of same-or-higher level. If heading is not found, throw.
89
+ function handleReplaceSection(cwd, op) {
90
+ const { file, heading, content } = op;
91
+ validateRequired(op, ["file", "heading", "content"]);
92
+
93
+ const absPath = path.resolve(cwd, file);
94
+ if (!fs.existsSync(absPath)) {
95
+ throw new OperationError(`File not found: ${file}`, { op, file, kind: "replace_section" });
96
+ }
97
+
98
+ const original = fs.readFileSync(absPath, "utf8");
99
+ const lines = original.split(/\r?\n/);
100
+
101
+ const headingTrimmed = heading.trim();
102
+ const headingLevel = countHeadingLevel(headingTrimmed);
103
+ if (headingLevel === 0) {
104
+ throw new OperationError(`"${heading}" is not a valid Markdown heading`, { op, file, kind: "replace_section" });
105
+ }
106
+
107
+ const startIdx = lines.findIndex((l) => l.trim() === headingTrimmed);
108
+ if (startIdx === -1) {
109
+ throw new OperationError(`Heading "${heading}" not found in ${file}`, { op, file, kind: "replace_section" });
110
+ }
111
+
112
+ let endIdx = lines.length;
113
+ for (let i = startIdx + 1; i < lines.length; i += 1) {
114
+ const lvl = countHeadingLevel(lines[i].trim());
115
+ if (lvl > 0 && lvl <= headingLevel) {
116
+ endIdx = i;
117
+ break;
118
+ }
119
+ }
120
+
121
+ const contentLines = content.endsWith("\n")
122
+ ? content.slice(0, -1).split("\n")
123
+ : content.split("\n");
124
+
125
+ const result = [
126
+ ...lines.slice(0, startIdx + 1),
127
+ ...contentLines,
128
+ ...lines.slice(endIdx)
129
+ ];
130
+
131
+ const output = result.join("\n");
132
+ if (output === original) {
133
+ return { changed: false };
134
+ }
135
+ fs.writeFileSync(absPath, output, "utf8");
136
+ return { changed: true };
137
+ }
138
+
139
+ // ── insert_after_heading ───────────────────────────────────────────
140
+ function handleInsertAfterHeading(cwd, op) {
141
+ const { file, heading, content } = op;
142
+ validateRequired(op, ["file", "heading", "content"]);
143
+
144
+ const absPath = path.resolve(cwd, file);
145
+ if (!fs.existsSync(absPath)) {
146
+ throw new OperationError(`File not found: ${file}`, { op, file, kind: "insert_after_heading" });
147
+ }
148
+
149
+ const original = fs.readFileSync(absPath, "utf8");
150
+ const lines = original.split(/\r?\n/);
151
+
152
+ const headingTrimmed = heading.trim();
153
+ const idx = lines.findIndex((l) => l.trim() === headingTrimmed);
154
+ if (idx === -1) {
155
+ throw new OperationError(`Heading "${heading}" not found in ${file}`, { op, file, kind: "insert_after_heading" });
156
+ }
157
+
158
+ const contentLines = content.endsWith("\n")
159
+ ? content.slice(0, -1).split("\n")
160
+ : content.split("\n");
161
+
162
+ const result = [
163
+ ...lines.slice(0, idx + 1),
164
+ ...contentLines,
165
+ ...lines.slice(idx + 1)
166
+ ];
167
+
168
+ const output = result.join("\n");
169
+ if (output === original) {
170
+ return { changed: false };
171
+ }
172
+ fs.writeFileSync(absPath, output, "utf8");
173
+ return { changed: true };
174
+ }
175
+
176
+ // ── replace_file ───────────────────────────────────────────────────
177
+ function handleReplaceFile(cwd, op) {
178
+ const { file, content } = op;
179
+ validateRequired(op, ["file", "content"]);
180
+
181
+ const absPath = path.resolve(cwd, file);
182
+ const dirPath = path.dirname(absPath);
183
+ fs.mkdirSync(dirPath, { recursive: true });
184
+
185
+ let original = "";
186
+ if (fs.existsSync(absPath)) {
187
+ original = fs.readFileSync(absPath, "utf8");
188
+ }
189
+
190
+ if (content === original) {
191
+ return { changed: false };
192
+ }
193
+ fs.writeFileSync(absPath, content, "utf8");
194
+ return { changed: true };
195
+ }
196
+
197
+ // ── append_to_file ─────────────────────────────────────────────────
198
+ function handleAppendToFile(cwd, op) {
199
+ const { file, content } = op;
200
+ validateRequired(op, ["file", "content"]);
201
+
202
+ if (!content) {
203
+ return { changed: false };
204
+ }
205
+
206
+ const absPath = path.resolve(cwd, file);
207
+ const dirPath = path.dirname(absPath);
208
+ fs.mkdirSync(dirPath, { recursive: true });
209
+
210
+ let existing = "";
211
+ if (fs.existsSync(absPath)) {
212
+ existing = fs.readFileSync(absPath, "utf8");
213
+ }
214
+
215
+ const separator = existing.length > 0 && !existing.endsWith("\n") ? "\n" : "";
216
+ fs.writeFileSync(absPath, existing + separator + content, "utf8");
217
+ return { changed: true };
218
+ }
219
+
220
+ // ── helpers ────────────────────────────────────────────────────────
221
+
222
+ function countHeadingLevel(line) {
223
+ const match = /^(#{1,6})\s/.exec(line);
224
+ return match ? match[1].length : 0;
225
+ }
226
+
227
+ function validateRequired(op, fields) {
228
+ for (const field of fields) {
229
+ if (op[field] == null || (typeof op[field] === "string" && !op[field].trim())) {
230
+ throw new OperationError(
231
+ `Operation ${op.kind} requires non-empty "${field}"`,
232
+ { op, file: op.file, kind: op.kind }
233
+ );
234
+ }
235
+ }
236
+ }
237
+
238
+ // Synthesize a human-readable summary for logging and progress.log.
239
+ export function summarizeOperations(operations) {
240
+ if (!Array.isArray(operations) || operations.length === 0) return "(no operations)";
241
+ return operations
242
+ .map((op) => `${op.kind} ${op.file}${op.heading ? ` @ "${op.heading}"` : ""}`)
243
+ .join("; ");
244
+ }
@@ -0,0 +1,132 @@
1
+ // Context bundle collector (= the "retrieve" phase).
2
+ //
3
+ // Runs BEFORE evaluate in every iteration. Collects relevant file contents
4
+ // from the target repo so the Codex prompt has actual document context
5
+ // instead of just a git diff. The bundle is injected into commonVars as
6
+ // {{CONTEXT_BUNDLE}}.
7
+ //
8
+ // Code mode: spec is null → bundle is empty → zero token overhead.
9
+ // Artifact mode: spec.required_files + state.goal.relevantGlobs drive the collection.
10
+
11
+ import fs from "node:fs";
12
+ import path from "node:path";
13
+ import crypto from "node:crypto";
14
+
15
+ const DEFAULT_CAP_BYTES = 64 * 1024;
16
+
17
+ const DEFAULT_FILES = ["TASK.md", "GOAL.md", "PRD.md", "cloop.task.json", "AGENTS.md"];
18
+
19
+ export function collectBundle({ state, repoRoot, capBytes = DEFAULT_CAP_BYTES }) {
20
+ const spec = state?.goal?.spec ?? null;
21
+ const result = {
22
+ files: [],
23
+ totalBytes: 0,
24
+ truncated: false,
25
+ fileCount: 0
26
+ };
27
+
28
+ if (!spec && (!state?.goal?.relevantGlobs || state.goal.relevantGlobs.length === 0)) {
29
+ return result;
30
+ }
31
+
32
+ const candidates = new Set();
33
+
34
+ if (Array.isArray(spec?.required_files)) {
35
+ for (const f of spec.required_files) {
36
+ candidates.add(f);
37
+ }
38
+ }
39
+
40
+ if (Array.isArray(spec?.required_sections)) {
41
+ for (const entry of spec.required_sections) {
42
+ if (entry.file) candidates.add(entry.file);
43
+ }
44
+ }
45
+
46
+ for (const defaultFile of DEFAULT_FILES) {
47
+ const absPath = path.resolve(repoRoot, defaultFile);
48
+ if (fs.existsSync(absPath)) {
49
+ candidates.add(defaultFile);
50
+ }
51
+ }
52
+
53
+ if (Array.isArray(state?.goal?.relevantGlobs)) {
54
+ for (const glob of state.goal.relevantGlobs) {
55
+ if (!glob.includes("*") && !glob.includes("?")) {
56
+ const absPath = path.resolve(repoRoot, glob);
57
+ if (fs.existsSync(absPath) && fs.statSync(absPath).isFile()) {
58
+ candidates.add(glob);
59
+ }
60
+ }
61
+ }
62
+ }
63
+
64
+ let usedBytes = 0;
65
+
66
+ for (const relPath of candidates) {
67
+ const absPath = path.resolve(repoRoot, relPath);
68
+ if (!fs.existsSync(absPath)) continue;
69
+
70
+ let stat;
71
+ try {
72
+ stat = fs.statSync(absPath);
73
+ } catch {
74
+ continue;
75
+ }
76
+ if (!stat.isFile()) continue;
77
+
78
+ if (usedBytes + stat.size > capBytes) {
79
+ result.truncated = true;
80
+ result.files.push({
81
+ path: relPath,
82
+ contentHash: null,
83
+ content: `(truncated — file ${stat.size} bytes, cap remaining ${capBytes - usedBytes} bytes)`,
84
+ truncated: true
85
+ });
86
+ continue;
87
+ }
88
+
89
+ let content;
90
+ try {
91
+ content = fs.readFileSync(absPath, "utf8");
92
+ } catch {
93
+ continue;
94
+ }
95
+
96
+ const contentHash = crypto.createHash("sha256").update(content).digest("hex").slice(0, 16);
97
+
98
+ result.files.push({
99
+ path: relPath,
100
+ contentHash,
101
+ content,
102
+ truncated: false
103
+ });
104
+
105
+ usedBytes += Buffer.byteLength(content, "utf8");
106
+ }
107
+
108
+ result.totalBytes = usedBytes;
109
+ result.fileCount = result.files.length;
110
+ return result;
111
+ }
112
+
113
+ export function formatBundleForPrompt(bundle) {
114
+ if (!bundle || bundle.fileCount === 0) {
115
+ return "(no context bundle — code mode or no required files)";
116
+ }
117
+
118
+ const parts = [];
119
+ for (const file of bundle.files) {
120
+ if (file.truncated) {
121
+ parts.push(`--- ${file.path} ---\n${file.content}`);
122
+ } else {
123
+ parts.push(`--- ${file.path} (${file.contentHash}) ---\n${file.content}`);
124
+ }
125
+ }
126
+
127
+ if (bundle.truncated) {
128
+ parts.push(`\n(bundle truncated at ${DEFAULT_CAP_BYTES} bytes — some files shown as preview only)`);
129
+ }
130
+
131
+ return parts.join("\n\n");
132
+ }
@@ -22,11 +22,19 @@ import process from "node:process";
22
22
 
23
23
  import { isProcessAlive } from "./process.mjs";
24
24
 
25
- const STATE_VERSION = 1;
25
+ const STATE_VERSION = 2;
26
26
  const STATE_FILENAME = "state.json";
27
27
 
28
28
  const MIGRATIONS = [
29
- // Index 0 would migrate v0 -> v1, etc. Empty for now.
29
+ // Index 0: placeholder (v0 never existed in the wild).
30
+ undefined,
31
+ // Index 1: v1 → v2. Adds taskMode + goal.spec for artifact/paper mode support.
32
+ function migrateV1toV2(state) {
33
+ if (!state.taskMode) state.taskMode = "code";
34
+ if (!state.goal) state.goal = {};
35
+ if (state.goal.spec === undefined) state.goal.spec = null;
36
+ return state;
37
+ }
30
38
  ];
31
39
 
32
40
  function nowIso() {
@@ -82,6 +90,7 @@ export function defaultState() {
82
90
  status: "idle",
83
91
  pid: null,
84
92
  mode: "interactive",
93
+ taskMode: "code",
85
94
  model: DEFAULT_MODEL,
86
95
  reasoningEffort: DEFAULT_REASONING_EFFORT,
87
96
  startedAt: null,
@@ -95,7 +104,8 @@ export function defaultState() {
95
104
  testCmd: null,
96
105
  lintCmd: null,
97
106
  typeCmd: null,
98
- goalHash: null
107
+ goalHash: null,
108
+ spec: null
99
109
  },
100
110
  budget: {
101
111
  maxIterations: 20,
@@ -226,14 +236,26 @@ export function generateLoopId() {
226
236
  return `loop-${time}-${rand}`;
227
237
  }
228
238
 
229
- export function computeGoalHash(goal) {
230
- const canonical = JSON.stringify({
239
+ export function computeGoalHash(goal, taskMode) {
240
+ // BC: when taskMode is "code" and spec is null, the canonical string MUST be
241
+ // byte-for-byte identical to v0.1.0's format. We achieve this by only
242
+ // appending v0.2 fields when they differ from code-mode defaults. This
243
+ // guarantees that a v1 state file loaded by v0.2 code produces the same
244
+ // goalHash without requiring the migration to rewrite it.
245
+ const parts = {
231
246
  text: goal.text ?? "",
232
247
  acceptanceCriteria: goal.acceptanceCriteria ?? [],
233
248
  testCmd: goal.testCmd ?? null,
234
249
  lintCmd: goal.lintCmd ?? null,
235
250
  typeCmd: goal.typeCmd ?? null
236
- });
251
+ };
252
+ const tm = taskMode ?? "code";
253
+ const spec = goal.spec ?? null;
254
+ if (tm !== "code" || spec !== null) {
255
+ parts.taskMode = tm;
256
+ parts.spec = spec;
257
+ }
258
+ const canonical = JSON.stringify(parts);
237
259
  return crypto.createHash("sha256").update(canonical).digest("hex").slice(0, 16);
238
260
  }
239
261
 
@@ -0,0 +1,111 @@
1
+ // Structured task spec loader, validator, and canonicalizer.
2
+ //
3
+ // A task spec can come from:
4
+ // 1. --task-file <path>.json (explicit flag)
5
+ // 2. cloop.task.json at repo root (committed declarative spec)
6
+ // 3. Interview-assembled spec at .loop/task-spec.json
7
+ //
8
+ // When the file is plain text (not JSON), it is treated as goal text and
9
+ // spec stays null (legacy code-mode behavior).
10
+
11
+ import fs from "node:fs";
12
+ import path from "node:path";
13
+
14
+ import { getValidModes } from "./modes/index.mjs";
15
+
16
+ const SPEC_VERSION = 1;
17
+
18
+ export function loadTaskSpec(filePath) {
19
+ if (!filePath || !fs.existsSync(filePath)) {
20
+ return null;
21
+ }
22
+ const raw = fs.readFileSync(filePath, "utf8").trim();
23
+ if (!raw) return null;
24
+
25
+ let parsed;
26
+ try {
27
+ parsed = JSON.parse(raw);
28
+ } catch {
29
+ return null;
30
+ }
31
+
32
+ return validateAndCanonicalizeSpec(parsed);
33
+ }
34
+
35
+ export function autoDetectTaskSpec(repoRoot) {
36
+ const candidate = path.join(repoRoot, "cloop.task.json");
37
+ if (fs.existsSync(candidate)) {
38
+ return loadTaskSpec(candidate);
39
+ }
40
+ return null;
41
+ }
42
+
43
+ export function validateAndCanonicalizeSpec(raw) {
44
+ if (raw == null || typeof raw !== "object") {
45
+ throw new Error("Task spec must be a JSON object");
46
+ }
47
+
48
+ const spec = {
49
+ version: raw.version ?? SPEC_VERSION,
50
+ mode: normalizeMode(raw.mode ?? "artifact"),
51
+ required_files: Array.isArray(raw.required_files) ? raw.required_files.filter(Boolean) : [],
52
+ required_sections: Array.isArray(raw.required_sections)
53
+ ? raw.required_sections.map(canonicalizeSection)
54
+ : [],
55
+ placeholder_policy: canonicalizePlaceholderPolicy(raw.placeholder_policy),
56
+ validators: Array.isArray(raw.validators) ? raw.validators.map(canonicalizeValidator) : []
57
+ };
58
+
59
+ return spec;
60
+ }
61
+
62
+ function normalizeMode(mode) {
63
+ const lower = String(mode).trim().toLowerCase();
64
+ const valid = getValidModes();
65
+ if (!valid.includes(lower)) {
66
+ throw new Error(`Invalid task mode "${mode}". Valid: ${valid.join(", ")}`);
67
+ }
68
+ return lower;
69
+ }
70
+
71
+ function canonicalizeSection(entry) {
72
+ if (typeof entry === "string") {
73
+ return { file: entry, headings: [] };
74
+ }
75
+ return {
76
+ file: String(entry.file ?? ""),
77
+ headings: Array.isArray(entry.headings) ? entry.headings.filter(Boolean) : []
78
+ };
79
+ }
80
+
81
+ function canonicalizePlaceholderPolicy(policy) {
82
+ if (policy == null) return null;
83
+ return {
84
+ max: typeof policy.max === "number" ? policy.max : 10,
85
+ disallowed_kinds: Array.isArray(policy.disallowed_kinds) ? policy.disallowed_kinds : []
86
+ };
87
+ }
88
+
89
+ function canonicalizeValidator(entry) {
90
+ if (typeof entry === "string") {
91
+ return { name: entry, args: {} };
92
+ }
93
+ return {
94
+ name: String(entry.name ?? ""),
95
+ args: entry.args != null && typeof entry.args === "object" ? entry.args : {}
96
+ };
97
+ }
98
+
99
+ export function canonicalizeSpecForHash(spec) {
100
+ if (spec == null) return null;
101
+ return JSON.stringify({
102
+ version: spec.version ?? SPEC_VERSION,
103
+ mode: spec.mode ?? "code",
104
+ required_files: spec.required_files ?? [],
105
+ required_sections: spec.required_sections ?? [],
106
+ placeholder_policy: spec.placeholder_policy ?? null,
107
+ validators: spec.validators ?? []
108
+ });
109
+ }
110
+
111
+ export { SPEC_VERSION };