deuk-agent-rule 2.5.13 → 3.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/CHANGELOG.ko.md +74 -0
  2. package/CHANGELOG.md +138 -316
  3. package/README.ko.md +134 -154
  4. package/README.md +121 -153
  5. package/package.json +29 -7
  6. package/scripts/cli-args.mjs +87 -3
  7. package/scripts/cli-init-commands.mjs +1382 -223
  8. package/scripts/cli-init-logic.mjs +28 -16
  9. package/scripts/cli-prompts.mjs +13 -4
  10. package/scripts/cli-rule-compiler.mjs +44 -34
  11. package/scripts/cli-skill-commands.mjs +172 -0
  12. package/scripts/cli-telemetry-commands.mjs +429 -0
  13. package/scripts/cli-ticket-commands.mjs +1934 -161
  14. package/scripts/cli-ticket-index.mjs +298 -0
  15. package/scripts/cli-ticket-migration.mjs +320 -0
  16. package/scripts/cli-ticket-parser.mjs +207 -0
  17. package/scripts/cli-utils.mjs +381 -59
  18. package/scripts/cli.mjs +99 -19
  19. package/scripts/lint-md.mjs +247 -0
  20. package/scripts/lint-rules.mjs +143 -0
  21. package/scripts/merge-logic.mjs +13 -306
  22. package/scripts/plan-parser.mjs +53 -0
  23. package/templates/MODULE_RULE_TEMPLATE.md +11 -0
  24. package/templates/PROJECT_RULE.md +47 -0
  25. package/templates/TICKET_TEMPLATE.ko.md +21 -0
  26. package/templates/TICKET_TEMPLATE.md +21 -0
  27. package/templates/rules.d/deukcontext-mcp.md +31 -0
  28. package/templates/rules.d/platform-coexistence.md +29 -0
  29. package/templates/skills/context-recall/SKILL.md +25 -0
  30. package/templates/skills/generated-file-guard/SKILL.md +25 -0
  31. package/templates/skills/safe-refactor/SKILL.md +25 -0
  32. package/bundle/.cursorrules +0 -11
  33. package/bundle/AGENTS.md +0 -146
  34. package/bundle/gemini.md +0 -26
  35. package/bundle/rules/delivery-and-parallel-work.mdc +0 -26
  36. package/bundle/rules/git-commit.mdc +0 -24
  37. package/bundle/rules/multi-ai-workflow.mdc +0 -104
  38. package/bundle/rules.d/core-workflow.md +0 -48
  39. package/bundle/rules.d/deukrag-mcp.md +0 -37
  40. package/bundle/templates/MODULE_RULE_TEMPLATE.md +0 -24
  41. package/bundle/templates/TICKET_TEMPLATE.md +0 -58
  42. package/scripts/cli-ticket-logic.mjs +0 -568
  43. package/scripts/sync-bundle.mjs +0 -77
  44. package/scripts/sync-oss.mjs +0 -126
@@ -0,0 +1,298 @@
1
+ import { existsSync, mkdirSync, readFileSync, readdirSync, writeFileSync, unlinkSync } from "fs";
2
+ import { basename, dirname, join, resolve } from "path";
3
+ import { hostname as osHostname } from "os";
4
+ import {
5
+ AGENT_ROOT_DIR, TICKET_SUBDIR, TICKET_INDEX_FILENAME,
6
+ toSlug, findFileRecursively, toRepoRelativePath, detectConsumerTicketDir, computeTicketPath, normalizeTicketGroup
7
+ } from "./cli-utils.mjs";
8
+
9
+ const TICKET_ARCHIVE_INDEX_FILENAME = "INDEX.archive.json";
10
+ const TICKET_ARCHIVE_INDEX_PREFIX = "INDEX.archive.";
11
+ const ARCHIVE_INDEX_RETENTION_MONTHS = 12;
12
+ const ARCHIVE_INDEX_MONTH_RE = /^INDEX\.archive\.(\d{4}-\d{2})\.json$/;
13
+ const ARCHIVE_INDEX_LEGACY_RE = /^INDEX\.archive\.json$/;
14
+
15
+ function parseArchiveMonth(value) {
16
+ const match = String(value || "").match(/^(\d{4})-(\d{2})$/);
17
+ if (!match) return null;
18
+ const year = Number(match[1]);
19
+ const month = Number(match[2]);
20
+ if (!Number.isInteger(year) || !Number.isInteger(month) || month < 1 || month > 12) return null;
21
+ return { year, month, yearMonth: `${match[1]}-${match[2]}` };
22
+ }
23
+
24
+ function monthDistance(fromYearMonth, toDate = new Date()) {
25
+ const parsed = parseArchiveMonth(fromYearMonth);
26
+ if (!parsed) return null;
27
+ return (toDate.getUTCFullYear() * 12 + toDate.getUTCMonth()) - ((parsed.year * 12) + (parsed.month - 1));
28
+ }
29
+
30
+ function resolveArchivePartition(entry, now = new Date()) {
31
+ const fromEntry = parseArchiveMonth(entry?.archiveYearMonth);
32
+ if (fromEntry) {
33
+ return {
34
+ yearMonth: fromEntry.yearMonth,
35
+ day: String(entry.archiveDay || "").match(/^\d{2}$/)?.[0] || String(now.getUTCDate()).padStart(2, "0")
36
+ };
37
+ }
38
+
39
+ const source = String(entry?.createdAt || entry?.updatedAt || "");
40
+ const match = source.match(/^(\d{4})-(\d{2})-(\d{2})/);
41
+ if (match) {
42
+ return { yearMonth: `${match[1]}-${match[2]}`, day: match[3] };
43
+ }
44
+
45
+ return {
46
+ yearMonth: `${now.getUTCFullYear()}-${String(now.getUTCMonth() + 1).padStart(2, "0")}`,
47
+ day: String(now.getUTCDate()).padStart(2, "0")
48
+ };
49
+ }
50
+
51
+ function shouldRetainArchiveEntry(entry, now = new Date()) {
52
+ const partition = resolveArchivePartition(entry, now);
53
+ const distance = monthDistance(partition.yearMonth, now);
54
+ if (distance === null) return true;
55
+ return distance <= ARCHIVE_INDEX_RETENTION_MONTHS;
56
+ }
57
+
58
+ function archiveIndexFilePath(dir, yearMonth = null) {
59
+ return join(dir, yearMonth ? `${TICKET_ARCHIVE_INDEX_PREFIX}${yearMonth}.json` : TICKET_ARCHIVE_INDEX_FILENAME);
60
+ }
61
+
62
+ function listArchiveIndexFiles(dir) {
63
+ if (!existsSync(dir)) return [];
64
+ return readdirSync(dir, { withFileTypes: true })
65
+ .filter(ent => ent.isFile())
66
+ .map(ent => join(dir, ent.name))
67
+ .filter(abs => ARCHIVE_INDEX_LEGACY_RE.test(basename(abs)) || ARCHIVE_INDEX_MONTH_RE.test(basename(abs)))
68
+ .sort((a, b) => {
69
+ const aBase = basename(a);
70
+ const bBase = basename(b);
71
+ const aMatch = aBase.match(ARCHIVE_INDEX_MONTH_RE);
72
+ const bMatch = bBase.match(ARCHIVE_INDEX_MONTH_RE);
73
+ if (aMatch && !bMatch) return 1;
74
+ if (!aMatch && bMatch) return -1;
75
+ if (!aMatch && !bMatch) return aBase.localeCompare(bBase);
76
+ return aMatch[1].localeCompare(bMatch[1]);
77
+ });
78
+ }
79
+
80
+ function parseIndexFile(absPath) {
81
+ if (!existsSync(absPath)) {
82
+ return { version: 1, updatedAt: null, activeTicketId: null, entries: [] };
83
+ }
84
+ try {
85
+ const j = JSON.parse(readFileSync(absPath, "utf8"));
86
+ const entries = Array.isArray(j.entries) ? j.entries.map(e => {
87
+ const entry = { ...e, status: e.status || "open", group: normalizeTicketGroup(e.group, "sub") };
88
+ entry.path = computeTicketPath(entry);
89
+ return entry;
90
+ }) : [];
91
+ return {
92
+ version: j.version || 1,
93
+ updatedAt: j.updatedAt ?? null,
94
+ activeTicketId: j.activeTicketId ?? null,
95
+ entries
96
+ };
97
+ } catch (err) {
98
+ console.error(`[ERROR] Failed to parse ${basename(absPath)} at ${absPath}:`, err.message);
99
+ return { version: 1, updatedAt: null, activeTicketId: null, entries: [], _corrupt: true };
100
+ }
101
+ }
102
+
103
+ function splitEntriesForStorage(entries = []) {
104
+ const activeEntries = [];
105
+ const archiveEntries = [];
106
+ for (const entry of entries) {
107
+ const status = String(entry?.status || "open").toLowerCase();
108
+ if (status === "open" || status === "active") {
109
+ activeEntries.push(entry);
110
+ } else {
111
+ archiveEntries.push(entry);
112
+ }
113
+ }
114
+ return { activeEntries, archiveEntries };
115
+ }
116
+
117
+ function partitionArchiveEntries(entries = []) {
118
+ const retained = [];
119
+ const retired = [];
120
+ for (const entry of entries) {
121
+ if (shouldRetainArchiveEntry(entry)) retained.push(entry);
122
+ else retired.push(entry);
123
+ }
124
+ return { retained, retired };
125
+ }
126
+
127
+ function mergeIndexEntries(primaryEntries = [], archiveEntries = []) {
128
+ const merged = new Map();
129
+ for (const entry of primaryEntries || []) {
130
+ if (entry?.id) merged.set(entry.id, entry);
131
+ }
132
+ for (const entry of archiveEntries || []) {
133
+ if (entry?.id) merged.set(entry.id, entry);
134
+ }
135
+ return Array.from(merged.values());
136
+ }
137
+
138
+ export function readTicketIndexJson(cwd) {
139
+ const dir = detectConsumerTicketDir(cwd);
140
+ if (!dir) return { version: 1, updatedAt: null, entries: [] };
141
+ const mainPath = join(dir, TICKET_INDEX_FILENAME);
142
+ const main = parseIndexFile(mainPath);
143
+ const archiveFiles = listArchiveIndexFiles(dir).map(parseIndexFile);
144
+
145
+ const archiveEntries = archiveFiles.flatMap(file => file.entries || []);
146
+ const entries = mergeIndexEntries(main.entries, archiveEntries).map(entry => {
147
+ const next = { ...entry, status: entry.status || "open", group: normalizeTicketGroup(entry.group, "sub") };
148
+ next.path = computeTicketPath(next);
149
+ return next;
150
+ });
151
+
152
+ return {
153
+ version: main.version || archiveFiles[0]?.version || 1,
154
+ updatedAt: main.updatedAt ?? archiveFiles[0]?.updatedAt ?? null,
155
+ activeTicketId: main.activeTicketId ?? null,
156
+ entries,
157
+ _corrupt: Boolean(main._corrupt || archiveFiles.some(file => file._corrupt))
158
+ };
159
+ }
160
+
161
+ export function writeTicketIndexJson(cwd, indexJson, opts = {}) {
162
+ if (indexJson._corrupt && !opts.force) {
163
+ console.error(`[ABORT] Refusing to overwrite potentially corrupt ${TICKET_INDEX_FILENAME}. Use --force to override.`);
164
+ return;
165
+ }
166
+ const dir = detectConsumerTicketDir(cwd, { createIfMissing: true });
167
+ const p = join(dir, TICKET_INDEX_FILENAME);
168
+ if (opts.dryRun) return;
169
+ mkdirSync(dir, { recursive: true });
170
+
171
+ const out = { ...indexJson };
172
+ delete out._corrupt;
173
+
174
+ // Strip physical path snapshots before saving to enforce state-driven resolution
175
+ const entries = Array.isArray(out.entries) ? out.entries : [];
176
+ const { activeEntries, archiveEntries } = splitEntriesForStorage(entries);
177
+ out.entries = activeEntries.map(e => {
178
+ const { path, ...clean } = e;
179
+ return clean;
180
+ });
181
+ out.activeTicketId = out.activeTicketId || activeEntries.find(e => e.status === "active")?.id || activeEntries.find(e => e.status === "open")?.id || null;
182
+ writeFileSync(p, JSON.stringify(out, null, 2) + "\n", "utf8");
183
+
184
+ const archiveFiles = listArchiveIndexFiles(dir);
185
+ const retainedArchiveEntries = partitionArchiveEntries(archiveEntries);
186
+ const archiveBuckets = new Map();
187
+
188
+ for (const entry of retainedArchiveEntries.retained) {
189
+ const partition = resolveArchivePartition(entry);
190
+ const bucket = archiveBuckets.get(partition.yearMonth) || [];
191
+ bucket.push({ ...entry, archiveYearMonth: partition.yearMonth, archiveDay: partition.day });
192
+ archiveBuckets.set(partition.yearMonth, bucket);
193
+ }
194
+
195
+ const desiredArchiveFiles = new Set();
196
+ for (const [yearMonth, bucket] of archiveBuckets.entries()) {
197
+ const archiveOut = {
198
+ version: out.version || 1,
199
+ updatedAt: out.updatedAt || new Date().toISOString(),
200
+ activeTicketId: null,
201
+ entries: bucket.map(e => {
202
+ const { path, ...clean } = e;
203
+ return clean;
204
+ })
205
+ };
206
+ const archivePath = archiveIndexFilePath(dir, yearMonth);
207
+ desiredArchiveFiles.add(archivePath);
208
+ writeFileSync(archivePath, JSON.stringify(archiveOut, null, 2) + "\n", "utf8");
209
+ }
210
+
211
+ for (const filePath of archiveFiles) {
212
+ if (!desiredArchiveFiles.has(filePath)) {
213
+ unlinkSync(filePath);
214
+ }
215
+ }
216
+
217
+ if (retainedArchiveEntries.retired.length > 0) {
218
+ console.log(`[GC] Dropped ${retainedArchiveEntries.retired.length} archived entries outside the ${ARCHIVE_INDEX_RETENTION_MONTHS}-month retention window.`);
219
+ }
220
+ }
221
+
222
+ export function getHostnameSlug() {
223
+ try {
224
+ const slug = osHostname().toLowerCase().replace(/[^a-z0-9\-]/g, '-').replace(/-+/g, '-').replace(/^-|-$/g, '');
225
+ return slug.slice(0, 8).replace(/-$/, '') || 'local';
226
+ } catch {
227
+ return 'local';
228
+ }
229
+ }
230
+
231
+ export function computeNextTicketNumber(existingEntries) {
232
+ const hostname = getHostnameSlug();
233
+ const newRe = /^(\d{3,4})-/;
234
+ let max = 0;
235
+ for (const e of (existingEntries || [])) {
236
+ const id = String(e.id || '');
237
+ const m = id.match(newRe);
238
+ if (m) {
239
+ const n = parseInt(m[1], 10);
240
+ if (n > max && n < 10000) max = n;
241
+ }
242
+ }
243
+ return { num: max + 1, hostname };
244
+ }
245
+
246
+ export function generateTicketId(topicSlug, existingEntries) {
247
+ const hostname = getHostnameSlug();
248
+ const slug = toSlug(topicSlug || 'ticket');
249
+ const match = slug.match(/^(\d{3,4})-(.*)/);
250
+ if (match) {
251
+ const numStr = match[1];
252
+ const restSlug = match[2].slice(0, 32);
253
+ return `${numStr}-${restSlug}-${hostname}`;
254
+ }
255
+ const { num } = computeNextTicketNumber(existingEntries);
256
+ const numStr = String(num).padStart(3, '0');
257
+ const finalSlug = slug.slice(0, 32);
258
+ return `${numStr}-${finalSlug}-${hostname}`;
259
+ }
260
+
261
+ export function syncActiveTicketId(cwd, opts = {}) {
262
+ const index = readTicketIndexJson(cwd);
263
+ const activeEntry = index.entries.find(e => e.status === "active") ||
264
+ index.entries.find(e => e.status === "open");
265
+
266
+ const ticketDir = detectConsumerTicketDir(cwd);
267
+ if (!ticketDir) return;
268
+ if (opts.dryRun) return;
269
+
270
+ const activeId = activeEntry ? activeEntry.id : null;
271
+ if (index.activeTicketId !== activeId) {
272
+ writeTicketIndexJson(cwd, { ...index, activeTicketId: activeId });
273
+ }
274
+
275
+ const legacyLatestPath = join(ticketDir, "LATEST.md");
276
+ const pointerPathMd = join(ticketDir, "ACTIVE_TICKET.md");
277
+ const pointerPathJson = join(ticketDir, "ACTIVE_TICKET.json");
278
+
279
+ for (const p of [legacyLatestPath, pointerPathMd, pointerPathJson]) {
280
+ if (existsSync(p)) {
281
+ unlinkSync(p);
282
+ }
283
+ }
284
+ }
285
+
286
+ export async function syncToPipeline(url, data) {
287
+ try {
288
+ const response = await fetch(url, {
289
+ method: "POST",
290
+ headers: { "Content-Type": "application/json" },
291
+ body: JSON.stringify(data),
292
+ signal: AbortSignal?.timeout ? AbortSignal.timeout(3000) : undefined
293
+ });
294
+ return response.ok;
295
+ } catch (err) {
296
+ return false;
297
+ }
298
+ }
@@ -0,0 +1,320 @@
1
+ import { existsSync, mkdirSync, readFileSync, writeFileSync, unlinkSync, copyFileSync, statSync } from "fs";
2
+ import { basename, dirname, join, relative } from "path";
3
+ import {
4
+ toRepoRelativePath, AGENT_ROOT_DIR, TICKET_SUBDIR, TICKET_LIST_FILENAME,
5
+ parseFrontMatter, stringifyFrontMatter, findFileRecursively, detectConsumerTicketDir
6
+ } from "./cli-utils.mjs";
7
+ import { readTicketIndexJson, writeTicketIndexJson, syncActiveTicketId } from "./cli-ticket-index.mjs";
8
+ import { collectTicketMarkdownFiles, rebuildTicketIndexFromTopicFilesIfNeeded } from "./cli-ticket-parser.mjs";
9
+
10
+ // ─── Summary Extraction ─────────────────────────────────────────────────────
11
+
12
+ /**
13
+ * Extracts a concise summary from ticket body content for legacy tickets.
14
+ * Strategy: combine title + scope/background + top tasks into 1-2 lines.
15
+ * Handles 3 legacy formats:
16
+ * - Old format: 🎯 Scope Bounds, 📁 Files to Modify
17
+ * - Mid format: ## Background, ## Analysis, ## Tasks
18
+ * - New format: ## Target Module, ## APC (already has summary usually)
19
+ */
20
+ export function extractSummary(meta, content) {
21
+ const title = meta.title || "";
22
+ const lines = content.split("\n");
23
+
24
+ // Collect section contents by known header patterns
25
+ const sections = {};
26
+ let currentSection = null;
27
+ for (const line of lines) {
28
+ const trimmed = line.trim();
29
+ // Match ## headers (with or without emoji prefix)
30
+ const headerMatch = trimmed.match(/^##\s+(?:[\u{1F3AF}\u{1F4C1}\u{1F3D7}\u{1F6D1}\u{1F504}\u{2705}\s]*)?(.+)$/u);
31
+ if (headerMatch) {
32
+ currentSection = headerMatch[1].trim().toLowerCase();
33
+ sections[currentSection] = [];
34
+ continue;
35
+ }
36
+ // Match # title
37
+ if (trimmed.startsWith("# ") && !currentSection) {
38
+ continue; // skip title line
39
+ }
40
+ if (currentSection && trimmed && !trimmed.startsWith(">") && !trimmed.startsWith("<!--")) {
41
+ sections[currentSection].push(trimmed);
42
+ }
43
+ }
44
+
45
+ // Priority: scope/background > analysis > tasks
46
+ let contextLine = "";
47
+
48
+ // Try scope bounds (old format)
49
+ const scopeKeys = Object.keys(sections).filter(k =>
50
+ k.includes("scope") || k.includes("target") || k.includes("background")
51
+ );
52
+ for (const key of scopeKeys) {
53
+ const scopeLines = (sections[key] || [])
54
+ .filter(l => !l.startsWith("- **Context") && !l.startsWith("["))
55
+ .map(l => l.replace(/^[-*]\s*\*\*[^*]+:\*\*\s*/, "").replace(/`/g, ""))
56
+ .filter(l => l.length > 5 && l.length < 200);
57
+ if (scopeLines.length > 0) {
58
+ contextLine = scopeLines[0];
59
+ break;
60
+ }
61
+ }
62
+
63
+ // Try analysis section if no scope found
64
+ if (!contextLine) {
65
+ const analysisKeys = Object.keys(sections).filter(k =>
66
+ k.includes("analysis") || k.includes("design") || k.includes("decisions")
67
+ );
68
+ for (const key of analysisKeys) {
69
+ const analysisLines = (sections[key] || [])
70
+ .filter(l => l.length > 10 && l.length < 200 && !l.startsWith("|"))
71
+ .map(l => l.replace(/^[-*]\s*/, ""));
72
+ if (analysisLines.length > 0) {
73
+ contextLine = analysisLines[0];
74
+ break;
75
+ }
76
+ }
77
+ }
78
+
79
+ // Collect task summary (first 3 task items)
80
+ const taskKeys = Object.keys(sections).filter(k =>
81
+ k.includes("task") || k.includes("phased") || k.includes("execution steps")
82
+ );
83
+ let taskItems = [];
84
+ for (const key of taskKeys) {
85
+ taskItems = (sections[key] || [])
86
+ .filter(l => /^-\s*\[[ x]\]/.test(l) || /^\d+\.\s*\[/.test(l))
87
+ .map(l => l.replace(/^[-*\d.]+\s*\[[ x]\]\s*/, "").replace(/\[Phase\s*\d+[>]\s*/i, "").replace(/]/g, "").trim())
88
+ .filter(l => l.length > 3)
89
+ .slice(0, 3);
90
+ if (taskItems.length > 0) break;
91
+ }
92
+
93
+ // Build summary
94
+ const parts = [];
95
+ if (contextLine) {
96
+ parts.push(contextLine.length > 120 ? contextLine.substring(0, 117) + "..." : contextLine);
97
+ }
98
+ if (taskItems.length > 0) {
99
+ parts.push(`주요 작업: ${taskItems.join(", ")}`);
100
+ }
101
+
102
+ if (parts.length === 0) {
103
+ // Fallback: use title itself as summary
104
+ return title || null;
105
+ }
106
+
107
+ const summary = parts.join(". ").substring(0, 300);
108
+ return summary || null;
109
+ }
110
+
111
+ // ─── CAUTION Block / Target Module Injection ─────────────────────────────────
112
+
113
+ const CAUTION_BLOCK = `> **[CAUTION FOR AI AGENTS]**
114
+ > 1. Restrict all analysis, file creation, and modifications to the declared **Target Module** below.
115
+ > 2. Read the files listed in **Context Files** before doing ANY code generation.
116
+ > 3. DO NOT leak configuration, logic, or dependencies from other modules.
117
+
118
+ ## Target Module
119
+ - **Target:** [Fill in the target module/submodule path]
120
+ - **Context Files:** [List architecture docs or key files to read first]`;
121
+
122
+ /**
123
+ * Ensures the CAUTION block and Target Module section exist after the title.
124
+ * Returns modified content or null if no change needed.
125
+ */
126
+ export function ensureCautionBlock(content) {
127
+ // Already has Target Module or the new-format CAUTION
128
+ if (content.includes("## Target Module")) return null;
129
+
130
+ // Also skip if it already has the old-format scope bounds (don't duplicate)
131
+ const hasOldScope = /## [\u{1F3AF}]?\s*Scope Bounds/u.test(content);
132
+ if (hasOldScope) return null;
133
+
134
+ // Find the # title line and insert after it
135
+ const titleMatch = content.match(/^(# .+)\n/m);
136
+ if (!titleMatch) return null;
137
+
138
+ const titleEnd = content.indexOf(titleMatch[0]) + titleMatch[0].length;
139
+ const before = content.substring(0, titleEnd);
140
+ let after = content.substring(titleEnd);
141
+
142
+ // Skip if CAUTION already exists (old variant)
143
+ if (after.includes("[CAUTION FOR AI AGENTS]")) return null;
144
+
145
+ return before + "\n" + CAUTION_BLOCK + "\n" + after;
146
+ }
147
+
148
+ // ─── Main Migration ─────────────────────────────────────────────────────────
149
+
150
+ export function performUpgradeMigration(cwd, opts = {}) {
151
+ const root = detectConsumerTicketDir(cwd, { createIfMissing: true });
152
+
153
+ const files = collectTicketMarkdownFiles(root).filter(p => {
154
+ const base = basename(p);
155
+ return base !== "LATEST.md" && base !== TICKET_LIST_FILENAME && base !== "ACTIVE_TICKET.md";
156
+ });
157
+
158
+ console.log(`[UPGRADE] Scanning ${files.length} tickets for V2 migration...`);
159
+
160
+ let upgraded = 0;
161
+ let summaryAdded = 0;
162
+ let cautionAdded = 0;
163
+
164
+ for (const abs of files) {
165
+ const rel = toRepoRelativePath(cwd, abs);
166
+ const body = readFileSync(abs, "utf8");
167
+ const { meta, content } = parseFrontMatter(body);
168
+ let dirty = false;
169
+ let modifiedContent = content;
170
+
171
+ const isAlreadyInArchive = rel.includes("/archive/");
172
+
173
+ // 1. Summary enrichment for legacy tickets
174
+ if (!meta.summary) {
175
+ const generated = extractSummary(meta, content);
176
+ if (generated) {
177
+ meta.summary = generated;
178
+ dirty = true;
179
+ summaryAdded++;
180
+ }
181
+ }
182
+
183
+ // 2. CAUTION / Target Module injection
184
+ const cautionResult = ensureCautionBlock(modifiedContent);
185
+ if (cautionResult !== null) {
186
+ modifiedContent = cautionResult;
187
+ dirty = true;
188
+ cautionAdded++;
189
+ }
190
+
191
+ // 3. Archive placement fix (existing logic)
192
+ if (meta.status === "archived" && !isAlreadyInArchive && !opts.dryRun) {
193
+ const finalAbs = moveFileToArchive(cwd, abs, meta.group || basename(dirname(abs)));
194
+ const migratedBody = stringifyFrontMatter(meta, modifiedContent);
195
+ writeFileSync(finalAbs, migratedBody, "utf8");
196
+ upgraded++;
197
+ console.log(`[OK] Upgraded + archived: ${toRepoRelativePath(cwd, finalAbs)}`);
198
+ continue;
199
+ }
200
+
201
+ if (dirty) {
202
+ if (opts.dryRun) {
203
+ const changes = [];
204
+ if (!parseFrontMatter(body).meta.summary && meta.summary) changes.push("summary");
205
+ if (cautionResult !== null) changes.push("caution+targetModule");
206
+ console.log(`[DRY-RUN] Would upgrade: ${rel} (+${changes.join(", ")})`);
207
+ } else {
208
+ const migratedBody = stringifyFrontMatter(meta, modifiedContent);
209
+ writeFileSync(abs, migratedBody, "utf8");
210
+ upgraded++;
211
+ }
212
+ }
213
+ }
214
+
215
+ console.log(`[UPGRADE] Results: ${upgraded} upgraded, ${summaryAdded} summaries added, ${cautionAdded} caution blocks added`);
216
+
217
+ if (!opts.dryRun) {
218
+ rebuildTicketIndexFromTopicFilesIfNeeded(cwd, { ...opts, force: true });
219
+ performDefragmentation(cwd, opts);
220
+ syncActiveTicketId(cwd);
221
+ }
222
+
223
+ return upgraded;
224
+ }
225
+
226
+ // ─── Defragmentation ────────────────────────────────────────────────────────
227
+
228
+ export function performDefragmentation(cwd, opts = {}) {
229
+ const rootTicketDir = detectConsumerTicketDir(cwd);
230
+ if (!rootTicketDir) return;
231
+ const tickets = collectTicketMarkdownFiles(rootTicketDir).filter(p => {
232
+ const base = basename(p);
233
+ return base !== "LATEST.md" && base !== TICKET_LIST_FILENAME && base !== "ACTIVE_TICKET.md";
234
+ });
235
+
236
+ console.log(`[DEFRAG] Checking ${tickets.length} tickets for workspace placement...`);
237
+
238
+ const modifiedPaths = new Set();
239
+
240
+ for (const abs of tickets) {
241
+ const { meta } = parseFrontMatter(readFileSync(abs, "utf8"));
242
+ if (meta.submodule && meta.submodule !== "global") {
243
+ const subPath = join(cwd, meta.submodule);
244
+ if (existsSync(subPath) && statSync(subPath).isDirectory()) {
245
+ const subTicketDir = join(subPath, AGENT_ROOT_DIR, TICKET_SUBDIR);
246
+
247
+ const relToRoot = relative(rootTicketDir, abs);
248
+ const destAbs = join(subTicketDir, relToRoot);
249
+
250
+ if (opts.dryRun) {
251
+ console.log(`[DRY-RUN] Would move to workspace: ${relToRoot} -> ${meta.submodule}/${AGENT_ROOT_DIR}/${TICKET_SUBDIR}/`);
252
+ } else {
253
+ mkdirSync(dirname(destAbs), { recursive: true });
254
+ copyFileSync(abs, destAbs);
255
+ unlinkSync(abs);
256
+ console.log(`[DEFRAG] Moved: ${meta.submodule}/${AGENT_ROOT_DIR}/${TICKET_SUBDIR}/${relToRoot}`);
257
+ modifiedPaths.add(subPath);
258
+ }
259
+ }
260
+ }
261
+ }
262
+
263
+ if (!opts.dryRun) {
264
+ for (const p of modifiedPaths) {
265
+ rebuildTicketIndexFromTopicFilesIfNeeded(p, { ...opts, force: true });
266
+ syncActiveTicketId(p);
267
+ }
268
+ }
269
+ }
270
+
271
+ // ─── Utilities ──────────────────────────────────────────────────────────────
272
+
273
+ export function moveFileToArchive(cwd, abs, group) {
274
+ const ticketDir = detectConsumerTicketDir(cwd);
275
+ const archiveBase = join(ticketDir, "archive");
276
+ const targetSubDir = (basename(ticketDir) === TICKET_SUBDIR || !group) ? "sub" : group;
277
+ const targetDir = join(archiveBase, targetSubDir);
278
+ mkdirSync(targetDir, { recursive: true });
279
+ const finalAbs = join(targetDir, basename(abs));
280
+ if (finalAbs !== abs) {
281
+ if (existsSync(finalAbs)) {
282
+ unlinkSync(abs);
283
+ } else {
284
+ writeFileSync(finalAbs, readFileSync(abs, "utf8"), "utf8");
285
+ unlinkSync(abs);
286
+ }
287
+ }
288
+ return finalAbs;
289
+ }
290
+
291
+ export function normalizeTicketPaths(cwd, opts = {}) {
292
+ const index = readTicketIndexJson(cwd);
293
+ const ticketDir = detectConsumerTicketDir(cwd);
294
+ const entries = index.entries || [];
295
+ let modified = false;
296
+
297
+ for (const entry of entries) {
298
+ if (!entry.path) continue;
299
+
300
+ const currentAbs = join(cwd, entry.path);
301
+ if (!existsSync(currentAbs)) {
302
+ const fileName = basename(entry.path);
303
+ const found = findFileRecursively(ticketDir, fileName);
304
+ if (found) {
305
+ const newRel = toRepoRelativePath(cwd, found);
306
+ if (entry.path !== newRel) {
307
+ entry.path = newRel;
308
+ modified = true;
309
+ }
310
+ }
311
+ }
312
+ }
313
+
314
+ if (modified) {
315
+ index.updatedAt = new Date().toISOString();
316
+ writeTicketIndexJson(cwd, index);
317
+ if (!opts.silent) console.log(`[NORMALIZE] Corrected stale paths in ${basename(cwd)}/INDEX.json`);
318
+ }
319
+ return modified;
320
+ }