deuk-agent-rule 2.5.13 → 3.3.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/CHANGELOG.ko.md +74 -0
  2. package/CHANGELOG.md +138 -316
  3. package/README.ko.md +134 -154
  4. package/README.md +121 -153
  5. package/package.json +29 -7
  6. package/scripts/cli-args.mjs +87 -3
  7. package/scripts/cli-init-commands.mjs +1382 -223
  8. package/scripts/cli-init-logic.mjs +28 -16
  9. package/scripts/cli-prompts.mjs +13 -4
  10. package/scripts/cli-rule-compiler.mjs +44 -34
  11. package/scripts/cli-skill-commands.mjs +172 -0
  12. package/scripts/cli-telemetry-commands.mjs +429 -0
  13. package/scripts/cli-ticket-commands.mjs +1934 -161
  14. package/scripts/cli-ticket-index.mjs +298 -0
  15. package/scripts/cli-ticket-migration.mjs +320 -0
  16. package/scripts/cli-ticket-parser.mjs +207 -0
  17. package/scripts/cli-utils.mjs +381 -59
  18. package/scripts/cli.mjs +99 -19
  19. package/scripts/lint-md.mjs +247 -0
  20. package/scripts/lint-rules.mjs +143 -0
  21. package/scripts/merge-logic.mjs +13 -306
  22. package/scripts/plan-parser.mjs +53 -0
  23. package/templates/MODULE_RULE_TEMPLATE.md +11 -0
  24. package/templates/PROJECT_RULE.md +47 -0
  25. package/templates/TICKET_TEMPLATE.ko.md +21 -0
  26. package/templates/TICKET_TEMPLATE.md +21 -0
  27. package/templates/rules.d/deukcontext-mcp.md +31 -0
  28. package/templates/rules.d/platform-coexistence.md +29 -0
  29. package/templates/skills/context-recall/SKILL.md +25 -0
  30. package/templates/skills/generated-file-guard/SKILL.md +25 -0
  31. package/templates/skills/safe-refactor/SKILL.md +25 -0
  32. package/bundle/.cursorrules +0 -11
  33. package/bundle/AGENTS.md +0 -146
  34. package/bundle/gemini.md +0 -26
  35. package/bundle/rules/delivery-and-parallel-work.mdc +0 -26
  36. package/bundle/rules/git-commit.mdc +0 -24
  37. package/bundle/rules/multi-ai-workflow.mdc +0 -104
  38. package/bundle/rules.d/core-workflow.md +0 -48
  39. package/bundle/rules.d/deukrag-mcp.md +0 -37
  40. package/bundle/templates/MODULE_RULE_TEMPLATE.md +0 -24
  41. package/bundle/templates/TICKET_TEMPLATE.md +0 -58
  42. package/scripts/cli-ticket-logic.mjs +0 -568
  43. package/scripts/sync-bundle.mjs +0 -77
  44. package/scripts/sync-oss.mjs +0 -126
@@ -1,80 +1,382 @@
1
- import { join, dirname, basename } from "path";
2
- import { existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync, readdirSync, unlinkSync, rmSync, renameSync } from "fs";
3
- import { resolveMarkers, resolveCursorrulesMarkers, applyAgents, applyRules, applyCursorrules, readBundleAgents } from "./merge-logic.mjs";
1
+ import { join, dirname, basename, relative } from "path";
2
+ import { homedir } from "os";
3
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync, readdirSync, unlinkSync, rmSync, renameSync, statSync, cpSync } from "fs";
4
+
4
5
  import { ensureTicketDirAndGitignore } from "./cli-init-logic.mjs";
5
- import { normalizeTicketPaths } from "./cli-ticket-logic.mjs";
6
- import { compileDynamicRules } from "./cli-rule-compiler.mjs";
7
- import { loadInitConfig, writeInitConfig } from "./cli-utils.mjs";
6
+ import { normalizeTicketPaths } from "./cli-ticket-migration.mjs";
7
+ import { readTicketIndexJson } from "./cli-ticket-index.mjs";
8
+
8
9
  import { runInteractive } from "./cli-prompts.mjs";
10
+ import { AGENT_ROOT_DIR, TICKET_SUBDIR, TEMPLATE_SUBDIR, TICKET_INDEX_FILENAME, TICKET_LIST_FILENAME, discoverAllWorkspaces, isMcpActive, toRepoRelativePath, toPosixPath, resolveWorkflowMode, pruneRuleModules, loadInitConfig, writeInitConfig, isWorkflowExecute, normalizeWorkflowMode, SPOKE_REGISTRY, parseFrontMatter, stringifyFrontMatter, LEGACY_TEMPLATE_DIR, LEGACY_TICKET_DIR, LEGACY_TICKET_DIR_PLURAL, LEGACY_TICKET_DIR_ROOT, LEGACY_CONFIG_FILE, normalizeTicketGroup } from "./cli-utils.mjs";
9
11
 
10
- import { AGENT_ROOT_DIR, TICKET_SUBDIR, TEMPLATE_SUBDIR, RULES_SUBDIR, discoverAllSubmodules } from "./cli-utils.mjs";
12
+ function sortedDirEntries(dir, options = {}) {
13
+ const entries = readdirSync(dir, options);
14
+ return entries.sort((a, b) => {
15
+ const aName = typeof a === "string" ? a : a.name;
16
+ const bName = typeof b === "string" ? b : b.name;
17
+ return String(aName).localeCompare(String(bName));
18
+ });
19
+ }
11
20
 
12
- function migrateLegacyStructure(cwd, dryRun) {
13
- const recursiveMerge = (src, dest) => {
14
- if (!existsSync(src)) return;
15
- if (!existsSync(dest)) {
16
- if (!dryRun) {
17
- mkdirSync(dirname(dest), { recursive: true });
18
- renameSync(src, dest);
19
- }
20
- return;
21
- }
22
- // Both exist, merge contents
23
- const entries = readdirSync(src, { withFileTypes: true });
24
- for (const ent of entries) {
25
- const sPath = join(src, ent.name);
26
- const dPath = join(dest, ent.name);
27
- if (ent.isDirectory()) {
28
- recursiveMerge(sPath, dPath);
29
- } else {
30
- if (!existsSync(dPath)) {
31
- if (!dryRun) renameSync(sPath, dPath);
21
+ function safeReadText(absPath, fallback = "") {
22
+ try {
23
+ return readFileSync(absPath, "utf8");
24
+ } catch {
25
+ return fallback;
26
+ }
27
+ }
28
+
29
+ function sameFileContent(leftPath, rightPath) {
30
+ return safeReadText(leftPath) === safeReadText(rightPath);
31
+ }
32
+
33
+ function isSelectedTool(selectedTools = [], spokeId) {
34
+ const tools = Array.isArray(selectedTools) ? selectedTools : [];
35
+ return tools.includes("all") || tools.includes(spokeId);
36
+ }
37
+
38
+ const MANAGED_BLOCK_BEGIN = "<!-- deuk-agent-managed:begin -->";
39
+ const MANAGED_BLOCK_END = "<!-- deuk-agent-managed:end -->";
40
+
41
+ function wrapManagedBlock(content) {
42
+ return `${MANAGED_BLOCK_BEGIN}\n${String(content || "").trimEnd()}\n${MANAGED_BLOCK_END}`;
43
+ }
44
+
45
+ function mergeManagedBlock(existing, managedContent) {
46
+ const current = String(existing || "");
47
+ const nextBlock = wrapManagedBlock(managedContent);
48
+
49
+ if (!current.trim()) return `${nextBlock}\n`;
50
+ if (current.includes(MANAGED_BLOCK_BEGIN) && current.includes(MANAGED_BLOCK_END)) {
51
+ const beginIdx = current.indexOf(MANAGED_BLOCK_BEGIN);
52
+ const endIdx = current.indexOf(MANAGED_BLOCK_END, beginIdx);
53
+ if (beginIdx !== -1 && endIdx !== -1) {
54
+ const before = current.slice(0, beginIdx).trimEnd();
55
+ const after = current.slice(endIdx + MANAGED_BLOCK_END.length).trimStart();
56
+ return [before, nextBlock, after].filter(Boolean).join("\n\n").trimEnd() + "\n";
57
+ }
58
+ }
59
+
60
+ const cleaned = current.trimEnd();
61
+ const managedBody = String(managedContent || "").trim();
62
+ if (managedBody && cleaned.includes(managedBody)) return cleaned + "\n";
63
+
64
+ return `${cleaned}\n\n${nextBlock}\n`;
65
+ }
66
+
67
+ function ensureWritableDirectory(dirAbs, cwd, dryRun, label) {
68
+ if (!existsSync(dirAbs)) return;
69
+
70
+ try {
71
+ if (statSync(dirAbs).isDirectory()) return;
72
+ } catch (err) {
73
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to inspect ${dirAbs}:`, err);
74
+ return;
75
+ }
76
+
77
+ const backupBase = `${dirAbs}.bak`;
78
+ let backupAbs = backupBase;
79
+ let index = 1;
80
+ while (existsSync(backupAbs)) {
81
+ backupAbs = `${backupBase}.${index}`;
82
+ index += 1;
83
+ }
84
+
85
+ const relDir = toRepoRelativePath(cwd, dirAbs);
86
+ const relBackup = toRepoRelativePath(cwd, backupAbs);
87
+ if (!dryRun) {
88
+ renameSync(dirAbs, backupAbs);
89
+ }
90
+ console.log(`[MIGRATE] ${label}: ${relDir} -> ${relBackup}`);
91
+ }
92
+
93
+ function moveOrMergeFile(srcAbs, dstAbs, cwd, dryRun, action) {
94
+ const relSrc = toRepoRelativePath(cwd, srcAbs);
95
+ const relDst = toRepoRelativePath(cwd, dstAbs);
96
+
97
+ if (srcAbs === dstAbs) return false;
98
+
99
+ if (existsSync(dstAbs)) {
100
+ if (sameFileContent(srcAbs, dstAbs)) {
101
+ if (!dryRun) unlinkSync(srcAbs);
102
+ console.log(`[MIGRATE] ${action} duplicate removed: ${relSrc} -> ${relDst}`);
103
+ return true;
104
+ }
105
+ console.warn(`[WARNING] ${action} conflict: destination exists with different content, skipped ${relSrc}`);
106
+ return false;
107
+ }
108
+
109
+ if (!dryRun) {
110
+ mkdirSync(dirname(dstAbs), { recursive: true });
111
+ renameSync(srcAbs, dstAbs);
112
+ }
113
+ console.log(`[MIGRATE] ${dryRun ? "Would move" : "Moved"} ${action}: ${relSrc} -> ${relDst}`);
114
+ return true;
115
+ }
116
+
117
+ function parseYearMonth(value) {
118
+ const match = String(value || "").match(/^(\d{4})-(\d{2})/);
119
+ if (!match) return null;
120
+ return `${match[1]}-${match[2]}`;
121
+ }
122
+
123
+ function parseDay(value) {
124
+ const match = String(value || "").match(/^\d{4}-(\d{2})-(\d{2})/);
125
+ if (match) return match[2] || "01";
126
+
127
+ const onlyDay = String(value || "").match(/^\d{2}$/);
128
+ if (onlyDay) return onlyDay[0];
129
+ return null;
130
+ }
131
+
132
+ function inferPartitionFromFile(statSource, entry, fallbackDate = new Date()) {
133
+ const yearMonth = parseYearMonth(entry?.archiveYearMonth) || parseYearMonth(entry?.createdAt) || parseYearMonth(entry?.updatedAt);
134
+ const day = parseDay(entry?.archiveDay) || parseDay(entry?.createdAt) || parseDay(entry?.updatedAt) || String(statSource.getDate ? statSource.getDate() : fallbackDate.getDate()).padStart(2, "0");
135
+ const referenceDate = statSource instanceof Date ? statSource : (statSource?.mtime || fallbackDate);
136
+
137
+ return {
138
+ yearMonth: yearMonth || `${String(fallbackDate.getFullYear())}-${String(fallbackDate.getMonth() + 1).padStart(2, "0")}`,
139
+ day: day || String(referenceDate.getDate()).padStart(2, "0"),
140
+ };
141
+ }
142
+
143
+ function mapTicketIndexByFileName(cwd) {
144
+ const indexJson = readTicketIndexJson(cwd);
145
+ const byFileName = new Map();
146
+ const byId = new Map();
147
+ for (const e of indexJson.entries || []) {
148
+ if (!e) continue;
149
+ if (e.fileName) byFileName.set(e.fileName, e);
150
+ if (e.id) byId.set(e.id, e);
151
+ }
152
+ return { indexJson, byFileName, byId };
153
+ }
154
+
155
+ function deriveDocTicketFileName(mdPath) {
156
+ const stem = basename(mdPath, ".md")
157
+ .replace(/-(plan|report)$/i, "");
158
+ if (!stem) return null;
159
+ return `${stem}.md`;
160
+ }
161
+
162
+ function isActiveTicketStatus(status) {
163
+ return status === "open" || status === "active";
164
+ }
165
+
166
+ function inferDocsBucketFromFileName(fileName) {
167
+ const lower = fileName.toLowerCase();
168
+ if (lower.endsWith("-plan.md") || lower === "plan.md") return "plan";
169
+ if (lower.endsWith("-report.md") || lower.startsWith("report-")) return "plan";
170
+ return "plan";
171
+ }
172
+
173
+ function resolveDocTicketEntry(fileName, sourceMeta, byFileName, byId) {
174
+ const candidates = new Set();
175
+ candidates.add(fileName);
176
+
177
+ const derived = deriveDocTicketFileName(fileName);
178
+ if (derived) candidates.add(derived);
179
+ if (sourceMeta?.id) candidates.add(`${sourceMeta.id}.md`);
180
+
181
+ for (const candidate of candidates) {
182
+ const entry = byFileName.get(candidate);
183
+ if (entry) return entry;
184
+ }
185
+
186
+ for (const candidate of candidates) {
187
+ const key = candidate.replace(/\.md$/i, "");
188
+ const entry = byId.get(key);
189
+ if (entry) return entry;
190
+ }
191
+
192
+ return sourceMeta?.id ? byId.get(sourceMeta.id) || null : null;
193
+ }
194
+
195
+ function removeEmptyDirsBottomUp(dir, cwd, dryRun) {
196
+ if (!existsSync(dir)) return;
197
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
198
+ if (entry.isDirectory()) removeEmptyDirsBottomUp(join(dir, entry.name), cwd, dryRun);
199
+ }
200
+
201
+ try {
202
+ if (sortedDirEntries(dir).length > 0) return;
203
+ if (!dryRun) rmSync(dir, { recursive: true, force: true });
204
+ console.log(`[CLEANUP] removed empty directory: ${toRepoRelativePath(cwd, dir)}`);
205
+ } catch (err) {
206
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to prune ${dir}:`, err);
207
+ }
208
+ }
209
+
210
+ function collectFilesRecursively(dir, out = []) {
211
+ if (!existsSync(dir)) return out;
212
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
213
+ const p = join(dir, entry.name);
214
+ if (entry.isDirectory()) {
215
+ collectFilesRecursively(p, out);
216
+ continue;
217
+ }
218
+ if (entry.isFile()) out.push(p);
219
+ }
220
+ return out;
221
+ }
222
+
223
+ function classifyDocTarget(cwd, sourceAbs, fallbackDir = "plan") {
224
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
225
+ const fileName = basename(sourceAbs);
226
+ return join(docsRoot, fallbackDir, fileName);
227
+ }
228
+
229
+ function isDistilledKnowledgeJson(sourceAbs) {
230
+ try {
231
+ const data = JSON.parse(safeReadText(sourceAbs));
232
+ const hasModernMetadata = Boolean(
233
+ data
234
+ && typeof data === "object"
235
+ && typeof data.id === "string"
236
+ && typeof data.summary === "string"
237
+ && data.sourceKind === "ticket"
238
+ && data.ingestionCategory === "archived_ticket"
239
+ && data.corpus === "tickets"
240
+ && data.originTool === "ticket-archive"
241
+ && data.freshness === "archived"
242
+ && data.refreshPolicy === "refresh-on-stale"
243
+ && typeof data.sourceTicketPath === "string"
244
+ && data.sections
245
+ && typeof data.sections === "object"
246
+ && data.analysis
247
+ && typeof data.analysis === "object"
248
+ );
249
+ const hasLegacyKnowledgeShape = Boolean(
250
+ data
251
+ && typeof data === "object"
252
+ && typeof data.id === "string"
253
+ && typeof data.summary === "string"
254
+ && typeof data.sourceTicketPath === "string"
255
+ && data.sections
256
+ && typeof data.sections === "object"
257
+ && data.analysis
258
+ && typeof data.analysis === "object"
259
+ );
260
+ return hasModernMetadata || hasLegacyKnowledgeShape;
261
+ } catch {
262
+ return false;
263
+ }
264
+ }
265
+
266
+ function classifyAgentFileTarget(cwd, sourceAbs, fallbackDir = "plan") {
267
+ const fileName = basename(sourceAbs);
268
+ const lower = fileName.toLowerCase();
269
+
270
+ if (lower.endsWith(".json")) {
271
+ if (isDistilledKnowledgeJson(sourceAbs)) {
272
+ return join(cwd, AGENT_ROOT_DIR, "knowledge", fileName);
273
+ }
274
+ return join(cwd, AGENT_ROOT_DIR, "docs", fallbackDir, fileName);
275
+ }
276
+ if (lower.endsWith(".md") || lower.endsWith(".deuk")) {
277
+ return classifyDocTarget(cwd, sourceAbs, fallbackDir);
278
+ }
279
+ return join(cwd, AGENT_ROOT_DIR, "docs", fallbackDir, fileName);
280
+ }
281
+
282
+ function recursiveMerge(src, dest, cwd, dryRun) {
283
+ if (!existsSync(src)) return;
284
+ if (!existsSync(dest)) {
285
+ if (!dryRun) {
286
+ mkdirSync(dirname(dest), { recursive: true });
287
+ renameSync(src, dest);
288
+ }
289
+ return;
290
+ }
291
+ // Both exist, merge contents
292
+ const entries = sortedDirEntries(src, { withFileTypes: true });
293
+ for (const ent of entries) {
294
+ const sPath = join(src, ent.name);
295
+ const dPath = join(dest, ent.name);
296
+ if (ent.isDirectory()) {
297
+ recursiveMerge(sPath, dPath, cwd, dryRun);
298
+ } else {
299
+ if (!existsSync(dPath)) {
300
+ if (!dryRun) {
301
+ renameSync(sPath, dPath);
302
+ console.log(`[MIGRATE] Moved: ${toRepoRelativePath(cwd, sPath)} -> ${toRepoRelativePath(cwd, dPath)}`);
32
303
  } else {
33
- // If destination exists, we could overwrite or skip.
34
- // For tickets, we skip to avoid data loss, but log it.
35
- if (basename(sPath) !== "INDEX.json" && basename(sPath) !== "TICKET_LIST.md") {
36
- // console.warn(`[MIGRATE] Skipping existing file: ${dPath}`);
304
+ console.log(`[DRY-RUN] Would move: ${toRepoRelativePath(cwd, sPath)} -> ${toRepoRelativePath(cwd, dPath)}`);
305
+ }
306
+ } else {
307
+ // If destination exists, check if content is identical
308
+ const sContent = readFileSync(sPath, "utf8");
309
+ const dContent = readFileSync(dPath, "utf8");
310
+ if (sContent === dContent) {
311
+ if (!dryRun) {
312
+ unlinkSync(sPath);
313
+ console.log(`[MIGRATE] Removed identical file: ${toRepoRelativePath(cwd, sPath)}`);
37
314
  }
38
- if (!dryRun) unlinkSync(sPath); // Remove migrated/redundant file
315
+ } else {
316
+ console.warn(`[WARNING] Migration conflict: ${toRepoRelativePath(cwd, dPath)} already exists with different content. Skipping.`);
39
317
  }
40
318
  }
41
319
  }
42
- // Clean up src if empty
43
- try {
44
- if (!dryRun && readdirSync(src).length === 0) rmSync(src, { recursive: true });
45
- } catch {}
46
- };
320
+ }
321
+ // Clean up src if empty
322
+ try {
323
+ if (!dryRun && sortedDirEntries(src).length === 0) {
324
+ rmSync(src, { recursive: true });
325
+ console.log(`[MIGRATE] Removed empty directory: ${toRepoRelativePath(cwd, src)}`);
326
+ }
327
+ } catch (err) {
328
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to clean up ${src}:`, err);
329
+ }
330
+ }
331
+
332
+ export function migrateLegacyStructure(cwd, dryRun) {
47
333
 
48
- const legacyTemplates = join(cwd, ".deuk-agent-templates");
334
+ const legacyTemplates = join(cwd, LEGACY_TEMPLATE_DIR);
49
335
  const newTemplates = join(cwd, AGENT_ROOT_DIR, TEMPLATE_SUBDIR);
50
336
  if (existsSync(legacyTemplates)) {
51
337
  console.log(`[MIGRATE] Merging legacy templates into ${AGENT_ROOT_DIR}/${TEMPLATE_SUBDIR}`);
52
- recursiveMerge(legacyTemplates, newTemplates);
338
+ recursiveMerge(legacyTemplates, newTemplates, cwd, dryRun);
339
+ if (!dryRun && existsSync(legacyTemplates)) rmSync(legacyTemplates, { recursive: true, force: true });
53
340
  }
54
341
 
55
- const legacyTickets = join(cwd, ".deuk-agent-ticket");
56
- const legacyTicketsPlural = join(cwd, ".deuk-agent-tickets");
342
+ const legacyTickets = join(cwd, LEGACY_TICKET_DIR);
343
+ const legacyTicketsPlural = join(cwd, LEGACY_TICKET_DIR_PLURAL);
57
344
  const newTickets = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
58
345
 
59
346
  if (existsSync(legacyTickets)) {
60
347
  console.log(`[MIGRATE] Merging legacy singular ticket directory into ${AGENT_ROOT_DIR}/${TICKET_SUBDIR}`);
61
- recursiveMerge(legacyTickets, newTickets);
348
+ recursiveMerge(legacyTickets, newTickets, cwd, dryRun);
349
+ if (!dryRun && existsSync(legacyTickets)) rmSync(legacyTickets, { recursive: true, force: true });
62
350
  }
63
351
  if (existsSync(legacyTicketsPlural)) {
64
352
  console.log(`[MIGRATE] Merging legacy plural tickets directory into ${AGENT_ROOT_DIR}/${TICKET_SUBDIR}`);
65
- recursiveMerge(legacyTicketsPlural, newTickets);
353
+ recursiveMerge(legacyTicketsPlural, newTickets, cwd, dryRun);
354
+ if (!dryRun && existsSync(legacyTicketsPlural)) rmSync(legacyTicketsPlural, { recursive: true, force: true });
66
355
  }
356
+ migrateLegacyAgentWorkflows(cwd, dryRun);
357
+ migrateLegacyRootTicketDir(cwd, dryRun);
358
+ removeLegacyContainer(cwd, dryRun);
67
359
 
68
- const legacyConfig = join(cwd, ".deuk-agent-rule.config.json");
360
+ const legacyConfig = join(cwd, LEGACY_CONFIG_FILE);
69
361
  const newConfig = join(cwd, AGENT_ROOT_DIR, "config.json");
70
- if (existsSync(legacyConfig) && !existsSync(newConfig)) {
71
- console.log(`[MIGRATE] Moving legacy config to ${AGENT_ROOT_DIR}/config.json`);
72
- if (!dryRun) {
73
- mkdirSync(join(cwd, AGENT_ROOT_DIR), { recursive: true });
74
- renameSync(legacyConfig, newConfig);
362
+ if (existsSync(legacyConfig)) {
363
+ if (!existsSync(newConfig)) {
364
+ console.log(`[MIGRATE] Moving legacy config to ${AGENT_ROOT_DIR}/config.json`);
365
+ if (!dryRun) {
366
+ mkdirSync(join(cwd, AGENT_ROOT_DIR), { recursive: true });
367
+ renameSync(legacyConfig, newConfig);
368
+ }
369
+ } else {
370
+ console.log(`[MIGRATE] Removing redundant legacy config`);
371
+ if (!dryRun) unlinkSync(legacyConfig);
75
372
  }
76
373
  }
77
374
 
375
+ migrateLegacyReports(cwd, dryRun);
376
+ migrateLegacyScratchReports(cwd, dryRun);
377
+ migrateLegacyArchiveTickets(cwd, dryRun);
378
+ pruneEmptyLegacyTicketDirs(cwd, dryRun);
379
+
78
380
  // 3. Clean up redundant legacy pointer files from the target directory
79
381
  if (existsSync(newTickets)) {
80
382
  for (const file of ["ACTIVE_TICKET.md", "ACTIVE_TICKET.json", "LATEST.md"]) {
@@ -87,87 +389,883 @@ function migrateLegacyStructure(cwd, dryRun) {
87
389
  }
88
390
  }
89
391
 
392
+ function migrateLegacyRootTicketDir(cwd, dryRun) {
393
+ const legacyTicketDir = join(cwd, LEGACY_TICKET_DIR_ROOT);
394
+ if (!existsSync(legacyTicketDir)) return;
395
+
396
+ const now = new Date();
397
+ const yearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
398
+ const day = String(now.getDate()).padStart(2, "0");
399
+ const importRoot = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "sub", yearMonth, day);
400
+ let index = 0;
401
+ let targetDir = join(importRoot, "ticket-import");
402
+ while (existsSync(targetDir)) {
403
+ index += 1;
404
+ targetDir = join(importRoot, `ticket-import-${String(index).padStart(2, "0")}`);
405
+ }
406
+
407
+ const relSource = toRepoRelativePath(cwd, legacyTicketDir);
408
+ const relTarget = toRepoRelativePath(cwd, targetDir);
409
+
410
+ if (dryRun) {
411
+ console.log(`[DRY-RUN] Would move legacy root ticket directory: ${relSource} -> ${relTarget}`);
412
+ return;
413
+ }
414
+
415
+ mkdirSync(importRoot, { recursive: true });
416
+ renameSync(legacyTicketDir, targetDir);
417
+ console.log(`[MIGRATE] Moved legacy root ticket directory: ${relSource} -> ${relTarget}`);
418
+ }
419
+
420
+ function migrateLegacyAgentWorkflows(cwd, dryRun) {
421
+ const workflowsDir = join(cwd, ".agent", "workflows");
422
+ if (!existsSync(workflowsDir)) return;
423
+
424
+ for (const sourceAbs of listFlatMarkdownFiles(workflowsDir)) {
425
+ const partition = inferPartitionFromFile(statSync(sourceAbs));
426
+ const targetName = `agent-workflow-${basename(sourceAbs)}`;
427
+ const targetAbs = join(cwd, AGENT_ROOT_DIR, "docs", "archive", partition.yearMonth, targetName);
428
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "legacy agent workflow cleanup");
429
+ }
430
+
431
+ removeEmptyDirsBottomUp(join(cwd, ".agent"), cwd, dryRun);
432
+ }
433
+
434
+ function removeLegacyContainer(cwd, dryRun) {
435
+ const legacyContainer = join(cwd, AGENT_ROOT_DIR, "legacy");
436
+ if (!existsSync(legacyContainer)) return;
437
+
438
+ if (!dryRun) {
439
+ rmSync(legacyContainer, { recursive: true, force: true });
440
+ }
441
+ console.log(`[CLEANUP] removed legacy container: ${toRepoRelativePath(cwd, legacyContainer)}`);
442
+ }
443
+
444
+ export function migrateLegacyReports(cwd, dryRun) {
445
+ const legacyReportsDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "reports");
446
+ const reportTargetDir = join(cwd, AGENT_ROOT_DIR, "docs", "plan");
447
+ if (!existsSync(legacyReportsDir)) return;
448
+
449
+ const reportFiles = sortedDirEntries(legacyReportsDir, { withFileTypes: true })
450
+ .filter(ent => ent.isFile() && ent.name.endsWith(".md"))
451
+ .map(ent => join(legacyReportsDir, ent.name));
452
+
453
+ if (reportFiles.length === 0) return;
454
+
455
+ mkdirSync(reportTargetDir, { recursive: true });
456
+
457
+ for (const sourceAbs of reportFiles) {
458
+ const fileName = basename(sourceAbs);
459
+ const targetAbs = join(reportTargetDir, fileName);
460
+ const sourceBody = readFileSync(sourceAbs, "utf8");
461
+
462
+ if (existsSync(targetAbs)) {
463
+ const targetBody = readFileSync(targetAbs, "utf8");
464
+ if (targetBody !== sourceBody) {
465
+ console.warn(`[WARNING] Legacy report conflict: ${toRepoRelativePath(cwd, targetAbs)} already exists with different content. Skipping move.`);
466
+ continue;
467
+ }
468
+ if (!dryRun) unlinkSync(sourceAbs);
469
+ console.log(`[MIGRATE] Removed duplicate legacy report: ${toRepoRelativePath(cwd, sourceAbs)}`);
470
+ rewriteLegacyReportLinks(cwd, targetAbs, fileName, dryRun);
471
+ continue;
472
+ }
473
+
474
+ rewriteLegacyReportLinks(cwd, targetAbs, fileName, dryRun);
475
+ if (!dryRun) {
476
+ renameSync(sourceAbs, targetAbs);
477
+ }
478
+ console.log(`[MIGRATE] Moved legacy report: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
479
+ }
480
+ }
481
+
482
+ export function migrateLegacyScratchReports(cwd, dryRun) {
483
+ const scratchDir = join(cwd, AGENT_ROOT_DIR, "docs", "scratch");
484
+ const walkthroughDir = join(cwd, AGENT_ROOT_DIR, "docs", "plan");
485
+ if (!existsSync(scratchDir)) return;
486
+
487
+ const reportFiles = sortedDirEntries(scratchDir, { withFileTypes: true })
488
+ .filter(ent => ent.isFile())
489
+ .map(ent => join(scratchDir, ent.name));
490
+
491
+ if (reportFiles.length === 0) return;
492
+
493
+ mkdirSync(walkthroughDir, { recursive: true });
494
+
495
+ for (const sourceAbs of reportFiles) {
496
+ const fileName = basename(sourceAbs);
497
+ const targetAbs = join(walkthroughDir, fileName);
498
+ const sourceBody = readFileSync(sourceAbs, "utf8");
499
+
500
+ if (existsSync(targetAbs)) {
501
+ const targetBody = readFileSync(targetAbs, "utf8");
502
+ if (targetBody !== sourceBody) {
503
+ console.warn(`[WARNING] Scratch report conflict: ${toRepoRelativePath(cwd, targetAbs)} already exists with different content. Skipping move.`);
504
+ continue;
505
+ }
506
+ if (!dryRun) unlinkSync(sourceAbs);
507
+ console.log(`[MIGRATE] Removed duplicate scratch report: ${toRepoRelativePath(cwd, sourceAbs)}`);
508
+ continue;
509
+ }
510
+
511
+ if (!dryRun) {
512
+ renameSync(sourceAbs, targetAbs);
513
+ }
514
+ console.log(`[MIGRATE] Moved scratch report: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
515
+ }
516
+ }
517
+
518
+ export function migrateLegacyArchiveTickets(cwd, dryRun) {
519
+ const legacyArchiveTicketsDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "tickets");
520
+ const canonicalArchiveSubDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "sub");
521
+ if (!existsSync(legacyArchiveTicketsDir)) return;
522
+
523
+ const archiveFiles = sortedDirEntries(legacyArchiveTicketsDir, { withFileTypes: true })
524
+ .filter(ent => ent.isFile() && ent.name.endsWith(".md"))
525
+ .map(ent => join(legacyArchiveTicketsDir, ent.name));
526
+
527
+ if (archiveFiles.length === 0) return;
528
+
529
+ mkdirSync(canonicalArchiveSubDir, { recursive: true });
530
+
531
+ for (const sourceAbs of archiveFiles) {
532
+ const fileName = basename(sourceAbs);
533
+ const targetAbs = join(canonicalArchiveSubDir, fileName);
534
+ const sourceBody = readFileSync(sourceAbs, "utf8");
535
+
536
+ if (existsSync(targetAbs)) {
537
+ const targetBody = readFileSync(targetAbs, "utf8");
538
+ if (targetBody !== sourceBody) {
539
+ console.warn(`[WARNING] Legacy archive conflict: ${toRepoRelativePath(cwd, targetAbs)} already exists with different content. Skipping move.`);
540
+ continue;
541
+ }
542
+ if (!dryRun) unlinkSync(sourceAbs);
543
+ console.log(`[MIGRATE] Removed duplicate legacy archive ticket: ${toRepoRelativePath(cwd, sourceAbs)}`);
544
+ continue;
545
+ }
546
+
547
+ if (!dryRun) {
548
+ renameSync(sourceAbs, targetAbs);
549
+ }
550
+ console.log(`[MIGRATE] Moved legacy archive ticket: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
551
+ }
552
+
553
+ try {
554
+ if (!dryRun && sortedDirEntries(legacyArchiveTicketsDir).length === 0) {
555
+ rmSync(legacyArchiveTicketsDir, { recursive: true, force: true });
556
+ console.log(`[CLEANUP] removed empty legacy archive shard: ${toRepoRelativePath(cwd, legacyArchiveTicketsDir)}`);
557
+ }
558
+ } catch (err) {
559
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to prune ${legacyArchiveTicketsDir}:`, err);
560
+ }
561
+ }
562
+
563
+ function rewriteLegacyReportLinks(cwd, targetAbs, fileName, dryRun) {
564
+ const archiveRoot = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive");
565
+ if (!existsSync(archiveRoot)) return;
566
+
567
+ walkMdFiles(archiveRoot, (absPath) => {
568
+ const body = readFileSync(absPath, "utf8");
569
+ if (!body.includes(`reports/${fileName}`)) return;
570
+ const relTarget = toRepoRelativePath(cwd, targetAbs);
571
+ const replacement = toPosixPath(relative(dirname(absPath), targetAbs));
572
+ const reportPattern = new RegExp(`(?:\\.\\./)+reports/${escapeRegExp(fileName)}`, "g");
573
+ const directPathPattern = new RegExp(`reports/${escapeRegExp(fileName)}`, "g");
574
+ const nextBody = body
575
+ .replace(reportPattern, replacement)
576
+ .replace(directPathPattern, replacement);
577
+ if (nextBody === body) return;
578
+ if (!dryRun) writeFileSync(absPath, nextBody, "utf8");
579
+ console.log(`[MIGRATE] Updated legacy report link in ${toRepoRelativePath(cwd, absPath)} -> ${relTarget}`);
580
+ });
581
+ }
582
+
583
+ function pruneEmptyLegacyTicketDirs(cwd, dryRun) {
584
+ const legacyDirs = [
585
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "core"),
586
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "global"),
587
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "main"),
588
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "reports"),
589
+ ];
590
+
591
+ for (const dir of legacyDirs) {
592
+ if (!existsSync(dir)) continue;
593
+ try {
594
+ if (sortedDirEntries(dir).length > 0) continue;
595
+ if (!dryRun) rmSync(dir, { recursive: true, force: true });
596
+ console.log(`[CLEANUP] removed empty legacy ticket dir: ${toRepoRelativePath(cwd, dir)}`);
597
+ } catch (err) {
598
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to prune ${dir}:`, err);
599
+ }
600
+ }
601
+ }
602
+
603
+ function routeMisplacedTicketFile(cwd, sourceAbs, dryRun) {
604
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
605
+ const fileName = basename(sourceAbs);
606
+ const raw = safeReadText(sourceAbs);
607
+ const meta = parseFrontMatter(raw).meta || {};
608
+ const status = String(meta.status || "").toLowerCase();
609
+
610
+ if (fileName === TICKET_LIST_FILENAME) {
611
+ if (!dryRun) unlinkSync(sourceAbs);
612
+ return;
613
+ }
614
+
615
+ if (!fileName.endsWith(".md")) {
616
+ const targetAbs = classifyAgentFileTarget(cwd, sourceAbs);
617
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced ticket artifact cleanup");
618
+ return;
619
+ }
620
+
621
+ const partition = inferPartitionFromFile(statSync(sourceAbs), meta);
622
+ const targetAbs = isActiveTicketStatus(status)
623
+ ? join(ticketDir, "sub", fileName)
624
+ : join(ticketDir, "archive", "sub", partition.yearMonth, partition.day, fileName);
625
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced ticket cleanup");
626
+ }
627
+
628
+ function canonicalizeAgentDocsLayout(cwd, dryRun) {
629
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
630
+ if (!existsSync(docsRoot)) return;
631
+ const allowedDirs = new Set(["archive", "plan"]);
632
+
633
+ for (const entry of sortedDirEntries(docsRoot, { withFileTypes: true })) {
634
+ const sourceAbs = join(docsRoot, entry.name);
635
+ if (entry.isDirectory() && allowedDirs.has(entry.name)) continue;
636
+
637
+ if (entry.isFile()) {
638
+ const targetAbs = classifyDocTarget(cwd, sourceAbs);
639
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced docs cleanup");
640
+ continue;
641
+ }
642
+
643
+ if (!entry.isDirectory()) continue;
644
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
645
+ const targetAbs = classifyDocTarget(cwd, fileAbs);
646
+ moveOrMergeFile(fileAbs, targetAbs, cwd, dryRun, `misplaced docs directory cleanup: ${entry.name}`);
647
+ }
648
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
649
+ }
650
+ }
651
+
652
+ function canonicalizeAgentTicketsLayout(cwd, dryRun) {
653
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
654
+ if (!existsSync(ticketDir)) return;
655
+ const allowedDirs = new Set(["archive", "sub"]);
656
+ const allowedFiles = new Set([TICKET_INDEX_FILENAME]);
657
+
658
+ for (const entry of sortedDirEntries(ticketDir, { withFileTypes: true })) {
659
+ const sourceAbs = join(ticketDir, entry.name);
660
+ if (entry.isDirectory() && allowedDirs.has(entry.name)) continue;
661
+ if (entry.isFile() && (allowedFiles.has(entry.name) || /^INDEX\.archive\.\d{4}-\d{2}\.json$/.test(entry.name))) continue;
662
+
663
+ if (entry.isFile()) {
664
+ routeMisplacedTicketFile(cwd, sourceAbs, dryRun);
665
+ continue;
666
+ }
667
+
668
+ if (!entry.isDirectory()) continue;
669
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
670
+ routeMisplacedTicketFile(cwd, fileAbs, dryRun);
671
+ }
672
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
673
+ }
674
+
675
+ const archiveRoot = join(ticketDir, "archive");
676
+ if (existsSync(archiveRoot)) {
677
+ for (const entry of sortedDirEntries(archiveRoot, { withFileTypes: true })) {
678
+ if (entry.name === "sub") continue;
679
+
680
+ const sourceAbs = join(archiveRoot, entry.name);
681
+ if (entry.isDirectory()) {
682
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
683
+ routeMisplacedTicketFile(cwd, fileAbs, dryRun);
684
+ }
685
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
686
+ continue;
687
+ }
688
+
689
+ if (entry.isFile()) {
690
+ routeMisplacedTicketFile(cwd, sourceAbs, dryRun);
691
+ continue;
692
+ }
693
+ }
694
+ }
695
+ }
696
+
697
+ function canonicalizeAgentRootLayout(cwd, dryRun) {
698
+ const agentRoot = join(cwd, AGENT_ROOT_DIR);
699
+ if (!existsSync(agentRoot)) return;
700
+ const allowedDirs = new Set(["docs", "knowledge", "tickets"]);
701
+ const allowedFiles = new Set(["config.json", "telemetry.jsonl"]);
702
+
703
+ for (const entry of sortedDirEntries(agentRoot, { withFileTypes: true })) {
704
+ const sourceAbs = join(agentRoot, entry.name);
705
+ if (entry.isDirectory() && allowedDirs.has(entry.name)) continue;
706
+ if (entry.isFile() && allowedFiles.has(entry.name)) continue;
707
+
708
+ if (entry.isFile()) {
709
+ const targetAbs = classifyAgentFileTarget(cwd, sourceAbs);
710
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced agent root cleanup");
711
+ continue;
712
+ }
713
+
714
+ if (!entry.isDirectory()) continue;
715
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
716
+ const targetAbs = classifyAgentFileTarget(cwd, fileAbs);
717
+ moveOrMergeFile(fileAbs, targetAbs, cwd, dryRun, `misplaced agent root directory cleanup: ${entry.name}`);
718
+ }
719
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
720
+ }
721
+ }
722
+
723
+ export function enforceCanonicalAgentLayout(cwd, dryRun) {
724
+ canonicalizeAgentDocsLayout(cwd, dryRun);
725
+ canonicalizeLegacyArchiveDocsBuckets(cwd, dryRun);
726
+ canonicalizeAgentTicketsLayout(cwd, dryRun);
727
+ canonicalizeAgentRootLayout(cwd, dryRun);
728
+ }
729
+
730
+ function collectMarkdownFilesRecursively(dir, out = []) {
731
+ if (!existsSync(dir)) return out;
732
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
733
+ const p = join(dir, entry.name);
734
+ if (entry.isDirectory()) {
735
+ collectMarkdownFilesRecursively(p, out);
736
+ continue;
737
+ }
738
+ if (entry.isFile() && entry.name.endsWith(".md")) {
739
+ out.push(p);
740
+ }
741
+ }
742
+ return out;
743
+ }
744
+
745
+ function listFlatMarkdownFiles(dir) {
746
+ if (!existsSync(dir)) return [];
747
+ return sortedDirEntries(dir)
748
+ .filter((name) => typeof name === "string" && name.endsWith(".md"))
749
+ .map((name) => join(dir, name));
750
+ }
751
+
752
+ function canonicalizeTicketArchivePath(cwd, dryRun) {
753
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
754
+ const archiveRoot = join(ticketDir, "archive");
755
+ if (!existsSync(archiveRoot)) return;
756
+
757
+ const { byFileName, byId } = mapTicketIndexByFileName(cwd);
758
+ const archiveFiles = collectMarkdownFilesRecursively(archiveRoot);
759
+
760
+ for (const sourceAbs of archiveFiles) {
761
+ const relParts = toPosixPath(relative(archiveRoot, sourceAbs)).split("/");
762
+ if (relParts.length < 2) continue;
763
+
764
+ const fileName = basename(sourceAbs);
765
+ const sourceBase = basename(sourceAbs, ".md");
766
+ const sourceMeta = parseFrontMatter(safeReadText(sourceAbs)).meta || {};
767
+ const matchedEntry = byFileName.get(fileName)
768
+ || byId.get(sourceMeta.id)
769
+ || byFileName.get(`${sourceBase}.md`);
770
+
771
+ const status = String(matchedEntry?.status || sourceMeta.status || "archived").toLowerCase();
772
+ const group = normalizeTicketGroup(matchedEntry?.group || relParts[0], "sub");
773
+ const partition = inferPartitionFromFile(statSync(sourceAbs), matchedEntry);
774
+ const shouldBeOpen = isActiveTicketStatus(status);
775
+ const targetAbs = shouldBeOpen
776
+ ? join(ticketDir, group, fileName)
777
+ : join(archiveRoot, group, partition.yearMonth, partition.day, fileName);
778
+ if (sourceAbs === targetAbs) continue;
779
+
780
+ const moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "ticket archive cleanup");
781
+ if (moved && sourceAbs !== targetAbs && !dryRun) {
782
+ console.log(`[CLEANUP] ticket archive normalized: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
783
+ }
784
+ }
785
+ }
786
+
787
+ function rewritePlanLinkReferences(cwd, sourceAbs, targetAbs, dryRun) {
788
+ const sourceRel = toRepoRelativePath(cwd, sourceAbs);
789
+ const targetRel = toRepoRelativePath(cwd, targetAbs);
790
+ if (sourceRel === targetRel) return;
791
+
792
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
793
+ for (const ticketAbs of collectMarkdownFilesRecursively(ticketDir)) {
794
+ const raw = safeReadText(ticketAbs);
795
+ if (!raw.includes(sourceRel)) continue;
796
+
797
+ let parsed;
798
+ try {
799
+ parsed = parseFrontMatter(raw);
800
+ } catch {
801
+ continue;
802
+ }
803
+
804
+ const nextMeta = { ...parsed.meta };
805
+ if (nextMeta.planLink === sourceRel) nextMeta.planLink = targetRel;
806
+ const nextContent = String(parsed.content || "").replaceAll(sourceRel, targetRel);
807
+ const nextRaw = stringifyFrontMatter(nextMeta, nextContent);
808
+ if (nextRaw === raw) continue;
809
+
810
+ if (!dryRun) writeFileSync(ticketAbs, nextRaw, "utf8");
811
+ console.log(`[MIGRATE] Updated planLink reference: ${toRepoRelativePath(cwd, ticketAbs)} -> ${targetRel}`);
812
+ }
813
+ }
814
+
815
+ function stripFrontMatterBlock(raw) {
816
+ return String(raw || "").replace(/^---\s*\n[\s\S]*?\n---\s*\n?/, "").trim();
817
+ }
818
+
819
+ function docTicketIdFromFile(fileAbs) {
820
+ const name = basename(fileAbs).replace(/\.[^.]+$/, "");
821
+ const match = name.match(/^(?:\d+-)?(\d{3})(?:-|$)/) || name.match(/^(\d{3})-/);
822
+ return match?.[1] || null;
823
+ }
824
+
825
+ function legacyDocSlug(fileAbs) {
826
+ return basename(fileAbs)
827
+ .replace(/\.[^.]+$/, "")
828
+ .toLowerCase()
829
+ .replace(/[^a-z0-9가-힣]+/gu, "-")
830
+ .replace(/^-+|-+$/g, "") || "legacy-doc";
831
+ }
832
+
833
+ function legacyDocTitle(fileAbs) {
834
+ return basename(fileAbs)
835
+ .replace(/\.[^.]+$/, "")
836
+ .replace(/[-_]+/g, " ")
837
+ .replace(/\s+/g, " ")
838
+ .trim();
839
+ }
840
+
841
+ function buildLegacyDocBody(fileAbs) {
842
+ const raw = safeReadText(fileAbs);
843
+ if (fileAbs.endsWith(".md")) return stripFrontMatterBlock(raw);
844
+ return ["```text", raw.trim(), "```"].join("\n");
845
+ }
846
+
847
+ function createLegacyDocTicket(ticketAbs, id, title, sourceAbs, dryRun) {
848
+ if (existsSync(ticketAbs)) return false;
849
+ const sourceMeta = parseFrontMatter(safeReadText(sourceAbs)).meta || {};
850
+ const summary = String(sourceMeta.summary || title).replace(/\n/g, " ").replace(/:/g, "-");
851
+ const createdAt = sourceMeta.createdAt || new Date().toISOString().slice(0, 19).replace("T", " ");
852
+ const body = [
853
+ "---",
854
+ `summary: ${summary}`,
855
+ "status: archived",
856
+ "priority: P3",
857
+ "tags: migrated",
858
+ `id: ${id}`,
859
+ `title: ${title}`,
860
+ `createdAt: ${createdAt}`,
861
+ "---",
862
+ "",
863
+ `# ${title}`,
864
+ "",
865
+ "> Legacy separated docs are merged here so this ticket is the single source of truth.",
866
+ "",
867
+ "## Scope & Constraints",
868
+ "",
869
+ "- **Target:** migrated legacy work record.",
870
+ "- **Context Files:** merged legacy content below.",
871
+ "- **Constraints:** preserve historical content without keeping separate docs files.",
872
+ "- **Lifecycle Guard:** this ticket is the canonical record.",
873
+ "",
874
+ "## Agent Permission Contract (APC)",
875
+ "",
876
+ "### [BOUNDARY]",
877
+ "- Editable modules: historical ticket record only.",
878
+ "- Forbidden modules: product/source changes from this migration.",
879
+ "- Rule citation: local project rules if present.",
880
+ "",
881
+ "### [CONTRACT]",
882
+ "- Input: separated legacy docs files.",
883
+ "- Output: one canonical ticket containing merged legacy content.",
884
+ "- Side effects: legacy docs files removed after merge.",
885
+ "",
886
+ "### [PATCH PLAN]",
887
+ "- Merge separated docs into this ticket.",
888
+ "- Remove source docs after merge.",
889
+ "",
890
+ "## Compact Plan",
891
+ "",
892
+ "- **Problem:** this work item existed as separated docs outside the ticket.",
893
+ "- **Approach:** merge the legacy content below and keep this ticket as canonical.",
894
+ "- **Verification:** confirm the source docs files are removed and this ticket remains.",
895
+ "- **Ticket Numbering:** infer the master/sub ticket from the numbered ticket ID; do not add inline child-ticket links.",
896
+ "",
897
+ "## Tasks",
898
+ "",
899
+ "- [x] Merge separated docs content into this ticket.",
900
+ "- [x] Remove separated docs files.",
901
+ "",
902
+ "## Done When",
903
+ "",
904
+ "- This ticket contains the merged content.",
905
+ "- Separate docs files are removed.",
906
+ ""
907
+ ].join("\n");
908
+ if (!dryRun) {
909
+ mkdirSync(dirname(ticketAbs), { recursive: true });
910
+ writeFileSync(ticketAbs, body, "utf8");
911
+ }
912
+ return true;
913
+ }
914
+
915
+ function mergeDocIntoTicket(ticketAbs, docAbs, dryRun) {
916
+ const body = buildLegacyDocBody(docAbs);
917
+ if (!body) return false;
918
+
919
+ const title = legacyDocTitle(docAbs);
920
+ const section = [
921
+ "",
922
+ "## Merged Legacy Document",
923
+ "",
924
+ `### ${title}`,
925
+ "",
926
+ body,
927
+ ""
928
+ ].join("\n");
929
+ const ticketRaw = safeReadText(ticketAbs).trimEnd();
930
+ if (!dryRun) {
931
+ writeFileSync(ticketAbs, `${ticketRaw}${section}`, "utf8");
932
+ unlinkSync(docAbs);
933
+ }
934
+ return true;
935
+ }
936
+
937
+ function mergeSeparatedDocsIntoTickets(cwd, dryRun) {
938
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
939
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
940
+ if (!existsSync(docsRoot)) return 0;
941
+
942
+ const ticketFiles = collectMarkdownFilesRecursively(ticketDir)
943
+ .filter((p) => basename(p) !== TICKET_LIST_FILENAME);
944
+ const ticketsById = new Map();
945
+ for (const ticketAbs of ticketFiles) {
946
+ const id = docTicketIdFromFile(ticketAbs);
947
+ if (!id) continue;
948
+ if (!ticketsById.has(id)) ticketsById.set(id, []);
949
+ ticketsById.get(id).push(ticketAbs);
950
+ }
951
+
952
+ let merged = 0;
953
+ let created = 0;
954
+ for (const docAbs of collectFilesRecursively(docsRoot)) {
955
+ const body = buildLegacyDocBody(docAbs).trim();
956
+ if (!body) continue;
957
+
958
+ const id = docTicketIdFromFile(docAbs);
959
+ let ticketAbs = null;
960
+ if (id && ticketsById.has(id)) {
961
+ const candidates = ticketsById.get(id);
962
+ ticketAbs = candidates.find((p) => p.includes(`${TICKET_SUBDIR}/sub/`)) || candidates[0];
963
+ }
964
+
965
+ if (!ticketAbs) {
966
+ const slug = legacyDocSlug(docAbs);
967
+ const ticketId = id || slug;
968
+ ticketAbs = join(ticketDir, "archive", "sub", "legacy-docs", `${ticketId}.md`);
969
+ if (createLegacyDocTicket(ticketAbs, ticketId, slug, docAbs, dryRun)) created++;
970
+ if (id) {
971
+ if (!ticketsById.has(id)) ticketsById.set(id, []);
972
+ ticketsById.get(id).push(ticketAbs);
973
+ }
974
+ }
975
+
976
+ if (mergeDocIntoTicket(ticketAbs, docAbs, dryRun)) merged++;
977
+ }
978
+
979
+ if (!dryRun) rmSync(docsRoot, { recursive: true, force: true });
980
+ if (merged > 0 || created > 0) {
981
+ console.log(`[MIGRATE] separated docs merged into tickets: merged=${merged}, created=${created}`);
982
+ }
983
+ return merged;
984
+ }
985
+
986
+ export function canonicalizeDocsArchiveBuckets(cwd, dryRun) {
987
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
988
+ const archiveDir = join(docsRoot, "archive");
989
+ const buckets = [
990
+ { name: "plan", source: join(docsRoot, "plan"), archiveBase: archiveDir },
991
+ ];
992
+
993
+ const { byFileName, byId } = mapTicketIndexByFileName(cwd);
994
+ for (const bucket of buckets) {
995
+ if (!existsSync(bucket.source)) continue;
996
+
997
+ const docFiles = collectFilesRecursively(bucket.source).filter((p) => p.endsWith(".md"));
998
+ for (const sourceAbs of docFiles) {
999
+ const fileName = basename(sourceAbs);
1000
+ const sourceMeta = parseFrontMatter(safeReadText(sourceAbs)).meta || {};
1001
+ const matchedEntry = resolveDocTicketEntry(fileName, sourceMeta, byFileName, byId);
1002
+ const status = String(matchedEntry?.status || sourceMeta.status || "active").toLowerCase();
1003
+ const isActive = isActiveTicketStatus(status);
1004
+ const shouldArchive = !isActive;
1005
+
1006
+ const yearMonth = parseYearMonth(matchedEntry?.archiveYearMonth)
1007
+ || parseYearMonth(matchedEntry?.createdAt)
1008
+ || parseYearMonth(matchedEntry?.updatedAt)
1009
+ || parseYearMonth(statSync(sourceAbs).mtime.toISOString())
1010
+ || parseYearMonth(new Date().toISOString());
1011
+ if (!yearMonth) continue;
1012
+
1013
+ const targetAbs = isActive ? join(docsRoot, "plan", fileName) : join(bucket.archiveBase, yearMonth, fileName);
1014
+
1015
+ const moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, `docs lifecycle cleanup: ${bucket.name}`);
1016
+ if (moved) {
1017
+ rewritePlanLinkReferences(cwd, sourceAbs, targetAbs, dryRun);
1018
+ }
1019
+ }
1020
+ }
1021
+ }
1022
+
1023
+ function canonicalizeLegacyArchiveDocsBuckets(cwd, dryRun) {
1024
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
1025
+ const archiveRoot = join(docsRoot, "archive");
1026
+ if (!existsSync(archiveRoot)) return;
1027
+
1028
+ const legacyBuckets = ["plans", "walkthroughs"];
1029
+ const now = new Date();
1030
+ const fallbackYearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
1031
+
1032
+ for (const bucket of legacyBuckets) {
1033
+ const sourceRoot = join(archiveRoot, bucket);
1034
+ if (!existsSync(sourceRoot)) continue;
1035
+
1036
+ for (const sourceAbs of collectFilesRecursively(sourceRoot)) {
1037
+ if (!sourceAbs.endsWith(".md")) continue;
1038
+
1039
+ const relParts = toPosixPath(relative(sourceRoot, sourceAbs)).split("/");
1040
+ const yearMonth = parseYearMonth(relParts[0]) || parseYearMonth(statSync(sourceAbs).mtime.toISOString()) || fallbackYearMonth;
1041
+ const targetAbs = join(archiveRoot, yearMonth, ...relParts.slice(relParts[0] && parseYearMonth(relParts[0]) ? 1 : 0));
1042
+ if (sourceAbs === targetAbs) continue;
1043
+
1044
+ const moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, `legacy archive docs cleanup: ${bucket}`);
1045
+ if (moved) {
1046
+ rewriteLegacyReportLinks(cwd, targetAbs, basename(sourceAbs), dryRun);
1047
+ }
1048
+ }
1049
+
1050
+ try {
1051
+ if (!dryRun && sortedDirEntries(sourceRoot).length === 0) {
1052
+ rmSync(sourceRoot, { recursive: true, force: true });
1053
+ } else if (!dryRun) {
1054
+ removeEmptyDirsBottomUp(sourceRoot, cwd, dryRun);
1055
+ }
1056
+ } catch (err) {
1057
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to remove legacy docs archive bucket ${sourceRoot}:`, err);
1058
+ }
1059
+ }
1060
+ }
1061
+
1062
+ function walkMdFiles(dir, callback) {
1063
+ if (!existsSync(dir)) return;
1064
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
1065
+ const p = join(dir, entry.name);
1066
+ if (entry.isDirectory()) {
1067
+ walkMdFiles(p, callback);
1068
+ } else if (entry.isFile() && entry.name.endsWith(".md")) {
1069
+ callback(p);
1070
+ }
1071
+ }
1072
+ }
1073
+
1074
+ function escapeRegExp(value) {
1075
+ return String(value || "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1076
+ }
1077
+
1078
+ function migrateHtmlMarkersToHeadings(cwd, dryRun) {
1079
+ const agentsPath = join(cwd, "AGENTS.md");
1080
+ if (!existsSync(agentsPath)) return;
1081
+
1082
+ const content = readFileSync(agentsPath, "utf8");
1083
+ const oldBegin = "<!-- deuk-agent-rule:begin -->";
1084
+ const oldEnd = "<!-- deuk-agent-rule:end -->";
1085
+
1086
+ if (!content.includes(oldBegin)) return;
1087
+
1088
+ const beginIdx = content.indexOf(oldBegin);
1089
+ const endIdx = content.lastIndexOf(oldEnd);
1090
+ if (endIdx <= beginIdx) return;
1091
+
1092
+ const managedContent = content.slice(beginIdx + oldBegin.length, endIdx).trim();
1093
+ const userContent = content.slice(0, beginIdx).trim();
1094
+ const afterContent = content.slice(endIdx + oldEnd.length).trim();
1095
+
1096
+ let newContent = "";
1097
+ if (userContent) newContent += userContent + "\n\n";
1098
+ if (afterContent) newContent += afterContent + "\n\n";
1099
+ newContent += "---\n\n";
1100
+ newContent += "## DeukAgentRules\n\n";
1101
+ newContent += "> Managed by DeukAgentRules. Remove this section if not installed.\n\n";
1102
+ newContent += managedContent + "\n";
1103
+
1104
+ if (!dryRun) {
1105
+ copyFileSync(agentsPath, agentsPath + ".pre-v2.bak");
1106
+ writeFileSync(agentsPath, newContent, "utf8");
1107
+ console.log("[MIGRATE] Converted HTML markers to heading-based format in AGENTS.md");
1108
+ console.log("[MIGRATE] Backup saved as AGENTS.md.pre-v2.bak");
1109
+ } else {
1110
+ console.log("[DRY-RUN] Would convert HTML markers to heading-based format in AGENTS.md");
1111
+ }
1112
+ }
1113
+
90
1114
  function syncTemplates(cwd, bundleRoot, dryRun) {
91
- const tplSrcDir = join(bundleRoot, "templates");
92
1115
  const tplDestDir = join(cwd, AGENT_ROOT_DIR, TEMPLATE_SUBDIR);
93
- if (!existsSync(tplSrcDir)) return;
94
- if (!dryRun) mkdirSync(tplDestDir, { recursive: true });
95
-
96
- const srcFiles = readdirSync(tplSrcDir).filter(n => n.endsWith(".md"));
97
- const destFiles = existsSync(tplDestDir) ? readdirSync(tplDestDir).filter(n => n.endsWith(".md")) : [];
98
-
99
- for (const name of srcFiles) {
100
- const src = join(tplSrcDir, name);
101
- const dest = join(tplDestDir, name);
102
- if (!dryRun) copyFileSync(src, dest);
103
- console.log(`template synced: ${dest}`);
104
- }
105
-
106
- for (const name of destFiles) {
107
- if (!srcFiles.includes(name)) {
108
- const obsolete = join(tplDestDir, name);
109
- if (!dryRun) unlinkSync(obsolete);
110
- console.log(`template removed (obsolete): ${obsolete}`);
111
- }
112
- }
113
- }
114
-
115
- const SPOKE_REGISTRY = [
116
- {
117
- id: "cursor",
118
- detect: (cwd) => existsSync(join(cwd, ".cursor")),
119
- legacy: ".cursorrules",
120
- target: ".cursor/rules/deuk-agent.mdc",
121
- format: "mdc",
122
- },
123
- {
124
- id: "claude",
125
- detect: (cwd) => existsSync(join(cwd, "CLAUDE.md")) || existsSync(join(cwd, ".claude")),
126
- legacy: null,
127
- target: "CLAUDE.md",
128
- format: "markdown",
129
- },
130
- {
131
- id: "copilot",
132
- detect: (cwd) => existsSync(join(cwd, ".github")),
133
- legacy: null,
134
- target: ".github/copilot-instructions.md",
135
- format: "markdown",
136
- },
137
- {
138
- id: "windsurf",
139
- detect: (cwd) => existsSync(join(cwd, ".windsurf")),
140
- legacy: ".windsurfrules",
141
- target: ".windsurf/rules/deuk-agent.md",
142
- format: "markdown",
143
- },
144
- {
145
- id: "jetbrains",
146
- detect: (cwd) => existsSync(join(cwd, ".aiassistant")) || existsSync(join(cwd, ".idea")),
147
- legacy: null,
148
- target: ".aiassistant/rules/deuk-agent.md",
149
- format: "markdown",
150
- },
151
- {
152
- id: "antigravity",
153
- detect: (cwd) => existsSync(join(cwd, "gemini.md")) || existsSync(join(cwd, ".gemini")),
154
- legacy: null,
155
- target: "gemini.md",
156
- format: "markdown",
157
- },
158
- ];
159
-
160
- function generateSpokeContent(spoke) {
161
- const commonContent = `# Deuk Agent Rules
162
-
163
- This project follows the Deuk Agent Rules framework.
164
- - Read the full rules: [AGENTS.md](../../AGENTS.md)
165
- - Module-specific rules: [.deuk-agent/rules/](../../.deuk-agent/rules/)
166
-
167
- ## Critical Rules
168
- - Use \`.deuk-agent/templates/TICKET_TEMPLATE.md\` for multi-step tasks.
169
- - RAG-First: Use MCP tools before local file search when available.
170
- - Error Loop Prevention: Stop after 2 repeated errors, create a ticket.
1116
+ const tplSourceDir = join(bundleRoot, "templates");
1117
+ if (!existsSync(tplSourceDir)) return;
1118
+
1119
+ if (!existsSync(tplDestDir) && !dryRun) {
1120
+ mkdirSync(tplDestDir, { recursive: true });
1121
+ }
1122
+
1123
+ if (!dryRun) {
1124
+ for (const entry of readdirSync(tplSourceDir, { withFileTypes: true })) {
1125
+ if (!entry.isFile()) continue;
1126
+ const source = join(tplSourceDir, entry.name);
1127
+ const target = join(tplDestDir, entry.name);
1128
+ cpSync(source, target);
1129
+ }
1130
+ console.log(`[SYNC] templates synced to ${toRepoRelativePath(cwd, tplDestDir)}`);
1131
+ return;
1132
+ }
1133
+ console.log(`[SYNC] templates synced to ${toRepoRelativePath(cwd, tplDestDir)} (dry-run mode)`);
1134
+ }
1135
+
1136
+ function syncSkillTemplates(cwd, bundleRoot, dryRun) {
1137
+ const skillDestDir = join(cwd, AGENT_ROOT_DIR, "skill-templates");
1138
+ const skillSourceDir = join(bundleRoot, "templates", "skills");
1139
+ if (!existsSync(skillSourceDir)) return;
1140
+
1141
+ if (!dryRun) {
1142
+ mkdirSync(skillDestDir, { recursive: true });
1143
+ cpSync(skillSourceDir, skillDestDir, { recursive: true });
1144
+ console.log(`[SYNC] skill templates synced to ${toRepoRelativePath(cwd, skillDestDir)}`);
1145
+ return;
1146
+ }
1147
+ console.log(`[SYNC] skill templates synced to ${toRepoRelativePath(cwd, skillDestDir)} (dry-run mode)`);
1148
+ }
1149
+
1150
+ /**
1151
+ * Scans .deuk-agent/tickets/ and .deuk-agent/docs/ for markdown files
1152
+ * missing YAML frontmatter or missing required frontmatter keys,
1153
+ * and injects/supplements them. Also strips trailing whitespace.
1154
+ * This ensures lint:md passes and RAG indexing works correctly.
1155
+ */
1156
+ function migrateMissingFrontmatter(cwd, dryRun) {
1157
+ const dirs = [
1158
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR),
1159
+ join(cwd, AGENT_ROOT_DIR, "docs"),
1160
+ ];
1161
+ const requiredKeys = ["summary", "status", "priority", "tags"];
1162
+
1163
+ let count = 0;
1164
+ for (const dir of dirs) {
1165
+ if (!existsSync(dir)) continue;
1166
+ walkMdFiles(dir, (absPath) => {
1167
+ if (absPath.includes("archive/")) return;
1168
+ const raw = readFileSync(absPath, "utf8");
1169
+ const relPath = toRepoRelativePath(cwd, absPath);
1170
+ const slug = basename(absPath, ".md");
1171
+ const isTicket = relPath.includes(`/${TICKET_SUBDIR}/`);
1172
+ const hasFrontmatter = raw.startsWith("---\n") || raw.startsWith("---\r\n");
1173
+
1174
+ if (hasFrontmatter) {
1175
+ // Check if required keys are present
1176
+ const parsed = parseFrontMatter(raw);
1177
+ const missing = requiredKeys.filter(k => !parsed.meta[k]);
1178
+ if (missing.length === 0) return; // all keys present, skip
1179
+
1180
+ // Supplement missing keys
1181
+ const defaults = {
1182
+ summary: parsed.meta.title || parsed.meta.id || slug,
1183
+ status: isTicket ? "open" : "active",
1184
+ priority: "P3",
1185
+ tags: isTicket ? "migrated" : "docs, migrated",
1186
+ };
1187
+ for (const key of missing) {
1188
+ parsed.meta[key] = defaults[key];
1189
+ }
1190
+
1191
+ if (!dryRun) {
1192
+ const cleanedContent = parsed.content.split("\n").map(l => l.trimEnd()).join("\n");
1193
+ writeFileSync(absPath, stringifyFrontMatter(parsed.meta, cleanedContent), "utf8");
1194
+ }
1195
+ console.log(`[MIGRATE] ${dryRun ? "Would supplement" : "Supplemented"} frontmatter (${missing.join(", ")}): ${relPath}`);
1196
+ } else {
1197
+ // No frontmatter at all — inject
1198
+ const meta = {
1199
+ summary: slug,
1200
+ status: isTicket ? "open" : "active",
1201
+ priority: "P3",
1202
+ tags: isTicket ? "migrated" : "docs, migrated",
1203
+ };
1204
+ if (isTicket) {
1205
+ meta.id = slug;
1206
+ meta.title = slug;
1207
+ meta.createdAt = new Date().toISOString().replace("T", " ").split(".")[0];
1208
+ }
1209
+
1210
+ if (!dryRun) {
1211
+ const cleanedRaw = raw.split("\n").map(l => l.trimEnd()).join("\n");
1212
+ const newContent = stringifyFrontMatter(meta, cleanedRaw);
1213
+ writeFileSync(absPath, newContent, "utf8");
1214
+ }
1215
+ console.log(`[MIGRATE] ${dryRun ? "Would add" : "Added"} frontmatter: ${relPath}`);
1216
+ }
1217
+ count++;
1218
+ });
1219
+ }
1220
+ if (count > 0) {
1221
+ console.log(`[MIGRATE] Frontmatter migration: ${count} file(s) ${dryRun ? "would be " : ""}updated.`);
1222
+ }
1223
+ }
1224
+
1225
+ export function buildGlobalCodexInstructions() {
1226
+ return `---
1227
+
1228
+ ## DeukAgentRules
1229
+
1230
+ > Managed by DeukAgentRules. Remove this section if not installed.
1231
+
1232
+ # Global DeukAgentRules Locator
1233
+
1234
+ This file is a locator, not a behavior contract.
1235
+
1236
+ - In a repository, load the local \`AGENTS.md\` or \`.deuk-agent/\` pointer first.
1237
+ - After the local pointer loads \`core-rules/AGENTS.md\`, the core hub owns TDW, RAG, silence, scope, and verification policy.
1238
+ - Do not summarize this locator or treat it as a second rule file.
1239
+ `;
1240
+ }
1241
+
1242
+ function syncGlobalCodexInstructions(dryRun) {
1243
+ const codexDir = join(homedir(), ".codex");
1244
+ if (!existsSync(codexDir)) return;
1245
+
1246
+ const target = join(codexDir, "AGENTS.md");
1247
+ const content = buildGlobalCodexInstructions();
1248
+
1249
+ if (!dryRun) {
1250
+ writeFileSync(target, content, "utf8");
1251
+ console.log(`global codex instructions synced: ${target}`);
1252
+ }
1253
+ }
1254
+
1255
+ export function generateSpokeContent(spoke, bundleRoot) {
1256
+ const globalRulesPath = join(bundleRoot, "core-rules", "AGENTS.md");
1257
+
1258
+ const content = `# Deuk Agent Rules
1259
+
1260
+ **[MANDATORY — TOOL CALL REQUIRED]** Core rules are at: [AGENTS.md](file://${globalRulesPath})
1261
+
1262
+ This pointer is a thin bootstrap, not a second workflow contract.
1263
+
1264
+ 1. FIRST tool call: read the core rules file above and internally note its frontmatter version.
1265
+ 2. Then read local \`PROJECT_RULE.md\` and internally identify applicable DC-* rules.
1266
+ 3. After the core hub is loaded, \`core-rules/AGENTS.md\` is the DeukAgentRules SSoT for TDW, RAG, silence, scope, and verification.
1267
+
1268
+ Do not print pointer/core metadata, version, DC-* lists, progress commentary, or interim summaries. Only the single required ticket-start line may appear before the final answer unless the user explicitly asks for live narration or a blocker/user decision must be surfaced.
171
1269
  `;
172
1270
 
173
1271
  if (spoke.format === "mdc") {
@@ -176,104 +1274,136 @@ description: "Deuk Agent Rules - Project conventions and ticket workflow"
176
1274
  globs: ["**/*"]
177
1275
  alwaysApply: true
178
1276
  ---
179
- ${commonContent}`;
1277
+ ${content}`;
180
1278
  }
181
- return `<!-- deuk-agent-rule:begin -->\n${commonContent}\n<!-- deuk-agent-rule:end -->\n`;
1279
+ return `---\n\n## DeukAgentRules\n\n> Managed by DeukAgentRules. Remove this section if not installed.\n\n${content}\n`;
182
1280
  }
183
1281
 
184
- function generateLegacyDeprecationNotice(spoke) {
185
- return `<!-- deuk-agent-rule:deprecated -->
186
- This file is deprecated. Rules have moved to:
187
- - Target: ${spoke.target}
188
- - All agents: AGENTS.md
189
- <!-- deuk-agent-rule:deprecated:end -->
190
- `;
1282
+ export function mergeManagedRuleContent(existingContent, managedContent) {
1283
+ return mergeManagedBlock(existingContent, managedContent);
1284
+ }
1285
+
1286
+ function hasCustomUserRules(filePath) {
1287
+ try {
1288
+ const content = readFileSync(filePath, "utf8");
1289
+ const withoutLegacyHtmlMarkers = content
1290
+ .replace(/<!-- deuk-agent-rule-cursorrules:begin -->[\s\S]*?<!-- deuk-agent-rule-cursorrules:end -->/g, "")
1291
+ .replace(/<!-- deuk-agent-rule:begin -->[\s\S]*?<!-- deuk-agent-rule:end -->/g, "");
1292
+ if (!withoutLegacyHtmlMarkers.trim()) return false;
1293
+
1294
+ const idx = content.indexOf("## DeukAgentRules");
1295
+ let stripped = content;
1296
+ if (idx !== -1) {
1297
+ // Find the preceding horizontal rule
1298
+ let blockStart = idx;
1299
+ const prevText = content.slice(0, idx);
1300
+ const hrIndex = prevText.lastIndexOf("---");
1301
+ if (hrIndex !== -1 && prevText.slice(hrIndex).trim() === "") {
1302
+ blockStart = hrIndex;
1303
+ }
1304
+ stripped = content.slice(0, blockStart);
1305
+ }
1306
+ const isPointer = content.includes("This project follows the Deuk Agent Rules framework") ||
1307
+ content.includes("centralized in:") ||
1308
+ content.includes("[AGENTS.md]");
1309
+ if (isPointer) return false;
1310
+
1311
+ return stripped.trim().length > 0;
1312
+ } catch (err) {
1313
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to read ${filePath}:`, err);
1314
+ return false;
1315
+ }
191
1316
  }
192
1317
 
193
- function deploySpokePointers(cwd, dryRun) {
1318
+ function deploySpokePointers(cwd, bundleRoot, dryRun, selectedTools = []) {
194
1319
  for (const spoke of SPOKE_REGISTRY) {
195
- if (!spoke.detect(cwd)) continue;
196
-
1320
+ // Always clean legacy files, but backup if they contain custom user rules
1321
+ if (spoke.legacy) {
1322
+ const legacyPath = join(cwd, spoke.legacy);
1323
+ if (existsSync(legacyPath)) {
1324
+ if (hasCustomUserRules(legacyPath)) {
1325
+ const bakPath = legacyPath + ".bak";
1326
+ if (!dryRun) renameSync(legacyPath, bakPath);
1327
+ console.log(`[MIGRATE] Backed up user rules to ${spoke.legacy}.bak`);
1328
+ } else {
1329
+ if (!dryRun) unlinkSync(legacyPath);
1330
+ console.log(`[CLEANUP] removed legacy: ${spoke.legacy}`);
1331
+ }
1332
+ }
1333
+ }
1334
+
1335
+ if (!isSelectedTool(selectedTools, spoke.id) && !spoke.detect(cwd, selectedTools)) continue;
1336
+
197
1337
  const targetPath = join(cwd, spoke.target);
198
1338
  const targetDir = dirname(targetPath);
1339
+ const managedContent = generateSpokeContent(spoke, bundleRoot);
1340
+ const existingContent = existsSync(targetPath) ? safeReadText(targetPath) : "";
1341
+ const nextContent = mergeManagedBlock(existingContent, managedContent);
1342
+ if (existingContent === nextContent) {
1343
+ console.log(`spoke synced: ${spoke.target} (${spoke.id})`);
1344
+ continue;
1345
+ }
199
1346
 
200
1347
  if (!dryRun) {
1348
+ ensureWritableDirectory(targetDir, cwd, dryRun, `spoke target conflict resolved for ${spoke.id}`);
201
1349
  mkdirSync(targetDir, { recursive: true });
202
- writeFileSync(targetPath, generateSpokeContent(spoke), "utf8");
1350
+ writeFileSync(targetPath, nextContent, "utf8");
203
1351
  }
204
1352
  console.log(`spoke synced: ${spoke.target} (${spoke.id})`);
205
-
206
- // Deprecate legacy file if it exists
207
- if (spoke.legacy) {
208
- const legacyPath = join(cwd, spoke.legacy);
209
- if (existsSync(legacyPath)) {
210
- if (!dryRun) writeFileSync(legacyPath, generateLegacyDeprecationNotice(spoke), "utf8");
211
- console.log(`spoke deprecated: ${spoke.legacy} -> ${spoke.target}`);
212
- }
213
- }
1353
+ }
1354
+ }
1355
+
1356
+ function removeDuplicateRuleCopies(cwd, dryRun) {
1357
+ // Note: AGENTS.md is now the Antigravity spoke target — do NOT delete it here.
1358
+ // GEMINI.md legacy cleanup is handled by deploySpokePointers (spoke.legacy field).
1359
+ // .gemini is the Antigravity platform directory — preserve it.
1360
+ const duplicatePaths = [
1361
+ join(cwd, AGENT_ROOT_DIR, "rules"),
1362
+ join(cwd, ".cursor", "rules", "deuk-agent-rule-multi-ai-workflow.mdc"),
1363
+ join(cwd, "CLAUDE.md"),
1364
+ ];
1365
+
1366
+ for (const p of duplicatePaths) {
1367
+ if (!existsSync(p)) continue;
1368
+ if (!dryRun) rmSync(p, { recursive: true, force: true });
1369
+ console.log(`[CLEANUP] removed legacy/duplicate: ${toRepoRelativePath(cwd, p)}`);
214
1370
  }
215
1371
  }
216
1372
 
217
1373
  export async function runInit(opts, bundleRoot) {
218
1374
  const savedConfig = loadInitConfig(opts.cwd) || {};
1375
+ const workflowMode = resolveWorkflowMode(opts, savedConfig);
1376
+ const executionEnabled = isWorkflowExecute({ ...opts, workflowMode }, savedConfig);
219
1377
  const ignoreDirs = savedConfig.ignoreDirs;
220
- const submodules = discoverAllSubmodules(opts.cwd, ignoreDirs);
221
- if (!submodules.includes(opts.cwd)) submodules.push(opts.cwd);
1378
+ const selectedTools = opts.agentTools || savedConfig.agentTools || [];
222
1379
 
223
- const markers = resolveMarkers(opts);
224
- const bundleAgents = readBundleAgents(bundleRoot);
1380
+ if (!opts.dryRun && !executionEnabled) {
1381
+ throw new Error(
1382
+ `[WORKFLOW BLOCKED] plan mode is active for ${opts.cwd}. Re-run with --workflow execute or --approval approved to apply file mutations. Use --dry-run for preparation only.`
1383
+ );
1384
+ }
225
1385
 
226
- for (const subCwd of submodules) {
227
- console.log(`\nInitializing ${basename(subCwd)}...`);
228
-
229
- // 1. Migration & Directory Setup
230
- migrateLegacyStructure(subCwd, opts.dryRun);
231
- ensureTicketDirAndGitignore({ ...opts, cwd: subCwd });
232
-
233
- // 2. Normalize INDEX.json paths (fix stale paths)
234
- normalizeTicketPaths(subCwd, { silent: false });
235
-
236
- // 3. Spoke Pointers (e.g. .cursor/rules/deuk-agent.mdc)
237
- deploySpokePointers(subCwd, opts.dryRun);
238
-
239
- // 4. Agents Setup (AGENTS.md)
240
- const compiledAgentsAdditions = compileDynamicRules(subCwd, bundleRoot, "AGENTS.md");
241
- const fullBundleAgents = bundleAgents + "\n\n" + compiledAgentsAdditions;
242
-
243
- const agentsResult = applyAgents({
244
- targetPath: join(subCwd, "AGENTS.md"),
245
- bundleContent: fullBundleAgents,
246
- markers, flavor: "init",
247
- appendIfNoMarkers: opts.appendIfNoMarkers,
248
- dryRun: opts.dryRun, backup: opts.backup,
249
- agentsMode: opts.agents || "inject"
250
- });
251
- console.log(`AGENTS.md: ${agentsResult.action}`);
252
-
253
- // 5. Hub Rules Sync (.deuk-agent/rules/)
254
- const hubRulesDir = join(subCwd, AGENT_ROOT_DIR, RULES_SUBDIR);
255
- if (!opts.dryRun) mkdirSync(hubRulesDir, { recursive: true });
256
- applyRules({
257
- bundleRulesDir: join(bundleRoot, "rules"),
258
- targetRulesDir: hubRulesDir,
259
- rulesMode: opts.rules || "overwrite",
260
- dryRun: opts.dryRun, backup: opts.backup
261
- });
1386
+ // 0. Sync Global Codex Instructions
1387
+ syncGlobalCodexInstructions(opts.dryRun);
262
1388
 
263
- // 6. Gemini Rule Sync (root rule)
264
- const geminiBundle = join(bundleRoot, "gemini.md");
265
- const geminiDest = join(subCwd, "gemini.md");
266
- if (existsSync(geminiBundle)) {
267
- const baseGemini = readFileSync(geminiBundle, "utf8");
268
- const compiledGeminiAdditions = compileDynamicRules(subCwd, bundleRoot, "gemini.md");
269
- if (!opts.dryRun) {
270
- writeFileSync(geminiDest, baseGemini + "\n\n" + compiledGeminiAdditions, "utf8");
271
- }
272
- console.log(`gemini.md: synced with dynamic rules`);
273
- }
1389
+ // 0.1 MCP / Phase 0 Status Check
1390
+ const mcpActive = await isMcpActive(opts.cwd);
1391
+ console.log(`\n[POLICY] MCP Status: ${mcpActive ? "\x1b[32mACTIVE\x1b[0m" : "\x1b[33mINACTIVE\x1b[0m"}`);
1392
+ if (mcpActive) {
1393
+ console.log(`[POLICY] Phase 0 RAG validation is \x1b[32mENFORCED\x1b[0m for ticket creation.\n`);
1394
+ } else {
1395
+ console.log(`[POLICY] Running in offline/disconnected mode.\n`);
1396
+ }
274
1397
 
275
- // 7. Templates Sync (.deuk-agent/templates/)
276
- syncTemplates(subCwd, bundleRoot, opts.dryRun);
1398
+ const submodules = discoverAllWorkspaces(opts.cwd, ignoreDirs);
1399
+ if (!submodules.includes(opts.cwd)) submodules.push(opts.cwd);
1400
+
1401
+ for (const subCwd of submodules) {
1402
+ try {
1403
+ await initSingleWorkspace(subCwd, opts, bundleRoot, selectedTools);
1404
+ } catch (err) {
1405
+ console.error(`[ERROR] Failed to initialize workspace ${basename(subCwd)}: ${err.message}`);
1406
+ }
277
1407
  }
278
1408
 
279
1409
  if (!loadInitConfig(opts.cwd)) {
@@ -281,26 +1411,55 @@ export async function runInit(opts, bundleRoot) {
281
1411
  }
282
1412
  }
283
1413
 
284
- export function runMerge(opts, bundleRoot) {
285
- const markers = resolveMarkers(opts);
286
- const agentsResult = applyAgents({
287
- targetPath: join(opts.cwd, "AGENTS.md"),
288
- bundleContent: readBundleAgents(bundleRoot),
289
- markers, flavor: "merge",
290
- appendIfNoMarkers: opts.appendIfNoMarkers,
291
- dryRun: opts.dryRun, backup: opts.backup,
292
- agentsMode: opts.agents || "inject"
293
- });
294
- console.log(`AGENTS.md: ${agentsResult.action} (${agentsResult.mode || ""})`);
295
-
296
- const hubRulesDir = join(opts.cwd, AGENT_ROOT_DIR, RULES_SUBDIR);
297
- const ruleActions = applyRules({
298
- bundleRulesDir: join(bundleRoot, "rules"),
299
- targetRulesDir: hubRulesDir,
300
- rulesMode: opts.rules || "skip",
301
- dryRun: opts.dryRun, backup: opts.backup
302
- });
303
- ruleActions.forEach(r => console.log(`hub rule ${r.action}: ${r.dest || r.src}`));
1414
+ async function initSingleWorkspace(subCwd, opts, bundleRoot, selectedTools) {
1415
+ console.log(`\nInitializing ${basename(subCwd)}...`);
1416
+
1417
+ // 1. Migration & Directory Setup
1418
+ migrateLegacyStructure(subCwd, opts.dryRun);
1419
+ migrateHtmlMarkersToHeadings(subCwd, opts.dryRun);
1420
+ ensureTicketDirAndGitignore({ ...opts, cwd: subCwd });
1421
+
1422
+ // 2. Normalize INDEX.json paths (fix stale paths)
1423
+ normalizeTicketPaths(subCwd, { silent: false });
1424
+
1425
+ // 2.5. Frontmatter migration (add missing frontmatter to deuk-agent docs/tickets)
1426
+ migrateMissingFrontmatter(subCwd, opts.dryRun);
1427
+
1428
+ // 2.6. Deterministic archive/docs normalization
1429
+ canonicalizeTicketArchivePath(subCwd, opts.dryRun);
1430
+ canonicalizeDocsArchiveBuckets(subCwd, opts.dryRun);
1431
+ enforceCanonicalAgentLayout(subCwd, opts.dryRun);
1432
+ mergeSeparatedDocsIntoTickets(subCwd, opts.dryRun);
1433
+
1434
+ // 3. Spoke Pointers (e.g. .cursor/rules/deuk-agent.mdc)
1435
+ removeDuplicateRuleCopies(subCwd, opts.dryRun);
1436
+ deploySpokePointers(subCwd, bundleRoot, opts.dryRun, selectedTools);
1437
+
1438
+ // 4. Project Rule Setup (PROJECT_RULE.md)
1439
+ const projectRulePath = join(subCwd, "PROJECT_RULE.md");
1440
+ if (!existsSync(projectRulePath)) {
1441
+ const templatePath = join(bundleRoot, "templates", "PROJECT_RULE.md");
1442
+ if (existsSync(templatePath)) {
1443
+ if (!opts.dryRun) copyFileSync(templatePath, projectRulePath);
1444
+ console.log(`PROJECT_RULE.md: created from template`);
1445
+ }
1446
+ }
1447
+
1448
+ // 5. Templates Sync (.deuk-agent/templates/)
1449
+ syncTemplates(subCwd, bundleRoot, opts.dryRun);
1450
+ syncSkillTemplates(subCwd, bundleRoot, opts.dryRun);
1451
+ }
304
1452
 
1453
+ export function runMerge(opts, bundleRoot) {
1454
+ const savedConfig = loadInitConfig(opts.cwd) || {};
1455
+ const workflowMode = resolveWorkflowMode(opts, savedConfig);
1456
+ const executionEnabled = isWorkflowExecute({ ...opts, workflowMode }, savedConfig);
1457
+ if (!opts.dryRun && !executionEnabled) {
1458
+ throw new Error(
1459
+ `[WORKFLOW BLOCKED] plan mode is active for ${opts.cwd}. Re-run with --workflow execute or --approval approved to apply file mutations. Use --dry-run for preparation only.`
1460
+ );
1461
+ }
1462
+
305
1463
  syncTemplates(opts.cwd, bundleRoot, opts.dryRun);
1464
+ syncSkillTemplates(opts.cwd, bundleRoot, opts.dryRun);
306
1465
  }