deuk-agent-flow 4.0.19

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. package/CHANGELOG.ko.md +223 -0
  2. package/CHANGELOG.md +227 -0
  3. package/LICENSE +184 -0
  4. package/README.ko.md +282 -0
  5. package/README.md +270 -0
  6. package/bin/deuk-agent-flow.js +50 -0
  7. package/bin/deuk-agent-rule.js +2 -0
  8. package/core-rules/AGENTS.md +153 -0
  9. package/core-rules/GEMINI.md +7 -0
  10. package/docs/architecture.ko.md +34 -0
  11. package/docs/architecture.md +33 -0
  12. package/docs/assets/architecture-v3.png +0 -0
  13. package/docs/how-it-works.ko.md +52 -0
  14. package/docs/how-it-works.md +71 -0
  15. package/docs/principles.ko.md +68 -0
  16. package/docs/principles.md +68 -0
  17. package/docs/usage-guide.ko.md +212 -0
  18. package/package.json +96 -0
  19. package/scripts/cli-args.mjs +200 -0
  20. package/scripts/cli-init-commands.mjs +1799 -0
  21. package/scripts/cli-init-logic.mjs +64 -0
  22. package/scripts/cli-prompts.mjs +104 -0
  23. package/scripts/cli-rule-compiler.mjs +112 -0
  24. package/scripts/cli-skill-commands.mjs +201 -0
  25. package/scripts/cli-telemetry-commands.mjs +599 -0
  26. package/scripts/cli-ticket-commands.mjs +2393 -0
  27. package/scripts/cli-ticket-index.mjs +298 -0
  28. package/scripts/cli-ticket-migration.mjs +320 -0
  29. package/scripts/cli-ticket-parser.mjs +209 -0
  30. package/scripts/cli-usage-commands.mjs +326 -0
  31. package/scripts/cli-utils.mjs +587 -0
  32. package/scripts/cli.mjs +246 -0
  33. package/scripts/lint-md.mjs +267 -0
  34. package/scripts/lint-rules.mjs +186 -0
  35. package/scripts/merge-logic.mjs +44 -0
  36. package/scripts/plan-parser.mjs +53 -0
  37. package/scripts/publish-dual-npm.mjs +141 -0
  38. package/scripts/smoke-npm-docker.mjs +102 -0
  39. package/scripts/smoke-npm-local.mjs +109 -0
  40. package/scripts/update-download-badge.mjs +107 -0
  41. package/templates/MODULE_RULE_TEMPLATE.md +11 -0
  42. package/templates/PROJECT_RULE.md +47 -0
  43. package/templates/TICKET_TEMPLATE.ko.md +44 -0
  44. package/templates/TICKET_TEMPLATE.md +44 -0
  45. package/templates/project-pilot/CONFORMANCE_GATE_TEMPLATE.md +23 -0
  46. package/templates/project-pilot/DRIFT_CHECKLIST.md +19 -0
  47. package/templates/project-pilot/FLOW_CONTRACT_TEMPLATE.md +26 -0
  48. package/templates/project-pilot/IMPLEMENTATION_MATRIX_TEMPLATE.md +30 -0
  49. package/templates/project-pilot/INTEGRATION_CONTRACT_TEMPLATE.md +26 -0
  50. package/templates/project-pilot/OWNER_MAP_TEMPLATE.md +15 -0
  51. package/templates/project-pilot/PROJECT_PILOT_RULE_TEMPLATE.md +34 -0
  52. package/templates/project-pilot/REFACTOR_CONTRACT_TEMPLATE.md +32 -0
  53. package/templates/project-pilot/REMEDIATION_PLAN_TEMPLATE.md +33 -0
  54. package/templates/rules.d/deukcontext-mcp.md +31 -0
  55. package/templates/rules.d/platform-coexistence.md +29 -0
  56. package/templates/skills/context-recall/SKILL.md +25 -0
  57. package/templates/skills/generated-file-guard/SKILL.md +25 -0
  58. package/templates/skills/project-pilot/SKILL.md +63 -0
  59. package/templates/skills/safe-refactor/SKILL.md +25 -0
@@ -0,0 +1,1799 @@
1
+ import { join, dirname, basename, relative } from "path";
2
+ import { homedir } from "os";
3
+ import { existsSync, readFileSync, writeFileSync, mkdirSync, copyFileSync, readdirSync, unlinkSync, rmSync, renameSync, statSync, cpSync, symlinkSync, chmodSync } from "fs";
4
+
5
+ import { ensureTicketDirAndGitignore } from "./cli-init-logic.mjs";
6
+ import { normalizeTicketPaths } from "./cli-ticket-migration.mjs";
7
+ import { rebuildTicketIndexFromTopicFilesIfNeeded } from "./cli-ticket-parser.mjs";
8
+ import { readTicketIndexJson, writeTicketIndexJson } from "./cli-ticket-index.mjs";
9
+
10
+ import { runInteractive } from "./cli-prompts.mjs";
11
+ import { AGENT_ROOT_DIR, TICKET_SUBDIR, TEMPLATE_SUBDIR, TICKET_INDEX_FILENAME, TICKET_LIST_FILENAME, discoverAllWorkspaces, isMcpActive, toRepoRelativePath, toPosixPath, resolveWorkflowMode, pruneRuleModules, loadInitConfig, writeInitConfig, isWorkflowExecute, normalizeWorkflowMode, SPOKE_REGISTRY, parseFrontMatter, stringifyFrontMatter, LEGACY_TEMPLATE_DIR, LEGACY_TICKET_DIR, LEGACY_TICKET_DIR_PLURAL, LEGACY_TICKET_DIR_ROOT, LEGACY_CONFIG_FILE, normalizeTicketGroup } from "./cli-utils.mjs";
12
+
13
+ function sortedDirEntries(dir, options = {}) {
14
+ const entries = readdirSync(dir, options);
15
+ return entries.sort((a, b) => {
16
+ const aName = typeof a === "string" ? a : a.name;
17
+ const bName = typeof b === "string" ? b : b.name;
18
+ return String(aName).localeCompare(String(bName));
19
+ });
20
+ }
21
+
22
+ function safeReadText(absPath, fallback = "") {
23
+ try {
24
+ return readFileSync(absPath, "utf8");
25
+ } catch {
26
+ return fallback;
27
+ }
28
+ }
29
+
30
+ function sameFileContent(leftPath, rightPath) {
31
+ return safeReadText(leftPath) === safeReadText(rightPath);
32
+ }
33
+
34
+ function isSelectedTool(selectedTools = [], spokeId) {
35
+ const tools = Array.isArray(selectedTools) ? selectedTools : [];
36
+ return tools.includes("all") || tools.includes(spokeId);
37
+ }
38
+
39
+ const MANAGED_BLOCK_BEGIN = "<!-- deuk-agent-managed:begin -->";
40
+ const MANAGED_BLOCK_END = "<!-- deuk-agent-managed:end -->";
41
+ const SOURCE_MODE_COMMANDS = [
42
+ ["deuk-agent-flow", "deuk-agent-flow.js"],
43
+ ["deukagentflow", "deuk-agent-flow.js"],
44
+ ["deuk-agent-rule", "deuk-agent-rule.js"],
45
+ ["deukagentrule", "deuk-agent-rule.js"]
46
+ ];
47
+
48
+ function pathEntries(pathEnv = process.env.PATH || "", platform = process.platform) {
49
+ return String(pathEnv || "").split(platform === "win32" ? ";" : ":").filter(Boolean);
50
+ }
51
+
52
+ function commandAlreadyOnPath(commandName, pathEnv = process.env.PATH || "", platform = process.platform) {
53
+ const suffixes = platform === "win32" ? ["", ".cmd", ".bat", ".exe"] : [""];
54
+ return pathEntries(pathEnv, platform).some(dir => suffixes.some(suffix => existsSync(join(dir, `${commandName}${suffix}`))));
55
+ }
56
+
57
+ function sourceModeBinDir(homeDir = homedir(), platform = process.platform) {
58
+ return platform === "win32"
59
+ ? join(homeDir, "AppData", "Roaming", "npm")
60
+ : join(homeDir, ".local", "bin");
61
+ }
62
+
63
+ function writeWindowsCommandShim(shimPath, targetScript) {
64
+ const normalizedTarget = String(targetScript).replace(/"/g, '\\"');
65
+ writeFileSync(shimPath, `@ECHO OFF\r\nnode "${normalizedTarget}" %*\r\n`, "utf8");
66
+ }
67
+
68
+ function writePosixCommandShim(shimPath, targetScript) {
69
+ const normalizedTarget = String(targetScript).replace(/'/g, "'\\''");
70
+ writeFileSync(shimPath, `#!/bin/sh\nexec node '${normalizedTarget}' "$@"\n`, "utf8");
71
+ chmodSync(shimPath, 0o755);
72
+ }
73
+
74
+ export function ensureSourceModeCommandShims(bundleRoot, opts = {}) {
75
+ const platform = opts.platform || process.platform;
76
+ const homeDir = opts.homeDir || homedir();
77
+ const pathEnv = opts.pathEnv ?? process.env.PATH ?? "";
78
+ const dryRun = Boolean(opts.dryRun);
79
+ const binRoot = join(bundleRoot, "bin");
80
+
81
+ if (!existsSync(join(binRoot, "deuk-agent-flow.js"))) return { created: [], skipped: [], binDir: null, onPath: false };
82
+
83
+ const binDir = opts.binDir || sourceModeBinDir(homeDir, platform);
84
+ const onPath = pathEntries(pathEnv, platform).some(dir => dir === binDir);
85
+ const created = [];
86
+ const skipped = [];
87
+
88
+ for (const [commandName, scriptName] of SOURCE_MODE_COMMANDS) {
89
+ if (commandAlreadyOnPath(commandName, pathEnv, platform)) {
90
+ skipped.push(commandName);
91
+ continue;
92
+ }
93
+
94
+ const targetScript = join(binRoot, scriptName);
95
+ const shimPath = join(binDir, platform === "win32" ? `${commandName}.cmd` : commandName);
96
+ if (!dryRun) {
97
+ mkdirSync(binDir, { recursive: true });
98
+ if (platform === "win32") {
99
+ writeWindowsCommandShim(shimPath, targetScript);
100
+ } else {
101
+ try {
102
+ symlinkSync(targetScript, shimPath);
103
+ } catch (err) {
104
+ if (err?.code !== "EEXIST") throw err;
105
+ unlinkSync(shimPath);
106
+ symlinkSync(targetScript, shimPath);
107
+ }
108
+ chmodSync(targetScript, 0o755);
109
+ }
110
+ }
111
+ created.push(commandName);
112
+ }
113
+
114
+ return { created, skipped, binDir, onPath };
115
+ }
116
+
117
+ function wrapManagedBlock(content) {
118
+ return `${MANAGED_BLOCK_BEGIN}\n${String(content || "").trimEnd()}\n${MANAGED_BLOCK_END}`;
119
+ }
120
+
121
+ function splitManagedBlock(content) {
122
+ const current = String(content || "");
123
+ if (!current.includes(MANAGED_BLOCK_BEGIN) || !current.includes(MANAGED_BLOCK_END)) return null;
124
+
125
+ const beginIdx = current.indexOf(MANAGED_BLOCK_BEGIN);
126
+ const endIdx = current.indexOf(MANAGED_BLOCK_END, beginIdx);
127
+ if (beginIdx === -1 || endIdx === -1) return null;
128
+
129
+ return {
130
+ before: current.slice(0, beginIdx).trimEnd(),
131
+ managed: current.slice(beginIdx + MANAGED_BLOCK_BEGIN.length, endIdx).trim(),
132
+ after: current.slice(endIdx + MANAGED_BLOCK_END.length).trimStart(),
133
+ };
134
+ }
135
+
136
+ function mergeManagedBlock(existing, managedContent) {
137
+ const current = String(existing || "");
138
+ const nextBlock = wrapManagedBlock(managedContent);
139
+
140
+ if (!current.trim()) return `${nextBlock}\n`;
141
+ const currentBlock = splitManagedBlock(current);
142
+ if (currentBlock) {
143
+ return [currentBlock.before, nextBlock, currentBlock.after].filter(Boolean).join("\n\n").trimEnd() + "\n";
144
+ }
145
+
146
+ const cleaned = current.trimEnd();
147
+ const managedBody = String(managedContent || "").trim();
148
+ if (managedBody && cleaned.includes(managedBody)) return cleaned + "\n";
149
+
150
+ return `${cleaned}\n\n${nextBlock}\n`;
151
+ }
152
+
153
+ function ensureWritableDirectory(dirAbs, cwd, dryRun, label) {
154
+ if (!existsSync(dirAbs)) return;
155
+
156
+ try {
157
+ if (statSync(dirAbs).isDirectory()) return;
158
+ } catch (err) {
159
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to inspect ${dirAbs}:`, err);
160
+ return;
161
+ }
162
+
163
+ const backupBase = `${dirAbs}.bak`;
164
+ let backupAbs = backupBase;
165
+ let index = 1;
166
+ while (existsSync(backupAbs)) {
167
+ backupAbs = `${backupBase}.${index}`;
168
+ index += 1;
169
+ }
170
+
171
+ const relDir = toRepoRelativePath(cwd, dirAbs);
172
+ const relBackup = toRepoRelativePath(cwd, backupAbs);
173
+ if (!dryRun) {
174
+ renameSync(dirAbs, backupAbs);
175
+ }
176
+ console.log(`[MIGRATE] ${label}: ${relDir} -> ${relBackup}`);
177
+ }
178
+
179
+ function moveOrMergeFile(srcAbs, dstAbs, cwd, dryRun, action) {
180
+ const relSrc = toRepoRelativePath(cwd, srcAbs);
181
+ const relDst = toRepoRelativePath(cwd, dstAbs);
182
+
183
+ if (srcAbs === dstAbs) return false;
184
+
185
+ if (existsSync(dstAbs)) {
186
+ if (sameFileContent(srcAbs, dstAbs)) {
187
+ if (!dryRun) unlinkSync(srcAbs);
188
+ console.log(`[MIGRATE] ${action} duplicate removed: ${relSrc} -> ${relDst}`);
189
+ return true;
190
+ }
191
+ console.warn(`[WARNING] ${action} conflict: destination exists with different content, skipped ${relSrc}`);
192
+ return false;
193
+ }
194
+
195
+ if (!dryRun) {
196
+ mkdirSync(dirname(dstAbs), { recursive: true });
197
+ renameSync(srcAbs, dstAbs);
198
+ }
199
+ console.log(`[MIGRATE] ${dryRun ? "Would move" : "Moved"} ${action}: ${relSrc} -> ${relDst}`);
200
+ return true;
201
+ }
202
+
203
+ function archiveLegacyVariantBaseName(fileName) {
204
+ return String(fileName || "").replace(/-legacy-docs(?:-\d{2})?\.md$/i, ".md");
205
+ }
206
+
207
+ function mergeArchiveVariantIntoCanonical(sourceAbs, targetAbs, cwd, dryRun, action) {
208
+ if (!existsSync(targetAbs)) {
209
+ const moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, `${action} normalize legacy variant`);
210
+ return { moved, finalTarget: targetAbs };
211
+ }
212
+
213
+ const sourceRaw = safeReadText(sourceAbs);
214
+ const targetRaw = safeReadText(targetAbs);
215
+ if (sourceRaw === targetRaw) {
216
+ if (!dryRun) unlinkSync(sourceAbs);
217
+ console.log(`[MIGRATE] ${action} duplicate removed: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
218
+ return { moved: true, finalTarget: targetAbs };
219
+ }
220
+
221
+ const sourceBody = stripFrontMatterBlock(sourceRaw);
222
+ const targetBody = stripFrontMatterBlock(targetRaw);
223
+ if (sourceBody && targetBody.includes(sourceBody)) {
224
+ if (!dryRun) unlinkSync(sourceAbs);
225
+ console.log(`[MIGRATE] ${action} merged duplicate variant removed: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
226
+ return { moved: true, finalTarget: targetAbs };
227
+ }
228
+
229
+ if (targetBody && sourceBody.includes(targetBody)) {
230
+ if (!dryRun) {
231
+ writeFileSync(targetAbs, sourceRaw, "utf8");
232
+ unlinkSync(sourceAbs);
233
+ }
234
+ console.log(`[MIGRATE] ${action} promoted richer legacy variant: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
235
+ return { moved: true, finalTarget: targetAbs };
236
+ }
237
+
238
+ const mergedSection = [
239
+ "",
240
+ "## Merged Legacy Archive Variant",
241
+ "",
242
+ sourceBody,
243
+ ""
244
+ ].join("\n");
245
+ const nextRaw = `${targetRaw.trimEnd()}${mergedSection}`;
246
+ if (!dryRun) {
247
+ writeFileSync(targetAbs, nextRaw, "utf8");
248
+ unlinkSync(sourceAbs);
249
+ }
250
+ console.log(`[MIGRATE] ${action} merged legacy variant content: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
251
+ return { moved: true, finalTarget: targetAbs };
252
+ }
253
+
254
+ function parseYearMonth(value) {
255
+ const match = String(value || "").match(/^(\d{4})-(\d{2})/);
256
+ if (!match) return null;
257
+ return `${match[1]}-${match[2]}`;
258
+ }
259
+
260
+ function parseDay(value) {
261
+ const match = String(value || "").match(/^\d{4}-(\d{2})-(\d{2})/);
262
+ if (match) return match[2] || "01";
263
+
264
+ const onlyDay = String(value || "").match(/^\d{2}$/);
265
+ if (onlyDay) return onlyDay[0];
266
+ return null;
267
+ }
268
+
269
+ function inferPartitionFromFile(statSource, entry, fallbackDate = new Date()) {
270
+ const yearMonth = parseYearMonth(entry?.archiveYearMonth) || parseYearMonth(entry?.createdAt) || parseYearMonth(entry?.updatedAt);
271
+
272
+ return {
273
+ yearMonth: yearMonth || `${String(fallbackDate.getFullYear())}-${String(fallbackDate.getMonth() + 1).padStart(2, "0")}`,
274
+ };
275
+ }
276
+
277
+ function mapTicketIndexByFileName(cwd) {
278
+ const indexJson = readTicketIndexJson(cwd);
279
+ const byFileName = new Map();
280
+ const byId = new Map();
281
+ for (const e of indexJson.entries || []) {
282
+ if (!e) continue;
283
+ if (e.fileName) byFileName.set(e.fileName, e);
284
+ if (e.id) byId.set(e.id, e);
285
+ }
286
+ return { indexJson, byFileName, byId };
287
+ }
288
+
289
+ function deriveDocTicketFileName(mdPath) {
290
+ const stem = basename(mdPath, ".md")
291
+ .replace(/-(plan|report)$/i, "");
292
+ if (!stem) return null;
293
+ return `${stem}.md`;
294
+ }
295
+
296
+ function isActiveTicketStatus(status) {
297
+ return status === "open" || status === "active";
298
+ }
299
+
300
+ function inferDocsBucketFromFileName(fileName) {
301
+ const lower = fileName.toLowerCase();
302
+ if (lower.endsWith("-plan.md") || lower === "plan.md") return "plan";
303
+ if (lower.endsWith("-report.md") || lower.startsWith("report-")) return "plan";
304
+ return "plan";
305
+ }
306
+
307
+ function resolveDocTicketEntry(fileName, sourceMeta, byFileName, byId) {
308
+ const candidates = new Set();
309
+ candidates.add(fileName);
310
+
311
+ const derived = deriveDocTicketFileName(fileName);
312
+ if (derived) candidates.add(derived);
313
+ if (sourceMeta?.id) candidates.add(`${sourceMeta.id}.md`);
314
+
315
+ for (const candidate of candidates) {
316
+ const entry = byFileName.get(candidate);
317
+ if (entry) return entry;
318
+ }
319
+
320
+ for (const candidate of candidates) {
321
+ const key = candidate.replace(/\.md$/i, "");
322
+ const entry = byId.get(key);
323
+ if (entry) return entry;
324
+ }
325
+
326
+ return sourceMeta?.id ? byId.get(sourceMeta.id) || null : null;
327
+ }
328
+
329
+ function removeEmptyDirsBottomUp(dir, cwd, dryRun) {
330
+ if (!existsSync(dir)) return;
331
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
332
+ if (entry.isDirectory()) removeEmptyDirsBottomUp(join(dir, entry.name), cwd, dryRun);
333
+ }
334
+
335
+ try {
336
+ if (sortedDirEntries(dir).length > 0) return;
337
+ if (!dryRun) rmSync(dir, { recursive: true, force: true });
338
+ console.log(`[CLEANUP] removed empty directory: ${toRepoRelativePath(cwd, dir)}`);
339
+ } catch (err) {
340
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to prune ${dir}:`, err);
341
+ }
342
+ }
343
+
344
+ function collectFilesRecursively(dir, out = []) {
345
+ if (!existsSync(dir)) return out;
346
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
347
+ const p = join(dir, entry.name);
348
+ if (entry.isDirectory()) {
349
+ collectFilesRecursively(p, out);
350
+ continue;
351
+ }
352
+ if (entry.isFile()) out.push(p);
353
+ }
354
+ return out;
355
+ }
356
+
357
+ function classifyDocTarget(cwd, sourceAbs, fallbackDir = "plan") {
358
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
359
+ const fileName = basename(sourceAbs);
360
+ return join(docsRoot, fallbackDir, fileName);
361
+ }
362
+
363
+ function isDistilledKnowledgeJson(sourceAbs) {
364
+ try {
365
+ const data = JSON.parse(safeReadText(sourceAbs));
366
+ const hasModernMetadata = Boolean(
367
+ data
368
+ && typeof data === "object"
369
+ && typeof data.id === "string"
370
+ && typeof data.summary === "string"
371
+ && data.sourceKind === "ticket"
372
+ && data.ingestionCategory === "archived_ticket"
373
+ && data.corpus === "tickets"
374
+ && data.originTool === "ticket-archive"
375
+ && data.freshness === "archived"
376
+ && data.refreshPolicy === "refresh-on-stale"
377
+ && typeof data.sourceTicketPath === "string"
378
+ && data.sections
379
+ && typeof data.sections === "object"
380
+ && data.analysis
381
+ && typeof data.analysis === "object"
382
+ );
383
+ const hasLegacyKnowledgeShape = Boolean(
384
+ data
385
+ && typeof data === "object"
386
+ && typeof data.id === "string"
387
+ && typeof data.summary === "string"
388
+ && typeof data.sourceTicketPath === "string"
389
+ && data.sections
390
+ && typeof data.sections === "object"
391
+ && data.analysis
392
+ && typeof data.analysis === "object"
393
+ );
394
+ return hasModernMetadata || hasLegacyKnowledgeShape;
395
+ } catch {
396
+ return false;
397
+ }
398
+ }
399
+
400
+ function classifyAgentFileTarget(cwd, sourceAbs, fallbackDir = "plan") {
401
+ const fileName = basename(sourceAbs);
402
+ const lower = fileName.toLowerCase();
403
+
404
+ if (lower.endsWith(".json")) {
405
+ if (isDistilledKnowledgeJson(sourceAbs)) {
406
+ return join(cwd, AGENT_ROOT_DIR, "knowledge", fileName);
407
+ }
408
+ return join(cwd, AGENT_ROOT_DIR, "docs", fallbackDir, fileName);
409
+ }
410
+ if (lower.endsWith(".md") || lower.endsWith(".deuk")) {
411
+ return classifyDocTarget(cwd, sourceAbs, fallbackDir);
412
+ }
413
+ return join(cwd, AGENT_ROOT_DIR, "docs", fallbackDir, fileName);
414
+ }
415
+
416
+ function recursiveMerge(src, dest, cwd, dryRun) {
417
+ if (!existsSync(src)) return;
418
+ if (!existsSync(dest)) {
419
+ if (!dryRun) {
420
+ mkdirSync(dirname(dest), { recursive: true });
421
+ renameSync(src, dest);
422
+ }
423
+ return;
424
+ }
425
+ // Both exist, merge contents
426
+ const entries = sortedDirEntries(src, { withFileTypes: true });
427
+ for (const ent of entries) {
428
+ const sPath = join(src, ent.name);
429
+ const dPath = join(dest, ent.name);
430
+ if (ent.isDirectory()) {
431
+ recursiveMerge(sPath, dPath, cwd, dryRun);
432
+ } else {
433
+ if (!existsSync(dPath)) {
434
+ if (!dryRun) {
435
+ renameSync(sPath, dPath);
436
+ console.log(`[MIGRATE] Moved: ${toRepoRelativePath(cwd, sPath)} -> ${toRepoRelativePath(cwd, dPath)}`);
437
+ } else {
438
+ console.log(`[DRY-RUN] Would move: ${toRepoRelativePath(cwd, sPath)} -> ${toRepoRelativePath(cwd, dPath)}`);
439
+ }
440
+ } else {
441
+ // If destination exists, check if content is identical
442
+ const sContent = readFileSync(sPath, "utf8");
443
+ const dContent = readFileSync(dPath, "utf8");
444
+ if (sContent === dContent) {
445
+ if (!dryRun) {
446
+ unlinkSync(sPath);
447
+ console.log(`[MIGRATE] Removed identical file: ${toRepoRelativePath(cwd, sPath)}`);
448
+ }
449
+ } else {
450
+ console.warn(`[WARNING] Migration conflict: ${toRepoRelativePath(cwd, dPath)} already exists with different content. Skipping.`);
451
+ }
452
+ }
453
+ }
454
+ }
455
+ // Clean up src if empty
456
+ try {
457
+ if (!dryRun && sortedDirEntries(src).length === 0) {
458
+ rmSync(src, { recursive: true });
459
+ console.log(`[MIGRATE] Removed empty directory: ${toRepoRelativePath(cwd, src)}`);
460
+ }
461
+ } catch (err) {
462
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to clean up ${src}:`, err);
463
+ }
464
+ }
465
+
466
+ export function migrateLegacyStructure(cwd, dryRun) {
467
+
468
+ const legacyTemplates = join(cwd, LEGACY_TEMPLATE_DIR);
469
+ const newTemplates = join(cwd, AGENT_ROOT_DIR, TEMPLATE_SUBDIR);
470
+ if (existsSync(legacyTemplates)) {
471
+ console.log(`[MIGRATE] Merging legacy templates into ${AGENT_ROOT_DIR}/${TEMPLATE_SUBDIR}`);
472
+ recursiveMerge(legacyTemplates, newTemplates, cwd, dryRun);
473
+ if (!dryRun && existsSync(legacyTemplates)) rmSync(legacyTemplates, { recursive: true, force: true });
474
+ }
475
+
476
+ const legacyTickets = join(cwd, LEGACY_TICKET_DIR);
477
+ const legacyTicketsPlural = join(cwd, LEGACY_TICKET_DIR_PLURAL);
478
+ const newTickets = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
479
+
480
+ if (existsSync(legacyTickets)) {
481
+ migrateLegacyTicketDirToArchive(cwd, legacyTickets, "legacy singular ticket directory", dryRun);
482
+ }
483
+ if (existsSync(legacyTicketsPlural)) {
484
+ migrateLegacyTicketDirToArchive(cwd, legacyTicketsPlural, "legacy plural ticket directory", dryRun);
485
+ }
486
+ migrateLegacyAgentWorkflows(cwd, dryRun);
487
+ migrateLegacyRootTicketDir(cwd, dryRun);
488
+ removeLegacyContainer(cwd, dryRun);
489
+
490
+ const legacyConfig = join(cwd, LEGACY_CONFIG_FILE);
491
+ const newConfig = join(cwd, AGENT_ROOT_DIR, "config.json");
492
+ if (existsSync(legacyConfig)) {
493
+ if (!existsSync(newConfig)) {
494
+ console.log(`[MIGRATE] Moving legacy config to ${AGENT_ROOT_DIR}/config.json`);
495
+ if (!dryRun) {
496
+ mkdirSync(join(cwd, AGENT_ROOT_DIR), { recursive: true });
497
+ renameSync(legacyConfig, newConfig);
498
+ }
499
+ } else {
500
+ console.log(`[MIGRATE] Removing redundant legacy config`);
501
+ if (!dryRun) unlinkSync(legacyConfig);
502
+ }
503
+ }
504
+
505
+ migrateLegacyReports(cwd, dryRun);
506
+ migrateLegacyScratchReports(cwd, dryRun);
507
+ migrateLegacyArchiveTickets(cwd, dryRun);
508
+ pruneEmptyLegacyTicketDirs(cwd, dryRun);
509
+
510
+ // 3. Clean up redundant legacy pointer files from the target directory
511
+ if (existsSync(newTickets)) {
512
+ for (const file of ["ACTIVE_TICKET.md", "ACTIVE_TICKET.json", "LATEST.md"]) {
513
+ const p = join(newTickets, file);
514
+ if (existsSync(p)) {
515
+ console.log(`[MIGRATE] Removing redundant pointer file: ${file}`);
516
+ if (!dryRun) unlinkSync(p);
517
+ }
518
+ }
519
+ }
520
+ }
521
+
522
+ function migrateLegacyTicketDirToArchive(cwd, legacyTicketDir, label, dryRun) {
523
+ const now = new Date();
524
+ const yearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
525
+ const importRoot = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "sub", yearMonth);
526
+ const baseName = basename(legacyTicketDir).replace(/^\./, "").replace(/[^a-z0-9-]+/gi, "-");
527
+ let index = 0;
528
+ let targetDir = join(importRoot, `${baseName}-import`);
529
+ while (existsSync(targetDir)) {
530
+ index += 1;
531
+ targetDir = join(importRoot, `${baseName}-import-${String(index).padStart(2, "0")}`);
532
+ }
533
+
534
+ const relSource = toRepoRelativePath(cwd, legacyTicketDir);
535
+ const relTarget = toRepoRelativePath(cwd, targetDir);
536
+ if (dryRun) {
537
+ console.log(`[DRY-RUN] Would move ${label}: ${relSource} -> ${relTarget}`);
538
+ return;
539
+ }
540
+
541
+ mkdirSync(importRoot, { recursive: true });
542
+ renameSync(legacyTicketDir, targetDir);
543
+ console.log(`[MIGRATE] Moved ${label}: ${relSource} -> ${relTarget}`);
544
+ }
545
+
546
+ function migrateLegacyRootTicketDir(cwd, dryRun) {
547
+ const legacyTicketDir = join(cwd, LEGACY_TICKET_DIR_ROOT);
548
+ if (!existsSync(legacyTicketDir)) return;
549
+
550
+ const now = new Date();
551
+ const yearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
552
+ const importRoot = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "sub", yearMonth);
553
+ let index = 0;
554
+ let targetDir = join(importRoot, "ticket-import");
555
+ while (existsSync(targetDir)) {
556
+ index += 1;
557
+ targetDir = join(importRoot, `ticket-import-${String(index).padStart(2, "0")}`);
558
+ }
559
+
560
+ const relSource = toRepoRelativePath(cwd, legacyTicketDir);
561
+ const relTarget = toRepoRelativePath(cwd, targetDir);
562
+
563
+ if (dryRun) {
564
+ console.log(`[DRY-RUN] Would move legacy root ticket directory: ${relSource} -> ${relTarget}`);
565
+ return;
566
+ }
567
+
568
+ mkdirSync(importRoot, { recursive: true });
569
+ renameSync(legacyTicketDir, targetDir);
570
+ console.log(`[MIGRATE] Moved legacy root ticket directory: ${relSource} -> ${relTarget}`);
571
+ }
572
+
573
+ function migrateLegacyAgentWorkflows(cwd, dryRun) {
574
+ const workflowsDir = join(cwd, ".agent", "workflows");
575
+ if (!existsSync(workflowsDir)) return;
576
+
577
+ for (const sourceAbs of listFlatMarkdownFiles(workflowsDir)) {
578
+ const partition = inferPartitionFromFile(statSync(sourceAbs));
579
+ const targetName = `agent-workflow-${basename(sourceAbs)}`;
580
+ const targetAbs = join(cwd, AGENT_ROOT_DIR, "docs", "archive", partition.yearMonth, targetName);
581
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "legacy agent workflow cleanup");
582
+ }
583
+
584
+ removeEmptyDirsBottomUp(join(cwd, ".agent"), cwd, dryRun);
585
+ }
586
+
587
+ function removeLegacyContainer(cwd, dryRun) {
588
+ const legacyContainer = join(cwd, AGENT_ROOT_DIR, "legacy");
589
+ if (!existsSync(legacyContainer)) return;
590
+
591
+ if (!dryRun) {
592
+ rmSync(legacyContainer, { recursive: true, force: true });
593
+ }
594
+ console.log(`[CLEANUP] removed legacy container: ${toRepoRelativePath(cwd, legacyContainer)}`);
595
+ }
596
+
597
+ export function migrateLegacyReports(cwd, dryRun) {
598
+ const legacyReportsDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "reports");
599
+ const reportTargetDir = join(cwd, AGENT_ROOT_DIR, "docs", "plan");
600
+ if (!existsSync(legacyReportsDir)) return;
601
+
602
+ const reportFiles = sortedDirEntries(legacyReportsDir, { withFileTypes: true })
603
+ .filter(ent => ent.isFile() && ent.name.endsWith(".md"))
604
+ .map(ent => join(legacyReportsDir, ent.name));
605
+
606
+ if (reportFiles.length === 0) return;
607
+
608
+ mkdirSync(reportTargetDir, { recursive: true });
609
+
610
+ for (const sourceAbs of reportFiles) {
611
+ const fileName = basename(sourceAbs);
612
+ const targetAbs = join(reportTargetDir, fileName);
613
+ const sourceBody = readFileSync(sourceAbs, "utf8");
614
+
615
+ if (existsSync(targetAbs)) {
616
+ const targetBody = readFileSync(targetAbs, "utf8");
617
+ if (targetBody !== sourceBody) {
618
+ console.warn(`[WARNING] Legacy report conflict: ${toRepoRelativePath(cwd, targetAbs)} already exists with different content. Skipping move.`);
619
+ continue;
620
+ }
621
+ if (!dryRun) unlinkSync(sourceAbs);
622
+ console.log(`[MIGRATE] Removed duplicate legacy report: ${toRepoRelativePath(cwd, sourceAbs)}`);
623
+ rewriteLegacyReportLinks(cwd, targetAbs, fileName, dryRun);
624
+ continue;
625
+ }
626
+
627
+ rewriteLegacyReportLinks(cwd, targetAbs, fileName, dryRun);
628
+ if (!dryRun) {
629
+ renameSync(sourceAbs, targetAbs);
630
+ }
631
+ console.log(`[MIGRATE] Moved legacy report: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
632
+ }
633
+ }
634
+
635
+ export function migrateLegacyScratchReports(cwd, dryRun) {
636
+ const scratchDir = join(cwd, AGENT_ROOT_DIR, "docs", "scratch");
637
+ const walkthroughDir = join(cwd, AGENT_ROOT_DIR, "docs", "plan");
638
+ if (!existsSync(scratchDir)) return;
639
+
640
+ const reportFiles = sortedDirEntries(scratchDir, { withFileTypes: true })
641
+ .filter(ent => ent.isFile())
642
+ .map(ent => join(scratchDir, ent.name));
643
+
644
+ if (reportFiles.length === 0) return;
645
+
646
+ mkdirSync(walkthroughDir, { recursive: true });
647
+
648
+ for (const sourceAbs of reportFiles) {
649
+ const fileName = basename(sourceAbs);
650
+ const targetAbs = join(walkthroughDir, fileName);
651
+ const sourceBody = readFileSync(sourceAbs, "utf8");
652
+
653
+ if (existsSync(targetAbs)) {
654
+ const targetBody = readFileSync(targetAbs, "utf8");
655
+ if (targetBody !== sourceBody) {
656
+ console.warn(`[WARNING] Scratch report conflict: ${toRepoRelativePath(cwd, targetAbs)} already exists with different content. Skipping move.`);
657
+ continue;
658
+ }
659
+ if (!dryRun) unlinkSync(sourceAbs);
660
+ console.log(`[MIGRATE] Removed duplicate scratch report: ${toRepoRelativePath(cwd, sourceAbs)}`);
661
+ continue;
662
+ }
663
+
664
+ if (!dryRun) {
665
+ renameSync(sourceAbs, targetAbs);
666
+ }
667
+ console.log(`[MIGRATE] Moved scratch report: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
668
+ }
669
+ }
670
+
671
+ export function migrateLegacyArchiveTickets(cwd, dryRun) {
672
+ const legacyArchiveTicketsDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "tickets");
673
+ const canonicalArchiveSubDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive", "sub");
674
+ if (!existsSync(legacyArchiveTicketsDir)) return;
675
+
676
+ const archiveFiles = sortedDirEntries(legacyArchiveTicketsDir, { withFileTypes: true })
677
+ .filter(ent => ent.isFile() && ent.name.endsWith(".md"))
678
+ .map(ent => join(legacyArchiveTicketsDir, ent.name));
679
+
680
+ if (archiveFiles.length === 0) return;
681
+
682
+ mkdirSync(canonicalArchiveSubDir, { recursive: true });
683
+
684
+ for (const sourceAbs of archiveFiles) {
685
+ const fileName = basename(sourceAbs);
686
+ const targetAbs = join(canonicalArchiveSubDir, fileName);
687
+ const sourceBody = readFileSync(sourceAbs, "utf8");
688
+
689
+ if (existsSync(targetAbs)) {
690
+ const targetBody = readFileSync(targetAbs, "utf8");
691
+ if (targetBody !== sourceBody) {
692
+ console.warn(`[WARNING] Legacy archive conflict: ${toRepoRelativePath(cwd, targetAbs)} already exists with different content. Skipping move.`);
693
+ continue;
694
+ }
695
+ if (!dryRun) unlinkSync(sourceAbs);
696
+ console.log(`[MIGRATE] Removed duplicate legacy archive ticket: ${toRepoRelativePath(cwd, sourceAbs)}`);
697
+ continue;
698
+ }
699
+
700
+ if (!dryRun) {
701
+ renameSync(sourceAbs, targetAbs);
702
+ }
703
+ console.log(`[MIGRATE] Moved legacy archive ticket: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, targetAbs)}`);
704
+ }
705
+
706
+ try {
707
+ if (!dryRun && sortedDirEntries(legacyArchiveTicketsDir).length === 0) {
708
+ rmSync(legacyArchiveTicketsDir, { recursive: true, force: true });
709
+ console.log(`[CLEANUP] removed empty legacy archive shard: ${toRepoRelativePath(cwd, legacyArchiveTicketsDir)}`);
710
+ }
711
+ } catch (err) {
712
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to prune ${legacyArchiveTicketsDir}:`, err);
713
+ }
714
+ }
715
+
716
+ function rewriteLegacyReportLinks(cwd, targetAbs, fileName, dryRun) {
717
+ const archiveRoot = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "archive");
718
+ if (!existsSync(archiveRoot)) return;
719
+
720
+ walkMdFiles(archiveRoot, (absPath) => {
721
+ const body = readFileSync(absPath, "utf8");
722
+ if (!body.includes(`reports/${fileName}`)) return;
723
+ const relTarget = toRepoRelativePath(cwd, targetAbs);
724
+ const replacement = toPosixPath(relative(dirname(absPath), targetAbs));
725
+ const reportPattern = new RegExp(`(?:\\.\\./)+reports/${escapeRegExp(fileName)}`, "g");
726
+ const directPathPattern = new RegExp(`reports/${escapeRegExp(fileName)}`, "g");
727
+ const nextBody = body
728
+ .replace(reportPattern, replacement)
729
+ .replace(directPathPattern, replacement);
730
+ if (nextBody === body) return;
731
+ if (!dryRun) writeFileSync(absPath, nextBody, "utf8");
732
+ console.log(`[MIGRATE] Updated legacy report link in ${toRepoRelativePath(cwd, absPath)} -> ${relTarget}`);
733
+ });
734
+ }
735
+
736
+ function pruneEmptyLegacyTicketDirs(cwd, dryRun) {
737
+ const legacyDirs = [
738
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "core"),
739
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "global"),
740
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "main"),
741
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR, "reports"),
742
+ ];
743
+
744
+ for (const dir of legacyDirs) {
745
+ if (!existsSync(dir)) continue;
746
+ try {
747
+ if (sortedDirEntries(dir).length > 0) continue;
748
+ if (!dryRun) rmSync(dir, { recursive: true, force: true });
749
+ console.log(`[CLEANUP] removed empty legacy ticket dir: ${toRepoRelativePath(cwd, dir)}`);
750
+ } catch (err) {
751
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to prune ${dir}:`, err);
752
+ }
753
+ }
754
+ }
755
+
756
+ function routeMisplacedTicketFile(cwd, sourceAbs, dryRun) {
757
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
758
+ const fileName = basename(sourceAbs);
759
+ const raw = safeReadText(sourceAbs);
760
+ const meta = parseFrontMatter(raw).meta || {};
761
+ const status = String(meta.status || "").toLowerCase();
762
+
763
+ if (fileName === TICKET_LIST_FILENAME) {
764
+ if (!dryRun) unlinkSync(sourceAbs);
765
+ return;
766
+ }
767
+
768
+ if (!fileName.endsWith(".md")) {
769
+ const targetAbs = classifyAgentFileTarget(cwd, sourceAbs);
770
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced ticket artifact cleanup");
771
+ return;
772
+ }
773
+
774
+ const partition = inferPartitionFromFile(statSync(sourceAbs), meta);
775
+ const targetAbs = isActiveTicketStatus(status)
776
+ ? join(ticketDir, "sub", fileName)
777
+ : join(ticketDir, "archive", "sub", partition.yearMonth, fileName);
778
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced ticket cleanup");
779
+ }
780
+
781
+ function canonicalizeAgentDocsLayout(cwd, dryRun) {
782
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
783
+ if (!existsSync(docsRoot)) return;
784
+ const allowedDirs = new Set(["archive", "plan"]);
785
+
786
+ for (const entry of sortedDirEntries(docsRoot, { withFileTypes: true })) {
787
+ const sourceAbs = join(docsRoot, entry.name);
788
+ if (entry.isDirectory() && allowedDirs.has(entry.name)) continue;
789
+
790
+ if (entry.isFile()) {
791
+ const targetAbs = classifyDocTarget(cwd, sourceAbs);
792
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced docs cleanup");
793
+ continue;
794
+ }
795
+
796
+ if (!entry.isDirectory()) continue;
797
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
798
+ const targetAbs = classifyDocTarget(cwd, fileAbs);
799
+ moveOrMergeFile(fileAbs, targetAbs, cwd, dryRun, `misplaced docs directory cleanup: ${entry.name}`);
800
+ }
801
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
802
+ }
803
+ }
804
+
805
+ function canonicalizeAgentTicketsLayout(cwd, dryRun) {
806
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
807
+ if (!existsSync(ticketDir)) return;
808
+ const allowedDirs = new Set(["archive", "sub"]);
809
+ const allowedFiles = new Set([TICKET_INDEX_FILENAME]);
810
+
811
+ for (const entry of sortedDirEntries(ticketDir, { withFileTypes: true })) {
812
+ const sourceAbs = join(ticketDir, entry.name);
813
+ if (entry.isDirectory() && allowedDirs.has(entry.name)) continue;
814
+ if (entry.isFile() && (allowedFiles.has(entry.name) || /^INDEX\.archive\.\d{4}-\d{2}\.json$/.test(entry.name))) continue;
815
+
816
+ if (entry.isFile()) {
817
+ routeMisplacedTicketFile(cwd, sourceAbs, dryRun);
818
+ continue;
819
+ }
820
+
821
+ if (!entry.isDirectory()) continue;
822
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
823
+ routeMisplacedTicketFile(cwd, fileAbs, dryRun);
824
+ }
825
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
826
+ }
827
+
828
+ const archiveRoot = join(ticketDir, "archive");
829
+ if (existsSync(archiveRoot)) {
830
+ for (const entry of sortedDirEntries(archiveRoot, { withFileTypes: true })) {
831
+ if (entry.name === "sub") continue;
832
+
833
+ const sourceAbs = join(archiveRoot, entry.name);
834
+ if (entry.isDirectory()) {
835
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
836
+ routeMisplacedTicketFile(cwd, fileAbs, dryRun);
837
+ }
838
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
839
+ continue;
840
+ }
841
+
842
+ if (entry.isFile()) {
843
+ routeMisplacedTicketFile(cwd, sourceAbs, dryRun);
844
+ continue;
845
+ }
846
+ }
847
+ }
848
+ }
849
+
850
+ function canonicalizeAgentRootLayout(cwd, dryRun) {
851
+ const agentRoot = join(cwd, AGENT_ROOT_DIR);
852
+ if (!existsSync(agentRoot)) return;
853
+ const allowedDirs = new Set(["docs", "knowledge", "tickets", "templates", "skill-templates", "skills"]);
854
+ const allowedFiles = new Set(["config.json", "telemetry.jsonl", "skills.json", "usage.json"]);
855
+
856
+ for (const entry of sortedDirEntries(agentRoot, { withFileTypes: true })) {
857
+ const sourceAbs = join(agentRoot, entry.name);
858
+ if (entry.isDirectory() && allowedDirs.has(entry.name)) continue;
859
+ if (entry.isFile() && allowedFiles.has(entry.name)) continue;
860
+
861
+ if (entry.isFile()) {
862
+ const targetAbs = classifyAgentFileTarget(cwd, sourceAbs);
863
+ moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "misplaced agent root cleanup");
864
+ continue;
865
+ }
866
+
867
+ if (!entry.isDirectory()) continue;
868
+ for (const fileAbs of collectFilesRecursively(sourceAbs)) {
869
+ const targetAbs = classifyAgentFileTarget(cwd, fileAbs);
870
+ moveOrMergeFile(fileAbs, targetAbs, cwd, dryRun, `misplaced agent root directory cleanup: ${entry.name}`);
871
+ }
872
+ removeEmptyDirsBottomUp(sourceAbs, cwd, dryRun);
873
+ }
874
+ }
875
+
876
+ export function enforceCanonicalAgentLayout(cwd, dryRun) {
877
+ canonicalizeAgentDocsLayout(cwd, dryRun);
878
+ canonicalizeLegacyArchiveDocsBuckets(cwd, dryRun);
879
+ canonicalizeAgentTicketsLayout(cwd, dryRun);
880
+ canonicalizeAgentRootLayout(cwd, dryRun);
881
+ }
882
+
883
+ function collectMarkdownFilesRecursively(dir, out = []) {
884
+ if (!existsSync(dir)) return out;
885
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
886
+ const p = join(dir, entry.name);
887
+ if (entry.isDirectory()) {
888
+ collectMarkdownFilesRecursively(p, out);
889
+ continue;
890
+ }
891
+ if (entry.isFile() && entry.name.endsWith(".md")) {
892
+ out.push(p);
893
+ }
894
+ }
895
+ return out;
896
+ }
897
+
898
+ function listFlatMarkdownFiles(dir) {
899
+ if (!existsSync(dir)) return [];
900
+ return sortedDirEntries(dir)
901
+ .filter((name) => typeof name === "string" && name.endsWith(".md"))
902
+ .map((name) => join(dir, name));
903
+ }
904
+
905
+ function canonicalizeTicketArchivePath(cwd, dryRun) {
906
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
907
+ const archiveRoot = join(ticketDir, "archive");
908
+ if (!existsSync(archiveRoot)) return;
909
+
910
+ const { byFileName, byId } = mapTicketIndexByFileName(cwd);
911
+ const archiveFiles = collectMarkdownFilesRecursively(archiveRoot);
912
+
913
+ for (const sourceAbs of archiveFiles) {
914
+ const relParts = toPosixPath(relative(archiveRoot, sourceAbs)).split("/");
915
+ if (relParts.length < 2) continue;
916
+
917
+ const fileName = basename(sourceAbs);
918
+ if (/-legacy-docs(?:-\d{2})?\.md$/i.test(fileName)) {
919
+ const targetAbs = join(dirname(sourceAbs), archiveLegacyVariantBaseName(fileName));
920
+ const variantResult = mergeArchiveVariantIntoCanonical(sourceAbs, targetAbs, cwd, dryRun, "ticket archive cleanup");
921
+ if (variantResult.moved && !dryRun) {
922
+ console.log(`[CLEANUP] ticket archive normalized: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, variantResult.finalTarget)}`);
923
+ }
924
+ continue;
925
+ }
926
+ const sourceBase = basename(sourceAbs, ".md");
927
+ const sourceMeta = parseFrontMatter(safeReadText(sourceAbs)).meta || {};
928
+ const matchedEntry = byFileName.get(fileName)
929
+ || byId.get(sourceMeta.id)
930
+ || byFileName.get(`${sourceBase}.md`);
931
+
932
+ const status = String(matchedEntry?.status || sourceMeta.status || "archived").toLowerCase();
933
+ const group = normalizeTicketGroup(matchedEntry?.group || relParts[0], "sub");
934
+ const partition = inferPartitionFromFile(statSync(sourceAbs), matchedEntry);
935
+ const shouldBeOpen = isActiveTicketStatus(status);
936
+ const targetAbs = shouldBeOpen
937
+ ? join(ticketDir, group, fileName)
938
+ : join(archiveRoot, group, partition.yearMonth, fileName);
939
+ if (sourceAbs === targetAbs) continue;
940
+
941
+ let moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, "ticket archive cleanup");
942
+ let finalTarget = targetAbs;
943
+ if (!moved && sourceAbs.includes(`${AGENT_ROOT_DIR}/${TICKET_SUBDIR}/archive/sub/legacy-docs/`)) {
944
+ const conflictResult = mergeArchiveVariantIntoCanonical(sourceAbs, targetAbs, cwd, dryRun, "ticket archive cleanup");
945
+ moved = conflictResult.moved;
946
+ finalTarget = conflictResult.finalTarget;
947
+ }
948
+ if (moved && sourceAbs !== targetAbs && !dryRun) {
949
+ console.log(`[CLEANUP] ticket archive normalized: ${toRepoRelativePath(cwd, sourceAbs)} -> ${toRepoRelativePath(cwd, finalTarget)}`);
950
+ }
951
+ }
952
+
953
+ removeEmptyDirsBottomUp(archiveRoot, cwd, dryRun);
954
+ }
955
+
956
+ function rewritePlanLinkReferences(cwd, sourceAbs, targetAbs, dryRun) {
957
+ const sourceRel = toRepoRelativePath(cwd, sourceAbs);
958
+ const targetRel = toRepoRelativePath(cwd, targetAbs);
959
+ if (sourceRel === targetRel) return;
960
+
961
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
962
+ for (const ticketAbs of collectMarkdownFilesRecursively(ticketDir)) {
963
+ const raw = safeReadText(ticketAbs);
964
+ if (!raw.includes(sourceRel)) continue;
965
+
966
+ let parsed;
967
+ try {
968
+ parsed = parseFrontMatter(raw);
969
+ } catch {
970
+ continue;
971
+ }
972
+
973
+ const nextMeta = { ...parsed.meta };
974
+ if (nextMeta.planLink === sourceRel) nextMeta.planLink = targetRel;
975
+ const nextContent = String(parsed.content || "").replaceAll(sourceRel, targetRel);
976
+ const nextRaw = stringifyFrontMatter(nextMeta, nextContent);
977
+ if (nextRaw === raw) continue;
978
+
979
+ if (!dryRun) writeFileSync(ticketAbs, nextRaw, "utf8");
980
+ console.log(`[MIGRATE] Updated planLink reference: ${toRepoRelativePath(cwd, ticketAbs)} -> ${targetRel}`);
981
+ }
982
+ }
983
+
984
+ function stripFrontMatterBlock(raw) {
985
+ return String(raw || "").replace(/^---\s*\n[\s\S]*?\n---\s*\n?/, "").trim();
986
+ }
987
+
988
+ function docTicketIdFromFile(fileAbs) {
989
+ const name = basename(fileAbs).replace(/\.[^.]+$/, "");
990
+ const match = name.match(/^(?:\d+-)?(\d{3})(?:-|$)/) || name.match(/^(\d{3})-/);
991
+ return match?.[1] || null;
992
+ }
993
+
994
+ function legacyDocSlug(fileAbs) {
995
+ return basename(fileAbs)
996
+ .replace(/\.[^.]+$/, "")
997
+ .toLowerCase()
998
+ .replace(/[^a-z0-9가-힣]+/gu, "-")
999
+ .replace(/^-+|-+$/g, "") || "legacy-doc";
1000
+ }
1001
+
1002
+ function legacyDocTitle(fileAbs) {
1003
+ return basename(fileAbs)
1004
+ .replace(/\.[^.]+$/, "")
1005
+ .replace(/[-_]+/g, " ")
1006
+ .replace(/\s+/g, " ")
1007
+ .trim();
1008
+ }
1009
+
1010
+ function buildLegacyDocBody(fileAbs) {
1011
+ const raw = safeReadText(fileAbs);
1012
+ if (fileAbs.endsWith(".md")) return stripFrontMatterBlock(raw);
1013
+ return ["```text", raw.trim(), "```"].join("\n");
1014
+ }
1015
+
1016
+ function createLegacyDocTicket(ticketAbs, id, title, sourceAbs, dryRun) {
1017
+ if (existsSync(ticketAbs)) return false;
1018
+ const sourceMeta = parseFrontMatter(safeReadText(sourceAbs)).meta || {};
1019
+ const summary = String(sourceMeta.summary || title).replace(/\n/g, " ").replace(/:/g, "-");
1020
+ const createdAt = sourceMeta.createdAt || new Date().toISOString().slice(0, 19).replace("T", " ");
1021
+ const body = [
1022
+ "---",
1023
+ `summary: ${summary}`,
1024
+ "status: archived",
1025
+ "priority: P3",
1026
+ "tags: migrated",
1027
+ `id: ${id}`,
1028
+ `title: ${title}`,
1029
+ `createdAt: ${createdAt}`,
1030
+ "---",
1031
+ "",
1032
+ `# ${title}`,
1033
+ "",
1034
+ "> Legacy separated docs are merged here so this ticket is the single source of truth.",
1035
+ "",
1036
+ "## Scope & Constraints",
1037
+ "",
1038
+ "- **Target:** migrated legacy work record.",
1039
+ "- **Context Files:** merged legacy content below.",
1040
+ "- **Constraints:** preserve historical content without keeping separate docs files.",
1041
+ "- **Lifecycle Guard:** this ticket is the canonical record.",
1042
+ "",
1043
+ "## Agent Permission Contract (APC)",
1044
+ "",
1045
+ "### [BOUNDARY]",
1046
+ "- Editable modules: historical ticket record only.",
1047
+ "- Forbidden modules: product/source changes from this migration.",
1048
+ "- Rule citation: local project rules if present.",
1049
+ "",
1050
+ "### [CONTRACT]",
1051
+ "- Input: separated legacy docs files.",
1052
+ "- Output: one canonical ticket containing merged legacy content.",
1053
+ "- Side effects: legacy docs files removed after merge.",
1054
+ "",
1055
+ "### [PATCH PLAN]",
1056
+ "- Merge separated docs into this ticket.",
1057
+ "- Remove source docs after merge.",
1058
+ "",
1059
+ "## Compact Plan",
1060
+ "",
1061
+ "- **Problem:** this work item existed as separated docs outside the ticket.",
1062
+ "- **Approach:** merge the legacy content below and keep this ticket as canonical.",
1063
+ "- **Verification:** confirm the source docs files are removed and this ticket remains.",
1064
+ "- **Ticket Numbering:** infer the master/sub ticket from the numbered ticket ID; do not add inline child-ticket links.",
1065
+ "",
1066
+ "## Tasks",
1067
+ "",
1068
+ "- [x] Merge separated docs content into this ticket.",
1069
+ "- [x] Remove separated docs files.",
1070
+ "",
1071
+ "## Done When",
1072
+ "",
1073
+ "- This ticket contains the merged content.",
1074
+ "- Separate docs files are removed.",
1075
+ ""
1076
+ ].join("\n");
1077
+ if (!dryRun) {
1078
+ mkdirSync(dirname(ticketAbs), { recursive: true });
1079
+ writeFileSync(ticketAbs, body, "utf8");
1080
+ }
1081
+ return true;
1082
+ }
1083
+
1084
+ function mergeDocIntoTicket(ticketAbs, docAbs, dryRun) {
1085
+ const body = buildLegacyDocBody(docAbs);
1086
+ if (!body) return false;
1087
+
1088
+ const title = legacyDocTitle(docAbs);
1089
+ const section = [
1090
+ "",
1091
+ "## Merged Legacy Document",
1092
+ "",
1093
+ `### ${title}`,
1094
+ "",
1095
+ body,
1096
+ ""
1097
+ ].join("\n");
1098
+ const ticketRaw = safeReadText(ticketAbs).trimEnd();
1099
+ if (!dryRun) {
1100
+ writeFileSync(ticketAbs, `${ticketRaw}${section}`, "utf8");
1101
+ unlinkSync(docAbs);
1102
+ }
1103
+ return true;
1104
+ }
1105
+
1106
+ function mergeSeparatedDocsIntoTickets(cwd, dryRun) {
1107
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
1108
+ const ticketDir = join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR);
1109
+ if (!existsSync(docsRoot)) return 0;
1110
+
1111
+ const ticketFiles = collectMarkdownFilesRecursively(ticketDir)
1112
+ .filter((p) => basename(p) !== TICKET_LIST_FILENAME);
1113
+ const ticketsById = new Map();
1114
+ for (const ticketAbs of ticketFiles) {
1115
+ const id = docTicketIdFromFile(ticketAbs);
1116
+ if (!id) continue;
1117
+ if (!ticketsById.has(id)) ticketsById.set(id, []);
1118
+ ticketsById.get(id).push(ticketAbs);
1119
+ }
1120
+
1121
+ let merged = 0;
1122
+ let created = 0;
1123
+ for (const docAbs of collectFilesRecursively(docsRoot)) {
1124
+ const body = buildLegacyDocBody(docAbs).trim();
1125
+ if (!body) continue;
1126
+
1127
+ const id = docTicketIdFromFile(docAbs);
1128
+ let ticketAbs = null;
1129
+ if (id && ticketsById.has(id)) {
1130
+ const candidates = ticketsById.get(id);
1131
+ ticketAbs = candidates.find((p) => p.includes(`${TICKET_SUBDIR}/sub/`)) || candidates[0];
1132
+ }
1133
+
1134
+ if (!ticketAbs) {
1135
+ const slug = legacyDocSlug(docAbs);
1136
+ const ticketId = id || slug;
1137
+ ticketAbs = join(ticketDir, "archive", "sub", "legacy-docs", `${ticketId}.md`);
1138
+ if (createLegacyDocTicket(ticketAbs, ticketId, slug, docAbs, dryRun)) created++;
1139
+ if (id) {
1140
+ if (!ticketsById.has(id)) ticketsById.set(id, []);
1141
+ ticketsById.get(id).push(ticketAbs);
1142
+ }
1143
+ }
1144
+
1145
+ if (mergeDocIntoTicket(ticketAbs, docAbs, dryRun)) merged++;
1146
+ }
1147
+
1148
+ if (!dryRun) rmSync(docsRoot, { recursive: true, force: true });
1149
+ if (merged > 0 || created > 0) {
1150
+ console.log(`[MIGRATE] separated docs merged into tickets: merged=${merged}, created=${created}`);
1151
+ }
1152
+ return merged;
1153
+ }
1154
+
1155
+ export function canonicalizeDocsArchiveBuckets(cwd, dryRun) {
1156
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
1157
+ const archiveDir = join(docsRoot, "archive");
1158
+ const buckets = [
1159
+ { name: "plan", source: join(docsRoot, "plan"), archiveBase: archiveDir },
1160
+ ];
1161
+
1162
+ const { byFileName, byId } = mapTicketIndexByFileName(cwd);
1163
+ for (const bucket of buckets) {
1164
+ if (!existsSync(bucket.source)) continue;
1165
+
1166
+ const docFiles = collectFilesRecursively(bucket.source).filter((p) => p.endsWith(".md"));
1167
+ for (const sourceAbs of docFiles) {
1168
+ const fileName = basename(sourceAbs);
1169
+ const sourceMeta = parseFrontMatter(safeReadText(sourceAbs)).meta || {};
1170
+ const matchedEntry = resolveDocTicketEntry(fileName, sourceMeta, byFileName, byId);
1171
+ const status = String(matchedEntry?.status || sourceMeta.status || "active").toLowerCase();
1172
+ const isActive = isActiveTicketStatus(status);
1173
+ const shouldArchive = !isActive;
1174
+
1175
+ const yearMonth = parseYearMonth(matchedEntry?.archiveYearMonth)
1176
+ || parseYearMonth(matchedEntry?.createdAt)
1177
+ || parseYearMonth(matchedEntry?.updatedAt)
1178
+ || parseYearMonth(statSync(sourceAbs).mtime.toISOString())
1179
+ || parseYearMonth(new Date().toISOString());
1180
+ if (!yearMonth) continue;
1181
+
1182
+ const targetAbs = isActive ? join(docsRoot, "plan", fileName) : join(bucket.archiveBase, yearMonth, fileName);
1183
+
1184
+ const moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, `docs lifecycle cleanup: ${bucket.name}`);
1185
+ if (moved) {
1186
+ rewritePlanLinkReferences(cwd, sourceAbs, targetAbs, dryRun);
1187
+ }
1188
+ }
1189
+ }
1190
+ }
1191
+
1192
+ function canonicalizeLegacyArchiveDocsBuckets(cwd, dryRun) {
1193
+ const docsRoot = join(cwd, AGENT_ROOT_DIR, "docs");
1194
+ const archiveRoot = join(docsRoot, "archive");
1195
+ if (!existsSync(archiveRoot)) return;
1196
+
1197
+ const legacyBuckets = ["plans", "walkthroughs"];
1198
+ const now = new Date();
1199
+ const fallbackYearMonth = `${now.getFullYear()}-${String(now.getMonth() + 1).padStart(2, "0")}`;
1200
+
1201
+ for (const bucket of legacyBuckets) {
1202
+ const sourceRoot = join(archiveRoot, bucket);
1203
+ if (!existsSync(sourceRoot)) continue;
1204
+
1205
+ for (const sourceAbs of collectFilesRecursively(sourceRoot)) {
1206
+ if (!sourceAbs.endsWith(".md")) continue;
1207
+
1208
+ const relParts = toPosixPath(relative(sourceRoot, sourceAbs)).split("/");
1209
+ const yearMonth = parseYearMonth(relParts[0]) || parseYearMonth(statSync(sourceAbs).mtime.toISOString()) || fallbackYearMonth;
1210
+ const targetAbs = join(archiveRoot, yearMonth, ...relParts.slice(relParts[0] && parseYearMonth(relParts[0]) ? 1 : 0));
1211
+ if (sourceAbs === targetAbs) continue;
1212
+
1213
+ const moved = moveOrMergeFile(sourceAbs, targetAbs, cwd, dryRun, `legacy archive docs cleanup: ${bucket}`);
1214
+ if (moved) {
1215
+ rewriteLegacyReportLinks(cwd, targetAbs, basename(sourceAbs), dryRun);
1216
+ }
1217
+ }
1218
+
1219
+ try {
1220
+ if (!dryRun && sortedDirEntries(sourceRoot).length === 0) {
1221
+ rmSync(sourceRoot, { recursive: true, force: true });
1222
+ } else if (!dryRun) {
1223
+ removeEmptyDirsBottomUp(sourceRoot, cwd, dryRun);
1224
+ }
1225
+ } catch (err) {
1226
+ if (process.env.DEBUG) console.warn(`[DEBUG] Failed to remove legacy docs archive bucket ${sourceRoot}:`, err);
1227
+ }
1228
+ }
1229
+ }
1230
+
1231
+ function walkMdFiles(dir, callback) {
1232
+ if (!existsSync(dir)) return;
1233
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
1234
+ const p = join(dir, entry.name);
1235
+ if (entry.isDirectory()) {
1236
+ walkMdFiles(p, callback);
1237
+ } else if (entry.isFile() && entry.name.endsWith(".md")) {
1238
+ callback(p);
1239
+ }
1240
+ }
1241
+ }
1242
+
1243
+ function escapeRegExp(value) {
1244
+ return String(value || "").replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
1245
+ }
1246
+
1247
+ function migrateHtmlMarkersToHeadings(cwd, dryRun) {
1248
+ const agentsPath = join(cwd, "AGENTS.md");
1249
+ if (!existsSync(agentsPath)) return;
1250
+
1251
+ const content = readFileSync(agentsPath, "utf8");
1252
+ const oldBegin = "<!-- deuk-agent-rule:begin -->";
1253
+ const oldEnd = "<!-- deuk-agent-rule:end -->";
1254
+
1255
+ if (!content.includes(oldBegin)) return;
1256
+
1257
+ const beginIdx = content.indexOf(oldBegin);
1258
+ const endIdx = content.lastIndexOf(oldEnd);
1259
+ if (endIdx <= beginIdx) return;
1260
+
1261
+ const managedContent = content.slice(beginIdx + oldBegin.length, endIdx).trim();
1262
+ const userContent = content.slice(0, beginIdx).trim();
1263
+ const afterContent = content.slice(endIdx + oldEnd.length).trim();
1264
+
1265
+ let newContent = "";
1266
+ if (userContent) newContent += userContent + "\n\n";
1267
+ if (afterContent) newContent += afterContent + "\n\n";
1268
+ newContent += "---\n\n";
1269
+ newContent += "## DeukAgentFlow\n\n";
1270
+ newContent += "> Managed by DeukAgentFlow. Remove this section if not installed.\n\n";
1271
+ newContent += managedContent + "\n";
1272
+
1273
+ if (!dryRun) {
1274
+ copyFileSync(agentsPath, agentsPath + ".pre-v2.bak");
1275
+ writeFileSync(agentsPath, newContent, "utf8");
1276
+ console.log("[MIGRATE] Converted HTML markers to heading-based format in AGENTS.md");
1277
+ console.log("[MIGRATE] Backup saved as AGENTS.md.pre-v2.bak");
1278
+ } else {
1279
+ console.log("[DRY-RUN] Would convert HTML markers to heading-based format in AGENTS.md");
1280
+ }
1281
+ }
1282
+
1283
+ const INIT_SURFACE_IGNORE_DIRS = new Set([
1284
+ ".git",
1285
+ ".hg",
1286
+ ".svn",
1287
+ AGENT_ROOT_DIR,
1288
+ "node_modules",
1289
+ "dist",
1290
+ "build",
1291
+ ".next",
1292
+ ".nuxt",
1293
+ "coverage"
1294
+ ]);
1295
+
1296
+ function walkInitTextSurfaces(dir, callback) {
1297
+ if (!existsSync(dir)) return;
1298
+ for (const entry of sortedDirEntries(dir, { withFileTypes: true })) {
1299
+ const absPath = join(dir, entry.name);
1300
+ if (entry.isDirectory()) {
1301
+ if (INIT_SURFACE_IGNORE_DIRS.has(entry.name)) continue;
1302
+ walkInitTextSurfaces(absPath, callback);
1303
+ continue;
1304
+ }
1305
+ if (!entry.isFile()) continue;
1306
+ if (entry.name === "AGENTS.md" || entry.name === "PROJECT_RULE.md") callback(absPath);
1307
+ }
1308
+ }
1309
+
1310
+ function removeLegacyHtmlManagedBlock(content) {
1311
+ const oldBegin = "<!-- deuk-agent-rule:begin -->";
1312
+ const oldEnd = "<!-- deuk-agent-rule:end -->";
1313
+ const src = String(content || "");
1314
+ const beginIdx = src.indexOf(oldBegin);
1315
+ const endIdx = src.lastIndexOf(oldEnd);
1316
+ if (beginIdx === -1 || endIdx <= beginIdx) return null;
1317
+ return [
1318
+ src.slice(0, beginIdx).trim(),
1319
+ src.slice(endIdx + oldEnd.length).trim()
1320
+ ].filter(Boolean).join("\n\n");
1321
+ }
1322
+
1323
+ function canonicalizeAgentSurfaceFile(absPath, cwd, bundleRoot, dryRun) {
1324
+ if (!existsSync(absPath)) return false;
1325
+ const fileName = basename(absPath);
1326
+ const before = readFileSync(absPath, "utf8");
1327
+ const isAgents = fileName === "AGENTS.md";
1328
+
1329
+ if (!/DeukAgentRules|DeukAgentFlow|deuk-agent-rule/.test(before)) return false;
1330
+
1331
+ if (isAgents) {
1332
+ const unmanagedContent = removeLegacyHtmlManagedBlock(before);
1333
+ if (unmanagedContent !== null) {
1334
+ const pointer = generateSpokeContent({ format: "markdown" }, bundleRoot).trimEnd();
1335
+ const next = unmanagedContent
1336
+ ? `${unmanagedContent.trimEnd()}\n\n${pointer}\n`
1337
+ : `${pointer}\n`;
1338
+ if (next !== before) {
1339
+ if (!dryRun) writeFileSync(absPath, next, "utf8");
1340
+ console.log(`[MIGRATE] ${dryRun ? "Would replace" : "Replaced"} legacy HTML AGENTS.md block: ${toRepoRelativePath(cwd, absPath)}`);
1341
+ return true;
1342
+ }
1343
+ return false;
1344
+ }
1345
+
1346
+ const { pointer, projectDoc } = splitProjectDoc(before);
1347
+ if (isGeneratedDeukPointer(pointer)) {
1348
+ const nextPointer = generateSpokeContent({ format: "markdown" }, bundleRoot).trimEnd();
1349
+ const next = projectDoc ? `${nextPointer}\n\n${projectDoc.trimEnd()}\n` : `${nextPointer}\n`;
1350
+ if (next !== before) {
1351
+ if (!dryRun) writeFileSync(absPath, next, "utf8");
1352
+ console.log(`[MIGRATE] ${dryRun ? "Would replace" : "Replaced"} legacy AGENTS.md pointer: ${toRepoRelativePath(cwd, absPath)}`);
1353
+ return true;
1354
+ }
1355
+ return false;
1356
+ }
1357
+ }
1358
+
1359
+ return canonicalizeTextFile(absPath, cwd, bundleRoot, dryRun, "legacy init surface reference");
1360
+ }
1361
+
1362
+ function canonicalizeRecursiveInitSurfaces(cwd, bundleRoot, dryRun) {
1363
+ let count = 0;
1364
+ walkInitTextSurfaces(cwd, (absPath) => {
1365
+ if (canonicalizeAgentSurfaceFile(absPath, cwd, bundleRoot, dryRun)) count += 1;
1366
+ });
1367
+ if (count > 0) {
1368
+ console.log(`[SUMMARY] ${dryRun ? "Would update" : "Updated"} ${count} legacy init surface(s) under ${basename(cwd)}.`);
1369
+ }
1370
+ return count;
1371
+ }
1372
+
1373
+ function syncTemplates(cwd, bundleRoot, dryRun) {
1374
+ const tplDestDir = join(cwd, AGENT_ROOT_DIR, TEMPLATE_SUBDIR);
1375
+ const tplSourceDir = join(bundleRoot, "templates");
1376
+ if (!existsSync(tplSourceDir)) return;
1377
+
1378
+ if (!existsSync(tplDestDir) && !dryRun) {
1379
+ mkdirSync(tplDestDir, { recursive: true });
1380
+ }
1381
+
1382
+ if (!dryRun) {
1383
+ for (const entry of readdirSync(tplSourceDir, { withFileTypes: true })) {
1384
+ if (!entry.isFile()) continue;
1385
+ const source = join(tplSourceDir, entry.name);
1386
+ const target = join(tplDestDir, entry.name);
1387
+ cpSync(source, target);
1388
+ }
1389
+ console.log(`[SYNC] templates synced to ${toRepoRelativePath(cwd, tplDestDir)}`);
1390
+ return;
1391
+ }
1392
+ console.log(`[SYNC] templates synced to ${toRepoRelativePath(cwd, tplDestDir)} (dry-run mode)`);
1393
+ }
1394
+
1395
+ function syncSkillTemplates(cwd, bundleRoot, dryRun) {
1396
+ const skillDestDir = join(cwd, AGENT_ROOT_DIR, "skill-templates");
1397
+ const skillSourceDir = join(bundleRoot, "templates", "skills");
1398
+ if (!existsSync(skillSourceDir)) return;
1399
+
1400
+ if (!dryRun) {
1401
+ mkdirSync(skillDestDir, { recursive: true });
1402
+ cpSync(skillSourceDir, skillDestDir, { recursive: true });
1403
+ console.log(`[SYNC] skill templates synced to ${toRepoRelativePath(cwd, skillDestDir)}`);
1404
+ return;
1405
+ }
1406
+ console.log(`[SYNC] skill templates synced to ${toRepoRelativePath(cwd, skillDestDir)} (dry-run mode)`);
1407
+ }
1408
+
1409
+ function canonicalizeLegacyDeukAgentText(content, bundleRoot) {
1410
+ const coreRulesPath = join(bundleRoot, "core-rules", "AGENTS.md");
1411
+ return String(content || "")
1412
+ .replace(/DeukAgentRules/g, "DeukAgentFlow")
1413
+ .replace(/deuk-agent-rule/g, "deuk-agent-flow")
1414
+ .replace(/Deuk Agent Rules/g, "Deuk Agent Flow")
1415
+ .replace(/file:\/\/[^)\s]+\/DeukAgentRules\/core-rules\/AGENTS\.md/g, `file://${coreRulesPath}`)
1416
+ .replace(/\/home\/joy\/workspace\/DeukAgentRules\/core-rules\/AGENTS\.md/g, coreRulesPath);
1417
+ }
1418
+
1419
+ function normalizeExistingSpokeContent(existingContent, managedContent, bundleRoot) {
1420
+ const current = String(existingContent || "");
1421
+ if (!current.trim()) return current;
1422
+ if (isGeneratedDeukPointer(current)) return "";
1423
+
1424
+ const normalizedManaged = String(managedContent || "").trim();
1425
+ const canonicalize = (value) => canonicalizeLegacyDeukAgentText(value, bundleRoot).trim();
1426
+
1427
+ if (canonicalize(current) === normalizedManaged) return "";
1428
+
1429
+ const currentBlock = splitManagedBlock(current);
1430
+ if (!currentBlock) return current;
1431
+
1432
+ const before = canonicalize(currentBlock.before) === normalizedManaged ? "" : currentBlock.before;
1433
+ const after = canonicalize(currentBlock.after) === normalizedManaged ? "" : currentBlock.after;
1434
+ return [before, wrapManagedBlock(managedContent), after].filter(Boolean).join("\n\n").trimEnd() + "\n";
1435
+ }
1436
+
1437
+ function canonicalizeTextFile(absPath, cwd, bundleRoot, dryRun, label) {
1438
+ if (!existsSync(absPath)) return false;
1439
+ const before = readFileSync(absPath, "utf8");
1440
+ const after = canonicalizeLegacyDeukAgentText(before, bundleRoot);
1441
+ if (before === after) return false;
1442
+ if (!dryRun) writeFileSync(absPath, after, "utf8");
1443
+ console.log(`[MIGRATE] ${dryRun ? "Would canonicalize" : "Canonicalized"} ${label}: ${toRepoRelativePath(cwd, absPath)}`);
1444
+ return true;
1445
+ }
1446
+
1447
+ function splitProjectDoc(content) {
1448
+ const marker = "--- project-doc ---";
1449
+ const idx = String(content || "").indexOf(marker);
1450
+ if (idx === -1) return { pointer: content, projectDoc: "" };
1451
+ return {
1452
+ pointer: content.slice(0, idx),
1453
+ projectDoc: content.slice(idx).trimStart()
1454
+ };
1455
+ }
1456
+
1457
+ function isGeneratedDeukPointer(content) {
1458
+ const src = String(content || "");
1459
+ return (/Managed by DeukAgent(?:Rules|Flow)/.test(src) || /# Deuk Agent Rules\b|# Deuk Agent Flow\b/.test(src))
1460
+ && /Core rules are at:/i.test(src)
1461
+ && /thin bootstrap, not a second workflow contract/i.test(src);
1462
+ }
1463
+
1464
+ function canonicalizeAgentPointer(cwd, bundleRoot, dryRun) {
1465
+ const agentsPath = join(cwd, "AGENTS.md");
1466
+ if (!existsSync(agentsPath)) return;
1467
+
1468
+ const current = readFileSync(agentsPath, "utf8");
1469
+ if (!/DeukAgentRules|DeukAgentFlow|deuk-agent-rule/.test(current)) return;
1470
+
1471
+ const { pointer, projectDoc } = splitProjectDoc(current);
1472
+ if (!isGeneratedDeukPointer(pointer)) {
1473
+ canonicalizeTextFile(agentsPath, cwd, bundleRoot, dryRun, "AGENTS.md legacy references");
1474
+ return;
1475
+ }
1476
+
1477
+ const nextPointer = generateSpokeContent({ format: "markdown" }, bundleRoot).trimEnd();
1478
+ const next = projectDoc ? `${nextPointer}\n\n${projectDoc.trimEnd()}\n` : `${nextPointer}\n`;
1479
+ if (current === next) return;
1480
+ if (!dryRun) writeFileSync(agentsPath, next, "utf8");
1481
+ console.log(`[MIGRATE] ${dryRun ? "Would replace" : "Replaced"} legacy AGENTS.md pointer with DeukAgentFlow canonical pointer`);
1482
+ }
1483
+
1484
+ function canonicalizeGeneratedCommandReferences(cwd, bundleRoot, dryRun) {
1485
+ const targets = [
1486
+ join(cwd, "PROJECT_RULE.md"),
1487
+ join(cwd, "docs", "project", "AGENTS.md"),
1488
+ join(cwd, ".github", "copilot-instructions.md"),
1489
+ join(cwd, ".codex", "AGENTS.md"),
1490
+ ];
1491
+
1492
+ for (const target of targets) {
1493
+ canonicalizeTextFile(target, cwd, bundleRoot, dryRun, "legacy command reference");
1494
+ }
1495
+
1496
+ const templateDir = join(cwd, AGENT_ROOT_DIR, TEMPLATE_SUBDIR);
1497
+ walkMdFiles(templateDir, (absPath) => {
1498
+ canonicalizeTextFile(absPath, cwd, bundleRoot, dryRun, "legacy template command reference");
1499
+ });
1500
+ }
1501
+
1502
+ /**
1503
+ * Scans .deuk-agent/tickets/ and .deuk-agent/docs/ for markdown files
1504
+ * missing YAML frontmatter or missing required frontmatter keys,
1505
+ * and injects/supplements them. Also strips trailing whitespace.
1506
+ * This ensures lint:md passes and RAG indexing works correctly.
1507
+ */
1508
+ function migrateMissingFrontmatter(cwd, dryRun) {
1509
+ const dirs = [
1510
+ join(cwd, AGENT_ROOT_DIR, TICKET_SUBDIR),
1511
+ join(cwd, AGENT_ROOT_DIR, "docs"),
1512
+ ];
1513
+ const requiredKeys = ["summary", "status", "priority", "tags"];
1514
+
1515
+ let count = 0;
1516
+ for (const dir of dirs) {
1517
+ if (!existsSync(dir)) continue;
1518
+ walkMdFiles(dir, (absPath) => {
1519
+ if (absPath.includes("archive/")) return;
1520
+ const raw = readFileSync(absPath, "utf8");
1521
+ const relPath = toRepoRelativePath(cwd, absPath);
1522
+ const slug = basename(absPath, ".md");
1523
+ const isTicket = relPath.includes(`/${TICKET_SUBDIR}/`);
1524
+ const hasFrontmatter = raw.startsWith("---\n") || raw.startsWith("---\r\n");
1525
+
1526
+ if (hasFrontmatter) {
1527
+ // Check if required keys are present
1528
+ const parsed = parseFrontMatter(raw);
1529
+ const missing = requiredKeys.filter(k => !parsed.meta[k]);
1530
+ if (missing.length === 0) return; // all keys present, skip
1531
+
1532
+ // Supplement missing keys
1533
+ const defaults = {
1534
+ summary: parsed.meta.title || parsed.meta.id || slug,
1535
+ status: isTicket ? "open" : "active",
1536
+ priority: "P3",
1537
+ tags: isTicket ? "migrated" : "docs, migrated",
1538
+ };
1539
+ for (const key of missing) {
1540
+ parsed.meta[key] = defaults[key];
1541
+ }
1542
+
1543
+ if (!dryRun) {
1544
+ const cleanedContent = parsed.content.split("\n").map(l => l.trimEnd()).join("\n");
1545
+ writeFileSync(absPath, stringifyFrontMatter(parsed.meta, cleanedContent), "utf8");
1546
+ }
1547
+ console.log(`[MIGRATE] ${dryRun ? "Would supplement" : "Supplemented"} frontmatter (${missing.join(", ")}): ${relPath}`);
1548
+ } else {
1549
+ // No frontmatter at all — inject
1550
+ const meta = {
1551
+ summary: slug,
1552
+ status: isTicket ? "open" : "active",
1553
+ priority: "P3",
1554
+ tags: isTicket ? "migrated" : "docs, migrated",
1555
+ };
1556
+ if (isTicket) {
1557
+ meta.id = slug;
1558
+ meta.title = slug;
1559
+ meta.createdAt = new Date().toISOString().replace("T", " ").split(".")[0];
1560
+ }
1561
+
1562
+ if (!dryRun) {
1563
+ const cleanedRaw = raw.split("\n").map(l => l.trimEnd()).join("\n");
1564
+ const newContent = stringifyFrontMatter(meta, cleanedRaw);
1565
+ writeFileSync(absPath, newContent, "utf8");
1566
+ }
1567
+ console.log(`[MIGRATE] ${dryRun ? "Would add" : "Added"} frontmatter: ${relPath}`);
1568
+ }
1569
+ count++;
1570
+ });
1571
+ }
1572
+ if (count > 0) {
1573
+ console.log(`[MIGRATE] Frontmatter migration: ${count} file(s) ${dryRun ? "would be " : ""}updated.`);
1574
+ }
1575
+ }
1576
+
1577
+ export function buildGlobalCodexInstructions() {
1578
+ return `---
1579
+
1580
+ ## DeukAgentFlow
1581
+
1582
+ > Managed by DeukAgentFlow. Remove this section if not installed.
1583
+
1584
+ # Global DeukAgentFlow Locator
1585
+
1586
+ This file is a locator, not a behavior contract.
1587
+
1588
+ - In a repository, load the local \`AGENTS.md\` or \`.deuk-agent/\` pointer first.
1589
+ - After the local pointer loads \`core-rules/AGENTS.md\`, the core hub owns TDW, RAG, silence, scope, and verification policy.
1590
+ - Do not summarize this locator or treat it as a second rule file.
1591
+ `;
1592
+ }
1593
+
1594
+ function syncGlobalCodexInstructions(dryRun) {
1595
+ const codexDir = join(homedir(), ".codex");
1596
+ if (!existsSync(codexDir)) return;
1597
+
1598
+ const target = join(codexDir, "AGENTS.md");
1599
+ const content = buildGlobalCodexInstructions();
1600
+
1601
+ if (!dryRun) {
1602
+ writeFileSync(target, content, "utf8");
1603
+ console.log(`global codex instructions synced: ${target}`);
1604
+ }
1605
+ }
1606
+
1607
+ export function generateSpokeContent(spoke, bundleRoot) {
1608
+ const globalRulesPath = join(bundleRoot, "core-rules", "AGENTS.md");
1609
+
1610
+ const content = `# Deuk Agent Flow
1611
+
1612
+ **[MANDATORY — TOOL CALL REQUIRED]** Core rules are at: [AGENTS.md](file://${globalRulesPath})
1613
+
1614
+ This pointer is a thin bootstrap, not a second workflow contract.
1615
+
1616
+ 1. FIRST tool call: read the core rules file above and internally note its frontmatter version.
1617
+ 2. Then read local \`PROJECT_RULE.md\` and internally identify applicable DC-* rules.
1618
+ 3. After the core hub is loaded, \`core-rules/AGENTS.md\` is the DeukAgentFlow SSoT for TDW, RAG, silence, scope, and verification.
1619
+
1620
+ Do not print pointer/core metadata, version, DC-* lists, progress commentary, or interim summaries. Before the final answer, only the single required ticket-start line, blockers, explicit user-requested output, or explicit command results may appear. During approved_execution, command_running, or search_running, stay silent unless the user explicitly asks for live narration or a blocker/user decision must be surfaced.
1621
+ `;
1622
+
1623
+ if (spoke.format === "mdc") {
1624
+ return `---
1625
+ description: "Deuk Agent Flow - Project conventions and ticket workflow"
1626
+ globs: ["**/*"]
1627
+ alwaysApply: true
1628
+ ---
1629
+ ${content}`;
1630
+ }
1631
+ return `---\n\n## DeukAgentFlow\n\n> Managed by DeukAgentFlow. Remove this section if not installed.\n\n${content}\n`;
1632
+ }
1633
+
1634
+ export function mergeManagedRuleContent(existingContent, managedContent) {
1635
+ return mergeManagedBlock(existingContent, managedContent);
1636
+ }
1637
+
1638
+ function deploySpokePointers(cwd, bundleRoot, dryRun, selectedTools = []) {
1639
+ for (const spoke of SPOKE_REGISTRY) {
1640
+ const legacyPath = spoke.legacy ? join(cwd, spoke.legacy) : null;
1641
+ const legacyExisted = Boolean(legacyPath && existsSync(legacyPath));
1642
+
1643
+ // Legacy root files are replaced by canonical spoke targets.
1644
+ if (legacyPath) {
1645
+ if (legacyExisted) {
1646
+ if (!dryRun) unlinkSync(legacyPath);
1647
+ console.log(`[CLEANUP] removed legacy: ${spoke.legacy}`);
1648
+ }
1649
+ }
1650
+
1651
+ const shouldInstallDefaultHub = spoke.id === "antigravity" && existsSync(join(cwd, AGENT_ROOT_DIR));
1652
+ if (!isSelectedTool(selectedTools, spoke.id) && !spoke.detect(cwd, selectedTools) && !legacyExisted && !shouldInstallDefaultHub) continue;
1653
+
1654
+ const targetPath = join(cwd, spoke.target);
1655
+ const targetDir = dirname(targetPath);
1656
+ const managedContent = generateSpokeContent(spoke, bundleRoot);
1657
+ const existingContent = existsSync(targetPath) ? safeReadText(targetPath) : "";
1658
+ const normalizedContent = normalizeExistingSpokeContent(existingContent, managedContent, bundleRoot);
1659
+ const nextContent = mergeManagedBlock(normalizedContent, managedContent);
1660
+ if (existingContent === nextContent) {
1661
+ console.log(`spoke synced: ${spoke.target} (${spoke.id})`);
1662
+ continue;
1663
+ }
1664
+
1665
+ if (!dryRun) {
1666
+ ensureWritableDirectory(targetDir, cwd, dryRun, `spoke target conflict resolved for ${spoke.id}`);
1667
+ mkdirSync(targetDir, { recursive: true });
1668
+ writeFileSync(targetPath, nextContent, "utf8");
1669
+ }
1670
+ console.log(`spoke synced: ${spoke.target} (${spoke.id})`);
1671
+ }
1672
+ }
1673
+
1674
+ function removeDuplicateRuleCopies(cwd, dryRun) {
1675
+ // Note: AGENTS.md is now the Antigravity spoke target — do NOT delete it here.
1676
+ // CLAUDE.md/GEMINI.md legacy cleanup is handled by deploySpokePointers (spoke.legacy field).
1677
+ // .gemini is the Antigravity platform directory — preserve it.
1678
+ const duplicatePaths = [
1679
+ join(cwd, AGENT_ROOT_DIR, "rules"),
1680
+ join(cwd, ".cursor", "rules", "deuk-agent-rule-multi-ai-workflow.mdc"),
1681
+ ];
1682
+
1683
+ for (const p of duplicatePaths) {
1684
+ if (!existsSync(p)) continue;
1685
+ if (!dryRun) rmSync(p, { recursive: true, force: true });
1686
+ console.log(`[CLEANUP] removed legacy/duplicate: ${toRepoRelativePath(cwd, p)}`);
1687
+ }
1688
+ }
1689
+
1690
+ export async function runInit(opts, bundleRoot) {
1691
+ const savedConfig = loadInitConfig(opts.cwd) || {};
1692
+ const workflowMode = resolveWorkflowMode(opts, savedConfig);
1693
+ const executionEnabled = isWorkflowExecute({ ...opts, workflowMode }, savedConfig);
1694
+ const ignoreDirs = savedConfig.ignoreDirs;
1695
+ const selectedTools = opts.agentTools || savedConfig.agentTools || [];
1696
+
1697
+ if (!opts.dryRun && !executionEnabled) {
1698
+ throw new Error(
1699
+ `[WORKFLOW BLOCKED] plan mode is active for ${opts.cwd}. Re-run with --workflow execute or --approval approved to apply file mutations. Use --dry-run for preparation only.`
1700
+ );
1701
+ }
1702
+
1703
+ if (opts.sourceShims !== false) {
1704
+ const sourceShimResult = ensureSourceModeCommandShims(bundleRoot, { dryRun: opts.dryRun });
1705
+ if (sourceShimResult.created.length > 0) {
1706
+ console.log(`[SOURCE MODE] Installed command shims in ${sourceShimResult.binDir}: ${sourceShimResult.created.join(", ")}`);
1707
+ if (!sourceShimResult.onPath) {
1708
+ console.warn(`[SOURCE MODE] ${sourceShimResult.binDir} is not on PATH; add it before using deuk-agent-flow by name.`);
1709
+ }
1710
+ }
1711
+ }
1712
+
1713
+ // 0. Sync Global Codex Instructions
1714
+ syncGlobalCodexInstructions(opts.dryRun);
1715
+
1716
+ // 0.1 MCP / Phase 0 Status Check
1717
+ const mcpActive = await isMcpActive(opts.cwd);
1718
+ console.log(`\n[POLICY] MCP Status: ${mcpActive ? "\x1b[32mACTIVE\x1b[0m" : "\x1b[33mINACTIVE\x1b[0m"}`);
1719
+ if (mcpActive) {
1720
+ console.log(`[POLICY] Phase 0 RAG validation is \x1b[32mENFORCED\x1b[0m for ticket creation.\n`);
1721
+ } else {
1722
+ console.log(`[POLICY] Running in offline/disconnected mode.\n`);
1723
+ }
1724
+
1725
+ const submodules = discoverAllWorkspaces(opts.cwd, ignoreDirs);
1726
+ if (!submodules.includes(opts.cwd)) submodules.push(opts.cwd);
1727
+
1728
+ for (const subCwd of submodules) {
1729
+ try {
1730
+ await initSingleWorkspace(subCwd, opts, bundleRoot, selectedTools);
1731
+ } catch (err) {
1732
+ console.error(`[ERROR] Failed to initialize workspace ${basename(subCwd)}: ${err.message}`);
1733
+ }
1734
+ }
1735
+
1736
+ if (!loadInitConfig(opts.cwd)) {
1737
+ writeInitConfig(opts.cwd, opts);
1738
+ }
1739
+ }
1740
+
1741
+ async function initSingleWorkspace(subCwd, opts, bundleRoot, selectedTools) {
1742
+ console.log(`\nInitializing ${basename(subCwd)}...`);
1743
+
1744
+ // 1. Migration & Directory Setup
1745
+ migrateLegacyStructure(subCwd, opts.dryRun);
1746
+ migrateHtmlMarkersToHeadings(subCwd, opts.dryRun);
1747
+ canonicalizeAgentPointer(subCwd, bundleRoot, opts.dryRun);
1748
+ ensureTicketDirAndGitignore({ ...opts, cwd: subCwd });
1749
+
1750
+ // 2. Normalize INDEX.json paths (fix stale paths)
1751
+ normalizeTicketPaths(subCwd, { silent: false });
1752
+
1753
+ // 2.5. Frontmatter migration (add missing frontmatter to deuk-agent docs/tickets)
1754
+ migrateMissingFrontmatter(subCwd, opts.dryRun);
1755
+
1756
+ // 2.6. Deterministic archive/docs normalization
1757
+ canonicalizeTicketArchivePath(subCwd, opts.dryRun);
1758
+ canonicalizeDocsArchiveBuckets(subCwd, opts.dryRun);
1759
+ enforceCanonicalAgentLayout(subCwd, opts.dryRun);
1760
+ mergeSeparatedDocsIntoTickets(subCwd, opts.dryRun);
1761
+ if (!opts.dryRun) {
1762
+ const rebuiltIndex = rebuildTicketIndexFromTopicFilesIfNeeded(subCwd, { force: true });
1763
+ writeTicketIndexJson(subCwd, rebuiltIndex, { force: true });
1764
+ }
1765
+
1766
+ // 3. Spoke Pointers (e.g. .cursor/rules/deuk-agent.mdc)
1767
+ removeDuplicateRuleCopies(subCwd, opts.dryRun);
1768
+ deploySpokePointers(subCwd, bundleRoot, opts.dryRun, selectedTools);
1769
+
1770
+ // 4. Project Rule Setup (PROJECT_RULE.md)
1771
+ const projectRulePath = join(subCwd, "PROJECT_RULE.md");
1772
+ if (!existsSync(projectRulePath)) {
1773
+ const templatePath = join(bundleRoot, "templates", "PROJECT_RULE.md");
1774
+ if (existsSync(templatePath)) {
1775
+ if (!opts.dryRun) copyFileSync(templatePath, projectRulePath);
1776
+ console.log(`PROJECT_RULE.md: created from template`);
1777
+ }
1778
+ }
1779
+
1780
+ // 5. Templates Sync (.deuk-agent/templates/)
1781
+ syncTemplates(subCwd, bundleRoot, opts.dryRun);
1782
+ syncSkillTemplates(subCwd, bundleRoot, opts.dryRun);
1783
+ canonicalizeGeneratedCommandReferences(subCwd, bundleRoot, opts.dryRun);
1784
+ canonicalizeRecursiveInitSurfaces(subCwd, bundleRoot, opts.dryRun);
1785
+ }
1786
+
1787
+ export function runMerge(opts, bundleRoot) {
1788
+ const savedConfig = loadInitConfig(opts.cwd) || {};
1789
+ const workflowMode = resolveWorkflowMode(opts, savedConfig);
1790
+ const executionEnabled = isWorkflowExecute({ ...opts, workflowMode }, savedConfig);
1791
+ if (!opts.dryRun && !executionEnabled) {
1792
+ throw new Error(
1793
+ `[WORKFLOW BLOCKED] plan mode is active for ${opts.cwd}. Re-run with --workflow execute or --approval approved to apply file mutations. Use --dry-run for preparation only.`
1794
+ );
1795
+ }
1796
+
1797
+ syncTemplates(opts.cwd, bundleRoot, opts.dryRun);
1798
+ syncSkillTemplates(opts.cwd, bundleRoot, opts.dryRun);
1799
+ }