clawstrap 1.4.1 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (2) hide show
  1. package/dist/index.cjs +861 -213
  2. package/package.json +3 -1
package/dist/index.cjs CHANGED
@@ -88,6 +88,7 @@ __export(writers_exports, {
88
88
  appendToFutureConsiderations: () => appendToFutureConsiderations,
89
89
  appendToGotchaLog: () => appendToGotchaLog,
90
90
  appendToMemory: () => appendToMemory,
91
+ appendToOpenThreads: () => appendToOpenThreads,
91
92
  writeConventions: () => writeConventions
92
93
  });
93
94
  function formatEntry(source, text) {
@@ -97,11 +98,11 @@ function formatEntry(source, text) {
97
98
  ${text}`;
98
99
  }
99
100
  function appendToMemory(rootDir, entries, source) {
100
- const memoryPath = import_node_path15.default.join(rootDir, ".claude", "memory", "MEMORY.md");
101
- import_node_fs14.default.mkdirSync(import_node_path15.default.dirname(memoryPath), { recursive: true });
101
+ const memoryPath = import_node_path9.default.join(rootDir, ".claude", "memory", "MEMORY.md");
102
+ import_node_fs8.default.mkdirSync(import_node_path9.default.dirname(memoryPath), { recursive: true });
102
103
  let existingContent = "";
103
- if (import_node_fs14.default.existsSync(memoryPath)) {
104
- existingContent = import_node_fs14.default.readFileSync(memoryPath, "utf-8");
104
+ if (import_node_fs8.default.existsSync(memoryPath)) {
105
+ existingContent = import_node_fs8.default.readFileSync(memoryPath, "utf-8");
105
106
  }
106
107
  const existingEntries = parseMemoryEntries(existingContent);
107
108
  const toAppend = [];
@@ -112,34 +113,48 @@ function appendToMemory(rootDir, entries, source) {
112
113
  }
113
114
  if (toAppend.length > 0) {
114
115
  const appendText = "\n" + toAppend.join("\n") + "\n";
115
- import_node_fs14.default.appendFileSync(memoryPath, appendText, "utf-8");
116
+ import_node_fs8.default.appendFileSync(memoryPath, appendText, "utf-8");
116
117
  }
118
+ return toAppend.length;
117
119
  }
118
120
  function appendToGotchaLog(rootDir, entries) {
119
- const logPath = import_node_path15.default.join(rootDir, ".claude", "gotcha-log.md");
120
- import_node_fs14.default.mkdirSync(import_node_path15.default.dirname(logPath), { recursive: true });
121
- if (!import_node_fs14.default.existsSync(logPath)) {
122
- import_node_fs14.default.writeFileSync(
121
+ const logPath = import_node_path9.default.join(rootDir, ".claude", "gotcha-log.md");
122
+ import_node_fs8.default.mkdirSync(import_node_path9.default.dirname(logPath), { recursive: true });
123
+ if (!import_node_fs8.default.existsSync(logPath)) {
124
+ import_node_fs8.default.writeFileSync(
123
125
  logPath,
124
126
  "# Gotcha Log\n\nIncident log \u2014 why rules exist.\n\n",
125
127
  "utf-8"
126
128
  );
127
129
  }
128
130
  const toAppend = entries.map((e) => formatEntry("session", e)).join("\n");
129
- import_node_fs14.default.appendFileSync(logPath, "\n" + toAppend + "\n", "utf-8");
131
+ import_node_fs8.default.appendFileSync(logPath, "\n" + toAppend + "\n", "utf-8");
130
132
  }
131
133
  function appendToFutureConsiderations(rootDir, entries) {
132
- const fcPath = import_node_path15.default.join(rootDir, ".claude", "future-considerations.md");
133
- import_node_fs14.default.mkdirSync(import_node_path15.default.dirname(fcPath), { recursive: true });
134
- if (!import_node_fs14.default.existsSync(fcPath)) {
135
- import_node_fs14.default.writeFileSync(
134
+ const fcPath = import_node_path9.default.join(rootDir, ".claude", "future-considerations.md");
135
+ import_node_fs8.default.mkdirSync(import_node_path9.default.dirname(fcPath), { recursive: true });
136
+ if (!import_node_fs8.default.existsSync(fcPath)) {
137
+ import_node_fs8.default.writeFileSync(
136
138
  fcPath,
137
139
  "# Future Considerations\n\nDeferred ideas and potential improvements.\n\n",
138
140
  "utf-8"
139
141
  );
140
142
  }
141
143
  const toAppend = entries.map((e) => formatEntry("session", e)).join("\n");
142
- import_node_fs14.default.appendFileSync(fcPath, "\n" + toAppend + "\n", "utf-8");
144
+ import_node_fs8.default.appendFileSync(fcPath, "\n" + toAppend + "\n", "utf-8");
145
+ }
146
+ function appendToOpenThreads(rootDir, entries) {
147
+ const otPath = import_node_path9.default.join(rootDir, ".claude", "memory", "open-threads.md");
148
+ import_node_fs8.default.mkdirSync(import_node_path9.default.dirname(otPath), { recursive: true });
149
+ if (!import_node_fs8.default.existsSync(otPath)) {
150
+ import_node_fs8.default.writeFileSync(
151
+ otPath,
152
+ "# Open Threads\n\nUnresolved questions and next steps.\n\n",
153
+ "utf-8"
154
+ );
155
+ }
156
+ const toAppend = entries.map((e) => formatEntry("session", e)).join("\n");
157
+ import_node_fs8.default.appendFileSync(otPath, "\n" + toAppend + "\n", "utf-8");
143
158
  }
144
159
  function buildAutoBlock(sections) {
145
160
  const lines = [AUTO_START];
@@ -177,14 +192,19 @@ function buildAutoBlock(sections) {
177
192
  } else {
178
193
  lines.push("- No comment patterns detected.");
179
194
  }
195
+ if (sections.architecture && sections.architecture.length > 0) {
196
+ lines.push("");
197
+ lines.push("## Architecture & Design Patterns");
198
+ for (const item of sections.architecture) lines.push(`- ${item}`);
199
+ }
180
200
  lines.push(AUTO_END);
181
201
  return lines.join("\n");
182
202
  }
183
203
  function writeConventions(rootDir, sections) {
184
- const conventionsPath = import_node_path15.default.join(rootDir, ".claude", "rules", "conventions.md");
185
- import_node_fs14.default.mkdirSync(import_node_path15.default.dirname(conventionsPath), { recursive: true });
204
+ const conventionsPath = import_node_path9.default.join(rootDir, ".claude", "rules", "conventions.md");
205
+ import_node_fs8.default.mkdirSync(import_node_path9.default.dirname(conventionsPath), { recursive: true });
186
206
  const autoBlock = buildAutoBlock(sections);
187
- if (!import_node_fs14.default.existsSync(conventionsPath)) {
207
+ if (!import_node_fs8.default.existsSync(conventionsPath)) {
188
208
  const content = [
189
209
  "# Conventions",
190
210
  "",
@@ -195,28 +215,28 @@ function writeConventions(rootDir, sections) {
195
215
  "<!-- Add manual conventions below this line -->",
196
216
  ""
197
217
  ].join("\n");
198
- import_node_fs14.default.writeFileSync(conventionsPath, content, "utf-8");
218
+ import_node_fs8.default.writeFileSync(conventionsPath, content, "utf-8");
199
219
  return;
200
220
  }
201
- const existing = import_node_fs14.default.readFileSync(conventionsPath, "utf-8");
221
+ const existing = import_node_fs8.default.readFileSync(conventionsPath, "utf-8");
202
222
  const startIdx = existing.indexOf(AUTO_START);
203
223
  const endIdx = existing.indexOf(AUTO_END);
204
224
  if (startIdx === -1 || endIdx === -1) {
205
225
  const updated = existing.trimEnd() + "\n\n" + autoBlock + "\n";
206
- import_node_fs14.default.writeFileSync(conventionsPath, updated, "utf-8");
226
+ import_node_fs8.default.writeFileSync(conventionsPath, updated, "utf-8");
207
227
  } else {
208
228
  const before = existing.slice(0, startIdx);
209
229
  const after = existing.slice(endIdx + AUTO_END.length);
210
230
  const updated = before + autoBlock + after;
211
- import_node_fs14.default.writeFileSync(conventionsPath, updated, "utf-8");
231
+ import_node_fs8.default.writeFileSync(conventionsPath, updated, "utf-8");
212
232
  }
213
233
  }
214
- var import_node_fs14, import_node_path15, AUTO_START, AUTO_END;
234
+ var import_node_fs8, import_node_path9, AUTO_START, AUTO_END;
215
235
  var init_writers = __esm({
216
236
  "src/watch/writers.ts"() {
217
237
  "use strict";
218
- import_node_fs14 = __toESM(require("fs"), 1);
219
- import_node_path15 = __toESM(require("path"), 1);
238
+ import_node_fs8 = __toESM(require("fs"), 1);
239
+ import_node_path9 = __toESM(require("path"), 1);
220
240
  init_dedup();
221
241
  AUTO_START = "<!-- CLAWSTRAP:AUTO -->";
222
242
  AUTO_END = "<!-- CLAWSTRAP:END -->";
@@ -263,12 +283,20 @@ var ClawstrapConfigSchema = import_zod.z.object({
263
283
  scan: import_zod.z.object({
264
284
  intervalDays: import_zod.z.number().default(7)
265
285
  }).default({}),
286
+ git: import_zod.z.object({
287
+ pollIntervalMinutes: import_zod.z.number().default(5)
288
+ }).default({}),
289
+ synthesis: import_zod.z.object({
290
+ enabled: import_zod.z.boolean().default(false),
291
+ triggerEveryN: import_zod.z.number().default(10)
292
+ }).default({}),
266
293
  silent: import_zod.z.boolean().default(false)
267
294
  }).optional(),
268
295
  watchState: import_zod.z.object({
269
296
  lastGitCommit: import_zod.z.string().optional(),
270
297
  lastScanAt: import_zod.z.string().optional(),
271
- lastTranscriptAt: import_zod.z.string().optional()
298
+ lastTranscriptAt: import_zod.z.string().optional(),
299
+ entriesSinceLastSynthesis: import_zod.z.coerce.number().optional()
272
300
  }).optional(),
273
301
  lastExport: LastExportSchema
274
302
  });
@@ -445,7 +473,7 @@ var OUTPUT_MANIFEST = [
445
473
  var EMPTY_DIRS = ["tmp", "research", "context", "artifacts"];
446
474
 
447
475
  // src/templates/governance-file.md.tmpl
448
- var governance_file_md_default = "# {%governanceFile%} \u2014 Master Governance Rules\n> **Workspace**: {%workspaceName%} | **Generated**: {%generatedDate%} | **Status**: active\n> Loaded every session. Keep lean. Move details to skills or rules files.\n\n---\n\n## Workflow Rules\n\n**Approval-first, always.**\nPlan work and get explicit approval before executing. No speculative actions.\nIf scope changes mid-task, pause and re-confirm.\n\n**If it's not on disk, it didn't happen.**\nSave findings immediately, not at session end. Flush {%flushCadence%}.\nWrite corrections to durable locations before applying them.\n{%#if hasSubagents%}Subagents return file paths, not raw data.{%/if%}\n\n**Quality > context cleanliness > speed > token cost.**\nQuality failures require full rework (100% waste). Never trade quality for tokens.\n{%#if isResearch%}\n**Research-specific:**\n- Cite sources for every claim \u2014 no unsourced assertions\n- Write findings to file immediately, not at session end\n- Separate facts from interpretation in all output\n{%/if%}\n{%#if isContent%}\n**Content-specific:**\n- Every piece of content follows the draft \u2192 review \u2192 approve cycle\n- Never publish or finalize without explicit approval\n- Track all revision feedback in a dedicated file\n{%/if%}\n{%#if isDataProcessing%}\n**Data processing-specific:**\n- Define batch size before starting \u2014 never process unbounded sets\n- Validate schema on every write\n- Log every transformation step for auditability\n{%/if%}\n\n---\n\n## Persistence Hierarchy\n\nFrom most ephemeral to most durable:\n\n| Layer | Location | Loaded When |\n|-------|----------|-------------|\n| Conversation | (in-context) | Always \u2014 volatile |\n| Temp files | `tmp/` | Per-task, gitignored |\n{%#if sessionHandoff%}| Memory | `{%systemDir%}/memory/` | On demand |\n{%/if%}| Skills | `{%systemDir%}/skills/*/SKILL.md` | When triggered |\n| Rules | `{%systemDir%}/rules/*.md` | Every session |\n| **{%governanceFile%}** | `./{%governanceFile%}` | **Every session** |\n\n---\n\n## Context Discipline\n\n- Flush working state to file {%flushCadence%}\n{%#if hasSubagents%}- Subagents write output to `tmp/{task}/{name}.json`; return one-line receipt\n- Main session reads subagent files ONLY if needed for the next step\n- Never hold raw batch results in conversation \u2014 write to disk first\n{%/if%}- Before batch work: write execution plan to file (survives context loss)\n\n---\n{%#if sessionHandoff%}\n## Session Handoff Checklist\n\nRun this at every session end (mandatory, not optional):\n\n1. Save all work to SSOT files\n2. Sync derived files (rebuild from SSOTs)\n3. SSOT integrity check (no duplicates, no stale data)\n4. Update progress tracker\n5. Write next-session plan (with pre-execution hooks listed)\n6. Launch QC on work done this session\n\n---\n{%/if%}\n\n## Security Rules\n\n- Never read `.env` files or echo credentials\n- Never install third-party MCP servers/plugins without explicit approval\n- Never write outside this workspace root \u2014 use project-local `tmp/`, not system `/tmp/`\n- Approved tools only \u2014 ask before using new tools/APIs\n\n---\n\n## Quality Rules\n{%#if isProductionQuality%}\n- QC is a structural gate, not an optional post-step\n- Run spot-checks after every 5 items in batch work (Ralph Loop)\n- Combined agents (extract + classify) beat split agents on quality\n- Disagreements between agents reveal ambiguity \u2014 escalate, don't suppress\n{%/if%}\n{%#if hasQualityGates%}\n{%#unless isProductionQuality%}- QC is a structural gate, not an optional post-step\n- Run QC checkpoints at regular intervals during batch work\n- All results must be reviewed before being marked complete\n{%/unless%}\n{%/if%}\n{%#unless hasQualityGates%}- Run a quick check before finishing any task\n- Review outputs before marking work complete\n{%/unless%}\n\n---\n\n## Pointers to Other Layers\n\n- Rules: `{%systemDir%}/rules/` \u2014 domain-specific rules loaded every session\n- Skills: `{%systemDir%}/skills/SKILL_REGISTRY.md` \u2014 index of all skills\n{%#if hasSubagents%}- Agents: `{%systemDir%}/agents/` \u2014 subagent definitions with governance baked in\n{%/if%}- Gotchas: `{%systemDir%}/gotcha-log.md` \u2014 incident log (why rules exist)\n- Future: `{%systemDir%}/future-considerations.md` \u2014 deferred ideas\n{%#if sddEnabled%}\n---\n\n## Spec-Driven Development\n\nThis workspace uses SDD. Before implementing any feature:\n\n1. Write a spec \u2192 `specs/{name}.md` (use `/spec` or copy `specs/_template.md`)\n2. Get explicit user approval\n3. Implement from the approved spec \u2014 not from the conversation\n\nRule details: `{%systemDir%}/rules/sdd.md`\n{%/if%}\n";
476
+ var governance_file_md_default = "# {%governanceFile%} \u2014 Master Governance Rules\n> **Workspace**: {%workspaceName%} | **Generated**: {%generatedDate%} | **Status**: active\n> Loaded every session. Keep lean. Move details to skills or rules files.\n\n---\n\n## Workflow Rules\n\n**Approval-first, always.**\nPlan work and get explicit approval before executing. No speculative actions.\nIf scope changes mid-task, pause and re-confirm.\n\n**If it's not on disk, it didn't happen.**\nSave findings immediately, not at session end. Flush {%flushCadence%}.\nWrite corrections to durable locations before applying them.\n{%#if hasSubagents%}Subagents return file paths, not raw data.{%/if%}\n\n**Quality > context cleanliness > speed > token cost.**\nQuality failures require full rework (100% waste). Never trade quality for tokens.\n{%#if isResearch%}\n**Research-specific:**\n- Cite sources for every claim \u2014 no unsourced assertions\n- Write findings to file immediately, not at session end\n- Separate facts from interpretation in all output\n{%/if%}\n{%#if isContent%}\n**Content-specific:**\n- Every piece of content follows the draft \u2192 review \u2192 approve cycle\n- Never publish or finalize without explicit approval\n- Track all revision feedback in a dedicated file\n{%/if%}\n{%#if isDataProcessing%}\n**Data processing-specific:**\n- Define batch size before starting \u2014 never process unbounded sets\n- Validate schema on every write\n- Log every transformation step for auditability\n{%/if%}\n\n---\n\n## Persistence Hierarchy\n\nFrom most ephemeral to most durable:\n\n| Layer | Location | Loaded When |\n|-------|----------|-------------|\n| Conversation | (in-context) | Always \u2014 volatile |\n| Temp files | `tmp/` | Per-task, gitignored |\n{%#if sessionHandoff%}| Memory | `{%systemDir%}/memory/` | On demand |\n{%/if%}| Skills | `{%systemDir%}/skills/*/SKILL.md` | When triggered |\n| Rules | `{%systemDir%}/rules/*.md` | Every session |\n| **{%governanceFile%}** | `./{%governanceFile%}` | **Every session** |\n\n---\n\n## Directory Map\n\n| Directory | Purpose | When Claude writes here |\n|-----------|---------|------------------------|\n| `artifacts/` | Architecture docs, ADRs, system overviews | After major design decisions; `artifacts/architecture.md` is the living system doc |\n| `context/` | Execution plans and session checkpoints | Before batch work \u2192 `context/plan-{date}-{task}.md`; every 5 ops \u2192 `context/checkpoint-{date}-{task}.md`; wrap-up \u2192 `context/next-session.md` |\n{%#if sessionHandoff%}| `projects/` | Active sub-projects (copy `projects/_template/`) | When a feature track needs its own `process.md` |\n{%/if%}| `research/` | Reference material from external sources | When reading specs, docs, or papers worth keeping |\n| `tmp/` | Subagent output, session summaries (gitignored) | Summaries \u2192 `tmp/sessions/YYYY-MM-DD-HHmm.md`; subagent output \u2192 `tmp/{task}/` |\n| `{%systemDir%}/memory/` | LLM-processed governance (fed by watch daemon) | Do not write directly \u2014 watch daemon only |\n\n---\n\n## Context Discipline\n\n- Flush working state to file {%flushCadence%}\n{%#if hasSubagents%}- Subagents write output to `tmp/{task}/{name}.json`; return one-line receipt\n- Main session reads subagent files ONLY if needed for the next step\n- Never hold raw batch results in conversation \u2014 write to disk first\n{%/if%}- Before batch work: write execution plan to `context/plan-{date}-{task}.md` (survives context loss)\n\n---\n{%#if sessionHandoff%}\n## Session Handoff Checklist\n\nRun this at every session end (mandatory, not optional):\n\n1. Save all work to SSOT files\n2. Sync derived files (rebuild from SSOTs)\n3. SSOT integrity check (no duplicates, no stale data)\n4. Update progress tracker \u2192 `context/progress-{date}.md`\n5. Write next-session plan \u2192 `context/next-session.md`\n6. Launch QC on work done this session \u2192 write results to `context/qc-{date}.md`\n\n---\n{%/if%}\n\n## Security Rules\n\n- Never read `.env` files or echo credentials\n- Never install third-party MCP servers/plugins without explicit approval\n- Never write outside this workspace root \u2014 use project-local `tmp/`, not system `/tmp/`\n- Approved tools only \u2014 ask before using new tools/APIs\n\n---\n\n## Quality Rules\n{%#if isProductionQuality%}\n- QC is a structural gate, not an optional post-step\n- Run spot-checks after every 5 items in batch work (Ralph Loop)\n- Combined agents (extract + classify) beat split agents on quality\n- Disagreements between agents reveal ambiguity \u2014 escalate, don't suppress\n{%/if%}\n{%#if hasQualityGates%}\n{%#unless isProductionQuality%}- QC is a structural gate, not an optional post-step\n- Run QC checkpoints at regular intervals during batch work\n- All results must be reviewed before being marked complete\n{%/unless%}\n{%/if%}\n{%#unless hasQualityGates%}- Run a quick check before finishing any task\n- Review outputs before marking work complete\n{%/unless%}\n\n---\n\n## Pointers to Other Layers\n\n- Rules: `{%systemDir%}/rules/` \u2014 domain-specific rules loaded every session\n- Skills: `{%systemDir%}/skills/SKILL_REGISTRY.md` \u2014 index of all skills\n{%#if hasSubagents%}- Agents: `{%systemDir%}/agents/` \u2014 subagent definitions with governance baked in\n{%/if%}- Gotchas: `{%systemDir%}/gotcha-log.md` \u2014 incident log (why rules exist)\n- Future: `{%systemDir%}/future-considerations.md` \u2014 deferred ideas\n{%#if sddEnabled%}\n---\n\n## Spec-Driven Development\n\nThis workspace uses SDD. Before implementing any feature:\n\n1. Write a spec \u2192 `specs/{name}.md` (use `/spec` or copy `specs/_template.md`)\n2. Get explicit user approval\n3. Implement from the approved spec \u2014 not from the conversation\n\nRule details: `{%systemDir%}/rules/sdd.md`\n{%/if%}\n";
449
477
 
450
478
  // src/templates/getting-started.md.tmpl
451
479
  var getting_started_md_default = "# Getting Started \u2014 {%workspaceName%}\n> Generated by Clawstrap v{%clawstrapVersion%} on {%generatedDate%}\n\nWelcome to your AI agent workspace. This workspace is configured for\n**{%workloadLabel%}** workflows.\n\n---\n\n## Quick Start\n\n1. **Read `{%governanceFile%}`** \u2014 this is your master governance file. It loads\n automatically every session and defines the rules your AI assistant follows.\n\n2. **Check `{%systemDir%}/rules/`** \u2014 these rule files also load every session.\n They contain detailed procedures for context discipline, approval workflows,\n and quality gates.\n\n3. **Start working** \u2014 open a Claude Code session in this directory. The\n governance files will load automatically.\n\n---\n\n## What's in This Workspace\n\n```\n{%governanceFile%} \u2190 Master governance (always loaded)\nGETTING_STARTED.md \u2190 You are here\n.clawstrap.json \u2190 Your workspace configuration\n\n{%systemDir%}/\n rules/ \u2190 Auto-loaded rules (every session)\n skills/ \u2190 Skill definitions (loaded when triggered)\n{%#if hasSubagents%} agents/ \u2190 Agent definitions\n{%/if%} gotcha-log.md \u2190 Incident log\n future-considerations.md \u2190 Deferred ideas\n{%#if sessionHandoff%} memory/ \u2190 Persistent memory across sessions\n{%/if%}\nprojects/\n _template/ \u2190 Template for new projects\n\ntmp/ \u2190 Temporary files (gitignored)\nresearch/ \u2190 Reference material\ncontext/ \u2190 Session checkpoints\nartifacts/ \u2190 Durable output\n{%#if sddEnabled%}specs/\n _template.md \u2190 Spec template (copy for each feature)\n{%/if%}```\n\n---\n\n## Key Principles\n\nThis workspace enforces five principles:\n\n1. **If it's not on disk, it didn't happen.** Context windows compress. Sessions\n end. The only thing that persists is files. Flush work {%flushCadence%}.\n\n2. **Approval-first, always.** Plan work, get approval, then execute. No\n speculative actions.\n\n3. **Quality is a structural gate.** Quality checks happen during work, not after.\n{%#if hasQualityGates%} See `{%systemDir%}/rules/quality-gates.md` for the full procedure.{%/if%}\n\n4. **Disagreements reveal ambiguity.** When agents disagree, the rules are\n ambiguous. Escalate and clarify \u2014 don't suppress.\n\n5. **One decision at a time.** Binary decisions made sequentially compound into\n reliable outcomes.\n{%#if hasSubagents%}\n\n---\n\n## Working with Multiple Agents\n\nYour workspace is configured for multi-agent workflows. Key rules:\n\n- **Subagents write to `tmp/`** \u2014 they return file paths and one-line summaries,\n never raw data in conversation.\n- **The main session is an orchestrator** \u2014 it tracks progress, launches agents,\n and checks output files. It does not hold subagent data in memory.\n- **Agent definitions live in `{%systemDir%}/agents/`** \u2014 each file becomes the\n system prompt for a subagent. Governance is baked into the definition.\n{%/if%}\n{%#if sessionHandoff%}\n\n---\n\n## Session Handoff\n\nThis workspace uses session handoff checklists. At the end of every session:\n\n1. Save all work to SSOT files\n2. Sync derived files\n3. Check SSOT integrity\n4. Update progress tracker\n5. Write next-session plan\n6. Run QC on session work\n\nThis ensures no context is lost between sessions.\n{%/if%}\n{%#if isResearch%}\n\n---\n\n## Research Tips\n\n- Write findings to disk immediately \u2014 don't accumulate in conversation\n- Cite sources for every claim\n- Separate facts from interpretation\n- Use `research/` for reference material, `artifacts/` for synthesis output\n{%/if%}\n{%#if isContent%}\n\n---\n\n## Content Workflow Tips\n\n- Every piece follows: draft \u2192 review \u2192 approve\n- Track revision feedback in dedicated files\n- Never finalize without explicit approval\n- Use `artifacts/` for final deliverables\n{%/if%}\n{%#if isDataProcessing%}\n\n---\n\n## Data Processing Tips\n\n- Define batch size before starting any processing run\n- Validate schema on every write\n- Checkpoint every 5 batch items\n- Log all transformations for auditability\n{%/if%}\n\n---\n\n{%#if sddEnabled%}\n\n---\n\n## Spec-Driven Development\n\nThis workspace enforces a spec-first workflow. Before any implementation:\n\n1. **Type `/spec`** to start a new spec interactively \u2014 Claude will guide you\n2. Or copy `specs/_template.md` manually, fill it in, and get approval\n3. Only implement after the spec is approved\n\nYour specs live in `specs/`. Each approved spec is the contract between you and\nClaude \u2014 implementation follows the spec, not the conversation.\n\nSee `{%systemDir%}/rules/sdd.md` for the full rules and exemptions.\n{%/if%}\n\n---\n\n*This workspace was generated by [Clawstrap](https://github.com/clawstrap/clawstrap).\nEdit any file to fit your needs \u2014 there's no lock-in.*\n";
@@ -454,7 +482,7 @@ var getting_started_md_default = "# Getting Started \u2014 {%workspaceName%}\n>
454
482
  var gitignore_default = "# Dependencies\nnode_modules/\n\n# Temporary files (subagent output, session data)\ntmp/\n\n# Secrets\n.env\n.env.*\ncredentials.json\n*.pem\n*.key\n\n# OS\n.DS_Store\nThumbs.db\n\n# Editor\n*.swp\n*.swo\n*~\n.clawstrap.watch.pid\ntmp/sessions/\n";
455
483
 
456
484
  // src/templates/rules/context-discipline.md.tmpl
457
- var context_discipline_md_default = "# Rule: Context Discipline\n> **Scope**: All sessions | **Generated**: {%generatedDate%}\n\n## Flush Cadence\n\nFlush working state to file {%flushCadence%}:\n- Write current state to a context checkpoint file\n- Include: what's done, what's next, accumulated results\n- Path: `context/checkpoint-{date}-{task}.md`\n{%#if hasSubagents%}\n\n## Subagent Output Rules\n\nSubagents MUST:\n1. Write all output to `tmp/{task}/{name}.json` (or `.md`)\n2. Return a one-line receipt: `\"Done. N items. File: {path}. Summary: {1 line}.\"`\n3. NOT dump raw results into the conversation\n\nMain session MUST:\n- Only read the subagent file if needed for the NEXT step\n- Never hold subagent output in conversation memory\n\n## Thin Orchestrator Principle\n\nDuring batch work, the main session is an orchestrator only:\n\n**DO**:\n- Track queue / done / next\n- Launch agents with correct prompts\n- Check output files for completeness\n- Update SSOT data files\n- Run QC gates\n\n**DO NOT**:\n- Read raw extraction data into main context\n- Classify results (agents do that)\n- Hold full subagent output in conversation\n{%/if%}\n\n## Before Batch Work\n\nAlways write an execution plan to `tmp/{task}/plan.md` before starting.\nThis file must survive context loss and be readable by any future session.\n\n## On User Correction\n\n1. FIRST write the correction to its durable home (memory/rule/skill file)\n2. THEN apply the correction to current work\n\nThis ensures the learning persists even if the session ends unexpectedly.\n";
485
+ var context_discipline_md_default = "# Rule: Context Discipline\n> **Scope**: All sessions | **Generated**: {%generatedDate%}\n\n## Flush Cadence\n\nFlush working state to file {%flushCadence%}:\n- Write current state to a context checkpoint file\n- Include: what's done, what's next, accumulated results\n- Path: `context/checkpoint-{date}-{task}.md`\n{%#if hasSubagents%}\n\n## Subagent Output Rules\n\nSubagents MUST:\n1. Write all output to `tmp/{task}/{name}.json` (or `.md`)\n2. Return a one-line receipt: `\"Done. N items. File: {path}. Summary: {1 line}.\"`\n3. NOT dump raw results into the conversation\n\nMain session MUST:\n- Only read the subagent file if needed for the NEXT step\n- Never hold subagent output in conversation memory\n\n## Thin Orchestrator Principle\n\nDuring batch work, the main session is an orchestrator only:\n\n**DO**:\n- Track queue / done / next\n- Launch agents with correct prompts\n- Check output files for completeness\n- Update SSOT data files\n- Run QC gates\n\n**DO NOT**:\n- Read raw extraction data into main context\n- Classify results (agents do that)\n- Hold full subagent output in conversation\n{%/if%}\n\n## Before Batch Work\n\nAlways write an execution plan before starting batch work:\n- Path: `context/plan-{YYYY-MM-DD}-{task-slug}.md`\n- This file must survive context loss and be readable by any future session.\n\nSubagent output goes to `tmp/{task}/` \u2014 not `context/`.\n\n## Session Handoff\n\nNext-session plan goes to `context/next-session.md` (overwrite each time).\nQC results go to `context/qc-{YYYY-MM-DD}.md`.\n\n## On User Correction\n\n1. FIRST write the correction to its durable home (memory/rule/skill file)\n2. THEN apply the correction to current work\n\nThis ensures the learning persists even if the session ends unexpectedly.\n";
458
486
 
459
487
  // src/templates/rules/approval-first.md.tmpl
460
488
  var approval_first_md_default = `# Rule: Approval-First Workflow
@@ -777,7 +805,7 @@ async function init(directory, options) {
777
805
  import_node_fs2.default.mkdirSync(targetDir, { recursive: true });
778
806
  }
779
807
  const config = ClawstrapConfigSchema.parse({
780
- version: "1.4.1",
808
+ version: "1.5.1",
781
809
  createdAt: (/* @__PURE__ */ new Date()).toISOString(),
782
810
  workspaceName: answers.workspaceName,
783
811
  targetDirectory: directory,
@@ -990,8 +1018,8 @@ Error: project "${name}" already exists at projects/${name}/
990
1018
  }
991
1019
 
992
1020
  // src/status.ts
993
- var import_node_fs8 = __toESM(require("fs"), 1);
994
- var import_node_path9 = __toESM(require("path"), 1);
1021
+ var import_node_fs10 = __toESM(require("fs"), 1);
1022
+ var import_node_path11 = __toESM(require("path"), 1);
995
1023
 
996
1024
  // src/watch/pid.ts
997
1025
  var import_node_fs7 = __toESM(require("fs"), 1);
@@ -1026,6 +1054,169 @@ function isDaemonRunning(rootDir) {
1026
1054
  }
1027
1055
  }
1028
1056
 
1057
+ // src/watch/promote.ts
1058
+ var import_node_fs9 = __toESM(require("fs"), 1);
1059
+ var import_node_path10 = __toESM(require("path"), 1);
1060
+ init_dedup();
1061
+ init_writers();
1062
+
1063
+ // src/watch/stopwords.ts
1064
+ var STOPWORDS = new Set(
1065
+ "a an the is are was were be been being have has had do does did will would could should may might must can this that these those i we you he she it they of in on at to for with from by about".split(" ")
1066
+ );
1067
+
1068
+ // src/watch/promote.ts
1069
+ var SIMILARITY_THRESHOLD = 0.65;
1070
+ var MIN_GROUP_SIZE = 3;
1071
+ function tokenize2(text) {
1072
+ return new Set(
1073
+ text.split(/\s+/).map((w) => w.replace(/[^a-z0-9]/gi, "").toLowerCase()).filter((w) => w.length > 1 && !STOPWORDS.has(w))
1074
+ );
1075
+ }
1076
+ function jaccard2(a, b) {
1077
+ const setA = tokenize2(a);
1078
+ const setB = tokenize2(b);
1079
+ if (setA.size === 0 && setB.size === 0) return 1;
1080
+ if (setA.size === 0 || setB.size === 0) return 0;
1081
+ let intersection = 0;
1082
+ for (const t of setA) {
1083
+ if (setB.has(t)) intersection++;
1084
+ }
1085
+ return intersection / (setA.size + setB.size - intersection);
1086
+ }
1087
+ function groupSimilar(entries) {
1088
+ const groups = [];
1089
+ for (const entry of entries) {
1090
+ let placed = false;
1091
+ for (const group of groups) {
1092
+ if (group.some((member) => jaccard2(entry, member) >= SIMILARITY_THRESHOLD)) {
1093
+ group.push(entry);
1094
+ placed = true;
1095
+ break;
1096
+ }
1097
+ }
1098
+ if (!placed) groups.push([entry]);
1099
+ }
1100
+ return groups.filter((g) => g.length >= MIN_GROUP_SIZE);
1101
+ }
1102
+ function deriveSlug(entries) {
1103
+ const freq = /* @__PURE__ */ new Map();
1104
+ for (const entry of entries) {
1105
+ for (const token of tokenize2(entry)) {
1106
+ freq.set(token, (freq.get(token) ?? 0) + 1);
1107
+ }
1108
+ }
1109
+ const top = [...freq.entries()].sort((a, b) => b[1] - a[1]).slice(0, 3).map(([t]) => t);
1110
+ return top.join("-") || "correction";
1111
+ }
1112
+ function parseRuleResponse(response) {
1113
+ const titleMatch = response.match(/^TITLE:\s*(.+)$/m);
1114
+ const principleMatch = response.match(/^PRINCIPLE:\s*(.+)$/m);
1115
+ const imperativesMatch = response.match(/^IMPERATIVES:\s*\n((?:\s*-\s*.+\n?)+)/m);
1116
+ if (!titleMatch || !principleMatch || !imperativesMatch) return null;
1117
+ const imperatives = imperativesMatch[1].split("\n").map((l) => l.replace(/^\s*-\s*/, "").trim()).filter(Boolean);
1118
+ if (imperatives.length === 0) return null;
1119
+ return {
1120
+ title: titleMatch[1].trim(),
1121
+ principle: principleMatch[1].trim(),
1122
+ imperatives
1123
+ };
1124
+ }
1125
+ function writeRuleFile(rulesDir, slug, data) {
1126
+ import_node_fs9.default.mkdirSync(rulesDir, { recursive: true });
1127
+ const imperativeLines = data.imperatives.map((i) => `- ${i}`).join("\n");
1128
+ const content = `---
1129
+ status: pending-review
1130
+ generated: ${(/* @__PURE__ */ new Date()).toISOString()}
1131
+ source: auto-promoted from gotcha-log
1132
+ ---
1133
+
1134
+ # ${data.title}
1135
+
1136
+ ${data.principle}
1137
+
1138
+ ## Imperatives
1139
+
1140
+ ${imperativeLines}
1141
+ `;
1142
+ import_node_fs9.default.writeFileSync(import_node_path10.default.join(rulesDir, `${slug}-auto.md`), content, "utf-8");
1143
+ }
1144
+ async function checkAndPromoteCorrections(rootDir, adapter, ui) {
1145
+ const logPath = import_node_path10.default.join(rootDir, ".claude", "gotcha-log.md");
1146
+ if (!import_node_fs9.default.existsSync(logPath)) return;
1147
+ let content;
1148
+ try {
1149
+ content = import_node_fs9.default.readFileSync(logPath, "utf-8");
1150
+ } catch {
1151
+ return;
1152
+ }
1153
+ const entries = parseMemoryEntries(content);
1154
+ if (entries.length < MIN_GROUP_SIZE) return;
1155
+ const promotableGroups = groupSimilar(entries);
1156
+ if (promotableGroups.length === 0) return;
1157
+ const rulesDir = import_node_path10.default.join(rootDir, ".claude", "rules");
1158
+ let written = 0;
1159
+ for (const group of promotableGroups) {
1160
+ const slug = deriveSlug(group);
1161
+ const ruleFile = import_node_path10.default.join(rulesDir, `${slug}-auto.md`);
1162
+ if (import_node_fs9.default.existsSync(ruleFile)) continue;
1163
+ ui.promoteStart();
1164
+ const prompt = `You are analysing a set of recurring corrections from an AI coding session log.
1165
+
1166
+ Corrections:
1167
+ ${group.map((e, i) => `${i + 1}. ${e}`).join("\n")}
1168
+
1169
+ Synthesise these into a governance rule. Respond in this exact format:
1170
+
1171
+ TITLE: (short rule name, 2\u20135 words)
1172
+ PRINCIPLE: (one sentence \u2014 the core principle this rule enforces)
1173
+ IMPERATIVES:
1174
+ - (specific imperative 1)
1175
+ - (specific imperative 2)
1176
+ - (specific imperative 3)
1177
+
1178
+ Output only the structured response \u2014 no explanation, no markdown fences.`;
1179
+ let response;
1180
+ try {
1181
+ response = await adapter.complete(prompt);
1182
+ } catch {
1183
+ ui.promoteDone(0);
1184
+ continue;
1185
+ }
1186
+ const data = parseRuleResponse(response);
1187
+ if (!data) {
1188
+ ui.promoteDone(0);
1189
+ continue;
1190
+ }
1191
+ try {
1192
+ writeRuleFile(rulesDir, slug, data);
1193
+ appendToMemory(rootDir, [`Auto-promoted correction group to rule: ${slug}-auto.md \u2014 "${data.title}"`], "promote");
1194
+ written++;
1195
+ ui.promoteDone(1);
1196
+ } catch {
1197
+ ui.promoteDone(0);
1198
+ }
1199
+ }
1200
+ void written;
1201
+ }
1202
+ function listPendingRules(rootDir) {
1203
+ const rulesDir = import_node_path10.default.join(rootDir, ".claude", "rules");
1204
+ if (!import_node_fs9.default.existsSync(rulesDir)) return [];
1205
+ const results = [];
1206
+ for (const entry of import_node_fs9.default.readdirSync(rulesDir)) {
1207
+ if (!entry.endsWith("-auto.md")) continue;
1208
+ try {
1209
+ const content = import_node_fs9.default.readFileSync(import_node_path10.default.join(rulesDir, entry), "utf-8");
1210
+ if (!content.includes("status: pending-review")) continue;
1211
+ const headingMatch = content.match(/^#\s+(.+)$/m);
1212
+ const title = headingMatch ? headingMatch[1].trim() : "(no title)";
1213
+ results.push({ file: entry, title });
1214
+ } catch {
1215
+ }
1216
+ }
1217
+ return results;
1218
+ }
1219
+
1029
1220
  // src/status.ts
1030
1221
  var WORKLOAD_LABELS3 = {
1031
1222
  research: "Research & Analysis",
@@ -1034,27 +1225,29 @@ var WORKLOAD_LABELS3 = {
1034
1225
  custom: "General Purpose"
1035
1226
  };
1036
1227
  function countEntries(dir, exclude = []) {
1037
- if (!import_node_fs8.default.existsSync(dir)) return 0;
1038
- return import_node_fs8.default.readdirSync(dir, { withFileTypes: true }).filter((e) => !exclude.includes(e.name) && !e.name.startsWith(".")).length;
1228
+ if (!import_node_fs10.default.existsSync(dir)) return 0;
1229
+ return import_node_fs10.default.readdirSync(dir, { withFileTypes: true }).filter((e) => !exclude.includes(e.name) && !e.name.startsWith(".")).length;
1039
1230
  }
1040
1231
  function countSkills(skillsDir) {
1041
- if (!import_node_fs8.default.existsSync(skillsDir)) return 0;
1042
- return import_node_fs8.default.readdirSync(skillsDir, { withFileTypes: true }).filter(
1043
- (e) => e.isDirectory() && import_node_fs8.default.existsSync(import_node_path9.default.join(skillsDir, e.name, "SKILL.md"))
1232
+ if (!import_node_fs10.default.existsSync(skillsDir)) return 0;
1233
+ return import_node_fs10.default.readdirSync(skillsDir, { withFileTypes: true }).filter(
1234
+ (e) => e.isDirectory() && import_node_fs10.default.existsSync(import_node_path11.default.join(skillsDir, e.name, "SKILL.md"))
1044
1235
  ).length;
1045
1236
  }
1046
1237
  async function showStatus() {
1047
1238
  const { config, vars, rootDir } = loadWorkspace();
1048
1239
  const systemDir = String(vars.systemDir);
1049
- const agentsDir = import_node_path9.default.join(rootDir, systemDir, "agents");
1050
- const skillsDir = import_node_path9.default.join(rootDir, systemDir, "skills");
1051
- const rulesDir = import_node_path9.default.join(rootDir, systemDir, "rules");
1052
- const projectsDir = import_node_path9.default.join(rootDir, "projects");
1240
+ const agentsDir = import_node_path11.default.join(rootDir, systemDir, "agents");
1241
+ const skillsDir = import_node_path11.default.join(rootDir, systemDir, "skills");
1242
+ const rulesDir = import_node_path11.default.join(rootDir, systemDir, "rules");
1243
+ const projectsDir = import_node_path11.default.join(rootDir, "projects");
1053
1244
  const agentCount = countEntries(agentsDir, ["_template.md"]);
1054
1245
  const skillCount = countSkills(skillsDir);
1055
1246
  const projectCount = countEntries(projectsDir, ["_template"]);
1056
1247
  const ruleCount = countEntries(rulesDir);
1057
1248
  const date = config.createdAt.split("T")[0];
1249
+ const pendingRulesList = listPendingRules(rootDir);
1250
+ const pendingRules = pendingRulesList.length;
1058
1251
  console.log(`
1059
1252
  Clawstrap Workspace: ${config.workspaceName}`);
1060
1253
  console.log(`Created: ${date} | Version: ${config.version}`);
@@ -1075,6 +1268,12 @@ Structure:`);
1075
1268
  console.log(` Skills: ${skillCount} (${systemDir}/skills/)`);
1076
1269
  console.log(` Projects: ${projectCount} (projects/)`);
1077
1270
  console.log(` Rules: ${ruleCount} (${systemDir}/rules/)`);
1271
+ if (pendingRules > 0) {
1272
+ console.log(` Pending rules: ${pendingRules} (.claude/rules/ \u2014 review *-auto.md files)`);
1273
+ for (const rule of pendingRulesList) {
1274
+ console.log(` \xB7 ${rule.file.padEnd(30)} "${rule.title}"`);
1275
+ }
1276
+ }
1078
1277
  if (config.lastExport) {
1079
1278
  const exportDate = config.lastExport.exportedAt.split("T")[0];
1080
1279
  console.log(`
@@ -1103,28 +1302,28 @@ Watch:`);
1103
1302
  }
1104
1303
 
1105
1304
  // src/export-paperclip.ts
1106
- var import_node_fs13 = __toESM(require("fs"), 1);
1107
- var import_node_path14 = __toESM(require("path"), 1);
1305
+ var import_node_fs15 = __toESM(require("fs"), 1);
1306
+ var import_node_path16 = __toESM(require("path"), 1);
1108
1307
  var import_prompts7 = require("@inquirer/prompts");
1109
1308
 
1110
1309
  // src/export-paperclip/translate-agents.ts
1111
- var import_node_fs9 = __toESM(require("fs"), 1);
1112
- var import_node_path10 = __toESM(require("path"), 1);
1310
+ var import_node_fs11 = __toESM(require("fs"), 1);
1311
+ var import_node_path12 = __toESM(require("path"), 1);
1113
1312
  function slugToName(slug) {
1114
1313
  return slug.split("-").map((w) => w.charAt(0).toUpperCase() + w.slice(1)).join(" ");
1115
1314
  }
1116
1315
  function translateAgents(rootDir, systemDir, workspaceName, skillSlugs) {
1117
- const agentsDir = import_node_path10.default.join(rootDir, systemDir, "agents");
1316
+ const agentsDir = import_node_path12.default.join(rootDir, systemDir, "agents");
1118
1317
  const agents = [];
1119
1318
  const workerNames = [];
1120
1319
  const workerAgents = [];
1121
- if (import_node_fs9.default.existsSync(agentsDir)) {
1122
- for (const entry of import_node_fs9.default.readdirSync(agentsDir)) {
1320
+ if (import_node_fs11.default.existsSync(agentsDir)) {
1321
+ for (const entry of import_node_fs11.default.readdirSync(agentsDir)) {
1123
1322
  if (entry === "primary-agent.md" || entry === "_template.md" || entry.startsWith(".") || !entry.endsWith(".md")) {
1124
1323
  continue;
1125
1324
  }
1126
1325
  const slug = entry.replace(/\.md$/, "");
1127
- const rawBody = import_node_fs9.default.readFileSync(import_node_path10.default.join(agentsDir, entry), "utf-8");
1326
+ const rawBody = import_node_fs11.default.readFileSync(import_node_path12.default.join(agentsDir, entry), "utf-8");
1128
1327
  const roleMatch = rawBody.match(/^>\s*\*\*Purpose\*\*:\s*(.+)/m);
1129
1328
  const description = roleMatch ? roleMatch[1].trim() : "";
1130
1329
  const name = slugToName(slug);
@@ -1234,8 +1433,8 @@ When your work is complete, hand off to the **CEO** for review routing. If a rev
1234
1433
  }
1235
1434
 
1236
1435
  // src/export-paperclip/translate-governance.ts
1237
- var import_node_fs10 = __toESM(require("fs"), 1);
1238
- var import_node_path11 = __toESM(require("path"), 1);
1436
+ var import_node_fs12 = __toESM(require("fs"), 1);
1437
+ var import_node_path13 = __toESM(require("path"), 1);
1239
1438
  var GOVERNANCE_TIERS = {
1240
1439
  solo: {
1241
1440
  tier: "light",
@@ -1269,39 +1468,39 @@ function getGovernanceConfig(qualityLevel) {
1269
1468
  }
1270
1469
 
1271
1470
  // src/export-paperclip/translate-skills.ts
1272
- var import_node_fs11 = __toESM(require("fs"), 1);
1273
- var import_node_path12 = __toESM(require("path"), 1);
1471
+ var import_node_fs13 = __toESM(require("fs"), 1);
1472
+ var import_node_path14 = __toESM(require("path"), 1);
1274
1473
  function translateSkills(rootDir, systemDir) {
1275
- const skillsDir = import_node_path12.default.join(rootDir, systemDir, "skills");
1474
+ const skillsDir = import_node_path14.default.join(rootDir, systemDir, "skills");
1276
1475
  const skills = [];
1277
- if (!import_node_fs11.default.existsSync(skillsDir)) return skills;
1278
- for (const entry of import_node_fs11.default.readdirSync(skillsDir, { withFileTypes: true })) {
1476
+ if (!import_node_fs13.default.existsSync(skillsDir)) return skills;
1477
+ for (const entry of import_node_fs13.default.readdirSync(skillsDir, { withFileTypes: true })) {
1279
1478
  if (!entry.isDirectory()) continue;
1280
- const skillMdPath = import_node_path12.default.join(skillsDir, entry.name, "SKILL.md");
1281
- if (!import_node_fs11.default.existsSync(skillMdPath)) continue;
1479
+ const skillMdPath = import_node_path14.default.join(skillsDir, entry.name, "SKILL.md");
1480
+ if (!import_node_fs13.default.existsSync(skillMdPath)) continue;
1282
1481
  skills.push({
1283
1482
  name: entry.name,
1284
1483
  sourcePath: `${systemDir}/skills/${entry.name}/SKILL.md`,
1285
- content: import_node_fs11.default.readFileSync(skillMdPath, "utf-8")
1484
+ content: import_node_fs13.default.readFileSync(skillMdPath, "utf-8")
1286
1485
  });
1287
1486
  }
1288
1487
  return skills;
1289
1488
  }
1290
1489
 
1291
1490
  // src/export-paperclip/translate-goals.ts
1292
- var import_node_fs12 = __toESM(require("fs"), 1);
1293
- var import_node_path13 = __toESM(require("path"), 1);
1491
+ var import_node_fs14 = __toESM(require("fs"), 1);
1492
+ var import_node_path15 = __toESM(require("path"), 1);
1294
1493
  function translateGoals(rootDir) {
1295
- const projectsDir = import_node_path13.default.join(rootDir, "projects");
1494
+ const projectsDir = import_node_path15.default.join(rootDir, "projects");
1296
1495
  const goals = [];
1297
- if (!import_node_fs12.default.existsSync(projectsDir)) return goals;
1298
- for (const entry of import_node_fs12.default.readdirSync(projectsDir, { withFileTypes: true })) {
1496
+ if (!import_node_fs14.default.existsSync(projectsDir)) return goals;
1497
+ for (const entry of import_node_fs14.default.readdirSync(projectsDir, { withFileTypes: true })) {
1299
1498
  if (!entry.isDirectory() || entry.name === "_template" || entry.name.startsWith(".")) {
1300
1499
  continue;
1301
1500
  }
1302
- const readmePath = import_node_path13.default.join(projectsDir, entry.name, "README.md");
1303
- if (!import_node_fs12.default.existsSync(readmePath)) continue;
1304
- const content = import_node_fs12.default.readFileSync(readmePath, "utf-8");
1501
+ const readmePath = import_node_path15.default.join(projectsDir, entry.name, "README.md");
1502
+ if (!import_node_fs14.default.existsSync(readmePath)) continue;
1503
+ const content = import_node_fs14.default.readFileSync(readmePath, "utf-8");
1305
1504
  const descMatch = content.match(
1306
1505
  /## What This Project Is\s*\n+([\s\S]*?)(?=\n---|\n##|$)/
1307
1506
  );
@@ -1329,7 +1528,7 @@ async function exportPaperclip(options) {
1329
1528
  default: `Governed AI workspace for ${String(vars.workloadLabel).toLowerCase()}`
1330
1529
  });
1331
1530
  const companySlug = toSlug(companyName);
1332
- const outDir = import_node_path14.default.resolve(
1531
+ const outDir = import_node_path16.default.resolve(
1333
1532
  options.out ?? `${config.workspaceName}-paperclip`
1334
1533
  );
1335
1534
  const skills = translateSkills(rootDir, systemDir);
@@ -1359,7 +1558,7 @@ ${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
1359
1558
  console.log("\nValidation passed. Run without --validate to export.\n");
1360
1559
  return;
1361
1560
  }
1362
- if (import_node_fs13.default.existsSync(outDir)) {
1561
+ if (import_node_fs15.default.existsSync(outDir)) {
1363
1562
  const proceed = await (0, import_prompts7.confirm)({
1364
1563
  message: `Output directory already exists at ${outDir}. Overwrite?`,
1365
1564
  default: false
@@ -1368,10 +1567,10 @@ ${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
1368
1567
  console.log("Aborted.\n");
1369
1568
  return;
1370
1569
  }
1371
- import_node_fs13.default.rmSync(outDir, { recursive: true, force: true });
1570
+ import_node_fs15.default.rmSync(outDir, { recursive: true, force: true });
1372
1571
  }
1373
1572
  console.log("\nExporting to Paperclip format (agentcompanies/v1)...\n");
1374
- import_node_fs13.default.mkdirSync(outDir, { recursive: true });
1573
+ import_node_fs15.default.mkdirSync(outDir, { recursive: true });
1375
1574
  const goalsYaml = goals.length > 0 ? goals.map((g) => ` - ${g.description.split("\n")[0]}`).join("\n") : ` - ${mission}`;
1376
1575
  const pipelineLines = agents.map((a, i) => {
1377
1576
  return `${i + 1}. **${a.name}** ${a.title.toLowerCase()}`;
@@ -1401,17 +1600,17 @@ ${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
1401
1600
  `Generated with [Clawstrap](https://github.com/peppinho89/clawstrap) v${CLI_VERSION}`,
1402
1601
  ""
1403
1602
  ].join("\n");
1404
- import_node_fs13.default.writeFileSync(import_node_path14.default.join(outDir, "COMPANY.md"), companyMd, "utf-8");
1603
+ import_node_fs15.default.writeFileSync(import_node_path16.default.join(outDir, "COMPANY.md"), companyMd, "utf-8");
1405
1604
  console.log(" \u2713 COMPANY.md");
1406
- import_node_fs13.default.writeFileSync(
1407
- import_node_path14.default.join(outDir, ".paperclip.yaml"),
1605
+ import_node_fs15.default.writeFileSync(
1606
+ import_node_path16.default.join(outDir, ".paperclip.yaml"),
1408
1607
  "schema: paperclip/v1\n",
1409
1608
  "utf-8"
1410
1609
  );
1411
1610
  console.log(" \u2713 .paperclip.yaml");
1412
1611
  for (const agent of agents) {
1413
- const agentDir = import_node_path14.default.join(outDir, "agents", agent.slug);
1414
- import_node_fs13.default.mkdirSync(agentDir, { recursive: true });
1612
+ const agentDir = import_node_path16.default.join(outDir, "agents", agent.slug);
1613
+ import_node_fs15.default.mkdirSync(agentDir, { recursive: true });
1415
1614
  const frontmatterLines = [
1416
1615
  "---",
1417
1616
  `name: ${agent.name}`,
@@ -1426,12 +1625,12 @@ ${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
1426
1625
  }
1427
1626
  frontmatterLines.push("---");
1428
1627
  const agentMd = frontmatterLines.join("\n") + "\n\n" + agent.body + "\n";
1429
- import_node_fs13.default.writeFileSync(import_node_path14.default.join(agentDir, "AGENTS.md"), agentMd, "utf-8");
1628
+ import_node_fs15.default.writeFileSync(import_node_path16.default.join(agentDir, "AGENTS.md"), agentMd, "utf-8");
1430
1629
  console.log(` \u2713 agents/${agent.slug}/AGENTS.md`);
1431
1630
  }
1432
1631
  if (nonCeoAgents.length > 0) {
1433
- const teamDir = import_node_path14.default.join(outDir, "teams", "engineering");
1434
- import_node_fs13.default.mkdirSync(teamDir, { recursive: true });
1632
+ const teamDir = import_node_path16.default.join(outDir, "teams", "engineering");
1633
+ import_node_fs15.default.mkdirSync(teamDir, { recursive: true });
1435
1634
  const includesList = nonCeoAgents.map((a) => ` - ../../agents/${a.slug}/AGENTS.md`).join("\n");
1436
1635
  const teamMd = [
1437
1636
  "---",
@@ -1448,13 +1647,13 @@ ${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
1448
1647
  `The engineering team at ${companyName}. Led by the CEO, who scopes and delegates work to specialists.`,
1449
1648
  ""
1450
1649
  ].join("\n");
1451
- import_node_fs13.default.writeFileSync(import_node_path14.default.join(teamDir, "TEAM.md"), teamMd, "utf-8");
1650
+ import_node_fs15.default.writeFileSync(import_node_path16.default.join(teamDir, "TEAM.md"), teamMd, "utf-8");
1452
1651
  console.log(" \u2713 teams/engineering/TEAM.md");
1453
1652
  }
1454
1653
  for (const skill of skills) {
1455
- const skillDir = import_node_path14.default.join(outDir, "skills", skill.name);
1456
- import_node_fs13.default.mkdirSync(skillDir, { recursive: true });
1457
- import_node_fs13.default.writeFileSync(import_node_path14.default.join(skillDir, "SKILL.md"), skill.content, "utf-8");
1654
+ const skillDir = import_node_path16.default.join(outDir, "skills", skill.name);
1655
+ import_node_fs15.default.mkdirSync(skillDir, { recursive: true });
1656
+ import_node_fs15.default.writeFileSync(import_node_path16.default.join(skillDir, "SKILL.md"), skill.content, "utf-8");
1458
1657
  console.log(` \u2713 skills/${skill.name}/SKILL.md`);
1459
1658
  }
1460
1659
  const importScript = [
@@ -1470,26 +1669,26 @@ ${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
1470
1669
  'echo "Done. Open your Paperclip dashboard to review."',
1471
1670
  ""
1472
1671
  ].join("\n");
1473
- const importPath = import_node_path14.default.join(outDir, "import.sh");
1474
- import_node_fs13.default.writeFileSync(importPath, importScript, "utf-8");
1475
- import_node_fs13.default.chmodSync(importPath, 493);
1672
+ const importPath = import_node_path16.default.join(outDir, "import.sh");
1673
+ import_node_fs15.default.writeFileSync(importPath, importScript, "utf-8");
1674
+ import_node_fs15.default.chmodSync(importPath, 493);
1476
1675
  console.log(" \u2713 import.sh");
1477
1676
  const updatedConfig = {
1478
1677
  ...config,
1479
1678
  lastExport: {
1480
1679
  format: "paperclip",
1481
1680
  exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
1482
- outputDir: import_node_path14.default.relative(rootDir, outDir) || outDir
1681
+ outputDir: import_node_path16.default.relative(rootDir, outDir) || outDir
1483
1682
  }
1484
1683
  };
1485
- import_node_fs13.default.writeFileSync(
1486
- import_node_path14.default.join(rootDir, ".clawstrap.json"),
1684
+ import_node_fs15.default.writeFileSync(
1685
+ import_node_path16.default.join(rootDir, ".clawstrap.json"),
1487
1686
  JSON.stringify(updatedConfig, null, 2) + "\n",
1488
1687
  "utf-8"
1489
1688
  );
1490
1689
  console.log(
1491
1690
  `
1492
- Exported to ${import_node_path14.default.relative(process.cwd(), outDir) || outDir}`
1691
+ Exported to ${import_node_path16.default.relative(process.cwd(), outDir) || outDir}`
1493
1692
  );
1494
1693
  console.log(
1495
1694
  `${agents.length} agent(s), ${skills.length} skill(s), ${goals.length} goal(s)`
@@ -1499,43 +1698,14 @@ Exported to ${import_node_path14.default.relative(process.cwd(), outDir) || outD
1499
1698
  }
1500
1699
 
1501
1700
  // src/watch.ts
1502
- var import_node_path20 = __toESM(require("path"), 1);
1503
- var import_node_child_process4 = require("child_process");
1504
- var import_node_fs19 = __toESM(require("fs"), 1);
1701
+ var import_node_path23 = __toESM(require("path"), 1);
1702
+ var import_node_fs22 = __toESM(require("fs"), 1);
1505
1703
 
1506
1704
  // src/watch/git.ts
1507
1705
  var import_node_child_process = require("child_process");
1508
- var import_node_fs15 = __toESM(require("fs"), 1);
1509
- var import_node_path16 = __toESM(require("path"), 1);
1706
+ var import_node_fs16 = __toESM(require("fs"), 1);
1707
+ var import_node_path17 = __toESM(require("path"), 1);
1510
1708
  init_writers();
1511
- var STOPWORDS = /* @__PURE__ */ new Set([
1512
- "fix",
1513
- "add",
1514
- "update",
1515
- "remove",
1516
- "feat",
1517
- "chore",
1518
- "the",
1519
- "a",
1520
- "an",
1521
- "and",
1522
- "or",
1523
- "in",
1524
- "on",
1525
- "at",
1526
- "to",
1527
- "for",
1528
- "of",
1529
- "is",
1530
- "was",
1531
- "be",
1532
- "it",
1533
- "as",
1534
- "with",
1535
- "by",
1536
- "this",
1537
- "that"
1538
- ]);
1539
1709
  function parseGitLog(output) {
1540
1710
  const entries = [];
1541
1711
  const lines = output.split("\n");
@@ -1563,7 +1733,7 @@ function getTopDirs(entries) {
1563
1733
  for (const entry of entries) {
1564
1734
  const seenDirs = /* @__PURE__ */ new Set();
1565
1735
  for (const file of entry.files) {
1566
- const dir = import_node_path16.default.dirname(file);
1736
+ const dir = import_node_path17.default.dirname(file);
1567
1737
  if (dir !== "." && !seenDirs.has(dir)) {
1568
1738
  seenDirs.add(dir);
1569
1739
  dirCount.set(dir, (dirCount.get(dir) ?? 0) + 1);
@@ -1596,7 +1766,7 @@ function getRecurringWords(entries) {
1596
1766
  return Array.from(wordCount.entries()).filter(([, count]) => count >= 2).sort((a, b) => b[1] - a[1]).slice(0, 10).map(([word]) => word);
1597
1767
  }
1598
1768
  async function runGitObserver(rootDir, sinceCommit) {
1599
- if (!import_node_fs15.default.existsSync(import_node_path16.default.join(rootDir, ".git"))) {
1769
+ if (!import_node_fs16.default.existsSync(import_node_path17.default.join(rootDir, ".git"))) {
1600
1770
  return null;
1601
1771
  }
1602
1772
  let headSha;
@@ -1651,8 +1821,8 @@ async function runGitObserver(rootDir, sinceCommit) {
1651
1821
  }
1652
1822
 
1653
1823
  // src/watch/scan.ts
1654
- var import_node_fs16 = __toESM(require("fs"), 1);
1655
- var import_node_path17 = __toESM(require("path"), 1);
1824
+ var import_node_fs17 = __toESM(require("fs"), 1);
1825
+ var import_node_path18 = __toESM(require("path"), 1);
1656
1826
  var SKIP_DIRS = /* @__PURE__ */ new Set([".git", "node_modules", "tmp", "dist", ".claude"]);
1657
1827
  var CODE_EXTS = /* @__PURE__ */ new Set([".ts", ".js", ".tsx", ".jsx"]);
1658
1828
  function walkDir(dir, maxDepth = 10, depth = 0) {
@@ -1660,13 +1830,13 @@ function walkDir(dir, maxDepth = 10, depth = 0) {
1660
1830
  let results = [];
1661
1831
  let entries;
1662
1832
  try {
1663
- entries = import_node_fs16.default.readdirSync(dir, { withFileTypes: true });
1833
+ entries = import_node_fs17.default.readdirSync(dir, { withFileTypes: true });
1664
1834
  } catch {
1665
1835
  return [];
1666
1836
  }
1667
1837
  for (const entry of entries) {
1668
1838
  if (SKIP_DIRS.has(entry.name)) continue;
1669
- const fullPath = import_node_path17.default.join(dir, entry.name);
1839
+ const fullPath = import_node_path18.default.join(dir, entry.name);
1670
1840
  if (entry.isDirectory()) {
1671
1841
  results = results.concat(walkDir(fullPath, maxDepth, depth + 1));
1672
1842
  } else if (entry.isFile()) {
@@ -1699,7 +1869,7 @@ function analyzeNaming(files) {
1699
1869
  other: []
1700
1870
  };
1701
1871
  for (const file of files) {
1702
- const base = import_node_path17.default.basename(file, import_node_path17.default.extname(file));
1872
+ const base = import_node_path18.default.basename(file, import_node_path18.default.extname(file));
1703
1873
  const style2 = detectNamingCase(base);
1704
1874
  counts[style2]++;
1705
1875
  if (examples[style2].length < 3) examples[style2].push(base);
@@ -1719,14 +1889,14 @@ function analyzeNaming(files) {
1719
1889
  return result;
1720
1890
  }
1721
1891
  function analyzeImports(files) {
1722
- const sample = files.filter((f) => CODE_EXTS.has(import_node_path17.default.extname(f))).slice(0, 20);
1892
+ const sample = files.filter((f) => CODE_EXTS.has(import_node_path18.default.extname(f))).slice(0, 20);
1723
1893
  let relativeCount = 0;
1724
1894
  let absoluteCount = 0;
1725
1895
  let barrelCount = 0;
1726
1896
  for (const file of sample) {
1727
1897
  let content;
1728
1898
  try {
1729
- content = import_node_fs16.default.readFileSync(file, "utf-8");
1899
+ content = import_node_fs17.default.readFileSync(file, "utf-8");
1730
1900
  } catch {
1731
1901
  continue;
1732
1902
  }
@@ -1735,7 +1905,7 @@ function analyzeImports(files) {
1735
1905
  if (/from\s+['"]\.\.?\//.test(line)) relativeCount++;
1736
1906
  else if (/from\s+['"]/.test(line)) absoluteCount++;
1737
1907
  }
1738
- const base = import_node_path17.default.basename(file, import_node_path17.default.extname(file));
1908
+ const base = import_node_path18.default.basename(file, import_node_path18.default.extname(file));
1739
1909
  if (base === "index") barrelCount++;
1740
1910
  }
1741
1911
  const results = [];
@@ -1759,7 +1929,7 @@ function analyzeTesting(files) {
1759
1929
  let hasSpecExt = false;
1760
1930
  let hasTestsDir = false;
1761
1931
  for (const file of files) {
1762
- const base = import_node_path17.default.basename(file);
1932
+ const base = import_node_path18.default.basename(file);
1763
1933
  if (/\.test\.(ts|js|tsx|jsx)$/.test(base)) hasTestExt = true;
1764
1934
  if (/\.spec\.(ts|js|tsx|jsx)$/.test(base)) hasSpecExt = true;
1765
1935
  if (file.includes("/__tests__/") || file.includes("\\__tests__\\")) hasTestsDir = true;
@@ -1773,13 +1943,13 @@ function analyzeTesting(files) {
1773
1943
  return [`Test patterns found: ${testPatterns.join(", ")}`];
1774
1944
  }
1775
1945
  function analyzeErrorHandling(files) {
1776
- const sample = files.filter((f) => CODE_EXTS.has(import_node_path17.default.extname(f))).slice(0, 20);
1946
+ const sample = files.filter((f) => CODE_EXTS.has(import_node_path18.default.extname(f))).slice(0, 20);
1777
1947
  let tryCatchCount = 0;
1778
1948
  let resultTypeCount = 0;
1779
1949
  for (const file of sample) {
1780
1950
  let content;
1781
1951
  try {
1782
- content = import_node_fs16.default.readFileSync(file, "utf-8");
1952
+ content = import_node_fs17.default.readFileSync(file, "utf-8");
1783
1953
  } catch {
1784
1954
  continue;
1785
1955
  }
@@ -1804,14 +1974,14 @@ function analyzeErrorHandling(files) {
1804
1974
  return results;
1805
1975
  }
1806
1976
  function analyzeComments(files) {
1807
- const sample = files.filter((f) => CODE_EXTS.has(import_node_path17.default.extname(f))).slice(0, 20);
1977
+ const sample = files.filter((f) => CODE_EXTS.has(import_node_path18.default.extname(f))).slice(0, 20);
1808
1978
  let jsdocCount = 0;
1809
1979
  let inlineCount = 0;
1810
1980
  let totalLines = 0;
1811
1981
  for (const file of sample) {
1812
1982
  let content;
1813
1983
  try {
1814
- content = import_node_fs16.default.readFileSync(file, "utf-8");
1984
+ content = import_node_fs17.default.readFileSync(file, "utf-8");
1815
1985
  } catch {
1816
1986
  continue;
1817
1987
  }
@@ -1844,17 +2014,220 @@ async function runScan(rootDir) {
1844
2014
  init_writers();
1845
2015
 
1846
2016
  // src/watch/daemon.ts
2017
+ var import_node_fs21 = __toESM(require("fs"), 1);
2018
+ var import_node_path22 = __toESM(require("path"), 1);
2019
+ init_writers();
2020
+
2021
+ // src/watch/synthesize.ts
1847
2022
  var import_node_fs18 = __toESM(require("fs"), 1);
1848
2023
  var import_node_path19 = __toESM(require("path"), 1);
1849
- init_writers();
2024
+ init_dedup();
2025
+ var SYNTH_START = "<!-- CLAWSTRAP:SYNTHESIS:START -->";
2026
+ var SYNTH_END = "<!-- CLAWSTRAP:SYNTHESIS:END -->";
2027
+ var MAX_ENTRIES_TO_SEND = 20;
2028
+ function extractExistingSummary(content) {
2029
+ const startIdx = content.indexOf(SYNTH_START);
2030
+ const endIdx = content.indexOf(SYNTH_END);
2031
+ if (startIdx === -1 || endIdx === -1) return null;
2032
+ const block = content.slice(startIdx + SYNTH_START.length, endIdx).trim();
2033
+ const lines = block.split("\n");
2034
+ let start = 0;
2035
+ if (/^##\s+Living Summary/.test(lines[start] ?? "")) start++;
2036
+ if (/^>\s+Updated:/.test(lines[start] ?? "")) start++;
2037
+ return lines.slice(start).join("\n").trim() || null;
2038
+ }
2039
+ function buildSynthBlock(summary) {
2040
+ const ts = (/* @__PURE__ */ new Date()).toISOString();
2041
+ return [
2042
+ SYNTH_START,
2043
+ "## Living Summary",
2044
+ `> Updated: ${ts}`,
2045
+ "",
2046
+ summary,
2047
+ SYNTH_END
2048
+ ].join("\n");
2049
+ }
2050
+ function writeSynthBlock(memoryPath, summary) {
2051
+ const content = import_node_fs18.default.existsSync(memoryPath) ? import_node_fs18.default.readFileSync(memoryPath, "utf-8") : "";
2052
+ const block = buildSynthBlock(summary);
2053
+ const startIdx = content.indexOf(SYNTH_START);
2054
+ const endIdx = content.indexOf(SYNTH_END);
2055
+ if (startIdx !== -1 && endIdx !== -1) {
2056
+ const before = content.slice(0, startIdx);
2057
+ const after = content.slice(endIdx + SYNTH_END.length);
2058
+ import_node_fs18.default.writeFileSync(memoryPath, before + block + after, "utf-8");
2059
+ return;
2060
+ }
2061
+ const headingMatch = /^#[^\n]*\n?/m.exec(content);
2062
+ if (headingMatch) {
2063
+ const insertAt = headingMatch.index + headingMatch[0].length;
2064
+ const updated = content.slice(0, insertAt) + "\n" + block + "\n" + content.slice(insertAt);
2065
+ import_node_fs18.default.writeFileSync(memoryPath, updated, "utf-8");
2066
+ } else {
2067
+ import_node_fs18.default.writeFileSync(memoryPath, block + "\n\n" + content, "utf-8");
2068
+ }
2069
+ }
2070
+ async function synthesizeMemory(rootDir, adapter) {
2071
+ const memoryPath = import_node_path19.default.join(rootDir, ".claude", "memory", "MEMORY.md");
2072
+ if (!import_node_fs18.default.existsSync(memoryPath)) return null;
2073
+ const content = import_node_fs18.default.readFileSync(memoryPath, "utf-8");
2074
+ const contentWithoutSynthBlock = content.replace(
2075
+ /<!-- CLAWSTRAP:SYNTHESIS:START -->[\s\S]*?<!-- CLAWSTRAP:SYNTHESIS:END -->/,
2076
+ ""
2077
+ );
2078
+ const allEntries = parseMemoryEntries(contentWithoutSynthBlock);
2079
+ if (allEntries.length === 0) return null;
2080
+ const recentEntries = allEntries.slice(-MAX_ENTRIES_TO_SEND);
2081
+ const existingSummary = extractExistingSummary(content);
2082
+ let prompt;
2083
+ if (existingSummary) {
2084
+ prompt = `You are maintaining a living summary of an AI agent workspace.
2085
+
2086
+ Current summary:
2087
+ ${existingSummary}
2088
+
2089
+ Recent new memory entries:
2090
+ ${recentEntries.join("\n---\n")}
2091
+
2092
+ Update the summary to incorporate the new information. Write 3\u20135 sentences of persistent truths about how this workspace operates. Output only the updated paragraph \u2014 no heading, no markdown, no explanation.`;
2093
+ } else {
2094
+ prompt = `You are summarising an AI agent workspace from its memory log.
2095
+
2096
+ Recent memory entries:
2097
+ ${recentEntries.join("\n---\n")}
2098
+
2099
+ Write a concise 3\u20135 sentence summary of the persistent truths about how this workspace operates. Output only the paragraph \u2014 no heading, no markdown, no explanation.`;
2100
+ }
2101
+ let response;
2102
+ try {
2103
+ response = await adapter.complete(prompt);
2104
+ } catch {
2105
+ return null;
2106
+ }
2107
+ const summary = response.replace(/^```(?:markdown)?\s*/m, "").replace(/\s*```\s*$/m, "").trim();
2108
+ if (!summary) return null;
2109
+ try {
2110
+ writeSynthBlock(memoryPath, summary);
2111
+ } catch {
2112
+ return null;
2113
+ }
2114
+ return summary;
2115
+ }
2116
+
2117
+ // src/watch/infer.ts
2118
+ var import_node_child_process2 = require("child_process");
2119
+ var import_node_fs19 = __toESM(require("fs"), 1);
2120
+ var import_node_path20 = __toESM(require("path"), 1);
2121
+ var CODE_EXTS2 = /* @__PURE__ */ new Set([".ts", ".js", ".tsx", ".jsx"]);
2122
+ var SKIP_DIRS2 = /* @__PURE__ */ new Set([".git", "node_modules", "tmp", "dist", ".claude"]);
2123
+ var MAX_FILES = 10;
2124
+ var MAX_LINES_PER_FILE = 150;
2125
+ var MIN_FILES = 3;
2126
+ function walkCodeFiles(rootDir) {
2127
+ const results = [];
2128
+ function walk(dir, depth = 0) {
2129
+ if (depth > 8) return;
2130
+ let entries;
2131
+ try {
2132
+ entries = import_node_fs19.default.readdirSync(dir, { withFileTypes: true });
2133
+ } catch {
2134
+ return;
2135
+ }
2136
+ for (const entry of entries) {
2137
+ if (SKIP_DIRS2.has(entry.name)) continue;
2138
+ const fullPath = import_node_path20.default.join(dir, entry.name);
2139
+ if (entry.isDirectory()) {
2140
+ walk(fullPath, depth + 1);
2141
+ } else if (entry.isFile() && CODE_EXTS2.has(import_node_path20.default.extname(entry.name))) {
2142
+ if (!/\.(test|spec)\.(ts|js|tsx|jsx)$/.test(entry.name)) {
2143
+ results.push(fullPath);
2144
+ }
2145
+ }
2146
+ }
2147
+ }
2148
+ walk(rootDir);
2149
+ return results;
2150
+ }
2151
+ function getRecentlyChangedFiles(rootDir) {
2152
+ try {
2153
+ const output = (0, import_node_child_process2.execSync)(
2154
+ `git -C "${rootDir}" log --format='' --name-only -n 100`,
2155
+ { encoding: "utf-8" }
2156
+ ).trim();
2157
+ if (!output) return [];
2158
+ const seen = /* @__PURE__ */ new Set();
2159
+ const files = [];
2160
+ for (const line of output.split("\n")) {
2161
+ const trimmed = line.trim();
2162
+ if (!trimmed || seen.has(trimmed)) continue;
2163
+ seen.add(trimmed);
2164
+ const ext = import_node_path20.default.extname(trimmed);
2165
+ if (!CODE_EXTS2.has(ext)) continue;
2166
+ if (/\.(test|spec)\.(ts|js|tsx|jsx)$/.test(trimmed)) continue;
2167
+ const abs = import_node_path20.default.join(rootDir, trimmed);
2168
+ if (import_node_fs19.default.existsSync(abs)) files.push(abs);
2169
+ }
2170
+ return files;
2171
+ } catch {
2172
+ return [];
2173
+ }
2174
+ }
2175
+ function readTruncated(filePath, rootDir) {
2176
+ let content;
2177
+ try {
2178
+ content = import_node_fs19.default.readFileSync(filePath, "utf-8");
2179
+ } catch {
2180
+ return "";
2181
+ }
2182
+ const lines = content.split("\n");
2183
+ const relPath = import_node_path20.default.relative(rootDir, filePath);
2184
+ const truncated = lines.length > MAX_LINES_PER_FILE ? lines.slice(0, MAX_LINES_PER_FILE).join("\n") + "\n// ... truncated" : lines.join("\n");
2185
+ return `=== ${relPath} ===
2186
+ ${truncated}`;
2187
+ }
2188
+ async function inferArchitecturePatterns(rootDir, syntacticSections, adapter) {
2189
+ let candidates = getRecentlyChangedFiles(rootDir);
2190
+ if (candidates.length < MIN_FILES) {
2191
+ candidates = walkCodeFiles(rootDir);
2192
+ }
2193
+ if (candidates.length < MIN_FILES) return [];
2194
+ const sampled = candidates.slice(0, MAX_FILES);
2195
+ const fileSamples = sampled.map((f) => readTruncated(f, rootDir)).filter(Boolean).join("\n\n");
2196
+ if (!fileSamples) return [];
2197
+ const syntacticSummary = [
2198
+ `Naming: ${syntacticSections.naming.join("; ")}`,
2199
+ `Imports: ${syntacticSections.imports.join("; ")}`,
2200
+ `Error handling: ${syntacticSections.errorHandling.join("; ")}`
2201
+ ].join("\n");
2202
+ const prompt = `You are analysing a software project to infer its architectural and design conventions.
2203
+
2204
+ Syntactic analysis already found:
2205
+ ${syntacticSummary}
2206
+
2207
+ Source file samples:
2208
+ ${fileSamples}
2209
+
2210
+ Based on the code, identify 3\u20138 architectural or design patterns as imperative rules.
2211
+ Rules must be specific to this codebase, not generic best practices.
2212
+ Format: one rule per line, starting with "Always", "Never", or "When".
2213
+ Output only the rules \u2014 no explanation, no numbering, no markdown.`;
2214
+ let response;
2215
+ try {
2216
+ response = await adapter.complete(prompt);
2217
+ } catch {
2218
+ return [];
2219
+ }
2220
+ const rules = response.replace(/^```(?:markdown)?\s*/m, "").replace(/\s*```\s*$/m, "").split("\n").map((line) => line.replace(/^\s*[-*\d.]+\s*/, "").trim()).filter((line) => /^(Always|Never|When)\b/i.test(line));
2221
+ return rules;
2222
+ }
1850
2223
 
1851
2224
  // src/watch/transcripts.ts
1852
- var import_node_fs17 = __toESM(require("fs"), 1);
1853
- var import_node_path18 = __toESM(require("path"), 1);
2225
+ var import_node_fs20 = __toESM(require("fs"), 1);
2226
+ var import_node_path21 = __toESM(require("path"), 1);
1854
2227
  async function processTranscript(filePath, adapter) {
1855
2228
  let content;
1856
2229
  try {
1857
- content = import_node_fs17.default.readFileSync(filePath, "utf-8");
2230
+ content = import_node_fs20.default.readFileSync(filePath, "utf-8");
1858
2231
  } catch {
1859
2232
  return null;
1860
2233
  }
@@ -1897,13 +2270,13 @@ Each item must be a concise one-sentence string. Arrays may be empty.`;
1897
2270
  }
1898
2271
  }
1899
2272
  function watchTranscriptDir(rootDir, onNewFile) {
1900
- const sessionsDir = import_node_path18.default.join(rootDir, "tmp", "sessions");
1901
- import_node_fs17.default.mkdirSync(sessionsDir, { recursive: true });
1902
- const watcher = import_node_fs17.default.watch(sessionsDir, (event, filename) => {
2273
+ const sessionsDir = import_node_path21.default.join(rootDir, "tmp", "sessions");
2274
+ import_node_fs20.default.mkdirSync(sessionsDir, { recursive: true });
2275
+ const watcher = import_node_fs20.default.watch(sessionsDir, (event, filename) => {
1903
2276
  if (event !== "rename" || !filename) return;
1904
2277
  if (!filename.endsWith(".md")) return;
1905
- const filePath = import_node_path18.default.join(sessionsDir, filename);
1906
- if (!import_node_fs17.default.existsSync(filePath)) return;
2278
+ const filePath = import_node_path21.default.join(sessionsDir, filename);
2279
+ if (!import_node_fs20.default.existsSync(filePath)) return;
1907
2280
  onNewFile(filePath).catch(() => {
1908
2281
  });
1909
2282
  });
@@ -1913,12 +2286,12 @@ function watchTranscriptDir(rootDir, onNewFile) {
1913
2286
  }
1914
2287
 
1915
2288
  // src/watch/adapters/claude-local.ts
1916
- var import_node_child_process2 = require("child_process");
2289
+ var import_node_child_process3 = require("child_process");
1917
2290
  var ClaudeLocalAdapter = class {
1918
2291
  async complete(prompt) {
1919
2292
  const escaped = prompt.replace(/'/g, "'\\''");
1920
2293
  try {
1921
- const result = (0, import_node_child_process2.execSync)(`claude -p '${escaped}'`, {
2294
+ const result = (0, import_node_child_process3.execSync)(`claude -p '${escaped}'`, {
1922
2295
  encoding: "utf-8",
1923
2296
  timeout: 6e4,
1924
2297
  stdio: ["pipe", "pipe", "pipe"]
@@ -1999,12 +2372,12 @@ var OllamaAdapter = class {
1999
2372
  };
2000
2373
 
2001
2374
  // src/watch/adapters/codex-local.ts
2002
- var import_node_child_process3 = require("child_process");
2375
+ var import_node_child_process4 = require("child_process");
2003
2376
  var CodexLocalAdapter = class {
2004
2377
  async complete(prompt) {
2005
2378
  const escaped = prompt.replace(/'/g, "'\\''");
2006
2379
  try {
2007
- const result = (0, import_node_child_process3.execSync)(`codex '${escaped}'`, {
2380
+ const result = (0, import_node_child_process4.execSync)(`codex '${escaped}'`, {
2008
2381
  encoding: "utf-8",
2009
2382
  timeout: 6e4,
2010
2383
  stdio: ["pipe", "pipe", "pipe"]
@@ -2038,78 +2411,358 @@ function createAdapter(config) {
2038
2411
  }
2039
2412
 
2040
2413
  // src/watch/daemon.ts
2041
- async function runDaemon(rootDir, config) {
2042
- const silent = config.watch?.silent ?? false;
2043
- const log = silent ? () => {
2044
- } : (msg) => process.stdout.write(msg + "\n");
2414
+ function serializedAdapter(adapter) {
2415
+ let chain = Promise.resolve();
2416
+ return {
2417
+ complete(prompt) {
2418
+ const result = chain.then(() => adapter.complete(prompt));
2419
+ chain = result.then(() => {
2420
+ }, () => {
2421
+ });
2422
+ return result;
2423
+ }
2424
+ };
2425
+ }
2426
+ async function runDaemon(rootDir, config, ui) {
2045
2427
  const cleanup = [];
2046
2428
  const shutdown = () => {
2047
2429
  cleanup.forEach((fn) => fn());
2430
+ ui.clear();
2048
2431
  clearPid(rootDir);
2049
2432
  process.exit(0);
2050
2433
  };
2051
2434
  process.on("SIGTERM", shutdown);
2052
2435
  process.on("SIGINT", shutdown);
2053
- log("[clawstrap watch] daemon started");
2436
+ ui.daemonStarted();
2054
2437
  const sinceCommit = config.watchState?.lastGitCommit ?? null;
2438
+ ui.gitStart();
2055
2439
  const gitResult = await runGitObserver(rootDir, sinceCommit);
2440
+ ui.gitDone(gitResult ? { entriesWritten: gitResult.entriesWritten, lastCommit: gitResult.lastCommit } : null);
2441
+ let lastGitCommit = gitResult?.lastCommit ?? sinceCommit;
2056
2442
  if (gitResult) {
2057
2443
  updateWatchState(rootDir, { lastGitCommit: gitResult.lastCommit });
2058
- log(`[clawstrap watch] git: ${gitResult.entriesWritten} entries written`);
2059
2444
  }
2060
- const adapter = createAdapter(config);
2445
+ const adapter = serializedAdapter(createAdapter(config));
2446
+ let entriesSinceLastSynthesis = config.watchState?.entriesSinceLastSynthesis ?? 0;
2447
+ const synthEnabled = config.watch?.synthesis?.enabled ?? false;
2448
+ const triggerEveryN = config.watch?.synthesis?.triggerEveryN ?? 10;
2449
+ const maybeSynthesize = async () => {
2450
+ if (!synthEnabled || entriesSinceLastSynthesis < triggerEveryN) return;
2451
+ ui.synthStart();
2452
+ const summary = await synthesizeMemory(rootDir, adapter);
2453
+ ui.synthDone(summary);
2454
+ entriesSinceLastSynthesis = 0;
2455
+ updateWatchState(rootDir, { entriesSinceLastSynthesis: "0" });
2456
+ };
2061
2457
  const stopTranscripts = watchTranscriptDir(rootDir, async (filePath) => {
2062
- log(`[clawstrap watch] transcript: processing ${import_node_path19.default.basename(filePath)}`);
2458
+ ui.transcriptStart(import_node_path22.default.basename(filePath));
2459
+ ui.llmCallStart();
2063
2460
  const result = await processTranscript(filePath, adapter);
2461
+ ui.llmCallDone(result ? { decisions: result.decisions.length, corrections: result.corrections.length, openThreads: result.openThreads.length } : null);
2064
2462
  if (result) {
2065
- const { appendToMemory: appendToMemory2, appendToGotchaLog: appendToGotchaLog2, appendToFutureConsiderations: appendToFutureConsiderations2 } = await Promise.resolve().then(() => (init_writers(), writers_exports));
2066
- if (result.decisions.length) appendToMemory2(rootDir, result.decisions, "session");
2067
- if (result.corrections.length) appendToGotchaLog2(rootDir, result.corrections);
2463
+ const { appendToMemory: appendToMemory2, appendToGotchaLog: appendToGotchaLog2, appendToFutureConsiderations: appendToFutureConsiderations2, appendToOpenThreads: appendToOpenThreads2 } = await Promise.resolve().then(() => (init_writers(), writers_exports));
2464
+ let written = 0;
2465
+ if (result.decisions.length) written += appendToMemory2(rootDir, result.decisions, "session");
2466
+ if (result.corrections.length) {
2467
+ appendToGotchaLog2(rootDir, result.corrections);
2468
+ await checkAndPromoteCorrections(rootDir, adapter, ui);
2469
+ }
2068
2470
  if (result.deferredIdeas.length) appendToFutureConsiderations2(rootDir, result.deferredIdeas);
2069
- updateWatchState(rootDir, { lastTranscriptAt: (/* @__PURE__ */ new Date()).toISOString() });
2070
- log(
2071
- `[clawstrap watch] transcript: decisions=${result.decisions.length} corrections=${result.corrections.length}`
2072
- );
2471
+ if (result.openThreads.length) appendToOpenThreads2(rootDir, result.openThreads);
2472
+ entriesSinceLastSynthesis += written;
2473
+ updateWatchState(rootDir, {
2474
+ lastTranscriptAt: (/* @__PURE__ */ new Date()).toISOString(),
2475
+ entriesSinceLastSynthesis: String(entriesSinceLastSynthesis)
2476
+ });
2477
+ ui.transcriptWriteDone();
2478
+ await maybeSynthesize();
2073
2479
  }
2074
2480
  });
2075
2481
  cleanup.push(stopTranscripts);
2482
+ let gitRunning = false;
2483
+ const pollIntervalMinutes = config.watch?.git?.pollIntervalMinutes ?? 5;
2484
+ const gitPollTimer = setInterval(async () => {
2485
+ if (gitRunning) return;
2486
+ gitRunning = true;
2487
+ try {
2488
+ const result = await runGitObserver(rootDir, lastGitCommit);
2489
+ if (result && result.entriesWritten > 0) {
2490
+ ui.gitPollDone({ entriesWritten: result.entriesWritten, lastCommit: result.lastCommit });
2491
+ }
2492
+ if (result) {
2493
+ lastGitCommit = result.lastCommit;
2494
+ updateWatchState(rootDir, { lastGitCommit: result.lastCommit });
2495
+ }
2496
+ } finally {
2497
+ gitRunning = false;
2498
+ }
2499
+ }, pollIntervalMinutes * 60 * 1e3);
2500
+ cleanup.push(() => clearInterval(gitPollTimer));
2076
2501
  const intervalDays = config.watch?.scan?.intervalDays ?? 7;
2077
2502
  const lastScan = config.watchState?.lastScanAt ? new Date(config.watchState.lastScanAt) : null;
2078
2503
  const msSinceLastScan = lastScan ? Date.now() - lastScan.getTime() : Infinity;
2079
2504
  const scanIntervalMs = intervalDays * 24 * 60 * 60 * 1e3;
2080
2505
  const doScan = async () => {
2081
- log("[clawstrap watch] scan: running convention scan...");
2506
+ ui.scanStart(lastScan);
2507
+ ui.scanFilesStart();
2082
2508
  const sections = await runScan(rootDir);
2509
+ ui.scanFilesDone();
2510
+ if (config.watch?.adapter) {
2511
+ ui.inferStart();
2512
+ const rules = await inferArchitecturePatterns(rootDir, sections, adapter);
2513
+ ui.inferDone(rules.length > 0 ? rules.length : null);
2514
+ if (rules.length > 0) sections.architecture = rules;
2515
+ }
2083
2516
  writeConventions(rootDir, sections);
2084
2517
  updateWatchState(rootDir, { lastScanAt: (/* @__PURE__ */ new Date()).toISOString() });
2085
- log("[clawstrap watch] scan: conventions.md updated");
2518
+ ui.scanDone(sections.naming[0] ?? "");
2086
2519
  };
2087
2520
  if (msSinceLastScan >= scanIntervalMs) {
2088
2521
  await doScan();
2089
2522
  }
2090
2523
  const scanTimer = setInterval(doScan, scanIntervalMs);
2091
2524
  cleanup.push(() => clearInterval(scanTimer));
2092
- log("[clawstrap watch] watching for changes...");
2525
+ ui.showIdle(import_node_path22.default.join(rootDir, "tmp", "sessions"));
2093
2526
  await new Promise(() => {
2094
2527
  });
2095
2528
  }
2096
2529
  function updateWatchState(rootDir, updates) {
2097
- const configPath = import_node_path19.default.join(rootDir, ".clawstrap.json");
2530
+ const configPath = import_node_path22.default.join(rootDir, ".clawstrap.json");
2098
2531
  try {
2099
- const raw = JSON.parse(import_node_fs18.default.readFileSync(configPath, "utf-8"));
2532
+ const raw = JSON.parse(import_node_fs21.default.readFileSync(configPath, "utf-8"));
2100
2533
  raw["watchState"] = { ...raw["watchState"] ?? {}, ...updates };
2101
- import_node_fs18.default.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n", "utf-8");
2534
+ import_node_fs21.default.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n", "utf-8");
2102
2535
  } catch {
2103
2536
  }
2104
2537
  }
2105
2538
 
2539
+ // src/watch/ui.ts
2540
+ var import_picocolors = __toESM(require("picocolors"), 1);
2541
+ var import_ora = __toESM(require("ora"), 1);
2542
+ var ora = typeof import_ora.default === "function" ? import_ora.default : import_ora.default.default;
2543
+ function formatAgo(date) {
2544
+ if (!date) return "never";
2545
+ const diffMs = Date.now() - date.getTime();
2546
+ const diffMins = Math.floor(diffMs / 6e4);
2547
+ if (diffMins < 60) return `${diffMins} minute${diffMins === 1 ? "" : "s"} ago`;
2548
+ const diffHours = Math.floor(diffMins / 60);
2549
+ if (diffHours < 24) return `${diffHours} hour${diffHours === 1 ? "" : "s"} ago`;
2550
+ const diffDays = Math.floor(diffHours / 24);
2551
+ return `${diffDays} day${diffDays === 1 ? "" : "s"} ago`;
2552
+ }
2553
+ var T = {
2554
+ branch: import_picocolors.default.gray("\u251C\u2500"),
2555
+ last: import_picocolors.default.gray("\u2514\u2500"),
2556
+ check: import_picocolors.default.green("\u2713")
2557
+ };
2558
+ function header(label) {
2559
+ process.stdout.write(`
2560
+ ${import_picocolors.default.cyan("\u25C6")} ${import_picocolors.default.bold(label)}
2561
+ `);
2562
+ }
2563
+ function row(connector, label, value) {
2564
+ const val = value !== void 0 ? ` ${import_picocolors.default.bold(value)}` : "";
2565
+ process.stdout.write(`${connector} ${label}${val}
2566
+ `);
2567
+ }
2568
+ var SilentUI = class {
2569
+ daemonStarted() {
2570
+ }
2571
+ gitStart() {
2572
+ }
2573
+ gitDone(_result) {
2574
+ }
2575
+ gitPollDone(_result) {
2576
+ }
2577
+ transcriptStart(_filename) {
2578
+ }
2579
+ llmCallStart() {
2580
+ }
2581
+ llmCallDone(_counts) {
2582
+ }
2583
+ transcriptWriteDone() {
2584
+ }
2585
+ scanStart(_lastRunAt) {
2586
+ }
2587
+ scanFilesStart() {
2588
+ }
2589
+ scanFilesDone() {
2590
+ }
2591
+ scanDone(_namingStyle) {
2592
+ }
2593
+ synthStart() {
2594
+ }
2595
+ synthDone(_summary) {
2596
+ }
2597
+ inferStart() {
2598
+ }
2599
+ inferDone(_rulesCount) {
2600
+ }
2601
+ promoteStart() {
2602
+ }
2603
+ promoteDone(_rulesWritten) {
2604
+ }
2605
+ showIdle(_watchDir) {
2606
+ }
2607
+ clear() {
2608
+ }
2609
+ };
2610
+ var RichUI = class {
2611
+ spinner = null;
2612
+ daemonStarted() {
2613
+ process.stdout.write(`
2614
+ ${import_picocolors.default.cyan("clawstrap watch")} ${import_picocolors.default.dim("daemon started")}
2615
+ `);
2616
+ }
2617
+ // Git observer ──────────────────────────────────────────────────────────────
2618
+ gitStart() {
2619
+ header("Git observer");
2620
+ }
2621
+ gitDone(result) {
2622
+ if (!result) {
2623
+ row(T.last, import_picocolors.default.dim("No new commits found."));
2624
+ return;
2625
+ }
2626
+ row(T.branch, "Last processed commit", result.lastCommit.slice(0, 7));
2627
+ row(T.branch, "Entries written", String(result.entriesWritten));
2628
+ row(T.last, `Writing to MEMORY.md... ${T.check} done`);
2629
+ }
2630
+ gitPollDone(result) {
2631
+ const time = (/* @__PURE__ */ new Date()).toTimeString().slice(0, 5);
2632
+ process.stdout.write(
2633
+ `
2634
+ ${import_picocolors.default.cyan("\u25C6")} ${import_picocolors.default.bold("Git:")} +${result.entriesWritten} entr${result.entriesWritten === 1 ? "y" : "ies"} written ${import_picocolors.default.dim(result.lastCommit.slice(0, 7))} ${import_picocolors.default.dim(time)}
2635
+ `
2636
+ );
2637
+ }
2638
+ // Transcript ────────────────────────────────────────────────────────────────
2639
+ transcriptStart(filename) {
2640
+ header(`New session summary detected ${import_picocolors.default.cyan(filename)}`);
2641
+ }
2642
+ llmCallStart() {
2643
+ this.spinner = ora({
2644
+ text: `${T.branch} Sending to LLM adapter...`,
2645
+ prefixText: ""
2646
+ }).start();
2647
+ }
2648
+ llmCallDone(counts) {
2649
+ if (this.spinner) {
2650
+ if (counts) {
2651
+ this.spinner.succeed(`${T.branch} Sending to LLM adapter... ${T.check}`);
2652
+ } else {
2653
+ this.spinner.fail(`${T.branch} Sending to LLM adapter... failed`);
2654
+ }
2655
+ this.spinner = null;
2656
+ }
2657
+ if (counts) {
2658
+ row(T.branch, "Decisions found ", String(counts.decisions));
2659
+ row(T.branch, "Corrections found ", String(counts.corrections));
2660
+ row(T.branch, "Open threads found ", String(counts.openThreads));
2661
+ }
2662
+ }
2663
+ transcriptWriteDone() {
2664
+ row(T.last, `Writing to memory files... ${T.check} done`);
2665
+ }
2666
+ // Convention scan ───────────────────────────────────────────────────────────
2667
+ scanStart(lastRunAt) {
2668
+ header(`Convention scan ${import_picocolors.default.dim(`(last run: ${formatAgo(lastRunAt)})`)}`);
2669
+ }
2670
+ scanFilesStart() {
2671
+ this.spinner = ora({
2672
+ text: `${T.branch} Scanning files...`,
2673
+ prefixText: ""
2674
+ }).start();
2675
+ }
2676
+ scanFilesDone() {
2677
+ if (this.spinner) {
2678
+ this.spinner.succeed(`${T.branch} Scanning files... done`);
2679
+ this.spinner = null;
2680
+ }
2681
+ }
2682
+ scanDone(namingStyle) {
2683
+ if (namingStyle) {
2684
+ row(T.branch, "Naming convention ", namingStyle);
2685
+ }
2686
+ row(T.last, `Writing conventions.md... ${T.check} done`);
2687
+ }
2688
+ // Memory synthesis ──────────────────────────────────────────────────────────
2689
+ synthStart() {
2690
+ this.spinner?.stop();
2691
+ this.spinner = ora({
2692
+ text: `${T.branch} Synthesising memory...`,
2693
+ prefixText: ""
2694
+ }).start();
2695
+ }
2696
+ synthDone(summary) {
2697
+ if (this.spinner) {
2698
+ if (summary) {
2699
+ const preview = summary.length > 60 ? summary.slice(0, 60) + "\u2026" : summary;
2700
+ this.spinner.succeed(`${T.branch} Living summary updated ${import_picocolors.default.dim(preview)}`);
2701
+ } else {
2702
+ this.spinner.fail(`${T.branch} Memory synthesis failed`);
2703
+ }
2704
+ this.spinner = null;
2705
+ }
2706
+ }
2707
+ // Architecture inference ────────────────────────────────────────────────────
2708
+ inferStart() {
2709
+ this.spinner?.stop();
2710
+ this.spinner = ora({
2711
+ text: `${T.branch} Inferring architecture patterns...`,
2712
+ prefixText: ""
2713
+ }).start();
2714
+ }
2715
+ inferDone(rulesCount) {
2716
+ if (this.spinner) {
2717
+ if (rulesCount !== null && rulesCount > 0) {
2718
+ this.spinner.succeed(`${T.branch} Architecture patterns inferred ${import_picocolors.default.bold(String(rulesCount))} rules`);
2719
+ } else {
2720
+ this.spinner.fail(`${T.branch} Architecture inference failed`);
2721
+ }
2722
+ this.spinner = null;
2723
+ }
2724
+ }
2725
+ // Correction promotion ──────────────────────────────────────────────────────
2726
+ promoteStart() {
2727
+ this.spinner?.stop();
2728
+ this.spinner = ora({
2729
+ text: `${T.branch} Promoting corrections to rule...`,
2730
+ prefixText: ""
2731
+ }).start();
2732
+ }
2733
+ promoteDone(rulesWritten) {
2734
+ if (this.spinner) {
2735
+ if (rulesWritten > 0) {
2736
+ this.spinner.succeed(`${T.branch} Draft rule written to .claude/rules/ ${T.check}`);
2737
+ } else {
2738
+ this.spinner.fail(`${T.branch} Rule promotion failed`);
2739
+ }
2740
+ this.spinner = null;
2741
+ }
2742
+ }
2743
+ // Idle ──────────────────────────────────────────────────────────────────────
2744
+ showIdle(watchDir) {
2745
+ process.stdout.write(`
2746
+ ${import_picocolors.default.dim("\u25C7")} ${import_picocolors.default.dim("Watching for changes...")}
2747
+ `);
2748
+ row(T.last, import_picocolors.default.dim("Transcripts"), import_picocolors.default.dim(watchDir + " (listening)"));
2749
+ process.stdout.write("\n");
2750
+ }
2751
+ // Cleanup ───────────────────────────────────────────────────────────────────
2752
+ clear() {
2753
+ if (this.spinner) {
2754
+ this.spinner.stop();
2755
+ this.spinner = null;
2756
+ }
2757
+ }
2758
+ };
2759
+ function createUI(silent) {
2760
+ return silent ? new SilentUI() : new RichUI();
2761
+ }
2762
+
2106
2763
  // src/watch.ts
2107
2764
  async function watch(options) {
2108
2765
  const { config, rootDir } = loadWorkspace();
2109
- if (options._daemon) {
2110
- await runDaemon(rootDir, config);
2111
- return;
2112
- }
2113
2766
  if (options.stop) {
2114
2767
  const pid = readPid(rootDir);
2115
2768
  if (!pid || !isDaemonRunning(rootDir)) {
@@ -2123,61 +2776,56 @@ Daemon stopped (pid ${pid}).
2123
2776
  `);
2124
2777
  return;
2125
2778
  }
2779
+ const silent = options.silent ?? config.watch?.silent ?? false;
2780
+ const ui = createUI(silent);
2126
2781
  if (options.once) {
2127
- console.log("\nRunning all observers once...\n");
2782
+ ui.gitStart();
2128
2783
  const gitResult = await runGitObserver(rootDir, config.watchState?.lastGitCommit ?? null);
2784
+ ui.gitDone(gitResult ? { entriesWritten: gitResult.entriesWritten, lastCommit: gitResult.lastCommit } : null);
2129
2785
  if (gitResult) {
2130
2786
  persistWatchState(rootDir, { lastGitCommit: gitResult.lastCommit });
2131
- console.log(` \u2713 git: ${gitResult.entriesWritten} entries`);
2132
2787
  }
2788
+ const lastScanAt = config.watchState?.lastScanAt ? new Date(config.watchState.lastScanAt) : null;
2789
+ ui.scanStart(lastScanAt);
2790
+ ui.scanFilesStart();
2133
2791
  const sections = await runScan(rootDir);
2792
+ ui.scanFilesDone();
2793
+ if (config.watch?.adapter) {
2794
+ const adapter = createAdapter(config);
2795
+ ui.inferStart();
2796
+ const rules = await inferArchitecturePatterns(rootDir, sections, adapter);
2797
+ ui.inferDone(rules.length > 0 ? rules.length : null);
2798
+ if (rules.length > 0) sections.architecture = rules;
2799
+ }
2134
2800
  writeConventions(rootDir, sections);
2135
2801
  persistWatchState(rootDir, { lastScanAt: (/* @__PURE__ */ new Date()).toISOString() });
2136
- console.log(" \u2713 scan: conventions.md updated");
2137
- console.log("\nDone.\n");
2802
+ ui.scanDone(sections.naming[0] ?? "");
2138
2803
  return;
2139
2804
  }
2140
2805
  if (isDaemonRunning(rootDir)) {
2141
2806
  const pid = readPid(rootDir);
2142
2807
  console.log(`
2143
- Daemon already running (pid ${pid}). Use --stop to stop it.
2808
+ Watch is already running (pid ${pid}). Use --stop to stop it.
2144
2809
  `);
2145
2810
  return;
2146
2811
  }
2147
2812
  injectWatchHook(rootDir, config);
2148
- const self = process.argv[1];
2149
- const child = (0, import_node_child_process4.spawn)(process.execPath, [self, "watch", "--_daemon"], {
2150
- detached: true,
2151
- stdio: "ignore",
2152
- cwd: rootDir
2153
- });
2154
- child.unref();
2155
- if (child.pid) {
2156
- writePid(rootDir, child.pid);
2157
- if (!options.silent) {
2158
- console.log(`
2159
- Daemon started (pid ${child.pid}).`);
2160
- console.log(`Run 'clawstrap watch --stop' to stop it.
2161
- `);
2162
- }
2163
- } else {
2164
- console.error("\nFailed to start daemon.\n");
2165
- process.exit(1);
2166
- }
2813
+ writePid(rootDir, process.pid);
2814
+ await runDaemon(rootDir, config, ui);
2167
2815
  }
2168
2816
  function persistWatchState(rootDir, updates) {
2169
- const configPath = import_node_path20.default.join(rootDir, ".clawstrap.json");
2817
+ const configPath = import_node_path23.default.join(rootDir, ".clawstrap.json");
2170
2818
  try {
2171
- const raw = JSON.parse(import_node_fs19.default.readFileSync(configPath, "utf-8"));
2819
+ const raw = JSON.parse(import_node_fs22.default.readFileSync(configPath, "utf-8"));
2172
2820
  raw.watchState = { ...raw.watchState ?? {}, ...updates };
2173
- import_node_fs19.default.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n", "utf-8");
2821
+ import_node_fs22.default.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n", "utf-8");
2174
2822
  } catch {
2175
2823
  }
2176
2824
  }
2177
2825
  function injectWatchHook(rootDir, config) {
2178
- const governanceFile = import_node_path20.default.join(rootDir, "CLAUDE.md");
2179
- if (!import_node_fs19.default.existsSync(governanceFile)) return;
2180
- const content = import_node_fs19.default.readFileSync(governanceFile, "utf-8");
2826
+ const governanceFile = import_node_path23.default.join(rootDir, "CLAUDE.md");
2827
+ if (!import_node_fs22.default.existsSync(governanceFile)) return;
2828
+ const content = import_node_fs22.default.readFileSync(governanceFile, "utf-8");
2181
2829
  if (content.includes("<!-- CLAWSTRAP:WATCH -->")) return;
2182
2830
  const _config = config;
2183
2831
  void _config;
@@ -2204,23 +2852,23 @@ function injectWatchHook(rootDir, config) {
2204
2852
 
2205
2853
  The watch daemon picks this up automatically and updates MEMORY.md and gotcha-log.md.
2206
2854
  `;
2207
- import_node_fs19.default.appendFileSync(governanceFile, hook, "utf-8");
2855
+ import_node_fs22.default.appendFileSync(governanceFile, hook, "utf-8");
2208
2856
  }
2209
2857
 
2210
2858
  // src/analyze.ts
2211
- var import_node_fs20 = __toESM(require("fs"), 1);
2212
- var import_node_path21 = __toESM(require("path"), 1);
2859
+ var import_node_fs23 = __toESM(require("fs"), 1);
2860
+ var import_node_path24 = __toESM(require("path"), 1);
2213
2861
  init_writers();
2214
2862
  async function analyze() {
2215
2863
  const { rootDir } = loadWorkspace();
2216
2864
  console.log("\nScanning codebase conventions...\n");
2217
2865
  const sections = await runScan(rootDir);
2218
2866
  writeConventions(rootDir, sections);
2219
- const configPath = import_node_path21.default.join(rootDir, ".clawstrap.json");
2867
+ const configPath = import_node_path24.default.join(rootDir, ".clawstrap.json");
2220
2868
  try {
2221
- const raw = JSON.parse(import_node_fs20.default.readFileSync(configPath, "utf-8"));
2869
+ const raw = JSON.parse(import_node_fs23.default.readFileSync(configPath, "utf-8"));
2222
2870
  raw["watchState"] = { ...raw["watchState"] ?? {}, lastScanAt: (/* @__PURE__ */ new Date()).toISOString() };
2223
- import_node_fs20.default.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n", "utf-8");
2871
+ import_node_fs23.default.writeFileSync(configPath, JSON.stringify(raw, null, 2) + "\n", "utf-8");
2224
2872
  } catch {
2225
2873
  }
2226
2874
  console.log(" \u2713 .claude/rules/conventions.md updated\n");
@@ -2228,7 +2876,7 @@ async function analyze() {
2228
2876
 
2229
2877
  // src/index.ts
2230
2878
  var program = new import_commander.Command();
2231
- program.name("clawstrap").description("Scaffold a production-ready AI agent workspace").version("1.4.1");
2879
+ program.name("clawstrap").description("Scaffold a production-ready AI agent workspace").version("1.5.1");
2232
2880
  program.command("init").description("Create a new AI workspace in the current directory").argument("[directory]", "Target directory", ".").option("-y, --yes", "Use defaults, skip prompts").option("--sdd", "Enable Spec-Driven Development mode").action(async (directory, options) => {
2233
2881
  await init(directory, options);
2234
2882
  });
@@ -2256,7 +2904,7 @@ Unknown format: ${options.format}. Supported: paperclip
2256
2904
  }
2257
2905
  await exportPaperclip(options);
2258
2906
  });
2259
- program.command("watch").description("Start adaptive memory daemon for this workspace").option("--stop", "Stop the running daemon").option("--silent", "Run without output").option("--once", "Run all observers once and exit (no persistent daemon)").option("--_daemon", void 0).action(async (options) => {
2907
+ program.command("watch").description("Start adaptive memory daemon for this workspace").option("--stop", "Stop the running daemon").option("--silent", "Run without output").option("--once", "Run all observers once and exit (no persistent daemon)").action(async (options) => {
2260
2908
  await watch(options);
2261
2909
  });
2262
2910
  program.command("analyze").description("Run codebase convention scan immediately").action(async () => {