@ulpi/cli 0.1.0 → 0.1.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (26) hide show
  1. package/README.md +139 -20
  2. package/dist/{chunk-6JCMYYBT.js → chunk-FNPD3V2X.js} +194 -110
  3. package/dist/{chunk-PKD4ASEM.js → chunk-S253WCQJ.js} +1 -1
  4. package/dist/{chunk-Q4HIY43N.js → chunk-UA6EFK64.js} +7 -11
  5. package/dist/{chunk-DBMUNBNB.js → chunk-VGZLMUNO.js} +148 -18
  6. package/dist/{chunk-247GVVKK.js → chunk-ZLYRPD7I.js} +18 -16
  7. package/dist/{codemap-RRJIDBQ5.js → codemap-3BVYMMVM.js} +49 -17
  8. package/dist/{dist-7LHZ65GC.js → dist-3SNTTNM3.js} +4 -3
  9. package/dist/{dist-R5F4MX3I.js → dist-57UMTPGR.js} +4 -3
  10. package/dist/{dist-LZKZFPVX.js → dist-MFFX7TZW.js} +5 -2
  11. package/dist/{dist-W7K4WPAF.js → dist-QYFQYSXP.js} +41 -13
  12. package/dist/{dist-R5ZJ4LX5.js → dist-YA2BWZB2.js} +1 -1
  13. package/dist/{history-ATTUKOHO.js → history-UG65BCO6.js} +5 -2
  14. package/dist/index.js +32 -20
  15. package/dist/{init-AY5C2ZAS.js → init-22PO3EQB.js} +2 -2
  16. package/dist/{memory-J3G24QHS.js → memory-IT4H3WRD.js} +114 -24
  17. package/dist/{server-MOYPE4SM-N7SE2AN7.js → server-KYER5KX3-43RQQ4MY.js} +1 -1
  18. package/dist/skills/ulpi-generate-guardian/SKILL.md +246 -7
  19. package/dist/skills/ulpi-generate-guardian/references/framework-rules.md +161 -4
  20. package/dist/skills/ulpi-generate-guardian/references/language-rules.md +13 -18
  21. package/dist/{ui-L7UAWXDY.js → ui-3EFREFSY.js} +2 -2
  22. package/dist/ui.html +99 -99
  23. package/dist/{update-M2B4RLGH.js → update-364RHTAO.js} +1 -1
  24. package/dist/{version-checker-ANCS3IHR.js → version-checker-DTAS4ZYK.js} +1 -1
  25. package/package.json +29 -28
  26. package/LICENSE +0 -21
package/README.md CHANGED
@@ -40,20 +40,19 @@ cd your-project
40
40
  ulpi init
41
41
  ```
42
42
 
43
- That's it. ULPI detects your stack — language, framework, package manager, test runner, linter — and generates a tailored rules configuration. Hooks are installed into Claude Code automatically.
43
+ ULPI detects your stack — language, framework, package manager, test runner, linter — and generates a tailored rules configuration. Hooks are installed into Claude Code automatically.
44
44
 
45
45
  Next time you start a Claude Code session, ULPI is active.
46
46
 
47
47
  ### Try it without installing
48
48
 
49
49
  ```bash
50
- cd your-project
51
50
  npx @ulpi/cli init
52
51
  ```
53
52
 
54
53
  ---
55
54
 
56
- ## What You Get
55
+ ## Features
57
56
 
58
57
  ### Automatic Permissions
59
58
 
@@ -66,6 +65,12 @@ permissions:
66
65
  matcher: Bash
67
66
  command_pattern: "npm test|pnpm test|yarn test"
68
67
  decision: allow
68
+
69
+ auto-approve-git-read:
70
+ trigger: PermissionRequest
71
+ matcher: Bash
72
+ command_pattern: "git (status|log|diff|branch)"
73
+ decision: allow
69
74
  ```
70
75
 
71
76
  ### Guardrails
@@ -83,33 +88,140 @@ preconditions:
83
88
  matcher: "Write|Edit"
84
89
  message: "Read the file before editing it."
85
90
  requires_read: true
91
+
92
+ test-before-commit:
93
+ trigger: PreToolUse
94
+ matcher: Bash
95
+ command_pattern: "git commit"
96
+ message: "Run tests before committing."
97
+ requires:
98
+ tests_run: true
99
+ ```
100
+
101
+ ### Pipelines
102
+
103
+ Chain multiple steps into automated workflows that run after tool execution.
104
+
105
+ ```yaml
106
+ pipelines:
107
+ pre-commit-checks:
108
+ trigger: PostToolUse
109
+ matcher: Bash
110
+ command_pattern: "git commit"
111
+ steps:
112
+ - name: build
113
+ command: "pnpm -r build"
114
+ - name: test
115
+ command: "pnpm test"
116
+ block_on_failure: true
86
117
  ```
87
118
 
88
119
  ### Session Tracking
89
120
 
90
- ULPI knows what happened in your sessionwhich files were read, which were written, which commands ran. Rules can use this context to make smarter decisions.
121
+ ULPI tracks every action in real time — files read and written, commands executed, rules enforced, tests passed or failed. Session state powers smarter rule evaluation and feeds into history, memory, and the web dashboard.
122
+
123
+ Each session follows a state machine (`idle` -> `active` -> `active_committed` -> `ended`) and logs up to 10,000 events as an append-only JSONL file.
91
124
 
92
125
  ### Web Dashboard
93
126
 
94
- A full web UI for managing rules, viewing live sessions, reviewing AI-generated plans, and browsing session history.
127
+ A full web UI for managing rules, monitoring live sessions, reviewing plans and code, and browsing history.
95
128
 
96
129
  ```bash
97
130
  ulpi ui
98
131
  ```
99
132
 
100
- <!-- TODO: Add screenshot -->
133
+ The dashboard shows:
134
+
135
+ - **Guards** — Active rule counts, enforcement stats, rule breakdown by type
136
+ - **Session** — Live session state with real-time event timeline (WebSocket)
137
+ - **Review** — Pending plan and code reviews
138
+ - **CodeMap** — Index status, file and chunk counts
139
+ - **Memory** — Memory count, classified sessions, importance breakdown
140
+ - **History** — Captured entries with AI enrichment status
141
+ - **Skills** — Available bundled, project, and global skills
142
+ - **Responses** — Notification channel configuration
101
143
 
102
144
  ### Plan & Code Review
103
145
 
104
- Review AI-generated plans and code changes in a dedicated UI before they execute. Annotate, approve, or reject with feedback that gets sent back to the agent.
146
+ Review AI-generated plans and code changes in a browser-based UI before they execute.
147
+
148
+ **Plan review** intercepts `ExitPlanMode` — the plan is parsed into sections, scored across 8 quality dimensions (test coverage, error handling, rollback strategy, scope clarity, and more), and presented for your review. You can annotate sections, assign priorities and risk levels, add per-section instructions, or make inline edits. Approve or deny with feedback that goes directly back to the agent.
149
+
150
+ **Code review** intercepts `git commit` — the staged diff and commit message are shown for review with line-level annotation tools.
151
+
152
+ Both flows use long-poll HTTP transport with a 10-minute timeout. If the server isn't running, the agent continues normally (fail-open).
105
153
 
106
- ### Semantic Code Search
154
+ ### Semantic Code Search (CodeMap)
107
155
 
108
- ULPI indexes your codebase and exposes it as MCP tools that Claude Code can use for smarter, context-aware search — beyond simple grep.
156
+ ULPI indexes your entire codebase using hybrid vector + BM25 search with AST-aware chunking. The agent gets smarter search than grep it understands code semantics, symbol names, and file context.
157
+
158
+ - **AST chunking** aligns chunks to function and class boundaries
159
+ - **Symbol extraction** identifies functions, classes, interfaces, and types
160
+ - **Hybrid ranking** fuses vector similarity (60%), keyword matching (25%), symbol boost (10%), and path relevance (5%)
161
+ - **Incremental indexing** — only re-indexes changed files
162
+ - **Per-branch indexes** — each git branch gets its own index
163
+ - **Shadow branch export** — share indexes across machines via git
164
+
165
+ Exposed to Claude Code as MCP tools: `search_code`, `search_symbols`, `get_file_summary`, `get_index_stats`, and `reindex`.
166
+
167
+ ```bash
168
+ ulpi codemap init # Index your codebase
169
+ ulpi codemap search "authentication middleware"
170
+ ```
109
171
 
110
172
  ### Agent Memory
111
173
 
112
- Captures decisions, patterns, and lessons learned across sessions. Your AI agent builds institutional knowledge over time instead of starting fresh every conversation.
174
+ Your AI agent builds institutional knowledge across sessions instead of starting fresh every conversation.
175
+
176
+ ULPI captures session events, classifies them with an LLM into 8 memory types — **decisions**, **patterns**, **bug root causes**, **preferences**, **constraints**, **context**, **lessons**, and **relationships** — then stores them with vector embeddings for semantic search.
177
+
178
+ At the start of each session, the most relevant memories are surfaced to the agent automatically.
179
+
180
+ - **Deduplication** — new memories are compared against existing ones (0.92 similarity threshold) to prevent redundancy
181
+ - **Importance-weighted ranking** — critical memories never decay; low-importance ones fade over time
182
+ - **Redaction** — API keys, tokens, and secrets are stripped before storage
183
+ - **Soft delete** — memories can be superseded rather than permanently removed
184
+
185
+ Exposed as MCP tools: `search_memory`, `save_memory`, `get_timeline`, `get_session_context`, `forget`, and `memory_stats`.
186
+
187
+ ```bash
188
+ ulpi memory search "authentication approach"
189
+ ulpi memory status
190
+ ```
191
+
192
+ ### Session History
193
+
194
+ Every coding session is recorded on a per-user orphan git branch (`ulpi/history-<username>`) — separate from your code, never touching your working tree.
195
+
196
+ Each commit gets a structured JSON entry with:
197
+
198
+ - Git metadata (SHA, message, author, branch, parents)
199
+ - Diff statistics (files changed, insertions, deletions, per-file breakdown)
200
+ - Session summary (files read/written, commands run, rules enforced)
201
+ - Hook-derived analytics (rule evaluations, permission decisions, tool usage)
202
+ - AI enrichment (summary, intent, challenges, learnings, recommendations)
203
+ - Review plan snapshots and session transcripts
204
+
205
+ ```bash
206
+ ulpi history init # Create the history branch
207
+ ulpi history list # Browse captured sessions
208
+ ulpi history enrich --all # AI-enrich all entries
209
+ ```
210
+
211
+ ### Notifications
212
+
213
+ Route events to desktop notifications, webhooks, terminal bells, or log files based on classification rules. Built-in deduplication prevents notification storms.
214
+
215
+ ### Templates & Skills
216
+
217
+ ULPI ships with **28 built-in templates** for popular stacks — Node.js, Python, Go, Rust, Next.js, Express, Laravel, Django, FastAPI, Prisma, Docker, and more.
218
+
219
+ **Skills** are injectable markdown guides that ULPI attaches to blocked tool messages — teaching the agent how to fix the issue instead of just blocking it.
220
+
221
+ ```bash
222
+ ulpi templates list
223
+ ulpi skills list
224
+ ```
113
225
 
114
226
  ---
115
227
 
@@ -143,12 +255,12 @@ permissions:
143
255
  matcher: Bash
144
256
  command_pattern: "git (status|log|diff|branch)"
145
257
  decision: allow
146
- ```
147
-
148
- ULPI ships with **28 built-in templates** for popular stacks — Node.js, Python, Go, Rust, Next.js, Laravel, Django, FastAPI, and more.
149
258
 
150
- ```bash
151
- ulpi templates list
259
+ review:
260
+ enabled: true
261
+ plan_review: true
262
+ code_review: true
263
+ auto_open_browser: true
152
264
  ```
153
265
 
154
266
  ---
@@ -179,12 +291,19 @@ ulpi templates list
179
291
 
180
292
  ## How It Works
181
293
 
182
- ULPI uses [Claude Code hooks](https://docs.anthropic.com/en/docs/claude-code/hooks) — lifecycle events that fire before and after every tool execution:
294
+ ULPI uses [Claude Code hooks](https://docs.anthropic.com/en/docs/claude-code/hooks) — lifecycle events that fire before and after every tool execution.
295
+
296
+ Seven hooks intercept the full session lifecycle:
183
297
 
184
- 1. **Before a tool runs**, ULPI checks your rules. Should this file edit be allowed? Has the file been read first? Is this command safe?
185
- 2. **When a permission is requested**, ULPI decides automatically based on your rules — allow, deny, or ask the user.
186
- 3. **After a tool runs**, ULPI updates session state and runs any postconditions you've defined.
187
- 4. **At session boundaries**, ULPI captures history, classifies memories, and indexes code changes.
298
+ | Hook | When | What ULPI Does |
299
+ |------|------|----------------|
300
+ | **SessionStart** | Claude Code session begins | Detect stack, capture branch/HEAD, surface memories, import CodeMap index |
301
+ | **PreToolUse** | Before any tool runs | Evaluate preconditions, block dangerous commands, intercept git commits for code review |
302
+ | **PostToolUse** | After a tool completes | Track files/commands, run postconditions, capture history on new commits, export CodeMap on git push |
303
+ | **PermissionRequest** | Tool needs approval | Auto-approve or deny based on rules, intercept ExitPlanMode for plan review |
304
+ | **Notification** | Event notification | Classify and route to desktop, webhook, terminal, or log channels |
305
+ | **Stop** | Session stopping | Final checks (warn about missing tests/lint) |
306
+ | **SessionEnd** | Session complete | Persist summary, capture commits to history, classify memories, export indexes |
188
307
 
189
308
  If ULPI ever encounters an error, it fails open — Claude Code continues normally. Your agent is never blocked by a bug in ULPI.
190
309
 
@@ -1,6 +1,13 @@
1
1
  import {
2
2
  createEmbedder
3
- } from "./chunk-DBMUNBNB.js";
3
+ } from "./chunk-VGZLMUNO.js";
4
+ import {
5
+ commitInWorktree,
6
+ copyAndStage,
7
+ historyBranchExists,
8
+ withWorktree,
9
+ writeAndStage
10
+ } from "./chunk-NNUWU6CV.js";
4
11
  import {
5
12
  readEvents
6
13
  } from "./chunk-YM2HV4IA.js";
@@ -248,9 +255,7 @@ function finalizeCapture(sessionId, state, projectDir) {
248
255
  const dir = sessionCaptureDir(projectDir, sessionId);
249
256
  fs3.mkdirSync(dir, { recursive: true });
250
257
  const redactPatterns = config.redactPatterns;
251
- if (config.captureMode === "end_of_session") {
252
- bulkCaptureFromSessionLog(sessionId, projectDir, redactPatterns);
253
- }
258
+ bulkCaptureFromSessionLog(sessionId, projectDir, redactPatterns);
254
259
  let transcriptCaptured = false;
255
260
  if (state.transcriptPath) {
256
261
  transcriptCaptured = copyTranscript(
@@ -550,6 +555,8 @@ function invokeClassifier(prompt, model, timeout) {
550
555
  throw new Error("Claude CLI not found \u2014 cannot classify memories");
551
556
  }
552
557
  return new Promise((resolve2, reject) => {
558
+ const env = { ...process.env };
559
+ delete env.CLAUDECODE;
553
560
  const proc = spawn(claudePath, [
554
561
  "--print",
555
562
  "--model",
@@ -561,7 +568,8 @@ function invokeClassifier(prompt, model, timeout) {
561
568
  "--permission-mode",
562
569
  "bypassPermissions"
563
570
  ], {
564
- stdio: ["pipe", "pipe", "pipe"]
571
+ stdio: ["pipe", "pipe", "pipe"],
572
+ env
565
573
  });
566
574
  let stdout = "";
567
575
  let stderr = "";
@@ -1324,17 +1332,7 @@ async function rememberMemory(projectDir, entry) {
1324
1332
  }
1325
1333
  }
1326
1334
  function memoryBranchExists(projectDir) {
1327
- const branch = getMemoryBranch();
1328
- try {
1329
- execFileSync2("git", ["rev-parse", "--verify", `refs/heads/${branch}`], {
1330
- cwd: projectDir,
1331
- stdio: "pipe",
1332
- timeout: 5e3
1333
- });
1334
- return true;
1335
- } catch {
1336
- return false;
1337
- }
1335
+ return historyBranchExists(projectDir, getMemoryBranch());
1338
1336
  }
1339
1337
  function initMemoryBranch(projectDir) {
1340
1338
  const branch = getMemoryBranch();
@@ -1385,114 +1383,200 @@ function initMemoryBranch(projectDir) {
1385
1383
  }
1386
1384
  }
1387
1385
  }
1388
- function exportMemories(projectDir) {
1389
- const branch = getMemoryBranch();
1386
+ async function exportMemories(projectDir) {
1387
+ const branchName = getMemoryBranch();
1390
1388
  if (!memoryBranchExists(projectDir)) {
1391
1389
  initMemoryBranch(projectDir);
1392
1390
  }
1393
1391
  const entries = listEntries(projectDir);
1394
1392
  const config = loadMemoryConfig(projectDir);
1395
- const exportData = {
1396
- version: 1,
1397
- exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
1398
- projectDir,
1399
- memories: entries,
1400
- config
1401
- };
1402
- const tmpDir = fs9.mkdtempSync(path9.join(os3.tmpdir(), "ulpi-memory-export-"));
1403
- try {
1404
- execFileSync2("git", ["worktree", "add", tmpDir, branch], {
1405
- cwd: projectDir,
1406
- stdio: "pipe",
1407
- timeout: 1e4
1408
- });
1409
- fs9.writeFileSync(
1410
- path9.join(tmpDir, "memories.json"),
1411
- JSON.stringify(exportData, null, 2) + "\n"
1412
- );
1413
- execFileSync2("git", ["add", "memories.json"], {
1414
- cwd: tmpDir,
1415
- stdio: "pipe",
1416
- timeout: 5e3
1417
- });
1418
- try {
1419
- execFileSync2("git", ["diff", "--cached", "--quiet"], {
1420
- cwd: tmpDir,
1421
- stdio: "pipe",
1422
- timeout: 5e3
1423
- });
1424
- const sha2 = execFileSync2("git", ["rev-parse", "HEAD"], {
1425
- cwd: tmpDir,
1426
- encoding: "utf-8",
1427
- timeout: 5e3
1428
- }).trim();
1429
- return { branchName: branch, commitSha: sha2, memoriesExported: entries.length };
1430
- } catch {
1393
+ const lanceDir = memoryLanceDir(projectDir);
1394
+ const statsFile = memoryStatsFile(projectDir);
1395
+ let filesExported = 0;
1396
+ let totalSizeBytes = 0;
1397
+ const commitSha = await withWorktree(projectDir, branchName, (worktreeDir) => {
1398
+ const exportData = {
1399
+ version: 1,
1400
+ exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
1401
+ projectDir,
1402
+ memories: entries,
1403
+ config
1404
+ };
1405
+ const memoriesJson = JSON.stringify(exportData, null, 2) + "\n";
1406
+ writeAndStage(worktreeDir, "memories.json", memoriesJson);
1407
+ totalSizeBytes += Buffer.byteLength(memoriesJson, "utf-8");
1408
+ filesExported++;
1409
+ const configPath = memoryConfigFile(projectDir);
1410
+ if (fs9.existsSync(configPath)) {
1411
+ const content = fs9.readFileSync(configPath, "utf-8");
1412
+ writeAndStage(worktreeDir, "config.json", content);
1413
+ totalSizeBytes += Buffer.byteLength(content, "utf-8");
1414
+ filesExported++;
1431
1415
  }
1432
- execFileSync2("git", [
1433
- "commit",
1434
- "-m",
1435
- `memory: export ${entries.length} memories`
1436
- ], {
1437
- cwd: tmpDir,
1438
- stdio: "pipe",
1439
- timeout: 1e4
1440
- });
1441
- const sha = execFileSync2("git", ["rev-parse", "HEAD"], {
1442
- cwd: tmpDir,
1443
- encoding: "utf-8",
1444
- timeout: 5e3
1445
- }).trim();
1446
- return { branchName: branch, commitSha: sha, memoriesExported: entries.length };
1447
- } finally {
1448
- try {
1449
- execFileSync2("git", ["worktree", "remove", "--force", tmpDir], {
1450
- cwd: projectDir,
1451
- stdio: "pipe",
1452
- timeout: 1e4
1453
- });
1454
- } catch {
1455
- try {
1456
- fs9.rmSync(tmpDir, { recursive: true, force: true });
1457
- } catch {
1458
- }
1416
+ if (fs9.existsSync(statsFile)) {
1417
+ const content = fs9.readFileSync(statsFile, "utf-8");
1418
+ writeAndStage(worktreeDir, "stats.json", content);
1419
+ totalSizeBytes += Buffer.byteLength(content, "utf-8");
1420
+ filesExported++;
1421
+ }
1422
+ if (fs9.existsSync(lanceDir)) {
1423
+ const result = copyDirRecursive(lanceDir, worktreeDir, "index/lance");
1424
+ filesExported += result.fileCount;
1425
+ totalSizeBytes += result.totalBytes;
1426
+ }
1427
+ const exportMeta = {
1428
+ exportedAt: (/* @__PURE__ */ new Date()).toISOString(),
1429
+ indexVersion: 1,
1430
+ vectorStoreFormatVersion: 1,
1431
+ engine: "lancedb"
1432
+ };
1433
+ writeAndStage(worktreeDir, "export-meta.json", JSON.stringify(exportMeta, null, 2) + "\n");
1434
+ filesExported++;
1435
+ const readme = [
1436
+ "# ULPI Agent Memory",
1437
+ "",
1438
+ "This branch stores agent memory data for this repository.",
1439
+ "It is maintained automatically by [ULPI](https://github.com/ulpi-io/ulpi).",
1440
+ "",
1441
+ "## Contents",
1442
+ "",
1443
+ "- `memories.json` \u2014 Memory entries + config snapshot",
1444
+ "- `config.json` \u2014 Memory configuration",
1445
+ "- `stats.json` \u2014 Memory statistics",
1446
+ "- `index/lance/` \u2014 LanceDB vector index",
1447
+ "- `export-meta.json` \u2014 Export metadata",
1448
+ "",
1449
+ `_Exported: ${exportMeta.exportedAt} \u2014 ${entries.length} memories_`
1450
+ ].join("\n") + "\n";
1451
+ writeAndStage(worktreeDir, "README.md", readme);
1452
+ filesExported++;
1453
+ return commitInWorktree(worktreeDir, `memory: export ${entries.length} memories (${filesExported} files)`);
1454
+ });
1455
+ const commitMessage = `memory: export ${entries.length} memories (${filesExported} files)`;
1456
+ const tree = execFileSync2("git", ["rev-parse", `${commitSha}^{tree}`], {
1457
+ cwd: projectDir,
1458
+ encoding: "utf-8",
1459
+ timeout: 5e3
1460
+ }).trim();
1461
+ const orphanSha = execFileSync2("git", ["commit-tree", tree, "-m", commitMessage], {
1462
+ cwd: projectDir,
1463
+ encoding: "utf-8",
1464
+ timeout: 5e3
1465
+ }).trim();
1466
+ execFileSync2("git", ["update-ref", `refs/heads/${branchName}`, orphanSha], {
1467
+ cwd: projectDir,
1468
+ timeout: 5e3
1469
+ });
1470
+ return {
1471
+ branchName,
1472
+ commitSha: orphanSha,
1473
+ memoriesExported: entries.length,
1474
+ filesExported,
1475
+ totalSizeBytes
1476
+ };
1477
+ }
1478
+ function copyDirRecursive(srcDir, worktreeDir, prefix) {
1479
+ let fileCount = 0;
1480
+ let totalBytes = 0;
1481
+ const dirEntries = fs9.readdirSync(srcDir);
1482
+ for (const entry of dirEntries) {
1483
+ const srcPath = path9.join(srcDir, entry);
1484
+ const destRelative = `${prefix}/${entry}`;
1485
+ const stat = fs9.statSync(srcPath);
1486
+ if (stat.isDirectory()) {
1487
+ const sub = copyDirRecursive(srcPath, worktreeDir, destRelative);
1488
+ fileCount += sub.fileCount;
1489
+ totalBytes += sub.totalBytes;
1490
+ } else if (stat.isFile()) {
1491
+ copyAndStage(worktreeDir, destRelative, srcPath);
1492
+ totalBytes += stat.size;
1493
+ fileCount++;
1459
1494
  }
1460
1495
  }
1496
+ return { fileCount, totalBytes };
1461
1497
  }
1462
- function importMemories(projectDir) {
1463
- const branch = getMemoryBranch();
1498
+ async function importMemories(projectDir) {
1499
+ const branchName = getMemoryBranch();
1464
1500
  if (!memoryBranchExists(projectDir)) {
1465
- return { success: false, memoriesImported: 0, message: "Memory branch does not exist" };
1501
+ return { success: false, memoriesImported: 0, filesImported: 0, totalSizeBytes: 0, message: "Memory branch does not exist" };
1466
1502
  }
1467
- try {
1468
- const raw = execFileSync2(
1469
- "git",
1470
- ["show", `${branch}:memories.json`],
1471
- { cwd: projectDir, encoding: "utf-8", timeout: 1e4 }
1472
- );
1473
- const exportData = JSON.parse(raw);
1474
- if (!exportData.memories || !Array.isArray(exportData.memories)) {
1475
- return { success: false, memoriesImported: 0, message: "Invalid export data" };
1476
- }
1477
- const entriesDir = memoryEntriesDir(projectDir);
1478
- fs9.mkdirSync(entriesDir, { recursive: true });
1479
- let imported = 0;
1480
- for (const entry of exportData.memories) {
1481
- const filePath = path9.join(entriesDir, `${entry.id}.json`);
1482
- if (!fs9.existsSync(filePath)) {
1483
- fs9.writeFileSync(filePath, JSON.stringify(entry, null, 2) + "\n", "utf-8");
1484
- imported++;
1503
+ const memDir = projectMemoryDir(projectDir);
1504
+ let memoriesImported = 0;
1505
+ let filesImported = 0;
1506
+ let totalSizeBytes = 0;
1507
+ await withWorktree(projectDir, branchName, (worktreeDir) => {
1508
+ const memoriesPath = path9.join(worktreeDir, "memories.json");
1509
+ if (fs9.existsSync(memoriesPath)) {
1510
+ const raw = fs9.readFileSync(memoriesPath, "utf-8");
1511
+ const exportData = JSON.parse(raw);
1512
+ totalSizeBytes += Buffer.byteLength(raw, "utf-8");
1513
+ filesImported++;
1514
+ if (exportData.memories && Array.isArray(exportData.memories)) {
1515
+ const entriesDir = memoryEntriesDir(projectDir);
1516
+ fs9.mkdirSync(entriesDir, { recursive: true });
1517
+ for (const entry of exportData.memories) {
1518
+ const filePath = path9.join(entriesDir, `${entry.id}.json`);
1519
+ if (!fs9.existsSync(filePath)) {
1520
+ fs9.writeFileSync(filePath, JSON.stringify(entry, null, 2) + "\n", "utf-8");
1521
+ memoriesImported++;
1522
+ }
1523
+ }
1485
1524
  }
1486
1525
  }
1487
- return {
1488
- success: true,
1489
- memoriesImported: imported,
1490
- message: `Imported ${imported} new memories (${exportData.memories.length - imported} already exist)`
1491
- };
1492
- } catch (err) {
1493
- const message = err instanceof Error ? err.message : String(err);
1494
- return { success: false, memoriesImported: 0, message };
1526
+ const configSrc = path9.join(worktreeDir, "config.json");
1527
+ const configDest = memoryConfigFile(projectDir);
1528
+ if (fs9.existsSync(configSrc) && !fs9.existsSync(configDest)) {
1529
+ fs9.mkdirSync(path9.dirname(configDest), { recursive: true });
1530
+ const content = fs9.readFileSync(configSrc);
1531
+ fs9.writeFileSync(configDest, content);
1532
+ totalSizeBytes += content.length;
1533
+ filesImported++;
1534
+ }
1535
+ const statsSrc = path9.join(worktreeDir, "stats.json");
1536
+ const statsDest = memoryStatsFile(projectDir);
1537
+ if (fs9.existsSync(statsSrc) && !fs9.existsSync(statsDest)) {
1538
+ const content = fs9.readFileSync(statsSrc);
1539
+ fs9.writeFileSync(statsDest, content);
1540
+ totalSizeBytes += content.length;
1541
+ filesImported++;
1542
+ }
1543
+ const lanceSrc = path9.join(worktreeDir, "index", "lance");
1544
+ const lanceDest = memoryLanceDir(projectDir);
1545
+ if (fs9.existsSync(lanceSrc)) {
1546
+ fs9.mkdirSync(lanceDest, { recursive: true });
1547
+ const result = copyDirRecursiveImport(lanceSrc, lanceDest);
1548
+ filesImported += result.fileCount;
1549
+ totalSizeBytes += result.totalBytes;
1550
+ }
1551
+ });
1552
+ return {
1553
+ success: true,
1554
+ memoriesImported,
1555
+ filesImported,
1556
+ totalSizeBytes,
1557
+ message: memoriesImported > 0 ? `Imported ${memoriesImported} new memories + vector index (${filesImported} files)` : `Vector index restored (${filesImported} files, entries already exist)`
1558
+ };
1559
+ }
1560
+ function copyDirRecursiveImport(srcDir, destDir) {
1561
+ let fileCount = 0;
1562
+ let totalBytes = 0;
1563
+ fs9.mkdirSync(destDir, { recursive: true });
1564
+ const dirEntries = fs9.readdirSync(srcDir);
1565
+ for (const entry of dirEntries) {
1566
+ const srcPath = path9.join(srcDir, entry);
1567
+ const destPath = path9.join(destDir, entry);
1568
+ const stat = fs9.statSync(srcPath);
1569
+ if (stat.isDirectory()) {
1570
+ const sub = copyDirRecursiveImport(srcPath, destPath);
1571
+ fileCount += sub.fileCount;
1572
+ totalBytes += sub.totalBytes;
1573
+ } else if (stat.isFile()) {
1574
+ fs9.copyFileSync(srcPath, destPath);
1575
+ totalBytes += stat.size;
1576
+ fileCount++;
1577
+ }
1495
1578
  }
1579
+ return { fileCount, totalBytes };
1496
1580
  }
1497
1581
 
1498
1582
  export {
@@ -13,7 +13,7 @@ var REGISTRY_URL = CLI_REGISTRY_URL;
13
13
  var FETCH_TIMEOUT_MS = 5e3;
14
14
  function getCurrentVersion() {
15
15
  try {
16
- return "0.1.0";
16
+ return "0.1.2";
17
17
  } catch {
18
18
  return "0.0.0";
19
19
  }
@@ -74,7 +74,7 @@ import {
74
74
  saveUlpiSettings
75
75
  } from "./chunk-7LXY5UVC.js";
76
76
 
77
- // ../api/dist/chunk-3N5EEDFM.js
77
+ // ../api/dist/chunk-77FAT4LI.js
78
78
  import * as http from "http";
79
79
  import * as fs12 from "fs";
80
80
  import * as path9 from "path";
@@ -2245,7 +2245,7 @@ async function exportObsidian(ctx) {
2245
2245
  jsonResponse(ctx.res, { format: "obsidian", content: md }, 200, ctx.req);
2246
2246
  }
2247
2247
  async function getEngine() {
2248
- return await import("./dist-LZKZFPVX.js");
2248
+ return await import("./dist-MFFX7TZW.js");
2249
2249
  }
2250
2250
  var runningPipelines = /* @__PURE__ */ new Map();
2251
2251
  var activeWatchers = /* @__PURE__ */ new Map();
@@ -2551,7 +2551,7 @@ async function codemapActionHandler(ctx) {
2551
2551
  }
2552
2552
  }
2553
2553
  async function getEngine2() {
2554
- return await import("./dist-R5F4MX3I.js");
2554
+ return await import("./dist-57UMTPGR.js");
2555
2555
  }
2556
2556
  async function memoryStatusHandler(ctx) {
2557
2557
  try {
@@ -2765,12 +2765,12 @@ async function memoryActionHandler(ctx) {
2765
2765
  return;
2766
2766
  }
2767
2767
  case "export": {
2768
- const result = engine.exportMemories(ctx.projectDir);
2768
+ const result = await engine.exportMemories(ctx.projectDir);
2769
2769
  jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
2770
2770
  return;
2771
2771
  }
2772
2772
  case "import": {
2773
- const result = engine.importMemories(ctx.projectDir);
2773
+ const result = await engine.importMemories(ctx.projectDir);
2774
2774
  jsonResponse(ctx.res, { ok: true, action, ...result }, 200, ctx.req);
2775
2775
  return;
2776
2776
  }
@@ -2855,7 +2855,7 @@ async function memoryConfigUpdateHandler(ctx) {
2855
2855
  }
2856
2856
  }
2857
2857
  async function getDepgraph() {
2858
- return await import("./dist-R5ZJ4LX5.js");
2858
+ return await import("./dist-YA2BWZB2.js");
2859
2859
  }
2860
2860
  async function getBranch(projectDir) {
2861
2861
  const { getCurrentBranch: getCurrentBranch2 } = await import("./dist-RKOGLK7R.js");
@@ -3014,11 +3014,7 @@ async function depgraphMetricsHandler(ctx) {
3014
3014
  jsonResponse(ctx.res, { error: error ?? "Invalid request body" }, 400, ctx.req);
3015
3015
  return;
3016
3016
  }
3017
- const modulePath = data.modulePath;
3018
- if (typeof modulePath !== "string" || !modulePath.trim()) {
3019
- jsonResponse(ctx.res, { error: "Missing or empty 'modulePath' field" }, 400, ctx.req);
3020
- return;
3021
- }
3017
+ const modulePath = typeof data.modulePath === "string" ? data.modulePath : "";
3022
3018
  try {
3023
3019
  const depgraph = await getDepgraph();
3024
3020
  const branch = await getBranch(ctx.projectDir);