engrm 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. package/.mcp.json +9 -0
  2. package/AUTH-DESIGN.md +436 -0
  3. package/BRIEF.md +197 -0
  4. package/CLAUDE.md +44 -0
  5. package/COMPETITIVE.md +174 -0
  6. package/CONTEXT-OPTIMIZATION.md +305 -0
  7. package/INFRASTRUCTURE.md +252 -0
  8. package/LICENSE +105 -0
  9. package/MARKET.md +230 -0
  10. package/PLAN.md +278 -0
  11. package/README.md +121 -0
  12. package/SENTINEL.md +293 -0
  13. package/SERVER-API-PLAN.md +553 -0
  14. package/SPEC.md +843 -0
  15. package/SWOT.md +148 -0
  16. package/SYNC-ARCHITECTURE.md +294 -0
  17. package/VIBE-CODER-STRATEGY.md +250 -0
  18. package/bun.lock +375 -0
  19. package/hooks/post-tool-use.ts +144 -0
  20. package/hooks/session-start.ts +64 -0
  21. package/hooks/stop.ts +131 -0
  22. package/mem-page.html +1305 -0
  23. package/package.json +30 -0
  24. package/src/capture/dedup.test.ts +103 -0
  25. package/src/capture/dedup.ts +76 -0
  26. package/src/capture/extractor.test.ts +245 -0
  27. package/src/capture/extractor.ts +330 -0
  28. package/src/capture/quality.test.ts +168 -0
  29. package/src/capture/quality.ts +104 -0
  30. package/src/capture/retrospective.test.ts +115 -0
  31. package/src/capture/retrospective.ts +121 -0
  32. package/src/capture/scanner.test.ts +131 -0
  33. package/src/capture/scanner.ts +100 -0
  34. package/src/capture/scrubber.test.ts +144 -0
  35. package/src/capture/scrubber.ts +181 -0
  36. package/src/cli.ts +517 -0
  37. package/src/config.ts +238 -0
  38. package/src/context/inject.test.ts +940 -0
  39. package/src/context/inject.ts +382 -0
  40. package/src/embeddings/backfill.ts +50 -0
  41. package/src/embeddings/embedder.test.ts +76 -0
  42. package/src/embeddings/embedder.ts +139 -0
  43. package/src/lifecycle/aging.test.ts +103 -0
  44. package/src/lifecycle/aging.ts +36 -0
  45. package/src/lifecycle/compaction.test.ts +264 -0
  46. package/src/lifecycle/compaction.ts +190 -0
  47. package/src/lifecycle/purge.test.ts +100 -0
  48. package/src/lifecycle/purge.ts +37 -0
  49. package/src/lifecycle/scheduler.test.ts +120 -0
  50. package/src/lifecycle/scheduler.ts +101 -0
  51. package/src/provisioning/browser-auth.ts +172 -0
  52. package/src/provisioning/provision.test.ts +198 -0
  53. package/src/provisioning/provision.ts +94 -0
  54. package/src/register.test.ts +167 -0
  55. package/src/register.ts +178 -0
  56. package/src/server.ts +436 -0
  57. package/src/storage/migrations.test.ts +244 -0
  58. package/src/storage/migrations.ts +261 -0
  59. package/src/storage/outbox.test.ts +229 -0
  60. package/src/storage/outbox.ts +131 -0
  61. package/src/storage/projects.test.ts +137 -0
  62. package/src/storage/projects.ts +184 -0
  63. package/src/storage/sqlite.test.ts +798 -0
  64. package/src/storage/sqlite.ts +934 -0
  65. package/src/storage/vec.test.ts +198 -0
  66. package/src/sync/auth.test.ts +76 -0
  67. package/src/sync/auth.ts +68 -0
  68. package/src/sync/client.ts +183 -0
  69. package/src/sync/engine.test.ts +94 -0
  70. package/src/sync/engine.ts +127 -0
  71. package/src/sync/pull.test.ts +279 -0
  72. package/src/sync/pull.ts +170 -0
  73. package/src/sync/push.test.ts +117 -0
  74. package/src/sync/push.ts +230 -0
  75. package/src/tools/get.ts +34 -0
  76. package/src/tools/pin.ts +47 -0
  77. package/src/tools/save.test.ts +301 -0
  78. package/src/tools/save.ts +231 -0
  79. package/src/tools/search.test.ts +69 -0
  80. package/src/tools/search.ts +181 -0
  81. package/src/tools/timeline.ts +64 -0
  82. package/tsconfig.json +22 -0
@@ -0,0 +1,230 @@
1
+ /**
2
+ * Push engine: flush sync outbox to Candengo Vector.
3
+ *
4
+ * Reads pending entries from the outbox, builds Vector documents,
5
+ * and pushes them via the REST client. Supports batch operations.
6
+ */
7
+
8
+ import type { MemDatabase, ObservationRow, SessionSummaryRow } from "../storage/sqlite.js";
9
+ import type { Config } from "../config.js";
10
+ import {
11
+ getPendingEntries,
12
+ markSyncing,
13
+ markSynced,
14
+ markFailed,
15
+ } from "../storage/outbox.js";
16
+ import { VectorClient, type VectorDocument } from "./client.js";
17
+ import { buildSourceId } from "./auth.js";
18
+
19
+ export interface PushResult {
20
+ pushed: number;
21
+ failed: number;
22
+ skipped: number;
23
+ }
24
+
25
+ /**
26
+ * Build a Candengo Vector document from a local observation.
27
+ */
28
+ export function buildVectorDocument(
29
+ obs: ObservationRow,
30
+ config: Config,
31
+ project: { canonical_id: string; name: string }
32
+ ): VectorDocument {
33
+ // Compose content: title + narrative + facts
34
+ const parts = [obs.title];
35
+ if (obs.narrative) parts.push(obs.narrative);
36
+ if (obs.facts) {
37
+ try {
38
+ const facts = JSON.parse(obs.facts) as string[];
39
+ if (Array.isArray(facts) && facts.length > 0) {
40
+ parts.push("Facts:\n" + facts.map((f) => `- ${f}`).join("\n"));
41
+ }
42
+ } catch {
43
+ // Not valid JSON — use as-is
44
+ parts.push(obs.facts);
45
+ }
46
+ }
47
+
48
+ return {
49
+ site_id: config.site_id,
50
+ namespace: config.namespace,
51
+ source_type: obs.type,
52
+ source_id: buildSourceId(config, obs.id),
53
+ content: parts.join("\n\n"),
54
+ metadata: {
55
+ project_canonical: project.canonical_id,
56
+ project_name: project.name,
57
+ user_id: obs.user_id,
58
+ device_id: obs.device_id,
59
+ agent: obs.agent,
60
+ title: obs.title,
61
+ type: obs.type,
62
+ quality: obs.quality,
63
+ concepts: obs.concepts ? JSON.parse(obs.concepts) : [],
64
+ files_modified: obs.files_modified
65
+ ? JSON.parse(obs.files_modified)
66
+ : [],
67
+ session_id: obs.session_id,
68
+ created_at_epoch: obs.created_at_epoch,
69
+ local_id: obs.id,
70
+ },
71
+ };
72
+ }
73
+
74
+ /**
75
+ * Build a Candengo Vector document from a session summary.
76
+ */
77
+ export function buildSummaryVectorDocument(
78
+ summary: SessionSummaryRow,
79
+ config: Config,
80
+ project: { canonical_id: string; name: string }
81
+ ): VectorDocument {
82
+ const parts: string[] = [];
83
+ if (summary.request) parts.push(`Request: ${summary.request}`);
84
+ if (summary.learned) parts.push(`Learned: ${summary.learned}`);
85
+ if (summary.completed) parts.push(`Completed: ${summary.completed}`);
86
+
87
+ return {
88
+ site_id: config.site_id,
89
+ namespace: config.namespace,
90
+ source_type: "summary",
91
+ source_id: buildSourceId(config, summary.id, "summary"),
92
+ content: parts.join("\n\n"),
93
+ metadata: {
94
+ project_canonical: project.canonical_id,
95
+ project_name: project.name,
96
+ user_id: summary.user_id,
97
+ session_id: summary.session_id,
98
+ created_at_epoch: summary.created_at_epoch,
99
+ local_id: summary.id,
100
+ },
101
+ };
102
+ }
103
+
104
+ /**
105
+ * Push pending outbox entries to Candengo Vector.
106
+ */
107
+ export async function pushOutbox(
108
+ db: MemDatabase,
109
+ client: VectorClient,
110
+ config: Config,
111
+ batchSize: number = 50
112
+ ): Promise<PushResult> {
113
+ const entries = getPendingEntries(db, batchSize);
114
+
115
+ let pushed = 0;
116
+ let failed = 0;
117
+ let skipped = 0;
118
+
119
+ // Collect documents for batch ingest
120
+ const batch: { entryId: number; doc: VectorDocument }[] = [];
121
+
122
+ for (const entry of entries) {
123
+ if (entry.record_type === "summary") {
124
+ const summary = db.getSessionSummary(
125
+ // record_id is the summary row id — look it up
126
+ (() => {
127
+ const row = db.db
128
+ .query<{ session_id: string }, [number]>(
129
+ "SELECT session_id FROM session_summaries WHERE id = ?"
130
+ )
131
+ .get(entry.record_id);
132
+ return row?.session_id ?? "";
133
+ })()
134
+ );
135
+
136
+ if (!summary || !summary.project_id) {
137
+ markSynced(db, entry.id);
138
+ skipped++;
139
+ continue;
140
+ }
141
+
142
+ const project = db.getProjectById(summary.project_id);
143
+ if (!project) {
144
+ markSynced(db, entry.id);
145
+ skipped++;
146
+ continue;
147
+ }
148
+
149
+ markSyncing(db, entry.id);
150
+ const doc = buildSummaryVectorDocument(summary, config, {
151
+ canonical_id: project.canonical_id,
152
+ name: project.name,
153
+ });
154
+ batch.push({ entryId: entry.id, doc });
155
+ continue;
156
+ }
157
+
158
+ if (entry.record_type !== "observation") {
159
+ skipped++;
160
+ continue;
161
+ }
162
+
163
+ const obs = db.getObservationById(entry.record_id);
164
+ if (!obs) {
165
+ // Observation was deleted
166
+ markSynced(db, entry.id);
167
+ skipped++;
168
+ continue;
169
+ }
170
+
171
+ // Don't sync secret observations
172
+ if (obs.sensitivity === "secret") {
173
+ markSynced(db, entry.id);
174
+ skipped++;
175
+ continue;
176
+ }
177
+
178
+ // Don't sync archived/purged observations (they get removed separately)
179
+ if (obs.lifecycle === "archived" || obs.lifecycle === "purged") {
180
+ markSynced(db, entry.id);
181
+ skipped++;
182
+ continue;
183
+ }
184
+
185
+ const project = db.getProjectById(obs.project_id);
186
+ if (!project) {
187
+ markSynced(db, entry.id);
188
+ skipped++;
189
+ continue;
190
+ }
191
+
192
+ markSyncing(db, entry.id);
193
+
194
+ const doc = buildVectorDocument(obs, config, {
195
+ canonical_id: project.canonical_id,
196
+ name: project.name,
197
+ });
198
+
199
+ batch.push({ entryId: entry.id, doc });
200
+ }
201
+
202
+ if (batch.length === 0) return { pushed, failed, skipped };
203
+
204
+ // Try batch ingest first
205
+ try {
206
+ await client.batchIngest(batch.map((b) => b.doc));
207
+ for (const { entryId } of batch) {
208
+ markSynced(db, entryId);
209
+ pushed++;
210
+ }
211
+ } catch {
212
+ // Batch failed — fall back to individual ingest
213
+ for (const { entryId, doc } of batch) {
214
+ try {
215
+ await client.ingest(doc);
216
+ markSynced(db, entryId);
217
+ pushed++;
218
+ } catch (err) {
219
+ markFailed(
220
+ db,
221
+ entryId,
222
+ err instanceof Error ? err.message : String(err)
223
+ );
224
+ failed++;
225
+ }
226
+ }
227
+ }
228
+
229
+ return { pushed, failed, skipped };
230
+ }
@@ -0,0 +1,34 @@
1
+ /**
2
+ * get_observations MCP tool.
3
+ *
4
+ * Retrieve observations by ID(s). Supports single and batch lookups.
5
+ */
6
+
7
+ import type { MemDatabase, ObservationRow } from "../storage/sqlite.js";
8
+
9
+ export interface GetObservationsInput {
10
+ ids: number[];
11
+ }
12
+
13
+ export interface GetObservationsResult {
14
+ observations: ObservationRow[];
15
+ not_found: number[];
16
+ }
17
+
18
+ /**
19
+ * Get observations by their IDs.
20
+ */
21
+ export function getObservations(
22
+ db: MemDatabase,
23
+ input: GetObservationsInput
24
+ ): GetObservationsResult {
25
+ if (input.ids.length === 0) {
26
+ return { observations: [], not_found: [] };
27
+ }
28
+
29
+ const observations = db.getObservationsByIds(input.ids);
30
+ const foundIds = new Set(observations.map((o) => o.id));
31
+ const notFound = input.ids.filter((id) => !foundIds.has(id));
32
+
33
+ return { observations, not_found: notFound };
34
+ }
@@ -0,0 +1,47 @@
1
+ /**
2
+ * pin_observation MCP tool.
3
+ *
4
+ * Pin or unpin an observation. Pinned observations are excluded
5
+ * from lifecycle aging and archival.
6
+ */
7
+
8
+ import type { MemDatabase } from "../storage/sqlite.js";
9
+
10
+ export interface PinInput {
11
+ id: number;
12
+ pinned: boolean;
13
+ }
14
+
15
+ export interface PinResult {
16
+ success: boolean;
17
+ reason?: string;
18
+ }
19
+
20
+ /**
21
+ * Pin or unpin an observation.
22
+ */
23
+ export function pinObservation(
24
+ db: MemDatabase,
25
+ input: PinInput
26
+ ): PinResult {
27
+ const success = db.pinObservation(input.id, input.pinned);
28
+
29
+ if (!success) {
30
+ const obs = db.getObservationById(input.id);
31
+ if (!obs) {
32
+ return { success: false, reason: `Observation #${input.id} not found` };
33
+ }
34
+ if (input.pinned) {
35
+ return {
36
+ success: false,
37
+ reason: `Cannot pin observation in '${obs.lifecycle}' state (must be active or aging)`,
38
+ };
39
+ }
40
+ return {
41
+ success: false,
42
+ reason: `Cannot unpin observation in '${obs.lifecycle}' state (must be pinned)`,
43
+ };
44
+ }
45
+
46
+ return { success: true };
47
+ }
@@ -0,0 +1,301 @@
1
+ import { describe, expect, test, beforeEach, afterEach } from "bun:test";
2
+ import { mkdtempSync, rmSync } from "node:fs";
3
+ import { join } from "node:path";
4
+ import { tmpdir } from "node:os";
5
+ import { MemDatabase } from "../storage/sqlite.js";
6
+ import type { Config } from "../config.js";
7
+ import { saveObservation, type SaveObservationInput } from "./save.js";
8
+
9
+ let db: MemDatabase;
10
+ let tmpDir: string;
11
+ let config: Config;
12
+
13
+ function makeConfig(overrides?: Partial<Config>): Config {
14
+ return {
15
+ candengo_url: "https://api.candengo.com",
16
+ candengo_api_key: "test-key",
17
+ site_id: "test-site",
18
+ namespace: "test-ns",
19
+ user_id: "david",
20
+ device_id: "laptop-abc",
21
+ user_email: "",
22
+ teams: [],
23
+ sync: { enabled: true, interval_seconds: 30, batch_size: 50 },
24
+ search: { default_limit: 10, local_boost: 1.2, scope: "all" },
25
+ scrubbing: {
26
+ enabled: true,
27
+ custom_patterns: [],
28
+ default_sensitivity: "shared",
29
+ },
30
+ ...overrides,
31
+ };
32
+ }
33
+
34
+ beforeEach(() => {
35
+ tmpDir = mkdtempSync(join(tmpdir(), "candengo-mem-save-test-"));
36
+ db = new MemDatabase(join(tmpDir, "test.db"));
37
+ config = makeConfig();
38
+ });
39
+
40
+ afterEach(() => {
41
+ db.close();
42
+ rmSync(tmpDir, { recursive: true, force: true });
43
+ });
44
+
45
+ describe("saveObservation", () => {
46
+ test("saves valid observation", async () => {
47
+ const result = await saveObservation(db, config, {
48
+ type: "bugfix",
49
+ title: "Fix authentication bug",
50
+ narrative: "The token was not being refreshed properly on expiry, causing 401 errors",
51
+ cwd: "/Volumes/Data/devs/candengo-mem",
52
+ });
53
+ expect(result.success).toBe(true);
54
+ expect(result.observation_id).toBeGreaterThan(0);
55
+ expect(result.quality_score).toBeGreaterThan(0);
56
+ });
57
+
58
+ test("rejects invalid type", async () => {
59
+ const result = await saveObservation(db, config, {
60
+ type: "invalid",
61
+ title: "Test",
62
+ cwd: "/tmp",
63
+ });
64
+ expect(result.success).toBe(false);
65
+ expect(result.reason).toContain("Invalid type");
66
+ });
67
+
68
+ test("rejects empty title", async () => {
69
+ const result = await saveObservation(db, config, {
70
+ type: "bugfix",
71
+ title: "",
72
+ cwd: "/tmp",
73
+ });
74
+ expect(result.success).toBe(false);
75
+ expect(result.reason).toContain("Title is required");
76
+ });
77
+
78
+ test("rejects whitespace-only title", async () => {
79
+ const result = await saveObservation(db, config, {
80
+ type: "bugfix",
81
+ title: " ",
82
+ cwd: "/tmp",
83
+ });
84
+ expect(result.success).toBe(false);
85
+ expect(result.reason).toContain("Title is required");
86
+ });
87
+
88
+ test("rejects low-quality observation", async () => {
89
+ // A bare "change" type with no enrichment scores 0.05 < threshold 0.1
90
+ const result = await saveObservation(db, config, {
91
+ type: "change",
92
+ title: "Minor tweak",
93
+ cwd: "/tmp",
94
+ });
95
+ expect(result.success).toBe(false);
96
+ expect(result.reason).toContain("Quality score");
97
+ });
98
+
99
+ test("scrubs secrets from title", async () => {
100
+ const result = await saveObservation(db, config, {
101
+ type: "bugfix",
102
+ title: "Fix sk-abc123def456ghi789jkl012mno in config",
103
+ narrative: "Found the API key hardcoded, which is a discovery worth noting for the team",
104
+ cwd: "/tmp",
105
+ });
106
+ expect(result.success).toBe(true);
107
+
108
+ const obs = db.getObservationById(result.observation_id!);
109
+ expect(obs!.title).toContain("[REDACTED_API_KEY]");
110
+ expect(obs!.title).not.toContain("sk-abc123");
111
+ });
112
+
113
+ test("scrubs secrets from narrative", async () => {
114
+ const result = await saveObservation(db, config, {
115
+ type: "discovery",
116
+ title: "Found hardcoded credentials",
117
+ narrative: "Database URL was postgresql://admin:secret@db.example.com/prod",
118
+ cwd: "/tmp",
119
+ });
120
+ expect(result.success).toBe(true);
121
+
122
+ const obs = db.getObservationById(result.observation_id!);
123
+ expect(obs!.narrative).toContain("[REDACTED_DB_URL]");
124
+ });
125
+
126
+ test("skips scrubbing when disabled", async () => {
127
+ const noScrubConfig = makeConfig({
128
+ scrubbing: {
129
+ enabled: false,
130
+ custom_patterns: [],
131
+ default_sensitivity: "shared",
132
+ },
133
+ });
134
+
135
+ const result = await saveObservation(db, noScrubConfig, {
136
+ type: "bugfix",
137
+ title: "Fix sk-abc123def456ghi789jkl012mno",
138
+ narrative: "This is a long narrative about the API key that was found in the config file and needs to be addressed",
139
+ cwd: "/tmp",
140
+ });
141
+ expect(result.success).toBe(true);
142
+
143
+ const obs = db.getObservationById(result.observation_id!);
144
+ expect(obs!.title).toContain("sk-abc123");
145
+ });
146
+
147
+ test("upgrades sensitivity when secrets detected", async () => {
148
+ const result = await saveObservation(db, config, {
149
+ type: "discovery",
150
+ title: "Found API key sk-abc123def456ghi789jkl012mno in env",
151
+ narrative: "Important discovery worth documenting for the team context and future reference",
152
+ cwd: "/tmp",
153
+ });
154
+ expect(result.success).toBe(true);
155
+
156
+ const obs = db.getObservationById(result.observation_id!);
157
+ expect(obs!.sensitivity).toBe("personal");
158
+ });
159
+
160
+ test("detects duplicates within 24h", async () => {
161
+ // Save first observation
162
+ const first = await saveObservation(db, config, {
163
+ type: "bugfix",
164
+ title: "Fix authentication token refresh",
165
+ narrative: "Long enough narrative to pass quality: the token refresh mechanism was broken due to race condition",
166
+ cwd: "/Volumes/Data/devs/candengo-mem",
167
+ });
168
+ expect(first.success).toBe(true);
169
+
170
+ // Save near-duplicate
171
+ const second = await saveObservation(db, config, {
172
+ type: "bugfix",
173
+ title: "Fix authentication token refresh",
174
+ narrative: "Same fix described differently but still about the token refresh race condition issue",
175
+ cwd: "/Volumes/Data/devs/candengo-mem",
176
+ });
177
+ expect(second.success).toBe(true);
178
+ expect(second.merged_into).toBe(first.observation_id);
179
+ });
180
+
181
+ test("adds observation to sync outbox", async () => {
182
+ const result = await saveObservation(db, config, {
183
+ type: "decision",
184
+ title: "Choose PostgreSQL over MySQL",
185
+ narrative: "PostgreSQL has better JSON support and more advanced indexing capabilities for our use case",
186
+ cwd: "/tmp",
187
+ });
188
+ expect(result.success).toBe(true);
189
+
190
+ const outbox = db.db
191
+ .query<{ record_id: number }, [number]>(
192
+ "SELECT record_id FROM sync_outbox WHERE record_id = ? AND record_type = 'observation'"
193
+ )
194
+ .get(result.observation_id!);
195
+ expect(outbox).not.toBeNull();
196
+ });
197
+
198
+ test("stores facts and concepts as JSON", async () => {
199
+ const result = await saveObservation(db, config, {
200
+ type: "decision",
201
+ title: "Choose PostgreSQL",
202
+ facts: ["Supports JSONB", "Better indexing"],
203
+ concepts: ["database", "postgresql"],
204
+ cwd: "/tmp",
205
+ });
206
+ expect(result.success).toBe(true);
207
+
208
+ const obs = db.getObservationById(result.observation_id!);
209
+ expect(JSON.parse(obs!.facts!)).toEqual(["Supports JSONB", "Better indexing"]);
210
+ expect(JSON.parse(obs!.concepts!)).toEqual(["database", "postgresql"]);
211
+ });
212
+
213
+ test("saves with session_id", async () => {
214
+ const result = await saveObservation(db, config, {
215
+ type: "bugfix",
216
+ title: "Fix auth bug",
217
+ narrative: "Important fix that required significant investigation and multiple file changes",
218
+ session_id: "sess-123",
219
+ cwd: "/tmp",
220
+ });
221
+ expect(result.success).toBe(true);
222
+
223
+ const obs = db.getObservationById(result.observation_id!);
224
+ expect(obs!.session_id).toBe("sess-123");
225
+ });
226
+
227
+ test("converts absolute file paths to project-relative", async () => {
228
+ const result = await saveObservation(db, config, {
229
+ type: "bugfix",
230
+ title: "Fix path handling",
231
+ narrative: "Important fix to ensure file paths are stored relative to the project root directory",
232
+ files_read: ["/projects/myapp/src/auth.ts", "/projects/myapp/README.md"],
233
+ files_modified: ["/projects/myapp/src/auth.ts"],
234
+ cwd: "/projects/myapp",
235
+ });
236
+ expect(result.success).toBe(true);
237
+
238
+ const obs = db.getObservationById(result.observation_id!);
239
+ const filesRead = JSON.parse(obs!.files_read!);
240
+ const filesModified = JSON.parse(obs!.files_modified!);
241
+ expect(filesRead).toEqual(["src/auth.ts", "README.md"]);
242
+ expect(filesModified).toEqual(["src/auth.ts"]);
243
+ });
244
+
245
+ test("leaves already-relative paths unchanged", async () => {
246
+ const result = await saveObservation(db, config, {
247
+ type: "bugfix",
248
+ title: "Fix with relative paths",
249
+ narrative: "Another important fix that tests the relative path handling for already relative paths",
250
+ files_modified: ["src/auth.ts", "lib/utils.ts"],
251
+ cwd: "/tmp",
252
+ });
253
+ expect(result.success).toBe(true);
254
+
255
+ const obs = db.getObservationById(result.observation_id!);
256
+ const filesModified = JSON.parse(obs!.files_modified!);
257
+ expect(filesModified).toEqual(["src/auth.ts", "lib/utils.ts"]);
258
+ });
259
+
260
+ test("keeps paths outside project root as-is", async () => {
261
+ const result = await saveObservation(db, config, {
262
+ type: "discovery",
263
+ title: "Found external dependency issue",
264
+ narrative: "Discovered a problem in an external file outside the project root directory boundary",
265
+ files_read: ["/usr/local/lib/node_modules/pkg/index.js"],
266
+ cwd: "/projects/myapp",
267
+ });
268
+ expect(result.success).toBe(true);
269
+
270
+ const obs = db.getObservationById(result.observation_id!);
271
+ const filesRead = JSON.parse(obs!.files_read!);
272
+ expect(filesRead).toEqual(["/usr/local/lib/node_modules/pkg/index.js"]);
273
+ });
274
+
275
+ test("all valid types accepted", async () => {
276
+ const types = [
277
+ "bugfix",
278
+ "discovery",
279
+ "decision",
280
+ "pattern",
281
+ "change",
282
+ "feature",
283
+ "refactor",
284
+ "digest",
285
+ ];
286
+
287
+ for (const type of types) {
288
+ const result = await saveObservation(db, config, {
289
+ type,
290
+ title: `Test ${type} observation`,
291
+ narrative: "Detailed narrative to ensure quality threshold is met for all observation types",
292
+ facts: ["fact1", "fact2"],
293
+ cwd: "/tmp",
294
+ });
295
+ // Some low-scoring types may not meet threshold, but they shouldn't fail with "Invalid type"
296
+ if (!result.success) {
297
+ expect(result.reason).not.toContain("Invalid type");
298
+ }
299
+ }
300
+ });
301
+ });