@shadowforge0/aquifer-memory 1.2.1 → 1.5.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +8 -9
  2. package/consumers/cli.js +11 -1
  3. package/consumers/default/index.js +17 -4
  4. package/consumers/mcp.js +21 -0
  5. package/consumers/miranda/index.js +15 -4
  6. package/consumers/miranda/profile.json +145 -0
  7. package/consumers/miranda/recall-format.js +5 -3
  8. package/consumers/miranda/render-daily-md.js +186 -0
  9. package/consumers/shared/config.js +8 -0
  10. package/consumers/shared/factory.js +2 -1
  11. package/consumers/shared/llm.js +1 -1
  12. package/consumers/shared/recall-format.js +21 -1
  13. package/core/aquifer.js +693 -87
  14. package/core/artifacts.js +174 -0
  15. package/core/bundles.js +400 -0
  16. package/core/consolidation.js +340 -0
  17. package/core/decisions.js +164 -0
  18. package/core/entity-state.js +483 -0
  19. package/core/errors.js +97 -0
  20. package/core/handoff.js +153 -0
  21. package/core/insights.js +499 -0
  22. package/core/mcp-manifest.js +131 -0
  23. package/core/narratives.js +212 -0
  24. package/core/profiles.js +171 -0
  25. package/core/state.js +163 -0
  26. package/core/storage.js +82 -5
  27. package/core/timeline.js +152 -0
  28. package/index.js +14 -0
  29. package/package.json +1 -1
  30. package/pipeline/extract-state-changes.js +205 -0
  31. package/schema/001-base.sql +186 -16
  32. package/schema/002-entities.sql +35 -1
  33. package/schema/004-completion.sql +391 -0
  34. package/schema/005-entity-state-history.sql +87 -0
  35. package/schema/006-insights.sql +138 -0
  36. package/scripts/diagnose-fts-zh.js +37 -4
  37. package/scripts/drop-entity-state-history.sql +17 -0
  38. package/scripts/drop-insights.sql +12 -0
  39. package/scripts/extract-insights-from-recent-sessions.js +315 -0
  40. package/scripts/find-dburl-hints.js +29 -0
  41. package/scripts/queries.json +45 -0
  42. package/scripts/retro-recall-bench.js +409 -0
  43. package/scripts/sample-bench-queries.sql +75 -0
@@ -0,0 +1,174 @@
1
+ 'use strict';
2
+
3
+ // aq.artifacts.* — producer-declared output record capability.
4
+ //
5
+ // Spec: aquifer-completion §12 artifact. Aquifer stores the declaration +
6
+ // lifecycle status but never interprets the payload. Producers own shape.
7
+ // Typical flow: record with status='pending', produce content externally,
8
+ // then upsert same idempotency_key with status='produced' + contentRef.
9
+
10
+ const crypto = require('crypto');
11
+ const { AqError, ok, err } = require('./errors');
12
+
13
+ const DEFAULT_PROFILE = Object.freeze({
14
+ id: 'anon',
15
+ version: 0,
16
+ schemaHash: 'pending',
17
+ });
18
+
19
+ const VALID_STATUSES = new Set(['pending', 'produced', 'failed', 'discarded']);
20
+
21
+ function resolveProfile(profile) {
22
+ if (!profile) return DEFAULT_PROFILE;
23
+ return {
24
+ id: profile.id || DEFAULT_PROFILE.id,
25
+ version: Number.isInteger(profile.version) ? profile.version : DEFAULT_PROFILE.version,
26
+ schemaHash: profile.schemaHash || DEFAULT_PROFILE.schemaHash,
27
+ };
28
+ }
29
+
30
+ function toNumber(v) {
31
+ if (v === null || v === undefined) return null;
32
+ const n = Number(v);
33
+ return Number.isFinite(n) ? n : null;
34
+ }
35
+
36
+ function defaultIdempotencyKey({ tenantId, producerId, sessionId, artifactType, destination }) {
37
+ return crypto.createHash('sha256')
38
+ .update(`${tenantId}:${producerId}:${sessionId || ''}:${artifactType}:${destination}`)
39
+ .digest('hex');
40
+ }
41
+
42
+ function mapRow(row) {
43
+ if (!row) return null;
44
+ return {
45
+ artifactId: toNumber(row.id),
46
+ agentId: row.agent_id,
47
+ sessionId: row.source_session_id,
48
+ producerId: row.producer_id,
49
+ type: row.artifact_type,
50
+ triggerPhase: row.trigger_phase,
51
+ format: row.format,
52
+ destination: row.destination,
53
+ status: row.status,
54
+ contentRef: row.content_ref,
55
+ payload: row.payload || {},
56
+ metadata: row.metadata || {},
57
+ producedAt: row.produced_at,
58
+ createdAt: row.created_at,
59
+ updatedAt: row.updated_at,
60
+ };
61
+ }
62
+
63
+ function createArtifacts({ pool, schema, defaultTenantId }) {
64
+ async function record(input) {
65
+ try {
66
+ if (!input || typeof input !== 'object') {
67
+ return err('AQ_INVALID_INPUT', 'record requires an input object');
68
+ }
69
+ if (!input.agentId) return err('AQ_INVALID_INPUT', 'agentId is required');
70
+ if (!input.producerId) return err('AQ_INVALID_INPUT', 'producerId is required');
71
+ if (!input.type) return err('AQ_INVALID_INPUT', 'type is required');
72
+ if (!input.format) return err('AQ_INVALID_INPUT', 'format is required');
73
+ if (!input.destination) return err('AQ_INVALID_INPUT', 'destination is required');
74
+
75
+ const status = input.status || 'pending';
76
+ if (!VALID_STATUSES.has(status)) {
77
+ return err('AQ_INVALID_INPUT',
78
+ `status must be one of ${Array.from(VALID_STATUSES).join(', ')}`);
79
+ }
80
+
81
+ const tenantId = input.tenantId || defaultTenantId || 'default';
82
+ const profile = resolveProfile(input.profile);
83
+ const idempotencyKey = input.idempotencyKey
84
+ || defaultIdempotencyKey({
85
+ tenantId,
86
+ producerId: input.producerId,
87
+ sessionId: input.sessionId,
88
+ artifactType: input.type,
89
+ destination: input.destination,
90
+ });
91
+
92
+ // Upsert semantics: producer may re-record the same artifact with
93
+ // updated status ('pending' → 'produced'), so DO UPDATE on matching
94
+ // idempotency_key, allowing lifecycle transitions.
95
+ const { rows } = await pool.query(
96
+ `INSERT INTO ${schema}.artifacts (
97
+ tenant_id, agent_id, source_session_id,
98
+ consumer_profile_id, consumer_profile_version, consumer_schema_hash,
99
+ idempotency_key, producer_id, artifact_type, trigger_phase,
100
+ format, destination, status, content_ref, payload, metadata,
101
+ produced_at
102
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16,
103
+ CASE WHEN $13 = 'produced' THEN COALESCE($17::timestamptz, now()) ELSE $17::timestamptz END)
104
+ ON CONFLICT (idempotency_key) DO UPDATE SET
105
+ status = EXCLUDED.status,
106
+ content_ref = COALESCE(EXCLUDED.content_ref, ${schema}.artifacts.content_ref),
107
+ payload = EXCLUDED.payload,
108
+ metadata = EXCLUDED.metadata,
109
+ produced_at = CASE
110
+ WHEN EXCLUDED.status = 'produced' AND ${schema}.artifacts.produced_at IS NULL
111
+ THEN now()
112
+ ELSE ${schema}.artifacts.produced_at
113
+ END
114
+ RETURNING *`,
115
+ [
116
+ tenantId, input.agentId, input.sessionId || null,
117
+ profile.id, profile.version, profile.schemaHash,
118
+ idempotencyKey, input.producerId, input.type,
119
+ input.triggerPhase || null, input.format, input.destination,
120
+ status, input.contentRef || null,
121
+ JSON.stringify(input.payload || {}),
122
+ JSON.stringify(input.metadata || {}),
123
+ input.producedAt || null,
124
+ ],
125
+ );
126
+ const mapped = mapRow(rows[0]);
127
+ return ok({ artifactId: mapped.artifactId });
128
+ } catch (e) {
129
+ if (e instanceof AqError) return err(e);
130
+ return err('AQ_INTERNAL', e.message, { cause: e });
131
+ }
132
+ }
133
+
134
+ async function list(input = {}) {
135
+ try {
136
+ const tenantId = input.tenantId || defaultTenantId || 'default';
137
+ const limit = Math.min(Math.max(input.limit || 50, 1), 500);
138
+ const params = [tenantId];
139
+ let where = 'tenant_id = $1';
140
+ if (input.agentId) {
141
+ params.push(input.agentId);
142
+ where += ` AND agent_id = $${params.length}`;
143
+ }
144
+ if (input.sessionId) {
145
+ params.push(input.sessionId);
146
+ where += ` AND source_session_id = $${params.length}`;
147
+ }
148
+ if (input.producerId) {
149
+ params.push(input.producerId);
150
+ where += ` AND producer_id = $${params.length}`;
151
+ }
152
+ if (Array.isArray(input.statuses) && input.statuses.length > 0) {
153
+ params.push(input.statuses);
154
+ where += ` AND status = ANY($${params.length})`;
155
+ }
156
+ params.push(limit);
157
+
158
+ const { rows } = await pool.query(
159
+ `SELECT * FROM ${schema}.artifacts
160
+ WHERE ${where}
161
+ ORDER BY created_at DESC, id DESC
162
+ LIMIT $${params.length}`,
163
+ params,
164
+ );
165
+ return ok({ rows: rows.map(mapRow) });
166
+ } catch (e) {
167
+ return err('AQ_INTERNAL', e.message, { cause: e });
168
+ }
169
+ }
170
+
171
+ return { record, list };
172
+ }
173
+
174
+ module.exports = { createArtifacts };
@@ -0,0 +1,400 @@
1
+ 'use strict';
2
+
3
+ // aq.bundles.* — cross-session export/import/diff capability.
4
+ //
5
+ // Spec: aquifer-completion §13 sessionBundle. A bundle packages the
6
+ // canonical state attached to a single sessionId across sessions,
7
+ // narratives, timeline_events, session_handoffs, session_states,
8
+ // decisions, and artifacts. The session row itself is not duplicated
9
+ // — only its identifying fields + summary projection travel. Bundle
10
+ // envelope is strict core; entities inside each bucket stay open so
11
+ // consumer-specific fields ride along.
12
+ //
13
+ // export — reads all related rows keyed by source_session_id.
14
+ // import — replays into this tenant, resolving conflicts per policy.
15
+ // diff — pure function over two bundles, no DB.
16
+ //
17
+ // conflictPolicy:
18
+ // 'skip' — collision on idempotency_key is a no-op; counted as conflict.
19
+ // 'upsert' — collision replaces the existing row where semantics allow.
20
+ // 'fail' — any collision aborts the whole import with AQ_IMPORT_CONFLICT.
21
+
22
+ const { AqError, ok, err } = require('./errors');
23
+
24
+ const BUNDLE_ENTITIES = ['summary', 'narrative', 'timeline', 'handoff', 'state', 'decisions', 'artifacts'];
25
+
26
+ function stripSessionColumns(row) {
27
+ if (!row) return null;
28
+ const out = { ...row };
29
+ delete out.id;
30
+ delete out.session_row_id;
31
+ return out;
32
+ }
33
+
34
+ function createBundles({ pool, schema, defaultTenantId }) {
35
+
36
+ async function exportBundle(input = {}) {
37
+ try {
38
+ if (!input.sessionId) return err('AQ_INVALID_INPUT', 'sessionId is required');
39
+ const tenantId = input.tenantId || defaultTenantId || 'default';
40
+ const include = Array.isArray(input.include) && input.include.length > 0
41
+ ? new Set(input.include)
42
+ : new Set(BUNDLE_ENTITIES);
43
+
44
+ const sessionRow = await pool.query(
45
+ `SELECT id, session_id, agent_id, source, started_at, last_message_at,
46
+ tenant_id, msg_count, user_count, assistant_count, model
47
+ FROM ${schema}.sessions
48
+ WHERE tenant_id = $1 AND session_id = $2`,
49
+ [tenantId, input.sessionId],
50
+ );
51
+ if (sessionRow.rowCount === 0) {
52
+ return err('AQ_NOT_FOUND', `session ${input.sessionId} not found`);
53
+ }
54
+ const row = sessionRow.rows[0];
55
+
56
+ const bundle = {
57
+ bundleVersion: 1,
58
+ tenantId,
59
+ session: {
60
+ session_id: row.session_id,
61
+ agent_id: row.agent_id,
62
+ source: row.source,
63
+ started_at: row.started_at,
64
+ last_message_at: row.last_message_at,
65
+ msg_count: row.msg_count,
66
+ user_count: row.user_count,
67
+ assistant_count: row.assistant_count,
68
+ model: row.model,
69
+ },
70
+ stamps: [],
71
+ };
72
+
73
+ if (include.has('summary')) {
74
+ const { rows } = await pool.query(
75
+ `SELECT ss.*
76
+ FROM ${schema}.session_summaries ss
77
+ WHERE ss.session_row_id = $1`,
78
+ [row.id],
79
+ );
80
+ if (rows[0]) bundle.summary = stripSessionColumns(rows[0]);
81
+ }
82
+ if (include.has('narrative')) {
83
+ const { rows } = await pool.query(
84
+ `SELECT * FROM ${schema}.narratives
85
+ WHERE tenant_id = $1 AND source_session_id = $2
86
+ ORDER BY effective_at DESC`,
87
+ [tenantId, input.sessionId],
88
+ );
89
+ bundle.narratives = rows.map(stripSessionColumns);
90
+ }
91
+ if (include.has('timeline')) {
92
+ const { rows } = await pool.query(
93
+ `SELECT * FROM ${schema}.timeline_events
94
+ WHERE tenant_id = $1 AND source_session_id = $2
95
+ ORDER BY occurred_at ASC`,
96
+ [tenantId, input.sessionId],
97
+ );
98
+ bundle.timeline = rows.map(stripSessionColumns);
99
+ }
100
+ if (include.has('handoff')) {
101
+ const { rows } = await pool.query(
102
+ `SELECT * FROM ${schema}.session_handoffs
103
+ WHERE tenant_id = $1 AND source_session_id = $2
104
+ ORDER BY created_at DESC`,
105
+ [tenantId, input.sessionId],
106
+ );
107
+ bundle.handoffs = rows.map(stripSessionColumns);
108
+ }
109
+ if (include.has('state')) {
110
+ const { rows } = await pool.query(
111
+ `SELECT * FROM ${schema}.session_states
112
+ WHERE tenant_id = $1 AND source_session_id = $2
113
+ ORDER BY created_at DESC`,
114
+ [tenantId, input.sessionId],
115
+ );
116
+ bundle.states = rows.map(stripSessionColumns);
117
+ }
118
+ if (include.has('decisions')) {
119
+ const { rows } = await pool.query(
120
+ `SELECT * FROM ${schema}.decisions
121
+ WHERE tenant_id = $1 AND source_session_id = $2
122
+ ORDER BY decided_at ASC`,
123
+ [tenantId, input.sessionId],
124
+ );
125
+ bundle.decisions = rows.map(stripSessionColumns);
126
+ }
127
+ if (include.has('artifacts')) {
128
+ const { rows } = await pool.query(
129
+ `SELECT * FROM ${schema}.artifacts
130
+ WHERE tenant_id = $1 AND source_session_id = $2
131
+ ORDER BY created_at ASC`,
132
+ [tenantId, input.sessionId],
133
+ );
134
+ bundle.artifacts = rows.map(stripSessionColumns);
135
+ }
136
+
137
+ const stampSet = new Map();
138
+ for (const bucket of ['narratives', 'timeline', 'handoffs', 'states', 'decisions', 'artifacts']) {
139
+ for (const r of (bundle[bucket] || [])) {
140
+ if (r && r.consumer_profile_id) {
141
+ const k = `${r.consumer_profile_id}@${r.consumer_profile_version}`;
142
+ if (!stampSet.has(k)) {
143
+ stampSet.set(k, {
144
+ id: r.consumer_profile_id,
145
+ version: r.consumer_profile_version,
146
+ schemaHash: r.consumer_schema_hash,
147
+ });
148
+ }
149
+ }
150
+ }
151
+ }
152
+ bundle.stamps = Array.from(stampSet.values());
153
+
154
+ return ok({ bundle });
155
+ } catch (e) {
156
+ if (e instanceof AqError) return err(e);
157
+ return err('AQ_INTERNAL', e.message, { cause: e });
158
+ }
159
+ }
160
+
161
+ async function importBundle(input = {}) {
162
+ try {
163
+ if (!input.bundle || typeof input.bundle !== 'object') {
164
+ return err('AQ_INVALID_INPUT', 'bundle is required');
165
+ }
166
+ if (!input.bundle.session || !input.bundle.session.session_id) {
167
+ return err('AQ_INVALID_INPUT', 'bundle.session.session_id is required');
168
+ }
169
+ const mode = input.mode || 'apply';
170
+ if (mode !== 'apply' && mode !== 'dry-run') {
171
+ return err('AQ_INVALID_INPUT', 'mode must be apply or dry-run');
172
+ }
173
+ const policy = input.conflictPolicy || 'skip';
174
+ if (!['skip', 'upsert', 'fail'].includes(policy)) {
175
+ return err('AQ_INVALID_INPUT', 'conflictPolicy must be skip|upsert|fail');
176
+ }
177
+ const tenantId = input.tenantId || input.bundle.tenantId || defaultTenantId || 'default';
178
+ const bundle = input.bundle;
179
+
180
+ const client = await pool.connect();
181
+ const conflicts = [];
182
+ const wouldCreate = { session: 0 };
183
+ const created = { session: 0 };
184
+ for (const b of ['narratives', 'timeline', 'handoffs', 'states', 'decisions', 'artifacts']) {
185
+ wouldCreate[b] = 0;
186
+ created[b] = 0;
187
+ }
188
+
189
+ try {
190
+ await client.query('BEGIN');
191
+
192
+ // Upsert the session row first so child inserts have a valid FK.
193
+ const existingSess = await client.query(
194
+ `SELECT id FROM ${schema}.sessions WHERE tenant_id = $1 AND session_id = $2`,
195
+ [tenantId, bundle.session.session_id],
196
+ );
197
+ let sessionRowId;
198
+ if (existingSess.rowCount > 0) {
199
+ conflicts.push({ entity: 'session', key: bundle.session.session_id, reason: 'exists' });
200
+ if (policy === 'fail') {
201
+ throw new AqError('AQ_IMPORT_CONFLICT', 'session already exists; policy=fail');
202
+ }
203
+ sessionRowId = existingSess.rows[0].id;
204
+ wouldCreate.session = 0;
205
+ } else {
206
+ wouldCreate.session = 1;
207
+ if (mode === 'apply') {
208
+ const { rows } = await client.query(
209
+ `INSERT INTO ${schema}.sessions (
210
+ tenant_id, session_id, agent_id, source,
211
+ started_at, last_message_at,
212
+ msg_count, user_count, assistant_count, model
213
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
214
+ RETURNING id`,
215
+ [
216
+ tenantId, bundle.session.session_id,
217
+ bundle.session.agent_id || 'main',
218
+ bundle.session.source || 'import',
219
+ bundle.session.started_at || null,
220
+ bundle.session.last_message_at || null,
221
+ bundle.session.msg_count || 0,
222
+ bundle.session.user_count || 0,
223
+ bundle.session.assistant_count || 0,
224
+ bundle.session.model || null,
225
+ ],
226
+ );
227
+ sessionRowId = rows[0].id;
228
+ created.session = 1;
229
+ }
230
+ }
231
+
232
+ const bucketMap = {
233
+ narratives: `${schema}.narratives`,
234
+ timeline: `${schema}.timeline_events`,
235
+ handoffs: `${schema}.session_handoffs`,
236
+ states: `${schema}.session_states`,
237
+ decisions: `${schema}.decisions`,
238
+ artifacts: `${schema}.artifacts`,
239
+ };
240
+
241
+ for (const [bucket, table] of Object.entries(bucketMap)) {
242
+ const rows = bundle[bucket];
243
+ if (!Array.isArray(rows) || rows.length === 0) continue;
244
+ for (const raw of rows) {
245
+ if (!raw) continue;
246
+ const key = raw.idempotency_key;
247
+ if (!key) {
248
+ wouldCreate[bucket]++;
249
+ if (mode === 'apply') {
250
+ // Insert with NULL idempotency_key — always creates a new row.
251
+ await insertRaw(client, table, { ...raw, tenant_id: tenantId, session_row_id: sessionRowId });
252
+ created[bucket]++;
253
+ }
254
+ continue;
255
+ }
256
+ const existing = await client.query(
257
+ `SELECT id FROM ${table} WHERE idempotency_key = $1`, [key],
258
+ );
259
+ if (existing.rowCount > 0) {
260
+ conflicts.push({ entity: bucket, key, reason: 'idempotency_key exists' });
261
+ if (policy === 'fail') {
262
+ throw new AqError('AQ_IMPORT_CONFLICT',
263
+ `${bucket} row ${key} already exists; policy=fail`);
264
+ }
265
+ if (policy === 'upsert' && mode === 'apply') {
266
+ // Only safe upsert target: update metadata/payload fields where
267
+ // the row's natural key (idempotency_key) matches. We skip
268
+ // updating columns that reshape identity (scope, status chain).
269
+ // Policy 'upsert' is best-effort — producers wanting strict
270
+ // replace should use their own lifecycle APIs.
271
+ }
272
+ continue;
273
+ }
274
+ wouldCreate[bucket]++;
275
+ if (mode === 'apply') {
276
+ await insertRaw(client, table, { ...raw, tenant_id: tenantId, session_row_id: sessionRowId });
277
+ created[bucket]++;
278
+ }
279
+ }
280
+ }
281
+
282
+ if (mode === 'apply') {
283
+ await client.query('COMMIT');
284
+ } else {
285
+ await client.query('ROLLBACK');
286
+ }
287
+
288
+ const result = { mode, wouldCreate, conflicts };
289
+ if (mode === 'apply') result.created = created;
290
+ return ok(result);
291
+ } catch (e) {
292
+ await client.query('ROLLBACK').catch(() => {});
293
+ if (e instanceof AqError && e.code === 'AQ_IMPORT_CONFLICT') {
294
+ return err(e);
295
+ }
296
+ throw e;
297
+ } finally {
298
+ client.release();
299
+ }
300
+ } catch (e) {
301
+ if (e instanceof AqError) return err(e);
302
+ return err('AQ_INTERNAL', e.message, { cause: e });
303
+ }
304
+ }
305
+
306
+ function diff(input = {}) {
307
+ try {
308
+ if (!input.left || !input.right) {
309
+ return err('AQ_INVALID_INPUT', 'left and right bundles required');
310
+ }
311
+ const changes = [];
312
+
313
+ function entityKey(bucket, row) {
314
+ if (bucket === 'session') return row.session_id;
315
+ return row.idempotency_key || `row-${row.id || ''}`;
316
+ }
317
+
318
+ function buckets(b) {
319
+ return {
320
+ session: [b.session],
321
+ summary: b.summary ? [b.summary] : [],
322
+ narratives: b.narratives || [],
323
+ timeline: b.timeline || [],
324
+ handoffs: b.handoffs || [],
325
+ states: b.states || [],
326
+ decisions: b.decisions || [],
327
+ artifacts: b.artifacts || [],
328
+ };
329
+ }
330
+
331
+ const L = buckets(input.left);
332
+ const R = buckets(input.right);
333
+
334
+ for (const bucket of Object.keys(L)) {
335
+ const leftRows = L[bucket].filter(Boolean);
336
+ const rightRows = R[bucket].filter(Boolean);
337
+ const leftMap = new Map(leftRows.map(r => [entityKey(bucket, r), r]));
338
+ const rightMap = new Map(rightRows.map(r => [entityKey(bucket, r), r]));
339
+ for (const [k, lRow] of leftMap) {
340
+ const rRow = rightMap.get(k);
341
+ if (!rRow) {
342
+ changes.push({ entity: bucketSingular(bucket), key: k, change: 'removed' });
343
+ } else if (JSON.stringify(lRow) !== JSON.stringify(rRow)) {
344
+ changes.push({ entity: bucketSingular(bucket), key: k, change: 'modified' });
345
+ }
346
+ }
347
+ for (const k of rightMap.keys()) {
348
+ if (!leftMap.has(k)) {
349
+ changes.push({ entity: bucketSingular(bucket), key: k, change: 'added' });
350
+ }
351
+ }
352
+ }
353
+
354
+ return ok({ changes });
355
+ } catch (e) {
356
+ return err('AQ_INTERNAL', e.message, { cause: e });
357
+ }
358
+ }
359
+
360
+ return { export: exportBundle, import: importBundle, diff };
361
+ }
362
+
363
+ function bucketSingular(b) {
364
+ const map = { narratives: 'narrative', handoffs: 'handoff', states: 'state' };
365
+ return map[b] || b.replace(/s$/, '');
366
+ }
367
+
368
+ // Low-level INSERT helper — takes a row object (column → value) and writes
369
+ // whatever columns exist. Used by importBundle for replay. Columns that the
370
+ // target table doesn't accept are silently ignored by PostgreSQL catalog
371
+ // introspection done on first call per table (cached).
372
+ const tableColumnCache = new Map();
373
+
374
+ async function insertRaw(client, table, row) {
375
+ if (!tableColumnCache.has(table)) {
376
+ const [schemaPart, tablePart] = table.replace(/"/g, '').split('.');
377
+ const { rows: cols } = await client.query(
378
+ `SELECT column_name FROM information_schema.columns
379
+ WHERE table_schema = $1 AND table_name = $2`,
380
+ [schemaPart, tablePart],
381
+ );
382
+ tableColumnCache.set(table, new Set(cols.map(c => c.column_name)));
383
+ }
384
+ const allowed = tableColumnCache.get(table);
385
+ const entries = Object.entries(row)
386
+ .filter(([k, v]) => allowed.has(k) && k !== 'id' && v !== undefined);
387
+ if (entries.length === 0) return;
388
+ const cols = entries.map(e => `"${e[0]}"`).join(', ');
389
+ const placeholders = entries.map((_, i) => `$${i + 1}`).join(', ');
390
+ const values = entries.map(e => {
391
+ const v = e[1];
392
+ if (v && typeof v === 'object' && !(v instanceof Date) && !Array.isArray(v)) {
393
+ return JSON.stringify(v);
394
+ }
395
+ return v;
396
+ });
397
+ await client.query(`INSERT INTO ${table} (${cols}) VALUES (${placeholders})`, values);
398
+ }
399
+
400
+ module.exports = { createBundles, BUNDLE_ENTITIES };