@shadowforge0/aquifer-memory 1.6.0 → 1.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. package/.env.example +8 -0
  2. package/README.md +72 -0
  3. package/README_CN.md +17 -0
  4. package/README_TW.md +4 -0
  5. package/aquifer.config.example.json +19 -0
  6. package/consumers/cli.js +259 -12
  7. package/consumers/codex-active-checkpoint.js +186 -0
  8. package/consumers/codex-current-memory.js +106 -0
  9. package/consumers/codex-handoff.js +551 -6
  10. package/consumers/codex.js +209 -25
  11. package/consumers/mcp.js +144 -6
  12. package/consumers/shared/config.js +60 -1
  13. package/consumers/shared/factory.js +10 -3
  14. package/core/aquifer.js +357 -838
  15. package/core/backends/capabilities.js +89 -0
  16. package/core/backends/local.js +430 -0
  17. package/core/legacy-bootstrap.js +140 -0
  18. package/core/mcp-manifest.js +66 -2
  19. package/core/memory-bootstrap.js +20 -8
  20. package/core/memory-consolidation.js +365 -11
  21. package/core/memory-promotion.js +157 -26
  22. package/core/memory-recall.js +341 -22
  23. package/core/memory-records.js +347 -11
  24. package/core/memory-serving.js +132 -0
  25. package/core/postgres-migrations.js +533 -0
  26. package/core/public-session-filter.js +40 -0
  27. package/core/recall-runtime.js +115 -0
  28. package/core/scope-attribution.js +279 -0
  29. package/core/session-checkpoint-producer.js +412 -0
  30. package/core/session-checkpoints.js +432 -0
  31. package/core/session-finalization.js +98 -2
  32. package/core/storage-checkpoints.js +546 -0
  33. package/core/storage.js +121 -8
  34. package/docs/getting-started.md +6 -0
  35. package/docs/setup.md +66 -3
  36. package/package.json +8 -4
  37. package/schema/014-v1-checkpoint-runs.sql +349 -0
  38. package/schema/015-v1-evidence-items.sql +92 -0
  39. package/schema/016-v1-evidence-ref-multi-item.sql +19 -0
  40. package/schema/017-v1-memory-record-embeddings.sql +25 -0
  41. package/schema/018-v1-finalization-candidate-envelope.sql +39 -0
  42. package/scripts/codex-checkpoint-commands.js +464 -0
  43. package/scripts/codex-checkpoint-runtime.js +520 -0
  44. package/scripts/codex-recovery.js +246 -1
package/core/storage.js CHANGED
@@ -1,6 +1,14 @@
1
1
  'use strict';
2
2
 
3
3
  const crypto = require('crypto');
4
+ const {
5
+ upsertCheckpointRun,
6
+ updateCheckpointRunStatus,
7
+ listCheckpointRuns,
8
+ upsertCheckpointRunSources,
9
+ listCheckpointRunSources,
10
+ } = require('./storage-checkpoints');
11
+ const { publicPlaceholderSummarySql } = require('./public-session-filter');
4
12
 
5
13
  // C1: quote identifier for SQL safety
6
14
  function qi(identifier) { return `"${identifier}"`; }
@@ -49,7 +57,6 @@ const FINALIZATION_MODES = new Set([
49
57
  'afterburn',
50
58
  'manual',
51
59
  ]);
52
-
53
60
  function requireField(obj, field) {
54
61
  if (!obj || obj[field] === undefined || obj[field] === null || obj[field] === '') {
55
62
  throw new Error(`${field} is required`);
@@ -60,6 +67,19 @@ function toJson(value, fallback) {
60
67
  return JSON.stringify(value === undefined ? fallback : value);
61
68
  }
62
69
 
70
+ function stableJson(value) {
71
+ if (value === null || value === undefined) return JSON.stringify(null);
72
+ if (Array.isArray(value)) return `[${value.map(stableJson).join(',')}]`;
73
+ if (typeof value === 'object') {
74
+ return `{${Object.keys(value).sort().map(key => `${JSON.stringify(key)}:${stableJson(value[key])}`).join(',')}}`;
75
+ }
76
+ return JSON.stringify(value);
77
+ }
78
+
79
+ function hashStable(value) {
80
+ return crypto.createHash('sha256').update(stableJson(value)).digest('hex');
81
+ }
82
+
63
83
  // ---------------------------------------------------------------------------
64
84
  // upsertSession
65
85
  // ---------------------------------------------------------------------------
@@ -270,6 +290,7 @@ async function searchSessions(pool, query, {
270
290
  const where = [
271
291
  `(ss.search_text ILIKE '%' || $1 || '%' OR ss.search_tsv @@ plainto_tsquery('${cfg}', $2))`,
272
292
  `s.tenant_id = $3`,
293
+ `NOT ${publicPlaceholderSummarySql('ss')}`,
273
294
  ];
274
295
  const params = [likeQuery, query, tenantId];
275
296
 
@@ -372,21 +393,34 @@ async function upsertSessionFinalization(pool, input = {}, { schema, tenantId: d
372
393
  const status = normalizeFinalizationStatus(input.status || 'pending');
373
394
  const mode = normalizeFinalizationMode(input.mode || 'handoff');
374
395
  const phase = input.phase || 'curated_memory_v1';
396
+ const candidateEnvelope = input.candidateEnvelope || input.candidate_envelope || {};
397
+ const candidateEnvelopeHash = input.candidateEnvelopeHash
398
+ || input.candidate_envelope_hash
399
+ || (candidateEnvelope && Object.keys(candidateEnvelope).length > 0 ? hashStable(candidateEnvelope) : null);
400
+ const candidateEnvelopeVersion = input.candidateEnvelopeVersion
401
+ || input.candidate_envelope_version
402
+ || candidateEnvelope.version
403
+ || null;
404
+ const coverage = input.coverage || {};
375
405
  const preserveTerminal = `${finalizationTerminalSql(qi(schema) + '.session_finalizations')}
376
406
  AND ${qi(schema)}.session_finalizations.status <> EXCLUDED.status`;
377
407
  const result = await pool.query(
378
408
  `INSERT INTO ${qi(schema)}.session_finalizations (
379
409
  tenant_id, session_row_id, source, host, agent_id, session_id,
380
410
  transcript_hash, phase, mode, status, finalizer_model, scope_kind,
381
- scope_key, context_key, topic_key, summary_row_id, memory_result,
411
+ scope_key, context_key, topic_key, scope_id, scope_snapshot,
412
+ summary_row_id, memory_result,
382
413
  summary_text, structured_summary, human_review_text, session_start_text,
383
- error, metadata, claimed_at, finalized_at
414
+ candidate_envelope, candidate_envelope_hash, candidate_envelope_version,
415
+ coverage, error, metadata, claimed_at, finalized_at
384
416
  )
385
417
  VALUES (
386
418
  $1,$2,$3,COALESCE($4,'codex'),$5,$6,$7,COALESCE($8,'curated_memory_v1'),
387
419
  $9,$10,$11,$12,$13,$14,$15,$16,COALESCE($17::jsonb,'{}'::jsonb),
388
- $18,COALESCE($19::jsonb,'{}'::jsonb),$20,$21,
389
- $22,COALESCE($23::jsonb,'{}'::jsonb),$24,$25
420
+ $18,COALESCE($19::jsonb,'{}'::jsonb),
421
+ $20,COALESCE($21::jsonb,'{}'::jsonb),$22,$23,
422
+ COALESCE($24::jsonb,'{}'::jsonb),$25,$26,COALESCE($27::jsonb,'{}'::jsonb),
423
+ $28,COALESCE($29::jsonb,'{}'::jsonb),$30,$31
390
424
  )
391
425
  ON CONFLICT (tenant_id, source, agent_id, session_id, transcript_hash, phase)
392
426
  DO UPDATE SET
@@ -435,6 +469,16 @@ async function upsertSessionFinalization(pool, input = {}, { schema, tenantId: d
435
469
  THEN ${qi(schema)}.session_finalizations.topic_key
436
470
  ELSE COALESCE(EXCLUDED.topic_key, ${qi(schema)}.session_finalizations.topic_key)
437
471
  END,
472
+ scope_id = CASE
473
+ WHEN ${preserveTerminal}
474
+ THEN ${qi(schema)}.session_finalizations.scope_id
475
+ ELSE COALESCE(EXCLUDED.scope_id, ${qi(schema)}.session_finalizations.scope_id)
476
+ END,
477
+ scope_snapshot = CASE
478
+ WHEN ${preserveTerminal}
479
+ THEN ${qi(schema)}.session_finalizations.scope_snapshot
480
+ ELSE COALESCE(NULLIF(EXCLUDED.scope_snapshot, '{}'::jsonb), ${qi(schema)}.session_finalizations.scope_snapshot)
481
+ END,
438
482
  summary_row_id = CASE
439
483
  WHEN ${preserveTerminal}
440
484
  THEN ${qi(schema)}.session_finalizations.summary_row_id
@@ -465,6 +509,26 @@ async function upsertSessionFinalization(pool, input = {}, { schema, tenantId: d
465
509
  THEN ${qi(schema)}.session_finalizations.session_start_text
466
510
  ELSE COALESCE(EXCLUDED.session_start_text, ${qi(schema)}.session_finalizations.session_start_text)
467
511
  END,
512
+ candidate_envelope = CASE
513
+ WHEN ${preserveTerminal}
514
+ THEN ${qi(schema)}.session_finalizations.candidate_envelope
515
+ ELSE COALESCE(NULLIF(EXCLUDED.candidate_envelope, '{}'::jsonb), ${qi(schema)}.session_finalizations.candidate_envelope)
516
+ END,
517
+ candidate_envelope_hash = CASE
518
+ WHEN ${preserveTerminal}
519
+ THEN ${qi(schema)}.session_finalizations.candidate_envelope_hash
520
+ ELSE COALESCE(EXCLUDED.candidate_envelope_hash, ${qi(schema)}.session_finalizations.candidate_envelope_hash)
521
+ END,
522
+ candidate_envelope_version = CASE
523
+ WHEN ${preserveTerminal}
524
+ THEN ${qi(schema)}.session_finalizations.candidate_envelope_version
525
+ ELSE COALESCE(EXCLUDED.candidate_envelope_version, ${qi(schema)}.session_finalizations.candidate_envelope_version)
526
+ END,
527
+ coverage = CASE
528
+ WHEN ${preserveTerminal}
529
+ THEN ${qi(schema)}.session_finalizations.coverage
530
+ ELSE COALESCE(NULLIF(EXCLUDED.coverage, '{}'::jsonb), ${qi(schema)}.session_finalizations.coverage)
531
+ END,
468
532
  error = CASE
469
533
  WHEN ${preserveTerminal}
470
534
  THEN ${qi(schema)}.session_finalizations.error
@@ -507,12 +571,18 @@ async function upsertSessionFinalization(pool, input = {}, { schema, tenantId: d
507
571
  input.scopeKey || null,
508
572
  input.contextKey || null,
509
573
  input.topicKey || null,
574
+ input.scopeId || input.scope_id || null,
575
+ toJson(input.scopeSnapshot || input.scope_snapshot, {}),
510
576
  input.summaryRowId || null,
511
577
  toJson(input.memoryResult, {}),
512
578
  input.summaryText || null,
513
579
  toJson(input.structuredSummary, {}),
514
580
  input.humanReviewText || null,
515
581
  input.sessionStartText || null,
582
+ toJson(candidateEnvelope, {}),
583
+ candidateEnvelopeHash,
584
+ candidateEnvelopeVersion,
585
+ toJson(coverage, {}),
516
586
  input.error || null,
517
587
  toJson(input.metadata, {}),
518
588
  input.claimedAt || (status === 'processing' ? new Date().toISOString() : null),
@@ -638,6 +708,33 @@ function candidateText(candidate = {}) {
638
708
  return '';
639
709
  }
640
710
 
711
+ const EXPLICIT_EVIDENCE_PAYLOAD_KEYS = [
712
+ 'evidenceText',
713
+ 'evidence_text',
714
+ 'evidenceExcerpt',
715
+ 'evidence_excerpt',
716
+ 'sourceText',
717
+ 'source_text',
718
+ 'quote',
719
+ 'evidenceItems',
720
+ 'evidence_items',
721
+ 'evidenceTexts',
722
+ 'evidence_texts',
723
+ ];
724
+
725
+ function candidatePayload(candidate = {}) {
726
+ if (!candidate || typeof candidate !== 'object') return {};
727
+ const base = candidate.payload && typeof candidate.payload === 'object'
728
+ ? { ...candidate.payload }
729
+ : { ...candidate };
730
+ for (const key of EXPLICIT_EVIDENCE_PAYLOAD_KEYS) {
731
+ if (candidate[key] !== undefined && base[key] === undefined) {
732
+ base[key] = candidate[key];
733
+ }
734
+ }
735
+ return base;
736
+ }
737
+
641
738
  async function upsertFinalizationCandidates(pool, rows = [], input = {}, { schema, tenantId: defaultTenantId } = {}) {
642
739
  if (!Array.isArray(rows) || rows.length === 0) return [];
643
740
  requireField(input, 'finalizationId');
@@ -649,14 +746,21 @@ async function upsertFinalizationCandidates(pool, rows = [], input = {}, { schem
649
746
  const memory = row.memory || {};
650
747
  const backingFact = row.backingFact || {};
651
748
  const evidenceRefs = candidate.evidenceRefs || candidate.evidence_refs || [];
749
+ const candidateHash = row.candidateHash || row.candidate_hash || hashStable({
750
+ action: row.action || 'skipped',
751
+ reason: row.reason || null,
752
+ memoryType: candidate.memoryType || candidate.memory_type || memory.memory_type || memory.memoryType || null,
753
+ canonicalKey: candidate.canonicalKey || candidate.canonical_key || memory.canonical_key || memory.canonicalKey || null,
754
+ payload: candidatePayload(candidate),
755
+ });
652
756
  const result = await pool.query(
653
757
  `INSERT INTO ${qi(schema)}.finalization_candidates (
654
758
  tenant_id, finalization_id, session_id, candidate_index, action, reason,
655
759
  memory_type, canonical_key, summary, payload, provenance,
656
- memory_record_id, fact_assertion_id
760
+ memory_record_id, fact_assertion_id, candidate_hash
657
761
  )
658
762
  VALUES (
659
- $1,$2,$3,$4,$5,$6,$7,$8,$9,COALESCE($10::jsonb,'{}'::jsonb),COALESCE($11::jsonb,'{}'::jsonb),$12,$13
763
+ $1,$2,$3,$4,$5,$6,$7,$8,$9,COALESCE($10::jsonb,'{}'::jsonb),COALESCE($11::jsonb,'{}'::jsonb),$12,$13,$14
660
764
  )
661
765
  ON CONFLICT (tenant_id, finalization_id, candidate_index)
662
766
  DO UPDATE SET
@@ -669,6 +773,7 @@ async function upsertFinalizationCandidates(pool, rows = [], input = {}, { schem
669
773
  provenance = COALESCE(NULLIF(EXCLUDED.provenance, '{}'::jsonb), ${qi(schema)}.finalization_candidates.provenance),
670
774
  memory_record_id = COALESCE(EXCLUDED.memory_record_id, ${qi(schema)}.finalization_candidates.memory_record_id),
671
775
  fact_assertion_id = COALESCE(EXCLUDED.fact_assertion_id, ${qi(schema)}.finalization_candidates.fact_assertion_id),
776
+ candidate_hash = COALESCE(EXCLUDED.candidate_hash, ${qi(schema)}.finalization_candidates.candidate_hash),
672
777
  updated_at = now()
673
778
  RETURNING *`,
674
779
  [
@@ -681,10 +786,11 @@ async function upsertFinalizationCandidates(pool, rows = [], input = {}, { schem
681
786
  candidate.memoryType || candidate.memory_type || memory.memory_type || memory.memoryType || null,
682
787
  candidate.canonicalKey || candidate.canonical_key || memory.canonical_key || memory.canonicalKey || null,
683
788
  candidateText(candidate) || candidateText(memory) || null,
684
- toJson(candidate.payload || candidate, {}),
789
+ toJson(candidatePayload(candidate), {}),
685
790
  toJson({ evidenceRefs }, {}),
686
791
  memory.id || memory.memory_id || null,
687
792
  backingFact.id || memory.backing_fact_id || null,
793
+ candidateHash,
688
794
  ]
689
795
  );
690
796
  out.push(result.rows[0] || null);
@@ -839,6 +945,7 @@ async function searchTurnEmbeddings(pool, {
839
945
  params.push(source);
840
946
  where.push(`s.source = $${params.length}`);
841
947
  }
948
+ where.push(`NOT ${publicPlaceholderSummarySql('ss')}`);
842
949
 
843
950
  params.push(`[${queryVec.join(',')}]`);
844
951
  const vecPos = params.length;
@@ -945,6 +1052,7 @@ async function searchSummaryEmbeddings(pool, {
945
1052
  params.push(candidateSessionIds);
946
1053
  where.push(`s.session_id = ANY($${params.length})`);
947
1054
  }
1055
+ where.push(`NOT ${publicPlaceholderSummarySql('ss')}`);
948
1056
 
949
1057
  params.push(limit);
950
1058
 
@@ -1130,6 +1238,11 @@ module.exports = {
1130
1238
  getSessionFinalization,
1131
1239
  updateSessionFinalizationStatus,
1132
1240
  listSessionFinalizations,
1241
+ upsertCheckpointRun,
1242
+ updateCheckpointRunStatus,
1243
+ listCheckpointRuns,
1244
+ upsertCheckpointRunSources,
1245
+ listCheckpointRunSources,
1133
1246
  upsertFinalizationCandidates,
1134
1247
  extractUserTurns,
1135
1248
  upsertTurnEmbeddings,
@@ -83,9 +83,15 @@ For first rollout, keep `AQUIFER_MEMORY_SERVING_MODE=legacy`. Switch to `curated
83
83
  | Start MCP server | `npx aquifer mcp` |
84
84
  | Search memory | `npx aquifer recall "auth middleware"` |
85
85
  | Plan curated compaction | `npx aquifer compact --cadence daily --period-start 2026-04-27T00:00:00Z --period-end 2026-04-28T00:00:00Z` |
86
+ | Generate a timer synthesis prompt | `npx aquifer operator compaction daily --include-synthesis-prompt --json` |
87
+ | Apply reviewed timer synthesis candidates | `npx aquifer operator compaction daily --synthesis-summary-file /tmp/timer-summary.json --apply --promote-candidates --json` |
86
88
  | Show stats | `npx aquifer stats` |
87
89
  | Enrich pending sessions | `npx aquifer backfill` |
88
90
 
91
+ Timer synthesis is an operator-reviewed candidate workflow. The prompt output
92
+ and summary JSON do not become active curated memory unless the apply step is
93
+ run with `--promote-candidates`.
94
+
89
95
  The default public serving mode is `legacy`. To test scoped curated memory serving, set `AQUIFER_MEMORY_SERVING_MODE=curated` plus `AQUIFER_MEMORY_ACTIVE_SCOPE_KEY` or `AQUIFER_MEMORY_ACTIVE_SCOPE_PATH`. Rollback is config-only: set the serving mode back to `legacy` and restart the MCP/CLI process.
90
96
 
91
97
  ## If something fails
package/docs/setup.md CHANGED
@@ -51,10 +51,25 @@ Aquifer reads configuration from three sources (in priority order):
51
51
 
52
52
  Default public serving mode is `legacy`. Opt into `curated` only when you want `session_recall` and `session_bootstrap` to read active curated memory. `evidence_recall` remains the explicit audit/debug lane in both modes, and rollback is just setting env or config back to `legacy`.
53
53
 
54
+ Backend profiles are explicit. `postgres` is the full backend and remains required for semantic recall, migrations, curated memory, and operator workflows. `local` is a zero-config starter profile with JSON-file persistence, raw session writes, lexical recall, bootstrap, stats, and export. It is intentionally degraded and does not create embeddings or run operator workflows:
55
+
56
+ ```bash
57
+ AQUIFER_BACKEND=local npx aquifer backend-info --json
58
+ ```
59
+
54
60
  ### Example config file
55
61
 
56
62
  ```json
57
63
  {
64
+ "storage": {
65
+ "backend": "postgres",
66
+ "postgres": {
67
+ "url": "postgresql://aquifer:aquifer@localhost:5432/aquifer"
68
+ },
69
+ "local": {
70
+ "path": ".aquifer/aquifer.local.json"
71
+ }
72
+ },
58
73
  "db": {
59
74
  "url": "postgresql://aquifer:aquifer@localhost:5432/aquifer"
60
75
  },
@@ -74,6 +89,7 @@ Default public serving mode is `legacy`. Opt into `curated` only when you want `
74
89
 
75
90
  ```bash
76
91
  export DATABASE_URL="postgresql://aquifer:aquifer@localhost:5432/aquifer"
92
+ export AQUIFER_BACKEND="postgres"
77
93
  export AQUIFER_EMBED_BASE_URL="http://localhost:11434/v1"
78
94
  export AQUIFER_EMBED_MODEL="bge-m3"
79
95
  export AQUIFER_MEMORY_SERVING_MODE="legacy"
@@ -97,6 +113,12 @@ export AQUIFER_MEMORY_SERVING_MODE="legacy"
97
113
  # export AQUIFER_MEMORY_SERVING_MODE="curated"
98
114
  # export AQUIFER_MEMORY_ACTIVE_SCOPE_KEY="project:aquifer"
99
115
  # export AQUIFER_MEMORY_ACTIVE_SCOPE_PATH="global,project:aquifer"
116
+
117
+ # Optional Codex active-session checkpoint heartbeat policy.
118
+ # Command flags still take precedence over these env vars.
119
+ # export AQUIFER_CODEX_CHECKPOINT_CHECK_INTERVAL_MINUTES="10"
120
+ # export AQUIFER_CODEX_CHECKPOINT_EVERY_MESSAGES="20"
121
+ # export AQUIFER_CODEX_CHECKPOINT_QUIET_MS="3000"
100
122
  ```
101
123
 
102
124
  Copy `.env.example` from the repo root for a full annotated list.
@@ -213,6 +235,45 @@ Do **not** use the OpenClaw plugin (`consumers/openclaw-plugin.js`) for tool del
213
235
 
214
236
  Curated serving rollback is config-only: set `AQUIFER_MEMORY_SERVING_MODE=legacy` and restart the MCP/CLI process. No destructive database rollback is required.
215
237
 
238
+ ## Operator compaction and timer synthesis
239
+
240
+ Compaction jobs are operator-safe by default. A dry-run plans lifecycle updates
241
+ and candidate output without writing active memory:
242
+
243
+ ```bash
244
+ npx aquifer operator compaction daily --include-synthesis-prompt --json
245
+ ```
246
+
247
+ If an operator or external model reviews that prompt and returns timer synthesis
248
+ JSON, attach it back to the plan with:
249
+
250
+ ```bash
251
+ npx aquifer operator compaction daily \
252
+ --synthesis-summary-file /tmp/timer-summary.json \
253
+ --apply \
254
+ --promote-candidates \
255
+ --json
256
+ ```
257
+
258
+ The summary file must match the normal structured summary shape, for example:
259
+
260
+ ```json
261
+ {
262
+ "summaryText": "Reviewed timer synthesis.",
263
+ "structuredSummary": {
264
+ "states": [
265
+ { "state": "The reviewed state that should continue into current memory." }
266
+ ],
267
+ "decisions": [],
268
+ "open_loops": []
269
+ }
270
+ }
271
+ ```
272
+
273
+ Without `--promote-candidates`, synthesis output is recorded as candidate
274
+ ledger material only. The prompt and summary file are producer material; active
275
+ curated memory still requires the explicit promotion gate.
276
+
216
277
  ## Release verification gates
217
278
 
218
279
  For the publish-surface checks:
@@ -222,13 +283,15 @@ node --test test/package-surface.test.js test/mcp-manifest.test.js
222
283
  npm pack --dry-run --json
223
284
  ```
224
285
 
225
- For the real DB-backed MCP integration gate:
286
+ For the real DB-backed release gate:
226
287
 
227
288
  ```bash
228
- AQUIFER_TEST_DB_URL="postgresql://..." node --test test/consumer-mcp.integration.test.js
289
+ AQUIFER_TEST_DB_URL="postgresql://..." npm run test:release:db
229
290
  ```
230
291
 
231
- That DB-backed test is the release proof that the stdio MCP server, current MCP manifest, and PostgreSQL path still line up on a live database.
292
+ That DB-backed test is the release proof that the stdio MCP server, CLI
293
+ consumer, Codex finalization serving path, current MCP manifest, and PostgreSQL
294
+ path still line up on a live database.
232
295
 
233
296
  ## Troubleshooting
234
297
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@shadowforge0/aquifer-memory",
3
- "version": "1.6.0",
3
+ "version": "1.8.0",
4
4
  "description": "PG-native long-term memory for AI agents. Turn-level embedding, hybrid RRF ranking, optional knowledge graph. MCP server, CLI, and library API.",
5
5
  "main": "index.js",
6
6
  "files": [
@@ -15,6 +15,8 @@
15
15
  "consumers/mcp.js",
16
16
  "consumers/claude-code.js",
17
17
  "consumers/codex.js",
18
+ "consumers/codex-active-checkpoint.js",
19
+ "consumers/codex-current-memory.js",
18
20
  "consumers/codex-handoff.js",
19
21
  "consumers/openclaw-plugin.js",
20
22
  "consumers/opencode.js",
@@ -26,6 +28,8 @@
26
28
  "docs/setup.md",
27
29
  ".env.example",
28
30
  "scripts/backfill-canonical-key.js",
31
+ "scripts/codex-checkpoint-commands.js",
32
+ "scripts/codex-checkpoint-runtime.js",
29
33
  "scripts/diagnose-fts-zh.js",
30
34
  "scripts/diagnose-vector.js",
31
35
  "scripts/codex-recovery.js",
@@ -67,9 +71,9 @@
67
71
  "scripts": {
68
72
  "test": "node --test test/*.test.js",
69
73
  "test:integration": "node --test test/integration.test.js",
70
- "test:release:package": "node --test test/package-surface.test.js test/mcp-manifest.test.js",
71
- "test:release:db": "node -e \"if (!process.env.AQUIFER_TEST_DB_URL) { console.error('AQUIFER_TEST_DB_URL is required for test:release:db'); process.exit(1); }\" && node --test test/consumer-mcp.integration.test.js test/consumer-cli.integration.test.js test/codex-finalization-serving.integration.test.js",
72
- "lint": "eslint index.js core/*.js consumers/cli.js consumers/mcp.js consumers/claude-code.js consumers/codex.js consumers/codex-handoff.js consumers/openclaw-plugin.js consumers/opencode.js consumers/shared/*.js consumers/default/*.js consumers/default/prompts/*.js consumers/openclaw-ext/*.js pipeline/*.js pipeline/consolidation/*.js scripts/*.js test/*.js",
74
+ "test:release:package": "node --test test/package-surface.test.js test/mcp-manifest.test.js test/local-backend.test.js test/scope-attribution.test.js test/v1-checkpoint-ledger-schema.test.js test/v1-finalization-envelope-schema.test.js test/v1-evidence-items.test.js test/v1-curated-semantic-recall.test.js test/session-checkpoints.test.js test/session-checkpoint-producer.test.js test/session-checkpoint-planner.test.js test/storage-checkpoint-ranges.test.js test/v1-serving-cutover.test.js test/v1-current-memory-contract.test.js test/v1-scope-inheritance.golden.test.js test/v1-bootstrap-determinism.test.js test/consumer-codex.test.js test/codex-recovery-script.test.js test/codex-handoff.test.js",
75
+ "test:release:db": "node -e \"if (!process.env.AQUIFER_TEST_DB_URL) { console.error('AQUIFER_TEST_DB_URL is required for test:release:db'); process.exit(1); }\" && node --test test/v1-evidence-items.test.js test/consumer-mcp.integration.test.js test/consumer-cli.integration.test.js test/codex-finalization-serving.integration.test.js",
76
+ "lint": "eslint index.js core/*.js core/backends/*.js consumers/cli.js consumers/mcp.js consumers/claude-code.js consumers/codex.js consumers/codex-active-checkpoint.js consumers/codex-current-memory.js consumers/codex-handoff.js consumers/openclaw-plugin.js consumers/opencode.js consumers/shared/*.js consumers/default/*.js consumers/default/prompts/*.js consumers/openclaw-ext/*.js pipeline/*.js pipeline/consolidation/*.js scripts/*.js test/*.js",
73
77
  "hooks:install": "git config core.hooksPath .githooks"
74
78
  },
75
79
  "dependencies": {