@shadowforge0/aquifer-memory 1.9.0 → 1.9.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/README.md +33 -4
  2. package/README_CN.md +9 -1
  3. package/README_TW.md +5 -2
  4. package/consumers/cli.js +55 -34
  5. package/consumers/codex-active-checkpoint.js +3 -1
  6. package/consumers/codex-current-memory.js +10 -6
  7. package/consumers/codex.js +5 -2
  8. package/consumers/default/daily-entries.js +2 -2
  9. package/consumers/default/index.js +40 -30
  10. package/consumers/default/prompts/summary.js +2 -2
  11. package/consumers/mcp.js +56 -49
  12. package/consumers/openclaw-ext/index.js +1 -1
  13. package/consumers/openclaw-ext/openclaw.plugin.json +1 -1
  14. package/consumers/openclaw-ext/package.json +1 -1
  15. package/consumers/openclaw-plugin.js +66 -23
  16. package/consumers/shared/compat-recall.js +101 -0
  17. package/consumers/shared/openclaw-product-tools.js +130 -0
  18. package/consumers/shared/recall-format.js +2 -2
  19. package/core/aquifer.js +385 -20
  20. package/core/backends/local.js +60 -1
  21. package/core/finalization-review.js +88 -42
  22. package/core/interface.js +629 -0
  23. package/core/mcp-manifest.js +11 -3
  24. package/core/memory-bootstrap.js +25 -27
  25. package/core/memory-consolidation.js +564 -42
  26. package/core/memory-explain.js +20 -51
  27. package/core/memory-promotion.js +392 -55
  28. package/core/memory-recall.js +26 -48
  29. package/core/memory-records.js +91 -103
  30. package/core/memory-type-policy.js +298 -0
  31. package/core/postgres-migrations.js +9 -0
  32. package/core/session-checkpoint-producer.js +3 -1
  33. package/core/session-checkpoints.js +1 -1
  34. package/core/session-finalization.js +2 -2
  35. package/docs/getting-started.md +16 -3
  36. package/docs/setup.md +61 -2
  37. package/package.json +2 -2
  38. package/schema/004-completion.sql +4 -4
  39. package/schema/010-v1-finalization-review.sql +72 -0
  40. package/schema/020-v1-assistant-shaping-memory.sql +30 -0
  41. package/scripts/backfill-canonical-key.js +1 -1
  42. package/scripts/diagnose-fts-zh.js +1 -1
  43. package/scripts/extract-insights-from-recent-sessions.js +4 -4
package/core/aquifer.js CHANGED
@@ -14,11 +14,243 @@ const { createMemoryServingRuntime } = require('./memory-serving');
14
14
  const { createLegacyBootstrap } = require('./legacy-bootstrap');
15
15
  const { buildRerankDocument, resolveEmbedFn, shouldAutoRerank } = require('./recall-runtime');
16
16
  const { filterPublicPlaceholderSessionRows } = require('./public-session-filter');
17
+ const { buildBacklogProductStatus, buildReadinessSurface } = require('./interface');
17
18
 
18
19
  // ---------------------------------------------------------------------------
19
20
  // createAquifer
20
21
  // ---------------------------------------------------------------------------
21
22
 
23
+ const ACTIONABLE_PENDING_STATUSES = new Set(['pending', 'failed']);
24
+ const TERMINAL_FINALIZATION_STATUSES = ['finalized', 'skipped', 'declined', 'deferred'];
25
+
26
+ function normalizePendingWorkLimit(value, fallback = 100, max = 500) {
27
+ const parsed = parseInt(value, 10);
28
+ if (!Number.isFinite(parsed) || parsed <= 0) return fallback;
29
+ return Math.min(parsed, max);
30
+ }
31
+
32
+ function normalizePendingWorkStatuses(value) {
33
+ const input = Array.isArray(value)
34
+ ? value
35
+ : String(value || '').split(',');
36
+ const statuses = input.map(item => String(item || '').trim()).filter(Boolean);
37
+ const normalized = statuses.length > 0 ? statuses : Array.from(ACTIONABLE_PENDING_STATUSES);
38
+ for (const status of normalized) {
39
+ if (!ACTIONABLE_PENDING_STATUSES.has(status)) {
40
+ throw new Error(`pending work status must be one of: ${Array.from(ACTIONABLE_PENDING_STATUSES).join(', ')}`);
41
+ }
42
+ }
43
+ return [...new Set(normalized)];
44
+ }
45
+
46
+ function normalizePendingWorkAction(value) {
47
+ const action = String(value || 'inspect').trim();
48
+ if (!['inspect', 'backfill', 'skip'].includes(action)) {
49
+ throw new Error('pending work action must be inspect, backfill, or skip');
50
+ }
51
+ return action;
52
+ }
53
+
54
+ function pluralize(count, singular, plural = `${singular}s`) {
55
+ return count === 1 ? singular : plural;
56
+ }
57
+
58
+ function normalizePendingWorkRow(row = {}) {
59
+ return {
60
+ sessionId: row.session_id,
61
+ sessionKey: row.session_key || null,
62
+ source: row.source || 'api',
63
+ agentId: row.agent_id,
64
+ status: row.processing_status,
65
+ processingError: row.processing_error || null,
66
+ startedAt: row.started_at || null,
67
+ lastMessageAt: row.last_message_at || null,
68
+ msgCount: row.msg_count || 0,
69
+ userCount: row.user_count || 0,
70
+ model: row.model || null,
71
+ };
72
+ }
73
+
74
+ function normalizePendingWorkGroup(row = {}) {
75
+ return {
76
+ source: row.source || 'api',
77
+ agentId: row.agent_id,
78
+ status: row.processing_status,
79
+ count: row.count || 0,
80
+ earliest: row.earliest || null,
81
+ latest: row.latest || null,
82
+ msgCount: row.msg_count || 0,
83
+ userCount: row.user_count || 0,
84
+ };
85
+ }
86
+
87
+ function buildPendingWorkGuidance(groups = []) {
88
+ const statuses = new Set(groups.map(group => group.status));
89
+ const guidance = [];
90
+ if (statuses.has('pending')) {
91
+ guidance.push({
92
+ status: 'pending',
93
+ recommendedAction: 'backfill',
94
+ alternateAction: 'skip',
95
+ reason: 'Pending sessions can be enriched; skip is reserved for deterministic policy suppression.',
96
+ });
97
+ }
98
+ if (statuses.has('failed')) {
99
+ guidance.push({
100
+ status: 'failed',
101
+ recommendedAction: 'backfill',
102
+ alternateAction: 'inspect_error',
103
+ reason: 'Failed sessions are retryable work; session skip only mutates rows still in pending status.',
104
+ });
105
+ }
106
+ return guidance;
107
+ }
108
+
109
+ function buildPendingWorkPlan(samples = [], action = 'inspect', groups = []) {
110
+ const totalByStatus = {};
111
+ for (const group of groups || []) {
112
+ totalByStatus[group.status] = (totalByStatus[group.status] || 0) + (group.count || 0);
113
+ }
114
+ const plan = {
115
+ action,
116
+ dryRunOnly: true,
117
+ allowed: 0,
118
+ blocked: 0,
119
+ changes: [],
120
+ notes: [],
121
+ };
122
+ if (action === 'inspect') {
123
+ plan.notes.push('No lifecycle changes planned.');
124
+ return plan;
125
+ }
126
+ if (action === 'backfill') {
127
+ plan.allowed = Object.values(totalByStatus).reduce((sum, count) => sum + count, 0);
128
+ } else if (action === 'skip') {
129
+ plan.allowed = totalByStatus.pending || 0;
130
+ plan.blocked = totalByStatus.failed || 0;
131
+ }
132
+ for (const row of samples) {
133
+ if (action === 'backfill') {
134
+ plan.changes.push({
135
+ sessionId: row.sessionId,
136
+ source: row.source,
137
+ agentId: row.agentId,
138
+ status: row.status,
139
+ operation: 'enrich',
140
+ allowed: true,
141
+ });
142
+ continue;
143
+ }
144
+ if (action === 'skip' && row.status === 'pending') {
145
+ plan.changes.push({
146
+ sessionId: row.sessionId,
147
+ source: row.source,
148
+ agentId: row.agentId,
149
+ status: row.status,
150
+ operation: 'mark_session_skipped',
151
+ allowed: true,
152
+ });
153
+ continue;
154
+ }
155
+ plan.changes.push({
156
+ sessionId: row.sessionId,
157
+ source: row.source,
158
+ agentId: row.agentId,
159
+ status: row.status,
160
+ operation: action,
161
+ allowed: false,
162
+ reason: action === 'skip'
163
+ ? 'skip only applies to pending sessions; failed sessions should be retried or suppressed through an explicit terminal policy'
164
+ : 'unsupported action',
165
+ });
166
+ }
167
+ if (action === 'skip' && plan.blocked > 0) {
168
+ plan.notes.push(`${plan.blocked} failed row(s) are intentionally blocked from skip. Filter to --status pending before applying skip.`);
169
+ }
170
+ return plan;
171
+ }
172
+
173
+ function buildPendingWorkDecision(groups = []) {
174
+ const statusCounts = {};
175
+ for (const group of groups || []) {
176
+ statusCounts[group.status] = (statusCounts[group.status] || 0) + (group.count || 0);
177
+ }
178
+ const publicStatus = buildBacklogProductStatus({ statusCounts });
179
+ return {
180
+ status: publicStatus.code === 'clear'
181
+ ? 'clear'
182
+ : publicStatus.code === 'recoverable'
183
+ ? 'recoverable'
184
+ : 'attention',
185
+ summary: publicStatus.headline,
186
+ nextStep: publicStatus.action,
187
+ statusCounts,
188
+ publicStatus,
189
+ };
190
+ }
191
+
192
+ function buildReadiness(stats = {}) {
193
+ const servingMode = stats.serving?.mode || 'legacy';
194
+ const activeScopePath = Array.isArray(stats.serving?.activeScopePath)
195
+ ? stats.serving.activeScopePath
196
+ : [];
197
+ const memoryRecords = stats.memoryRecords || {};
198
+ const pendingTotal = stats.pendingSessions?.total || 0;
199
+ const hasHistoricalMemory = (stats.summaries || 0) > 0 || (stats.turnEmbeddings || 0) > 0;
200
+ const currentEnabled = servingMode === 'curated';
201
+ const activeCurrentRows = memoryRecords.available ? (memoryRecords.active || 0) : 0;
202
+ const currentReady = currentEnabled && activeCurrentRows > 0;
203
+ const currentEmpty = currentEnabled && activeCurrentRows === 0;
204
+ const activeScopeReady = currentEnabled && activeScopePath.length > 0 && !(activeScopePath.length === 1 && activeScopePath[0] === 'global');
205
+
206
+ const checks = [
207
+ {
208
+ key: 'backend',
209
+ status: 'ready',
210
+ label: 'Service',
211
+ message: 'Aquifer is online.',
212
+ },
213
+ {
214
+ key: 'historical_memory',
215
+ status: hasHistoricalMemory ? 'ready' : 'empty',
216
+ label: 'Saved content',
217
+ message: hasHistoricalMemory
218
+ ? 'Saved content is available.'
219
+ : 'No saved content is available yet.',
220
+ },
221
+ {
222
+ key: 'current_memory',
223
+ status: currentReady ? 'ready' : currentEmpty ? 'empty' : 'not_enabled',
224
+ label: 'Memory',
225
+ message: currentReady
226
+ ? `Memory has ${activeCurrentRows} active row(s).`
227
+ : currentEmpty
228
+ ? 'Memory is enabled, but no active rows are available yet.'
229
+ : 'Memory is still getting ready.',
230
+ },
231
+ {
232
+ key: 'active_scope',
233
+ status: activeScopeReady ? 'ready' : currentEnabled ? 'attention' : 'not_enabled',
234
+ label: 'App link',
235
+ message: activeScopeReady
236
+ ? `Memory is linked through ${activeScopePath.join(' > ')}.`
237
+ : currentEnabled
238
+ ? 'Memory is not linked to this app yet.'
239
+ : 'Memory link is not active yet.',
240
+ },
241
+ {
242
+ key: 'backlog',
243
+ status: pendingTotal > 0 ? 'attention' : 'ready',
244
+ label: 'Saved content',
245
+ message: pendingTotal > 0
246
+ ? `${pendingTotal} saved-content ${pluralize(pendingTotal, 'item')} still need attention.`
247
+ : 'Saved content is ready.',
248
+ },
249
+ ];
250
+
251
+ return buildReadinessSurface(stats, { checks });
252
+ }
253
+
22
254
  function createAquifer(config = {}) {
23
255
  const backendKind = normalizeBackendKind(config.backend?.kind || config.storage?.backend || 'postgres');
24
256
  if (backendKind !== 'postgres') {
@@ -681,7 +913,10 @@ function createAquifer(config = {}) {
681
913
  const [lexicalRows, embeddingRows] = await Promise.all([
682
914
  runLexical ? aquifer.memory.recall(query, {
683
915
  ...scopedOpts,
684
- ftsConfig: migrationRuntime.getFtsConfig(),
916
+ // memory_records.search_tsv is generated with the stable simple config
917
+ // in schema/007, so curated memory_recall must query with the same
918
+ // config even when legacy session search selected a zh tokenizer.
919
+ ftsConfig: 'simple',
685
920
  }) : Promise.resolve([]),
686
921
  runVector ? aquifer.memory.recallViaMemoryEmbeddings(queryVec, scopedOpts) : Promise.resolve([]),
687
922
  ]);
@@ -1163,22 +1398,24 @@ function createAquifer(config = {}) {
1163
1398
  async skip(sessionId, opts = {}) {
1164
1399
  const agentId = opts.agentId || 'agent';
1165
1400
  const reason = opts.reason || null;
1401
+ const source = opts.source || null;
1166
1402
  // Atomic CAS: only skip if still pending (avoids race with concurrent enrich)
1167
1403
  const result = await pool.query(
1168
1404
  `UPDATE ${qi(schema)}.sessions
1169
1405
  SET processing_status = 'skipped', processing_error = $1
1170
1406
  WHERE session_id = $2 AND agent_id = $3 AND tenant_id = $4
1407
+ AND ($5::text IS NULL OR source = $5)
1171
1408
  AND processing_status = 'pending'
1172
1409
  RETURNING id`,
1173
- [reason, sessionId, agentId, tenantId]
1410
+ [reason, sessionId, agentId, tenantId, source]
1174
1411
  );
1175
1412
  if (result.rows.length === 0) {
1176
1413
  // Check if session exists at all
1177
- const existing = await storage.getSession(pool, sessionId, agentId, {}, { schema, tenantId });
1414
+ const existing = await storage.getSession(pool, sessionId, agentId, { source: source || undefined }, { schema, tenantId });
1178
1415
  if (!existing) throw new Error(`Session not found: ${sessionId} (agentId=${agentId})`);
1179
1416
  return null; // exists but not pending — no-op
1180
1417
  }
1181
- return { id: result.rows[0].id, sessionId, agentId, status: 'skipped' };
1418
+ return { id: result.rows[0].id, sessionId, agentId, source, status: 'skipped' };
1182
1419
  },
1183
1420
 
1184
1421
  // --- public config accessor ---
@@ -1361,7 +1598,7 @@ function createAquifer(config = {}) {
1361
1598
  /* session_finalizations table may not exist on older installs */
1362
1599
  }
1363
1600
 
1364
- return {
1601
+ const stats = {
1365
1602
  backendKind,
1366
1603
  backendProfile: backendInfo.profile,
1367
1604
  serving: {
@@ -1381,17 +1618,145 @@ function createAquifer(config = {}) {
1381
1618
  earliest: timeRange.rows[0]?.earliest || null,
1382
1619
  latest: timeRange.rows[0]?.latest || null,
1383
1620
  };
1621
+ return {
1622
+ ...stats,
1623
+ readiness: buildReadiness(stats),
1624
+ };
1625
+ },
1626
+
1627
+ async getPendingWork(opts = {}) {
1628
+ const limit = normalizePendingWorkLimit(opts.limit, 100, 500);
1629
+ const statuses = normalizePendingWorkStatuses(opts.statuses || opts.status);
1630
+ const action = normalizePendingWorkAction(opts.action || opts.plan);
1631
+ const filters = {
1632
+ source: opts.source || null,
1633
+ agentId: opts.agentId || null,
1634
+ statuses,
1635
+ limit,
1636
+ };
1637
+
1638
+ const params = [tenantId, statuses];
1639
+ const where = [
1640
+ 's.tenant_id = $1',
1641
+ 's.processing_status = ANY($2::text[])',
1642
+ ];
1643
+ if (filters.source) {
1644
+ params.push(filters.source);
1645
+ where.push(`s.source = $${params.length}`);
1646
+ }
1647
+ if (filters.agentId) {
1648
+ params.push(filters.agentId);
1649
+ where.push(`s.agent_id = $${params.length}`);
1650
+ }
1651
+
1652
+ const terminalExclusion = `NOT EXISTS (
1653
+ SELECT 1
1654
+ FROM ${qi(schema)}.session_finalizations f
1655
+ WHERE f.tenant_id = s.tenant_id
1656
+ AND f.session_id = s.session_id
1657
+ AND f.agent_id = s.agent_id
1658
+ AND f.source = s.source
1659
+ AND f.status = ANY($${params.length + 1}::text[])
1660
+ )`;
1661
+ const runQueries = async (excludeTerminal = true) => {
1662
+ const queryParams = excludeTerminal ? [...params, TERMINAL_FINALIZATION_STATUSES] : params;
1663
+ const whereSql = [...where, ...(excludeTerminal ? [terminalExclusion] : [])].join(' AND ');
1664
+ const limitParam = queryParams.length + 1;
1665
+ const [groupsResult, samplesResult] = await Promise.all([
1666
+ pool.query(
1667
+ `SELECT
1668
+ COALESCE(s.source, 'api') AS source,
1669
+ s.agent_id,
1670
+ s.processing_status,
1671
+ COUNT(*)::int AS count,
1672
+ MIN(s.started_at) AS earliest,
1673
+ MAX(s.started_at) AS latest,
1674
+ COALESCE(SUM(s.msg_count), 0)::int AS msg_count,
1675
+ COALESCE(SUM(s.user_count), 0)::int AS user_count
1676
+ FROM ${qi(schema)}.sessions s
1677
+ WHERE ${whereSql}
1678
+ GROUP BY COALESCE(s.source, 'api'), s.agent_id, s.processing_status
1679
+ ORDER BY COUNT(*) DESC, latest DESC`,
1680
+ queryParams,
1681
+ ),
1682
+ pool.query(
1683
+ `SELECT
1684
+ s.session_id,
1685
+ s.session_key,
1686
+ COALESCE(s.source, 'api') AS source,
1687
+ s.agent_id,
1688
+ s.processing_status,
1689
+ s.processing_error,
1690
+ s.started_at,
1691
+ s.last_message_at,
1692
+ s.msg_count,
1693
+ s.user_count,
1694
+ s.model
1695
+ FROM ${qi(schema)}.sessions s
1696
+ WHERE ${whereSql}
1697
+ ORDER BY s.started_at DESC
1698
+ LIMIT $${limitParam}`,
1699
+ [...queryParams, limit],
1700
+ ),
1701
+ ]);
1702
+ return { groupsResult, samplesResult, terminalFiltering: excludeTerminal };
1703
+ };
1704
+
1705
+ let result;
1706
+ try {
1707
+ result = await runQueries(true);
1708
+ } catch (err) {
1709
+ if (err?.code !== '42P01') throw err;
1710
+ result = await runQueries(false);
1711
+ }
1712
+
1713
+ const groups = result.groupsResult.rows.map(normalizePendingWorkGroup);
1714
+ const samples = result.samplesResult.rows.map(normalizePendingWorkRow);
1715
+ const total = groups.reduce((sum, group) => sum + group.count, 0);
1716
+ const decision = buildPendingWorkDecision(groups);
1717
+ return {
1718
+ available: true,
1719
+ generatedAt: new Date().toISOString(),
1720
+ terminalFiltering: result.terminalFiltering,
1721
+ filters,
1722
+ total,
1723
+ status: decision.status,
1724
+ statusCounts: decision.statusCounts,
1725
+ summary: decision.summary,
1726
+ nextStep: decision.nextStep,
1727
+ publicStatus: decision.publicStatus,
1728
+ groups,
1729
+ samples,
1730
+ guidance: buildPendingWorkGuidance(groups),
1731
+ plan: buildPendingWorkPlan(samples, action, groups),
1732
+ };
1384
1733
  },
1385
1734
 
1386
1735
  async getPendingSessions(opts = {}) {
1387
- const limit = opts.limit !== undefined ? opts.limit : 100;
1736
+ const limit = normalizePendingWorkLimit(opts.limit, 100, 500);
1737
+ const statuses = normalizePendingWorkStatuses(opts.statuses || opts.status);
1738
+ const params = [tenantId, statuses];
1739
+ const where = [
1740
+ 's.tenant_id = $1',
1741
+ 's.processing_status = ANY($2::text[])',
1742
+ ];
1743
+ if (opts.source) {
1744
+ params.push(opts.source);
1745
+ where.push(`s.source = $${params.length}`);
1746
+ }
1747
+ if (opts.agentId) {
1748
+ params.push(opts.agentId);
1749
+ where.push(`s.agent_id = $${params.length}`);
1750
+ }
1388
1751
  let result;
1389
1752
  try {
1753
+ const terminalParam = params.length + 1;
1754
+ const limitParam = params.length + 2;
1390
1755
  result = await pool.query(
1391
- `SELECT s.session_id, s.agent_id, s.processing_status
1756
+ `SELECT s.session_id, s.session_key, s.source, s.agent_id, s.processing_status,
1757
+ s.processing_error, s.started_at, s.last_message_at, s.msg_count, s.user_count, s.model
1392
1758
  FROM ${qi(schema)}.sessions s
1393
- WHERE s.tenant_id = $1
1394
- AND s.processing_status IN ('pending', 'failed')
1759
+ WHERE ${where.join(' AND ')}
1395
1760
  AND NOT EXISTS (
1396
1761
  SELECT 1
1397
1762
  FROM ${qi(schema)}.session_finalizations f
@@ -1399,22 +1764,23 @@ function createAquifer(config = {}) {
1399
1764
  AND f.session_id = s.session_id
1400
1765
  AND f.agent_id = s.agent_id
1401
1766
  AND f.source = s.source
1402
- AND f.status IN ('finalized', 'skipped', 'declined', 'deferred')
1767
+ AND f.status = ANY($${terminalParam}::text[])
1403
1768
  )
1404
1769
  ORDER BY s.started_at DESC
1405
- LIMIT $2`,
1406
- [tenantId, limit]
1770
+ LIMIT $${limitParam}`,
1771
+ [...params, TERMINAL_FINALIZATION_STATUSES, limit]
1407
1772
  );
1408
1773
  } catch (err) {
1409
1774
  if (err?.code !== '42P01') throw err;
1775
+ const limitParam = params.length + 1;
1410
1776
  result = await pool.query(
1411
- `SELECT session_id, agent_id, processing_status
1412
- FROM ${qi(schema)}.sessions
1413
- WHERE tenant_id = $1
1414
- AND processing_status IN ('pending', 'failed')
1777
+ `SELECT session_id, session_key, source, agent_id, processing_status,
1778
+ processing_error, started_at, last_message_at, msg_count, user_count, model
1779
+ FROM ${qi(schema)}.sessions s
1780
+ WHERE ${where.join(' AND ')}
1415
1781
  ORDER BY started_at DESC
1416
- LIMIT $2`,
1417
- [tenantId, limit]
1782
+ LIMIT $${limitParam}`,
1783
+ [...params, limit]
1418
1784
  );
1419
1785
  }
1420
1786
  return result.rows;
@@ -1499,8 +1865,7 @@ function createAquifer(config = {}) {
1499
1865
  aquifer.consolidation = createConsolidation({ pool, schema: qSchema, defaultTenantId: tenantId });
1500
1866
  aquifer.bundles = createBundles({ pool, schema: qSchema, defaultTenantId: tenantId });
1501
1867
  // entityState materialises in schema/005-entity-state-history.sql, gated on
1502
- // entitiesEnabled (it FK-references entities). Drop-clean — see
1503
- // scripts/drop-entity-state-history.sql.
1868
+ // entitiesEnabled because it FK-references entities.
1504
1869
  aquifer.entityState = createEntityState({ pool, schema: qSchema, defaultTenantId: tenantId });
1505
1870
  // insights materialises in schema/006-insights.sql. No FK from elsewhere
1506
1871
  // into this table; DROP CASCADE is clean. See scripts/drop-insights.sql.
@@ -3,6 +3,7 @@
3
3
  const fs = require('fs/promises');
4
4
  const path = require('path');
5
5
  const { backendCapabilities, unsupportedCapabilityError } = require('./capabilities');
6
+ const { buildBacklogProductStatus, buildReadinessSurface } = require('../interface');
6
7
 
7
8
  function emptyStore() {
8
9
  const now = new Date().toISOString();
@@ -420,7 +421,7 @@ function createLocalAquifer(config = {}) {
420
421
  counts[status] = (counts[status] || 0) + 1;
421
422
  }
422
423
  const dates = sessions.map(s => s.startedAt).filter(Boolean).sort();
423
- return {
424
+ const stats = {
424
425
  backendKind: 'local',
425
426
  backendProfile: capabilities.profile,
426
427
  serving: {
@@ -454,6 +455,64 @@ function createLocalAquifer(config = {}) {
454
455
  degraded: true,
455
456
  capabilities: capabilities.capabilities,
456
457
  };
458
+ return {
459
+ ...stats,
460
+ readiness: buildReadinessSurface(stats, {
461
+ checks: [
462
+ {
463
+ key: 'backend',
464
+ status: 'ready',
465
+ label: 'Service',
466
+ message: 'Aquifer is online.',
467
+ },
468
+ {
469
+ key: 'historical_memory',
470
+ status: sessions.length > 0 ? 'ready' : 'empty',
471
+ label: 'Saved content',
472
+ message: sessions.length > 0
473
+ ? 'Saved local content is available.'
474
+ : 'No saved content is available yet.',
475
+ },
476
+ {
477
+ key: 'current_memory',
478
+ status: 'not_supported',
479
+ label: 'Memory',
480
+ message: 'Complete memory requires full storage.',
481
+ },
482
+ ],
483
+ }),
484
+ };
485
+ },
486
+ async getPendingWork(opts = {}) {
487
+ const publicStatus = buildBacklogProductStatus({ statusCounts: {} });
488
+ return {
489
+ available: true,
490
+ generatedAt: new Date().toISOString(),
491
+ terminalFiltering: false,
492
+ filters: {
493
+ source: opts.source || null,
494
+ agentId: opts.agentId || null,
495
+ statuses: opts.status ? [String(opts.status)] : ['pending', 'failed'],
496
+ limit: Math.max(1, Math.min(500, opts.limit || 100)),
497
+ },
498
+ total: 0,
499
+ status: 'clear',
500
+ statusCounts: {},
501
+ summary: 'All saved content is ready.',
502
+ nextStep: 'No action required.',
503
+ publicStatus,
504
+ groups: [],
505
+ samples: [],
506
+ guidance: [],
507
+ plan: {
508
+ action: opts.action || opts.plan || 'inspect',
509
+ dryRunOnly: true,
510
+ allowed: 0,
511
+ blocked: 0,
512
+ changes: [],
513
+ notes: ['Local backend has no asynchronous pending/failed session backlog.'],
514
+ },
515
+ };
457
516
  },
458
517
  async getPendingSessions() {
459
518
  return [];