@shadowforge0/aquifer-memory 1.0.3 → 1.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +37 -29
- package/consumers/claude-code.js +117 -0
- package/consumers/cli.js +28 -1
- package/consumers/default/daily-entries.js +196 -0
- package/consumers/default/index.js +282 -0
- package/consumers/default/prompts/summary.js +153 -0
- package/consumers/mcp.js +3 -23
- package/consumers/miranda/context-inject.js +119 -0
- package/consumers/miranda/daily-entries.js +224 -0
- package/consumers/miranda/index.js +353 -0
- package/consumers/miranda/instance.js +55 -0
- package/consumers/miranda/llm.js +99 -0
- package/consumers/miranda/profile.json +145 -0
- package/consumers/miranda/prompts/summary.js +303 -0
- package/consumers/miranda/recall-format.js +74 -0
- package/consumers/miranda/render-daily-md.js +186 -0
- package/consumers/miranda/workspace-files.js +91 -0
- package/consumers/openclaw-ext/index.js +38 -0
- package/consumers/openclaw-ext/openclaw.plugin.json +9 -0
- package/consumers/openclaw-ext/package.json +10 -0
- package/consumers/openclaw-plugin.js +66 -74
- package/consumers/opencode.js +21 -24
- package/consumers/shared/autodetect.js +64 -0
- package/consumers/shared/entity-parser.js +119 -0
- package/consumers/shared/ingest.js +148 -0
- package/consumers/shared/llm-autodetect.js +137 -0
- package/consumers/shared/normalize.js +129 -0
- package/consumers/shared/recall-format.js +110 -0
- package/core/aquifer.js +209 -71
- package/core/artifacts.js +174 -0
- package/core/bundles.js +400 -0
- package/core/consolidation.js +340 -0
- package/core/decisions.js +164 -0
- package/core/entity.js +1 -3
- package/core/errors.js +97 -0
- package/core/handoff.js +153 -0
- package/core/mcp-manifest.js +131 -0
- package/core/narratives.js +212 -0
- package/core/profiles.js +171 -0
- package/core/state.js +163 -0
- package/core/storage.js +86 -28
- package/core/timeline.js +152 -0
- package/docs/postprocess-contract.md +132 -0
- package/index.js +23 -1
- package/package.json +23 -2
- package/pipeline/_http.js +1 -1
- package/pipeline/consolidation/apply.js +176 -0
- package/pipeline/consolidation/index.js +21 -0
- package/pipeline/extract-entities.js +2 -2
- package/pipeline/rerank.js +1 -1
- package/pipeline/summarize.js +4 -1
- package/schema/001-base.sql +61 -24
- package/schema/002-entities.sql +17 -3
- package/schema/004-completion.sql +375 -0
- package/schema/004-facts.sql +67 -0
- package/scripts/diagnose-fts-zh.js +168 -134
- package/scripts/diagnose-vector.js +188 -0
- package/scripts/install-openclaw.sh +59 -0
- package/scripts/smoke.mjs +2 -2
package/README.md
CHANGED
|
@@ -63,43 +63,52 @@ Sessions, summaries, turn-level embeddings, entity graph — all live in one dat
|
|
|
63
63
|
|
|
64
64
|
## Quick Start (MCP Server)
|
|
65
65
|
|
|
66
|
-
|
|
66
|
+
Two commands from zero to a working MCP memory server — no env vars to set. For library API usage, see [API Reference](#api-reference) below.
|
|
67
67
|
|
|
68
68
|
### 1. Start the stack
|
|
69
69
|
|
|
70
70
|
```bash
|
|
71
71
|
docker compose up -d
|
|
72
|
-
#
|
|
73
|
-
# First run
|
|
72
|
+
# PostgreSQL 16 + pgvector and Ollama with bge-m3 (auto-pulled).
|
|
73
|
+
# First run pulls the model — `docker compose logs -f ollama-pull` to watch.
|
|
74
74
|
```
|
|
75
75
|
|
|
76
|
-
Already
|
|
76
|
+
Already running PostgreSQL + pgvector and an embedding endpoint? Skip this step — `quickstart` picks up `DATABASE_URL` / `EMBED_PROVIDER` from your environment if you've set them.
|
|
77
77
|
|
|
78
|
-
### 2.
|
|
78
|
+
### 2. Verify
|
|
79
79
|
|
|
80
80
|
```bash
|
|
81
|
-
|
|
81
|
+
npx --yes @shadowforge0/aquifer-memory quickstart
|
|
82
82
|
```
|
|
83
83
|
|
|
84
|
-
|
|
84
|
+
That's it. `quickstart` autodetects `localhost:5432` PostgreSQL and `localhost:11434` Ollama (from step 1 or your own), runs migrations, embeds a test session, recalls it, and cleans up. If it prints `✓ Aquifer is working`, you're done.
|
|
85
85
|
|
|
86
|
-
|
|
87
|
-
export DATABASE_URL="postgresql://aquifer:aquifer@localhost:5432/aquifer"
|
|
88
|
-
export AQUIFER_EMBED_BASE_URL="http://localhost:11434/v1"
|
|
89
|
-
export AQUIFER_EMBED_MODEL="bge-m3"
|
|
86
|
+
For ongoing use, install it into your project so you skip the `npx` resolution cost: `npm install @shadowforge0/aquifer-memory` then `npx aquifer quickstart`.
|
|
90
87
|
|
|
91
|
-
|
|
92
|
-
```
|
|
88
|
+
Using OpenAI instead of Ollama? `export EMBED_PROVIDER=openai` + `OPENAI_API_KEY=sk-...` before `quickstart` — model defaults to `text-embedding-3-small`.
|
|
93
89
|
|
|
94
|
-
|
|
90
|
+
### 3. Wire into your MCP client
|
|
95
91
|
|
|
96
|
-
|
|
92
|
+
Claude Code, Claude Desktop, or any MCP-capable client — drop this into `.mcp.json` (project-level) or `claude_desktop_config.json`:
|
|
97
93
|
|
|
98
|
-
```
|
|
99
|
-
|
|
94
|
+
```jsonc
|
|
95
|
+
{
|
|
96
|
+
"mcpServers": {
|
|
97
|
+
"aquifer": {
|
|
98
|
+
"command": "npx",
|
|
99
|
+
"args": ["--yes", "@shadowforge0/aquifer-memory", "mcp"],
|
|
100
|
+
"env": {
|
|
101
|
+
"DATABASE_URL": "postgresql://aquifer:aquifer@localhost:5432/aquifer",
|
|
102
|
+
"EMBED_PROVIDER": "ollama"
|
|
103
|
+
}
|
|
104
|
+
}
|
|
105
|
+
}
|
|
106
|
+
}
|
|
100
107
|
```
|
|
101
108
|
|
|
102
|
-
|
|
109
|
+
Or run it directly: `DATABASE_URL=... EMBED_PROVIDER=ollama npx aquifer mcp`. (MCP server itself stays strict about env — `quickstart`'s autodetect is the try-it path, not the production one.)
|
|
110
|
+
|
|
111
|
+
Need LLM summarization, the knowledge graph, OpenAI embeddings, or the reranker? See [Environment Variables](#environment-variables) below and [docs/setup.md](docs/setup.md).
|
|
103
112
|
|
|
104
113
|
---
|
|
105
114
|
|
|
@@ -236,7 +245,7 @@ Any host that supports MCP stdio can connect the same way — point it at `node
|
|
|
236
245
|
|
|
237
246
|
| File | Purpose |
|
|
238
247
|
|------|---------|
|
|
239
|
-
| `index.js` | Entry point — exports `createAquifer`, `createEmbedder`, `createReranker
|
|
248
|
+
| `index.js` | Entry point — exports `createAquifer`, `createEmbedder`, `createReranker` |
|
|
240
249
|
| `core/aquifer.js` | Main facade: `migrate()`, `ingest()`, `recall()`, `enrich()` |
|
|
241
250
|
| `core/storage.js` | Session/summary/turn CRUD, FTS search, embedding search |
|
|
242
251
|
| `core/entity.js` | Entity upsert, mention tracking, relation graph, normalization |
|
|
@@ -324,18 +333,17 @@ Built-in entity extraction and relationship tracking:
|
|
|
324
333
|
|
|
325
334
|
## Benchmark: LongMemEval
|
|
326
335
|
|
|
327
|
-
We tested Aquifer's retrieval pipeline on [LongMemEval_S](https://github.com/xiaowu0162/LongMemEval) — 470 questions across 19,195 sessions
|
|
336
|
+
We tested Aquifer's retrieval pipeline on [LongMemEval_S](https://github.com/xiaowu0162/LongMemEval) — 470 questions across 19,195 sessions with 98,795 turn embeddings. Per-question haystack scoping (matching the official protocol), bge-m3 embeddings via OpenRouter.
|
|
328
337
|
|
|
329
|
-
|
|
338
|
+
| Pipeline | R@1 | R@3 | R@5 | R@10 |
|
|
339
|
+
|----------|-----|-----|-----|------|
|
|
340
|
+
| Turn-only (cosine) | 89.5% | 96.6% | 98.1% | 98.9% |
|
|
341
|
+
| Three-way hybrid (FTS + session_emb + turn_emb → RRF) | 79.2% | 94.0% | 97.7% | 98.9% |
|
|
342
|
+
| **Hybrid + Cohere Rerank v3.5 (top-30)** | **96.0%** | **98.5%** | **99.3%** | **99.8%** |
|
|
330
343
|
|
|
331
|
-
|
|
332
|
-
|--------|-----------------|
|
|
333
|
-
| R@1 | 89.6% |
|
|
334
|
-
| R@3 | 96.6% |
|
|
335
|
-
| R@5 | 98.1% |
|
|
336
|
-
| R@10 | 98.9% |
|
|
344
|
+
Measured 2026-04-19 on Aquifer 1.2.1.
|
|
337
345
|
|
|
338
|
-
**Key
|
|
346
|
+
**Key findings.** Turn-level embedding alone beats session-level (26.8% → 89.5% R@1, a 3× improvement). Hybrid fusion adds robustness at R@3-R@10 but trades R@1 because FTS + session-level signals spread the top candidate across adjacent sessions. Re-ranking the hybrid top-30 with a cross-encoder (Cohere Rerank v3.5) wins back the top-1 precision and then some — +16.9pt R@1 over hybrid baseline, and 6.5pt above pure turn-level cosine. That's the production pipeline Aquifer ships by default when a reranker is configured.
|
|
339
347
|
|
|
340
348
|
### Multi-Tenant
|
|
341
349
|
|
|
@@ -408,7 +416,7 @@ const result = await aquifer.enrich('session-001', {
|
|
|
408
416
|
// Returns: { summary, turnsEmbedded, entitiesFound, warnings, effectiveModel, postProcessError }
|
|
409
417
|
```
|
|
410
418
|
|
|
411
|
-
**postProcess hook**: runs after transaction commit, receives full context (session, summary, embedding, parsedEntities, etc.). Best-effort, at-most-once.
|
|
419
|
+
**postProcess hook**: runs after transaction commit, receives full context (session, summary, embedding, parsedEntities, etc.). Best-effort, at-most-once. If the hook throws, the error is captured and returned as `postProcessError` on the enrich result — the session itself remains committed and is not retried.
|
|
412
420
|
|
|
413
421
|
#### `aquifer.recall(query, opts)`
|
|
414
422
|
|
|
@@ -0,0 +1,117 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// ---------------------------------------------------------------------------
|
|
4
|
+
// Claude Code host adapter.
|
|
5
|
+
//
|
|
6
|
+
// Generic entry points for CC-side afterburn hooks. No persona logic — the
|
|
7
|
+
// caller (typically cc-afterburn.js) constructs the Miranda persona hooks
|
|
8
|
+
// via consumers/miranda and injects them via `postProcess`, `summaryFn`,
|
|
9
|
+
// `entityParseFn`.
|
|
10
|
+
//
|
|
11
|
+
// API:
|
|
12
|
+
// runEnrich({ aquifer, sessionId, agentId, ... })
|
|
13
|
+
// Enrich an already-committed session. Used by cc-afterburn after
|
|
14
|
+
// cc-session-to-pg has written the session row.
|
|
15
|
+
//
|
|
16
|
+
// runBackfill({ aquifer, sessionIds, ... })
|
|
17
|
+
// Iterate enrich() over pending sessions (for catch-up after a gap).
|
|
18
|
+
//
|
|
19
|
+
// runContextInject({ aquifer, pool, agentId })
|
|
20
|
+
// Return the Miranda-flavored system context string for a CC session
|
|
21
|
+
// start hook. (Delegates to consumers/miranda/context-inject.)
|
|
22
|
+
// ---------------------------------------------------------------------------
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* Enrich one committed session. Caller supplies the summaryFn / entityParseFn /
|
|
26
|
+
* postProcess they want (persona-specific hooks).
|
|
27
|
+
*
|
|
28
|
+
* @param {object} opts
|
|
29
|
+
* @param {object} opts.aquifer
|
|
30
|
+
* @param {string} opts.sessionId
|
|
31
|
+
* @param {string} opts.agentId
|
|
32
|
+
* @param {function} [opts.summaryFn]
|
|
33
|
+
* @param {function} [opts.entityParseFn]
|
|
34
|
+
* @param {function} [opts.postProcess]
|
|
35
|
+
* @param {object} [opts.logger]
|
|
36
|
+
* @returns {Promise<object>} The enrich result.
|
|
37
|
+
*/
|
|
38
|
+
async function runEnrich({
|
|
39
|
+
aquifer, sessionId, agentId,
|
|
40
|
+
summaryFn = null, entityParseFn = null, postProcess = null,
|
|
41
|
+
logger = console,
|
|
42
|
+
} = {}) {
|
|
43
|
+
if (!aquifer) throw new Error('runEnrich: aquifer is required');
|
|
44
|
+
if (!sessionId) throw new Error('runEnrich: sessionId is required');
|
|
45
|
+
if (!agentId) throw new Error('runEnrich: agentId is required');
|
|
46
|
+
|
|
47
|
+
const result = await aquifer.enrich(sessionId, {
|
|
48
|
+
agentId,
|
|
49
|
+
summaryFn: summaryFn || undefined,
|
|
50
|
+
entityParseFn: entityParseFn || undefined,
|
|
51
|
+
postProcess: postProcess || undefined,
|
|
52
|
+
});
|
|
53
|
+
|
|
54
|
+
if (result.postProcessError && logger.warn) {
|
|
55
|
+
logger.warn(`[cc-adapter] postProcess error for ${sessionId}: ${result.postProcessError.message}`);
|
|
56
|
+
}
|
|
57
|
+
if (logger.info) {
|
|
58
|
+
logger.info(`[cc-adapter] enriched ${sessionId} (turns=${result.turnsEmbedded}, entities=${result.entitiesFound})`);
|
|
59
|
+
}
|
|
60
|
+
return result;
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
/**
|
|
64
|
+
* Enrich a batch of sessions sequentially. Errors on one session don't stop
|
|
65
|
+
* the batch; they're captured and returned alongside successes.
|
|
66
|
+
*
|
|
67
|
+
* @param {object} opts
|
|
68
|
+
* @param {object} opts.aquifer
|
|
69
|
+
* @param {string[]} opts.sessionIds
|
|
70
|
+
* @param {function} opts.buildHooks — (sessionId) => { summaryFn?, entityParseFn?, postProcess? }
|
|
71
|
+
* Called per session; lets the caller rebuild persona hooks with the
|
|
72
|
+
* right sessionId / agentId / now.
|
|
73
|
+
* @param {string} [opts.agentId='main']
|
|
74
|
+
* @param {object} [opts.logger]
|
|
75
|
+
* @returns {Promise<{ succeeded: object[], failed: object[] }>}
|
|
76
|
+
*/
|
|
77
|
+
async function runBackfill({
|
|
78
|
+
aquifer, sessionIds, buildHooks,
|
|
79
|
+
agentId = 'main', logger = console,
|
|
80
|
+
} = {}) {
|
|
81
|
+
if (!aquifer) throw new Error('runBackfill: aquifer is required');
|
|
82
|
+
if (!Array.isArray(sessionIds)) throw new Error('runBackfill: sessionIds must be an array');
|
|
83
|
+
if (typeof buildHooks !== 'function') throw new Error('runBackfill: buildHooks must be a function');
|
|
84
|
+
|
|
85
|
+
const succeeded = [];
|
|
86
|
+
const failed = [];
|
|
87
|
+
|
|
88
|
+
for (const sessionId of sessionIds) {
|
|
89
|
+
try {
|
|
90
|
+
const hooks = await buildHooks(sessionId, agentId);
|
|
91
|
+
const result = await runEnrich({
|
|
92
|
+
aquifer, sessionId, agentId,
|
|
93
|
+
summaryFn: hooks?.summaryFn,
|
|
94
|
+
entityParseFn: hooks?.entityParseFn,
|
|
95
|
+
postProcess: hooks?.postProcess,
|
|
96
|
+
logger,
|
|
97
|
+
});
|
|
98
|
+
succeeded.push({ sessionId, result });
|
|
99
|
+
} catch (err) {
|
|
100
|
+
if (logger.warn) logger.warn(`[cc-adapter] backfill failed for ${sessionId}: ${err.message}`);
|
|
101
|
+
failed.push({ sessionId, error: err.message });
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
return { succeeded, failed };
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/**
|
|
109
|
+
* Build the Miranda-flavored system context for a CC SessionStart hook.
|
|
110
|
+
* Delegates to consumers/miranda/context-inject.computeInjection.
|
|
111
|
+
*/
|
|
112
|
+
async function runContextInject(opts = {}) {
|
|
113
|
+
const { computeInjection } = require('./miranda/context-inject');
|
|
114
|
+
return computeInjection(opts);
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
module.exports = { runEnrich, runBackfill, runContextInject };
|
package/consumers/cli.js
CHANGED
|
@@ -43,7 +43,7 @@ function parsePositiveInt(value, fallback) {
|
|
|
43
43
|
function parseArgs(argv) {
|
|
44
44
|
const args = { _: [], flags: {} };
|
|
45
45
|
// Flags that take a value (not boolean)
|
|
46
|
-
const VALUE_FLAGS = new Set(['limit', 'agent-id', 'source', 'date-from', 'date-to', 'output', 'format', 'config', 'status', 'concurrency', 'entities', 'entity-mode', 'session-id', 'verdict', 'note', 'db', 'since', 'min-messages', 'lookback-days', 'max-chars']);
|
|
46
|
+
const VALUE_FLAGS = new Set(['limit', 'agent-id', 'source', 'date-from', 'date-to', 'output', 'format', 'config', 'status', 'concurrency', 'entities', 'entity-mode', 'session-id', 'verdict', 'note', 'db', 'since', 'min-messages', 'lookback-days', 'max-chars', 'out']);
|
|
47
47
|
for (let i = 0; i < argv.length; i++) {
|
|
48
48
|
if (argv[i] === '--') { args._.push(...argv.slice(i + 1)); break; }
|
|
49
49
|
if (argv[i].startsWith('--')) {
|
|
@@ -360,6 +360,16 @@ Options:
|
|
|
360
360
|
return;
|
|
361
361
|
}
|
|
362
362
|
|
|
363
|
+
// mcp-contract: write canonical MCP tool manifest to disk. No Aquifer
|
|
364
|
+
// instance needed — manifest is static. Default path /tmp/aquifer-mcp-contract.json.
|
|
365
|
+
if (command === 'mcp-contract') {
|
|
366
|
+
const { writeMcpManifestFile } = require('../index');
|
|
367
|
+
const outPath = args.flags.out || '/tmp/aquifer-mcp-contract.json';
|
|
368
|
+
const written = writeMcpManifestFile(outPath);
|
|
369
|
+
console.log(`Wrote MCP manifest to ${written}`);
|
|
370
|
+
return;
|
|
371
|
+
}
|
|
372
|
+
|
|
363
373
|
// All other commands need an Aquifer instance
|
|
364
374
|
const configOverrides = {};
|
|
365
375
|
if (args.flags.config) {
|
|
@@ -367,6 +377,23 @@ Options:
|
|
|
367
377
|
process.env.AQUIFER_CONFIG = args.flags.config;
|
|
368
378
|
}
|
|
369
379
|
|
|
380
|
+
// quickstart is the try-it path: autodetect docker-compose defaults so a
|
|
381
|
+
// fresh `docker compose up -d && npx aquifer quickstart` works with zero env.
|
|
382
|
+
// Production commands (migrate, mcp, recall, ...) stay strict — they expect
|
|
383
|
+
// the operator to have set env explicitly.
|
|
384
|
+
if (command === 'quickstart') {
|
|
385
|
+
const { autodetectForQuickstart } = require('./shared/autodetect');
|
|
386
|
+
const detected = await autodetectForQuickstart(process.env);
|
|
387
|
+
if (Object.keys(detected).length > 0) {
|
|
388
|
+
console.log('Autodetected localhost services (env not set):');
|
|
389
|
+
for (const [k, v] of Object.entries(detected)) {
|
|
390
|
+
console.log(` ${k}=${v}`);
|
|
391
|
+
process.env[k] = v;
|
|
392
|
+
}
|
|
393
|
+
console.log(' Export these in your shell (or MCP client env) to make them permanent.\n');
|
|
394
|
+
}
|
|
395
|
+
}
|
|
396
|
+
|
|
370
397
|
const aquifer = createAquiferFromConfig(configOverrides);
|
|
371
398
|
|
|
372
399
|
try {
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
// Aquifer default persona — parameterized daily_entries writer.
|
|
4
|
+
// Schema matches miranda.daily_entries (id / event_at / source / tag / text /
|
|
5
|
+
// agent_id / session_id / metadata / dedupe_key) — hosts clone that DDL into
|
|
6
|
+
// their own schema and set persona.dailyTable = '<schema>.daily_entries'.
|
|
7
|
+
//
|
|
8
|
+
// Host must create the table before use:
|
|
9
|
+
// CREATE TABLE jenny.daily_entries (LIKE miranda.daily_entries INCLUDING ALL);
|
|
10
|
+
|
|
11
|
+
const crypto = require('crypto');
|
|
12
|
+
const { parseHandoffSection } = require('../miranda/prompts/summary');
|
|
13
|
+
|
|
14
|
+
const UPSERT_TAGS = new Set(['[FOCUS]', '[TODO]', '[STATS]', '[HIGHLIGHT]', '[SYSTEM]', '[HANDOFF]']);
|
|
15
|
+
|
|
16
|
+
function taipeiDateString(now) {
|
|
17
|
+
if (!now) now = new Date();
|
|
18
|
+
return new Intl.DateTimeFormat('sv-SE', {
|
|
19
|
+
timeZone: 'Asia/Taipei',
|
|
20
|
+
year: 'numeric', month: '2-digit', day: '2-digit',
|
|
21
|
+
}).format(now);
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
function textHash6(text) {
|
|
25
|
+
const normalized = (text || '').normalize('NFKC').replace(/\s+/g, ' ').trim().toLowerCase();
|
|
26
|
+
if (!normalized) return 'empty';
|
|
27
|
+
return crypto.createHash('sha256').update(normalized).digest('hex').slice(0, 6);
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
async function insertDailyEntry(pool, tableName, { eventAt, source, tag, text, agentId, sessionId, metadata, dedupeKey }) {
|
|
31
|
+
const shouldUpsert = dedupeKey && UPSERT_TAGS.has(tag);
|
|
32
|
+
const sql = shouldUpsert
|
|
33
|
+
? `INSERT INTO ${tableName}
|
|
34
|
+
(event_at, source, tag, text, agent_id, session_id, metadata, dedupe_key)
|
|
35
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
36
|
+
ON CONFLICT (dedupe_key) DO UPDATE SET
|
|
37
|
+
text = EXCLUDED.text,
|
|
38
|
+
event_at = EXCLUDED.event_at,
|
|
39
|
+
metadata = EXCLUDED.metadata
|
|
40
|
+
RETURNING id, event_at, source, tag, text`
|
|
41
|
+
: `INSERT INTO ${tableName}
|
|
42
|
+
(event_at, source, tag, text, agent_id, session_id, metadata, dedupe_key)
|
|
43
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
|
|
44
|
+
ON CONFLICT (dedupe_key) DO NOTHING
|
|
45
|
+
RETURNING id, event_at, source, tag, text`;
|
|
46
|
+
const result = await pool.query(sql, [
|
|
47
|
+
eventAt, source, tag || null, text,
|
|
48
|
+
agentId || 'main', sessionId || null,
|
|
49
|
+
metadata ? JSON.stringify(metadata) : '{}',
|
|
50
|
+
dedupeKey || null,
|
|
51
|
+
]);
|
|
52
|
+
return result.rows[0] || null;
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
async function getDailyEntries(pool, tableName, date, agentId) {
|
|
56
|
+
const result = await pool.query(
|
|
57
|
+
`SELECT * FROM ${tableName}
|
|
58
|
+
WHERE (event_at AT TIME ZONE 'Asia/Taipei')::date = $1
|
|
59
|
+
AND ($2::text IS NULL OR agent_id = $2)
|
|
60
|
+
ORDER BY event_at ASC`,
|
|
61
|
+
[date, agentId || null]
|
|
62
|
+
);
|
|
63
|
+
return result.rows;
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
async function fetchDailyContext(pool, date, agentId, tableName = null) {
|
|
67
|
+
if (!tableName) return '';
|
|
68
|
+
const rows = await getDailyEntries(pool, tableName, date, agentId);
|
|
69
|
+
if (!rows || rows.length === 0) return '';
|
|
70
|
+
let currentFocus = '', currentTodo = '';
|
|
71
|
+
const entries = [];
|
|
72
|
+
for (const row of rows) {
|
|
73
|
+
if (row.tag === '[FOCUS]') currentFocus = row.text;
|
|
74
|
+
else if (row.tag === '[TODO]') currentTodo = row.text;
|
|
75
|
+
else if (!row.tag || row.tag === '[CLI]') {
|
|
76
|
+
const time = new Date(row.event_at).toLocaleTimeString('sv-SE', {
|
|
77
|
+
timeZone: 'Asia/Taipei', hour: '2-digit', minute: '2-digit',
|
|
78
|
+
});
|
|
79
|
+
entries.push(`- (${time}) ${row.text}`);
|
|
80
|
+
}
|
|
81
|
+
}
|
|
82
|
+
const recentEntries = entries.slice(-20);
|
|
83
|
+
const parts = [];
|
|
84
|
+
if (currentFocus) parts.push(`當前焦點: ${currentFocus}`);
|
|
85
|
+
if (currentTodo) parts.push(`當前待辦:\n${currentTodo}`);
|
|
86
|
+
if (recentEntries.length > 0) parts.push(`今日紀錄:\n${recentEntries.join('\n')}`);
|
|
87
|
+
let text = parts.join('\n\n');
|
|
88
|
+
if (text.length > 3000) text = text.slice(0, 3000) + '\n...(truncated)';
|
|
89
|
+
return text;
|
|
90
|
+
}
|
|
91
|
+
|
|
92
|
+
async function writeDailyEntries({
|
|
93
|
+
sections, recap, pool, sessionId, agentId, logger = console,
|
|
94
|
+
source = 'afterburn', tag = null, now, dailyTable,
|
|
95
|
+
}) {
|
|
96
|
+
if (!dailyTable) return { inserted: 0, focusUpdated: false, todoUpdated: false };
|
|
97
|
+
if (!now) now = new Date();
|
|
98
|
+
const date = taipeiDateString(now);
|
|
99
|
+
let inserted = 0, focusUpdated = false, todoUpdated = false;
|
|
100
|
+
|
|
101
|
+
// Session bullets
|
|
102
|
+
if (sections?.session_entries) {
|
|
103
|
+
const entryLines = sections.session_entries.split('\n');
|
|
104
|
+
const bullets = entryLines.filter(l => l.trim().startsWith('- ')).map(l => l.trim().slice(2));
|
|
105
|
+
for (const bullet of bullets) {
|
|
106
|
+
const timeMatch = bullet.match(/^\((\d{2}:\d{2})\)\s*(.*)/);
|
|
107
|
+
const text = timeMatch ? timeMatch[2] : bullet;
|
|
108
|
+
const eventAt = now.toISOString();
|
|
109
|
+
const row = await insertDailyEntry(pool, dailyTable, {
|
|
110
|
+
eventAt, source, tag, text,
|
|
111
|
+
agentId, sessionId, metadata: {},
|
|
112
|
+
dedupeKey: `daily:${date}:${textHash6(text)}`,
|
|
113
|
+
});
|
|
114
|
+
if (row) inserted++;
|
|
115
|
+
}
|
|
116
|
+
if (logger.info) logger.info(`[default-persona] wrote ${inserted} daily entries to ${dailyTable}`);
|
|
117
|
+
}
|
|
118
|
+
|
|
119
|
+
// Focus
|
|
120
|
+
if (recap?.focus_decision === 'update' && recap.focus) {
|
|
121
|
+
await insertDailyEntry(pool, dailyTable, {
|
|
122
|
+
eventAt: now.toISOString(), source, tag: '[FOCUS]',
|
|
123
|
+
text: recap.focus, agentId, sessionId,
|
|
124
|
+
metadata: { proposed_by: source },
|
|
125
|
+
dedupeKey: `daily:${date}:focus:${source}`,
|
|
126
|
+
});
|
|
127
|
+
focusUpdated = true;
|
|
128
|
+
if (logger.info) logger.info(`[default-persona] focus updated: ${recap.focus.slice(0, 60)}`);
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// TODO
|
|
132
|
+
if (recap?.todo_new?.length > 0 || recap?.todo_done?.length > 0) {
|
|
133
|
+
const todayEntries = await getDailyEntries(pool, dailyTable, date, agentId);
|
|
134
|
+
let currentItems = [];
|
|
135
|
+
for (const row of todayEntries) {
|
|
136
|
+
if (row.tag === '[TODO]') {
|
|
137
|
+
currentItems = row.text.split('\n').map(s => s.replace(/^[-•]\s*/, '').trim()).filter(Boolean);
|
|
138
|
+
}
|
|
139
|
+
}
|
|
140
|
+
if (recap.todo_done?.length > 0) {
|
|
141
|
+
for (const done of recap.todo_done) {
|
|
142
|
+
const dl = done.toLowerCase();
|
|
143
|
+
currentItems = currentItems.filter(item => {
|
|
144
|
+
const il = item.toLowerCase();
|
|
145
|
+
return il !== dl && !il.includes(dl) && !dl.includes(il);
|
|
146
|
+
});
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
if (recap.todo_new?.length > 0) {
|
|
150
|
+
for (const n of recap.todo_new) {
|
|
151
|
+
if (!currentItems.some(i => i.toLowerCase() === n.toLowerCase())) currentItems.push(n);
|
|
152
|
+
}
|
|
153
|
+
}
|
|
154
|
+
await insertDailyEntry(pool, dailyTable, {
|
|
155
|
+
eventAt: now.toISOString(), source, tag: '[TODO]',
|
|
156
|
+
text: currentItems.map(i => `- ${i}`).join('\n') || '(全部完成)',
|
|
157
|
+
agentId, sessionId,
|
|
158
|
+
metadata: { proposed_by: source, todo_new: recap.todo_new, todo_done: recap.todo_done },
|
|
159
|
+
dedupeKey: `daily:${date}:todo:${source}`,
|
|
160
|
+
});
|
|
161
|
+
todoUpdated = true;
|
|
162
|
+
if (logger.info) logger.info(`[default-persona] todo updated: ${currentItems.length} items`);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
// Handoff
|
|
166
|
+
if (sections?.handoff) {
|
|
167
|
+
const handoff = parseHandoffSection(sections.handoff);
|
|
168
|
+
if (handoff) {
|
|
169
|
+
let handoffText;
|
|
170
|
+
switch (handoff.status) {
|
|
171
|
+
case 'completed': handoffText = `上一段已完成 ${handoff.lastStep}`; break;
|
|
172
|
+
case 'blocked': handoffText = `上一段卡在 ${handoff.lastStep}`; break;
|
|
173
|
+
default: handoffText = `上一段停在 ${handoff.lastStep}`;
|
|
174
|
+
}
|
|
175
|
+
if (handoff.next && handoff.next !== '無') handoffText += `,下一步建議 ${handoff.next}`;
|
|
176
|
+
if (handoff.decided) handoffText += `,已決定 ${handoff.decided}`;
|
|
177
|
+
if (handoff.blocker && handoff.status !== 'blocked') handoffText += `,卡在 ${handoff.blocker}`;
|
|
178
|
+
handoffText += '。';
|
|
179
|
+
await insertDailyEntry(pool, dailyTable, {
|
|
180
|
+
eventAt: now.toISOString(), source, tag: '[HANDOFF]',
|
|
181
|
+
text: handoffText, agentId, sessionId,
|
|
182
|
+
metadata: { ...handoff, proposed_by: source },
|
|
183
|
+
dedupeKey: `daily:${date}:handoff:${source}`,
|
|
184
|
+
});
|
|
185
|
+
if (logger.info) logger.info(`[default-persona] handoff written: ${handoffText.slice(0, 80)}`);
|
|
186
|
+
}
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
return { inserted, focusUpdated, todoUpdated };
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
module.exports = {
|
|
193
|
+
taipeiDateString, textHash6,
|
|
194
|
+
insertDailyEntry, getDailyEntries, fetchDailyContext, writeDailyEntries,
|
|
195
|
+
UPSERT_TAGS,
|
|
196
|
+
};
|