@shadowforge0/aquifer-memory 1.7.0 → 1.8.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. package/.env.example +8 -0
  2. package/README.md +66 -0
  3. package/aquifer.config.example.json +19 -0
  4. package/consumers/cli.js +217 -14
  5. package/consumers/codex-active-checkpoint.js +186 -0
  6. package/consumers/codex-current-memory.js +106 -0
  7. package/consumers/codex-handoff.js +442 -3
  8. package/consumers/codex.js +164 -107
  9. package/consumers/mcp.js +144 -6
  10. package/consumers/shared/config.js +60 -1
  11. package/consumers/shared/factory.js +10 -3
  12. package/core/aquifer.js +351 -840
  13. package/core/backends/capabilities.js +89 -0
  14. package/core/backends/local.js +430 -0
  15. package/core/legacy-bootstrap.js +140 -0
  16. package/core/mcp-manifest.js +66 -2
  17. package/core/memory-promotion.js +157 -26
  18. package/core/memory-recall.js +341 -22
  19. package/core/memory-records.js +128 -8
  20. package/core/memory-serving.js +132 -0
  21. package/core/postgres-migrations.js +533 -0
  22. package/core/public-session-filter.js +40 -0
  23. package/core/recall-runtime.js +115 -0
  24. package/core/scope-attribution.js +279 -0
  25. package/core/session-checkpoint-producer.js +412 -0
  26. package/core/session-checkpoints.js +432 -0
  27. package/core/session-finalization.js +82 -1
  28. package/core/storage-checkpoints.js +546 -0
  29. package/core/storage.js +121 -8
  30. package/docs/setup.md +22 -0
  31. package/package.json +8 -4
  32. package/schema/014-v1-checkpoint-runs.sql +349 -0
  33. package/schema/015-v1-evidence-items.sql +92 -0
  34. package/schema/016-v1-evidence-ref-multi-item.sql +19 -0
  35. package/schema/017-v1-memory-record-embeddings.sql +25 -0
  36. package/schema/018-v1-finalization-candidate-envelope.sql +39 -0
  37. package/scripts/codex-checkpoint-commands.js +464 -0
  38. package/scripts/codex-checkpoint-runtime.js +520 -0
  39. package/scripts/codex-recovery.js +105 -0
@@ -0,0 +1,132 @@
1
+ 'use strict';
2
+
3
+ function splitScopePath(value) {
4
+ if (Array.isArray(value)) return value.map(v => String(v).trim()).filter(Boolean);
5
+ if (typeof value !== 'string') return null;
6
+ const parts = value.split(',').map(v => v.trim()).filter(Boolean);
7
+ return parts.length > 0 ? parts : null;
8
+ }
9
+
10
+ function hasEvidenceBoundary(opts = {}) {
11
+ return Boolean(
12
+ opts.agentId
13
+ || (Array.isArray(opts.agentIds) && opts.agentIds.length > 0)
14
+ || opts.source
15
+ || opts.dateFrom
16
+ || opts.dateTo
17
+ || opts.host
18
+ || opts.sessionId
19
+ || opts.allowUnsafeDebug === true
20
+ || opts.unsafeDebug === true
21
+ );
22
+ }
23
+
24
+ function assertCuratedRecallOpts(opts = {}) {
25
+ const unsupported = [];
26
+ for (const key of ['agentId', 'agentIds', 'source', 'dateFrom', 'dateTo', 'entities', 'entityMode', 'weights', 'rerank', 'allowUnsafeDebug', 'unsafeDebug']) {
27
+ if (opts[key] !== undefined && opts[key] !== null) unsupported.push(key);
28
+ }
29
+ if (unsupported.length > 0) {
30
+ throw new Error(`curated memory_recall does not support legacy filters: ${unsupported.join(', ')}. Use activeScopeKey/activeScopePath or historical_recall.`);
31
+ }
32
+ }
33
+
34
+ function assertCuratedBootstrapOpts(opts = {}) {
35
+ const unsupported = [];
36
+ for (const key of ['agentId', 'source', 'lookbackDays', 'dateFrom', 'dateTo']) {
37
+ if (opts[key] !== undefined && opts[key] !== null) unsupported.push(key);
38
+ }
39
+ if (unsupported.length > 0) {
40
+ throw new Error(`curated session_bootstrap does not support legacy filters: ${unsupported.join(', ')}. Use activeScopeKey/activeScopePath.`);
41
+ }
42
+ }
43
+
44
+ function curatedRecallTitle(row = {}) {
45
+ const title = row.title || row.summary || row.canonical_key || row.canonicalKey || row.memory_type || row.memoryType || 'memory';
46
+ return String(title).trim();
47
+ }
48
+
49
+ function curatedRecallSummary(row = {}) {
50
+ const summary = row.summary || row.title || row.canonical_key || row.canonicalKey || '';
51
+ return String(summary).trim();
52
+ }
53
+
54
+ function normalizeCuratedRecallRow(row = {}) {
55
+ const { embedding: _embedding, ...publicRow } = row;
56
+ void _embedding;
57
+ const memoryId = row.memoryId || row.memory_id || row.id || null;
58
+ const canonicalKey = row.canonicalKey || row.canonical_key || null;
59
+ const memoryType = row.memoryType || row.memory_type || null;
60
+ const scopeKey = row.scopeKey || row.scope_key || null;
61
+ const scopeKind = row.scopeKind || row.scope_kind || null;
62
+ const summaryText = curatedRecallSummary(row) || null;
63
+ const title = curatedRecallTitle(row) || null;
64
+ const scoreValue = row.recall_score ?? row.score ?? row.lexical_rank ?? null;
65
+ const score = scoreValue === null ? null : Number(scoreValue);
66
+ return {
67
+ ...publicRow,
68
+ memoryId: memoryId === null ? null : String(memoryId),
69
+ canonicalKey,
70
+ memoryType,
71
+ scopeKey,
72
+ scopeKind,
73
+ title,
74
+ summaryText,
75
+ structuredSummary: {
76
+ title,
77
+ overview: summaryText,
78
+ },
79
+ startedAt: row.acceptedAt || row.accepted_at || row.observedAt || row.observed_at || null,
80
+ score: Number.isFinite(score) ? score : null,
81
+ feedbackTarget: {
82
+ kind: 'memory_feedback',
83
+ memoryId: memoryId === null ? null : String(memoryId),
84
+ canonicalKey,
85
+ },
86
+ };
87
+ }
88
+
89
+ function createMemoryServingRuntime(memoryCfg = {}, env = process.env) {
90
+ const servingMode = memoryCfg.servingMode || env.AQUIFER_MEMORY_SERVING_MODE || 'legacy';
91
+ const defaultActiveScopeKey = memoryCfg.activeScopeKey || null;
92
+ const defaultActiveScopePath = splitScopePath(memoryCfg.activeScopePath || null);
93
+
94
+ function resolveMode(opts = {}) {
95
+ const mode = opts.memoryMode || opts.servingMode || servingMode;
96
+ if (mode === 'legacy' || mode === 'evidence') return 'legacy';
97
+ if (mode === 'curated') return 'curated';
98
+ throw new Error(`Invalid memory serving mode: "${mode}". Must be one of: legacy, curated`);
99
+ }
100
+
101
+ function withDefaultScope(opts = {}) {
102
+ const next = { ...opts };
103
+ if (!next.activeScopePath && defaultActiveScopePath) next.activeScopePath = defaultActiveScopePath;
104
+ if (Array.isArray(next.activeScopePath) && next.activeScopePath.length > 0) {
105
+ if (!next.activeScopeKey) next.activeScopeKey = next.activeScopePath[next.activeScopePath.length - 1];
106
+ return next;
107
+ }
108
+ if (!next.activeScopeKey && defaultActiveScopeKey) next.activeScopeKey = defaultActiveScopeKey;
109
+ return next;
110
+ }
111
+
112
+ return {
113
+ assertCuratedBootstrapOpts,
114
+ assertCuratedRecallOpts,
115
+ defaultActiveScopeKey,
116
+ defaultActiveScopePath,
117
+ hasEvidenceBoundary,
118
+ normalizeCuratedRecallRow,
119
+ resolveMode,
120
+ servingMode,
121
+ withDefaultScope,
122
+ };
123
+ }
124
+
125
+ module.exports = {
126
+ assertCuratedBootstrapOpts,
127
+ assertCuratedRecallOpts,
128
+ createMemoryServingRuntime,
129
+ hasEvidenceBoundary,
130
+ normalizeCuratedRecallRow,
131
+ splitScopePath,
132
+ };
@@ -0,0 +1,533 @@
1
+ 'use strict';
2
+
3
+ const fs = require('fs');
4
+ const path = require('path');
5
+
6
+ const SCHEMA_RE = /^[a-zA-Z_]\w{0,62}$/;
7
+
8
+ function validateSchema(schema) {
9
+ if (!SCHEMA_RE.test(schema)) {
10
+ throw new Error(`Invalid schema name: "${schema}". Must match /^[a-zA-Z_]\\w{0,62}$/`);
11
+ }
12
+ }
13
+
14
+ function qi(identifier) {
15
+ return `"${identifier}"`;
16
+ }
17
+
18
+ function loadSql(filename, schema) {
19
+ const filePath = path.join(__dirname, '..', 'schema', filename);
20
+ const raw = fs.readFileSync(filePath, 'utf8');
21
+ return raw.replace(/\$\{schema\}/g, qi(schema));
22
+ }
23
+
24
+ const MIGRATION_PLAN = [
25
+ { id: '001-base', file: '001-base.sql', always: true, signature: 'sessions' },
26
+ { id: '002-entities', file: '002-entities.sql', gate: 'entities', signature: 'entities' },
27
+ { id: '003-trust-feedback', file: '003-trust-feedback.sql', always: true, signature: 'session_feedback' },
28
+ { id: '004-facts', file: '004-facts.sql', gate: 'facts', signature: 'facts' },
29
+ { id: '004-completion', file: '004-completion.sql', always: true, signature: 'narratives' },
30
+ { id: '005-entity-state-history',file: '005-entity-state-history.sql',gate: 'entities', signature: 'entity_state_history' },
31
+ { id: '006-insights', file: '006-insights.sql', always: true, signature: 'insights' },
32
+ { id: '007-v1-foundation', file: '007-v1-foundation.sql', always: true, signature: 'memory_records' },
33
+ { id: '008-session-finalizations',file: '008-session-finalizations.sql',always: true, signature: 'session_finalizations' },
34
+ { id: '009-v1-assertion-plane', file: '009-v1-assertion-plane.sql', always: true, signature: 'fact_assertions_v1' },
35
+ { id: '010-v1-finalization-review',file: '010-v1-finalization-review.sql',always: true, signature: 'finalization_candidates' },
36
+ { id: '011-v1-compaction-claim', file: '011-v1-compaction-claim.sql', always: true, signature: { table: 'compaction_runs', column: 'apply_token' } },
37
+ { id: '012-v1-compaction-lease', file: '012-v1-compaction-lease.sql', always: true, signature: { table: 'compaction_runs', column: 'lease_expires_at' } },
38
+ { id: '013-v1-compaction-lineage', file: '013-v1-compaction-lineage.sql', always: true, signature: 'compaction_candidates' },
39
+ {
40
+ id: '014-v1-checkpoint-runs',
41
+ file: '014-v1-checkpoint-runs.sql',
42
+ always: true,
43
+ signature: [
44
+ { table: 'session_finalizations', column: 'scope_snapshot' },
45
+ { table: 'checkpoint_runs', column: 'scope_id' },
46
+ { table: 'checkpoint_run_sources', column: 'finalization_id' },
47
+ ],
48
+ },
49
+ {
50
+ id: '015-v1-evidence-items',
51
+ file: '015-v1-evidence-items.sql',
52
+ always: true,
53
+ signature: [
54
+ 'evidence_items',
55
+ { table: 'evidence_refs', column: 'evidence_item_id' },
56
+ ],
57
+ },
58
+ {
59
+ id: '016-v1-evidence-ref-multi-item',
60
+ file: '016-v1-evidence-ref-multi-item.sql',
61
+ always: true,
62
+ signature: [
63
+ { index: 'idx_evidence_refs_source_dedupe' },
64
+ { index: 'idx_evidence_refs_evidence_item_dedupe' },
65
+ ],
66
+ },
67
+ {
68
+ id: '017-v1-memory-record-embeddings',
69
+ file: '017-v1-memory-record-embeddings.sql',
70
+ always: true,
71
+ signature: [
72
+ { table: 'memory_records', column: 'embedding' },
73
+ ],
74
+ },
75
+ {
76
+ id: '018-v1-finalization-candidate-envelope',
77
+ file: '018-v1-finalization-candidate-envelope.sql',
78
+ always: true,
79
+ signature: [
80
+ { table: 'session_finalizations', column: 'candidate_envelope' },
81
+ { table: 'finalization_candidates', column: 'candidate_hash' },
82
+ ],
83
+ },
84
+ ];
85
+
86
+ function createPostgresMigrationRuntime(opts = {}) {
87
+ const {
88
+ pool,
89
+ schema,
90
+ migrations = {},
91
+ getEntitiesEnabled = () => false,
92
+ getFactsEnabled = () => false,
93
+ initialFtsConfig = null,
94
+ } = opts;
95
+
96
+ let migrated = false;
97
+ let migratePromise = null;
98
+ let ftsConfig = initialFtsConfig;
99
+
100
+ const migrationsMode = (() => {
101
+ const raw = migrations.mode;
102
+ if (raw === 'apply' || raw === 'check' || raw === 'off') return raw;
103
+ if (raw === undefined || raw === null) return 'apply';
104
+ throw new Error(`config.migrations.mode must be 'apply' | 'check' | 'off' (got ${JSON.stringify(raw)})`);
105
+ })();
106
+ const migrationLockTimeoutMs = Number.isFinite(migrations.lockTimeoutMs)
107
+ ? Math.max(0, migrations.lockTimeoutMs) : 30000;
108
+ const migrationStartupTimeoutMs = Number.isFinite(migrations.startupTimeoutMs)
109
+ ? Math.max(0, migrations.startupTimeoutMs) : 60000;
110
+ const migrationOnEvent = typeof migrations.onEvent === 'function' ? migrations.onEvent : null;
111
+
112
+ function emitMigrationEvent(name, payload) {
113
+ if (!migrationOnEvent) return;
114
+ try {
115
+ migrationOnEvent({ name, schema, ...payload });
116
+ } catch (err) {
117
+ console.warn(`[aquifer] migrations.onEvent handler threw: ${err.message}`);
118
+ }
119
+ }
120
+
121
+ function requiredMigrations() {
122
+ return MIGRATION_PLAN
123
+ .filter(m => m.always
124
+ || (m.gate === 'entities' && getEntitiesEnabled())
125
+ || (m.gate === 'facts' && getFactsEnabled()))
126
+ .map(m => m.id);
127
+ }
128
+
129
+ async function readAppliedMigrations(queryRunner) {
130
+ const required = MIGRATION_PLAN.filter(m => m.always
131
+ || (m.gate === 'entities' && getEntitiesEnabled())
132
+ || (m.gate === 'facts' && getFactsEnabled()));
133
+ const normalizedSignatures = required.flatMap((m) => {
134
+ if (Array.isArray(m.signature)) return m.signature;
135
+ return [m.signature];
136
+ });
137
+ const tableSignatures = normalizedSignatures
138
+ .filter(signature => typeof signature === 'string');
139
+ const columnSignatures = normalizedSignatures
140
+ .filter(signature => signature && typeof signature === 'object' && signature.table && signature.column);
141
+ const indexSignatures = normalizedSignatures
142
+ .filter(signature => signature && typeof signature === 'object' && signature.index);
143
+ const presentTables = new Set();
144
+ const presentColumns = new Set();
145
+ const presentIndexes = new Set();
146
+ if (tableSignatures.length > 0) {
147
+ const r = await queryRunner.query(
148
+ `SELECT tablename FROM pg_tables
149
+ WHERE schemaname = $1 AND tablename = ANY($2::text[])`,
150
+ [schema, tableSignatures]
151
+ );
152
+ for (const row of r.rows) presentTables.add(row.tablename);
153
+ }
154
+ if (columnSignatures.length > 0) {
155
+ const tables = [...new Set(columnSignatures.map(signature => signature.table))];
156
+ const r = await queryRunner.query(
157
+ `SELECT table_name, column_name
158
+ FROM information_schema.columns
159
+ WHERE table_schema = $1 AND table_name = ANY($2::text[])`,
160
+ [schema, tables]
161
+ );
162
+ for (const row of r.rows) presentColumns.add(`${row.table_name}.${row.column_name}`);
163
+ }
164
+ if (indexSignatures.length > 0) {
165
+ const indexes = indexSignatures.map(signature => signature.index);
166
+ const r = await queryRunner.query(
167
+ `SELECT indexname FROM pg_indexes
168
+ WHERE schemaname = $1 AND indexname = ANY($2::text[])`,
169
+ [schema, indexes]
170
+ );
171
+ for (const row of r.rows) presentIndexes.add(row.indexname);
172
+ }
173
+ return required
174
+ .filter(m => {
175
+ const signatures = Array.isArray(m.signature) ? m.signature : [m.signature];
176
+ return signatures.every((signature) => {
177
+ if (typeof signature === 'string') return presentTables.has(signature);
178
+ if (signature && signature.index) return presentIndexes.has(signature.index);
179
+ return presentColumns.has(`${signature.table}.${signature.column}`);
180
+ });
181
+ })
182
+ .map(m => m.id);
183
+ }
184
+
185
+ async function buildMigrationPlan(queryRunner) {
186
+ const required = requiredMigrations();
187
+ const applied = await readAppliedMigrations(queryRunner);
188
+ const appliedSet = new Set(applied);
189
+ const pending = required.filter(id => !appliedSet.has(id));
190
+ return { required, applied, pending };
191
+ }
192
+
193
+ async function ensureMigrated() {
194
+ if (migrated) return;
195
+ if (migratePromise) return migratePromise;
196
+ if (migrationsMode === 'off') {
197
+ migrated = true;
198
+ return;
199
+ }
200
+ if (migrationsMode === 'check') {
201
+ const plan = await buildMigrationPlan(pool).catch(() => null);
202
+ if (plan && plan.pending.length === 0) migrated = true;
203
+ return;
204
+ }
205
+ migratePromise = migrate().finally(() => { migratePromise = null; });
206
+ return migratePromise;
207
+ }
208
+
209
+ async function migrate() {
210
+ const t0 = Date.now();
211
+ const lockKey = Buffer.from(`aquifer:${schema}`).reduce((h, b) => (h * 31 + b) & 0x7fffffff, 0);
212
+
213
+ emitMigrationEvent('init_started', { mode: migrationsMode });
214
+
215
+ const supportsCheckout = typeof pool.connect === 'function';
216
+ const client = supportsCheckout ? await pool.connect() : pool;
217
+ const releasesClient = supportsCheckout && typeof client.release === 'function';
218
+ const notices = [];
219
+ const onNotice = (n) => {
220
+ notices.push({ severity: n.severity || 'NOTICE', message: n.message || String(n) });
221
+ };
222
+ const hasEvents = typeof client.on === 'function' && typeof client.off === 'function';
223
+ if (hasEvents) client.on('notice', onNotice);
224
+
225
+ const ddlExecuted = [];
226
+ let lockAcquired = false;
227
+
228
+ try {
229
+ const planBefore = await buildMigrationPlan(client).catch(() => null);
230
+ emitMigrationEvent('check_completed', {
231
+ required: planBefore ? planBefore.required : requiredMigrations(),
232
+ applied: planBefore ? planBefore.applied : [],
233
+ pending: planBefore ? planBefore.pending : requiredMigrations(),
234
+ });
235
+
236
+ const lockDeadline = Date.now() + migrationLockTimeoutMs;
237
+ const pollMs = 250;
238
+ while (true) {
239
+ const r = await client.query('SELECT pg_try_advisory_lock($1) AS ok', [lockKey]);
240
+ const row = r && r.rows ? r.rows[0] : null;
241
+ if (row && row.ok === false) {
242
+ if (Date.now() >= lockDeadline) break;
243
+ await new Promise(res => setTimeout(res, pollMs));
244
+ continue;
245
+ }
246
+ lockAcquired = true;
247
+ break;
248
+ }
249
+ if (!lockAcquired) {
250
+ const err = new Error(`aquifer: failed to acquire migration advisory lock within ${migrationLockTimeoutMs}ms for schema "${schema}"`);
251
+ err.code = 'AQ_MIGRATION_LOCK_TIMEOUT';
252
+ err.failedAt = 'acquire_lock';
253
+ throw err;
254
+ }
255
+
256
+ emitMigrationEvent('apply_started', {
257
+ pending: planBefore ? planBefore.pending : requiredMigrations(),
258
+ });
259
+
260
+ try {
261
+ await client.query(loadSql('001-base.sql', schema));
262
+ ddlExecuted.push('001-base');
263
+
264
+ if (getEntitiesEnabled()) {
265
+ await client.query(loadSql('002-entities.sql', schema));
266
+ ddlExecuted.push('002-entities');
267
+ }
268
+
269
+ await client.query(loadSql('003-trust-feedback.sql', schema));
270
+ ddlExecuted.push('003-trust-feedback');
271
+
272
+ if (getFactsEnabled()) {
273
+ await client.query(loadSql('004-facts.sql', schema));
274
+ ddlExecuted.push('004-facts');
275
+ }
276
+
277
+ await client.query(loadSql('004-completion.sql', schema));
278
+ ddlExecuted.push('004-completion');
279
+
280
+ if (getEntitiesEnabled()) {
281
+ await client.query(loadSql('005-entity-state-history.sql', schema));
282
+ ddlExecuted.push('005-entity-state-history');
283
+ }
284
+
285
+ for (const migration of [
286
+ ['006-insights.sql', '006-insights'],
287
+ ['007-v1-foundation.sql', '007-v1-foundation'],
288
+ ['008-session-finalizations.sql', '008-session-finalizations'],
289
+ ['009-v1-assertion-plane.sql', '009-v1-assertion-plane'],
290
+ ['010-v1-finalization-review.sql', '010-v1-finalization-review'],
291
+ ['011-v1-compaction-claim.sql', '011-v1-compaction-claim'],
292
+ ['012-v1-compaction-lease.sql', '012-v1-compaction-lease'],
293
+ ['013-v1-compaction-lineage.sql', '013-v1-compaction-lineage'],
294
+ ['014-v1-checkpoint-runs.sql', '014-v1-checkpoint-runs'],
295
+ ['015-v1-evidence-items.sql', '015-v1-evidence-items'],
296
+ ['016-v1-evidence-ref-multi-item.sql', '016-v1-evidence-ref-multi-item'],
297
+ ['017-v1-memory-record-embeddings.sql', '017-v1-memory-record-embeddings'],
298
+ ['018-v1-finalization-candidate-envelope.sql', '018-v1-finalization-candidate-envelope'],
299
+ ]) {
300
+ await client.query(loadSql(migration[0], schema));
301
+ ddlExecuted.push(migration[1]);
302
+ }
303
+
304
+ migrated = true;
305
+ } finally {
306
+ await client.query('SELECT pg_advisory_unlock($1)', [lockKey]).catch((err) => {
307
+ console.warn(`[aquifer] failed to release migration advisory lock for schema "${schema}": ${err.message}`);
308
+ });
309
+ }
310
+ } catch (err) {
311
+ err.notices = Array.isArray(err.notices) ? err.notices : notices.slice();
312
+ err.failedAt = err.failedAt || 'apply_ddl';
313
+ emitMigrationEvent('apply_failed', {
314
+ error: { code: err.code || null, message: err.message },
315
+ failedAt: err.failedAt,
316
+ notices: err.notices,
317
+ durationMs: Date.now() - t0,
318
+ });
319
+ throw err;
320
+ } finally {
321
+ if (hasEvents) client.off('notice', onNotice);
322
+ if (releasesClient) client.release();
323
+ }
324
+
325
+ for (const n of notices) {
326
+ const sev = (n.severity || 'NOTICE').toUpperCase();
327
+ const msg = n.message || '';
328
+ const line = `[aquifer] migration ${sev.toLowerCase()}: ${msg}`;
329
+ if (sev === 'WARNING' || sev === 'ERROR') {
330
+ console.warn(line);
331
+ } else if (sev === 'NOTICE' && msg.startsWith('[aquifer]')) {
332
+ process.stderr.write(line + '\n');
333
+ }
334
+ }
335
+
336
+ if (!ftsConfig) {
337
+ try {
338
+ const r = await pool.query(
339
+ `SELECT 1 FROM pg_ts_config
340
+ WHERE cfgname = 'zhcfg' AND cfgnamespace = 'public'::regnamespace
341
+ LIMIT 1`);
342
+ ftsConfig = r.rowCount > 0 ? 'zhcfg' : 'simple';
343
+ } catch {
344
+ ftsConfig = 'simple';
345
+ }
346
+ }
347
+
348
+ try {
349
+ const f = await pool.query(`
350
+ SELECT
351
+ EXISTS(SELECT 1 FROM pg_extension WHERE extname='pg_jieba') AS have_jieba,
352
+ EXISTS(SELECT 1 FROM pg_extension WHERE extname='zhparser') AS have_zhparser,
353
+ (SELECT p.prsname FROM pg_ts_config c
354
+ JOIN pg_ts_parser p ON c.cfgparser = p.oid
355
+ WHERE c.cfgname='zhcfg' AND c.cfgnamespace='public'::regnamespace
356
+ LIMIT 1) AS zhcfg_parser
357
+ `);
358
+ const row = f.rows[0] || {};
359
+ const backend = row.zhcfg_parser
360
+ ? `zhcfg(parser=${row.zhcfg_parser})`
361
+ : `simple (no zhcfg in public namespace)`;
362
+
363
+ let warmupMs = null;
364
+ if (row.zhcfg_parser) {
365
+ const t0Warmup = Date.now();
366
+ await pool.query(`SELECT to_tsvector('zhcfg', $1)`, ['warmup 記憶系統 aquifer'])
367
+ .catch(() => {});
368
+ warmupMs = Date.now() - t0Warmup;
369
+ }
370
+
371
+ const warmupNote = warmupMs !== null ? ` warmup=${warmupMs}ms` : '';
372
+ process.stderr.write(
373
+ `[aquifer] FTS post-flight: backend=${backend} ` +
374
+ `jieba=${row.have_jieba} zhparser=${row.have_zhparser} ` +
375
+ `selected=${ftsConfig}${warmupNote}\n`
376
+ );
377
+ if (warmupMs !== null && warmupMs > 500) {
378
+ process.stderr.write(
379
+ `[aquifer] Note: first FTS call paid ~${warmupMs}ms for tokenizer init ` +
380
+ `(dictionary mmap). Subsequent calls on the same backend are cached.\n`
381
+ );
382
+ }
383
+ } catch (err) {
384
+ console.warn(`[aquifer] FTS post-flight check failed: ${err.message}`);
385
+ }
386
+
387
+ const durationMs = Date.now() - t0;
388
+ emitMigrationEvent('apply_succeeded', {
389
+ ddlExecuted,
390
+ durationMs,
391
+ notices: notices.slice(),
392
+ });
393
+ return { ok: true, durationMs, notices: notices.slice(), ddlExecuted };
394
+ }
395
+
396
+ async function listPendingMigrations() {
397
+ const plan = await buildMigrationPlan(pool);
398
+ return { ...plan, lastRunAt: null };
399
+ }
400
+
401
+ async function init() {
402
+ const t0 = Date.now();
403
+ const mode = migrationsMode;
404
+
405
+ let deadlineTimer = null;
406
+ const startupDeadline = migrationStartupTimeoutMs > 0
407
+ ? new Promise((_, reject) => {
408
+ deadlineTimer = setTimeout(() => {
409
+ const err = new Error(`aquifer: init() exceeded startupTimeoutMs=${migrationStartupTimeoutMs}ms`);
410
+ err.code = 'AQ_MIGRATION_STARTUP_TIMEOUT';
411
+ reject(err);
412
+ }, migrationStartupTimeoutMs);
413
+ if (typeof deadlineTimer.unref === 'function') deadlineTimer.unref();
414
+ })
415
+ : null;
416
+ const withDeadline = (p) => startupDeadline ? Promise.race([p, startupDeadline]) : p;
417
+ const clearDeadline = () => {
418
+ if (deadlineTimer) {
419
+ clearTimeout(deadlineTimer);
420
+ deadlineTimer = null;
421
+ }
422
+ };
423
+
424
+ try {
425
+ let plan;
426
+ try {
427
+ plan = await withDeadline(buildMigrationPlan(pool));
428
+ } catch (err) {
429
+ const durationMs = Date.now() - t0;
430
+ emitMigrationEvent('apply_failed', {
431
+ error: { code: err.code || null, message: err.message },
432
+ failedAt: 'plan_probe',
433
+ notices: [],
434
+ durationMs,
435
+ });
436
+ return {
437
+ ready: false,
438
+ memoryMode: 'off',
439
+ migrationMode: mode,
440
+ pendingMigrations: [],
441
+ appliedMigrations: [],
442
+ error: { code: err.code || 'AQ_MIGRATION_PROBE_FAILED', message: err.message },
443
+ durationMs,
444
+ };
445
+ }
446
+
447
+ if (mode === 'off') {
448
+ return {
449
+ ready: true,
450
+ memoryMode: 'rw',
451
+ migrationMode: mode,
452
+ pendingMigrations: plan.pending,
453
+ appliedMigrations: plan.applied,
454
+ error: null,
455
+ durationMs: Date.now() - t0,
456
+ };
457
+ }
458
+
459
+ if (mode === 'check') {
460
+ const ready = plan.pending.length === 0;
461
+ if (ready) migrated = true;
462
+ return {
463
+ ready,
464
+ memoryMode: ready ? 'rw' : 'ro',
465
+ migrationMode: mode,
466
+ pendingMigrations: plan.pending,
467
+ appliedMigrations: plan.applied,
468
+ error: null,
469
+ durationMs: Date.now() - t0,
470
+ };
471
+ }
472
+
473
+ if (plan.pending.length === 0) {
474
+ migrated = true;
475
+ return {
476
+ ready: true,
477
+ memoryMode: 'rw',
478
+ migrationMode: mode,
479
+ pendingMigrations: [],
480
+ appliedMigrations: plan.applied,
481
+ error: null,
482
+ durationMs: Date.now() - t0,
483
+ };
484
+ }
485
+
486
+ try {
487
+ const result = await withDeadline(migrate());
488
+ const planAfter = await buildMigrationPlan(pool).catch(() => null);
489
+ return {
490
+ ready: true,
491
+ memoryMode: 'rw',
492
+ migrationMode: mode,
493
+ pendingMigrations: planAfter ? planAfter.pending : [],
494
+ appliedMigrations: planAfter ? planAfter.applied : plan.required,
495
+ error: null,
496
+ durationMs: result.durationMs || (Date.now() - t0),
497
+ };
498
+ } catch (err) {
499
+ return {
500
+ ready: false,
501
+ memoryMode: 'ro',
502
+ migrationMode: mode,
503
+ pendingMigrations: plan.pending,
504
+ appliedMigrations: plan.applied,
505
+ error: { code: err.code || 'AQ_MIGRATION_FAILED', message: err.message },
506
+ durationMs: Date.now() - t0,
507
+ };
508
+ }
509
+ } finally {
510
+ clearDeadline();
511
+ }
512
+ }
513
+
514
+ return {
515
+ buildMigrationPlan,
516
+ ensureMigrated,
517
+ getFtsConfig: () => ftsConfig,
518
+ init,
519
+ isMigrated: () => migrated,
520
+ listPendingMigrations,
521
+ loadSql: filename => loadSql(filename, schema),
522
+ migrate,
523
+ requiredMigrations,
524
+ };
525
+ }
526
+
527
+ module.exports = {
528
+ MIGRATION_PLAN,
529
+ createPostgresMigrationRuntime,
530
+ loadSql,
531
+ qi,
532
+ validateSchema,
533
+ };