@shadowforge0/aquifer-memory 1.2.1 → 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -333,18 +333,17 @@ Built-in entity extraction and relationship tracking:
333
333
 
334
334
  ## Benchmark: LongMemEval
335
335
 
336
- We tested Aquifer's retrieval pipeline on [LongMemEval_S](https://github.com/xiaowu0162/LongMemEval) — 470 questions across 19,195 sessions (98,845 turn embeddings).
336
+ We tested Aquifer's retrieval pipeline on [LongMemEval_S](https://github.com/xiaowu0162/LongMemEval) — 470 questions across 19,195 sessions with 98,795 turn embeddings. Per-question haystack scoping (matching the official protocol), bge-m3 embeddings via OpenRouter.
337
337
 
338
- **Setup:** Per-question haystack scoping (matching official methodology), bge-m3 embeddings via OpenRouter, turn-level user-only embedding.
338
+ | Pipeline | R@1 | R@3 | R@5 | R@10 |
339
+ |----------|-----|-----|-----|------|
340
+ | Turn-only (cosine) | 89.5% | 96.6% | 98.1% | 98.9% |
341
+ | Three-way hybrid (FTS + session_emb + turn_emb → RRF) | 79.2% | 94.0% | 97.7% | 98.9% |
342
+ | **Hybrid + Cohere Rerank v3.5 (top-30)** | **96.0%** | **98.5%** | **99.3%** | **99.8%** |
339
343
 
340
- | Metric | Aquifer (bge-m3) |
341
- |--------|-----------------|
342
- | R@1 | 89.6% |
343
- | R@3 | 96.6% |
344
- | R@5 | 98.1% |
345
- | R@10 | 98.9% |
344
+ Measured 2026-04-19 on Aquifer 1.2.1.
346
345
 
347
- **Key finding:** Turn-level embedding is the main driver going from session-level (R@1=26.8%) to turn-level (R@1=89.6%) is a 3x improvement.
346
+ **Key findings.** Turn-level embedding alone beats session-level (26.8% 89.5% R@1, a 3× improvement). Hybrid fusion adds robustness at R@3-R@10 but trades R@1 because FTS + session-level signals spread the top candidate across adjacent sessions. Re-ranking the hybrid top-30 with a cross-encoder (Cohere Rerank v3.5) wins back the top-1 precision and then some — +16.9pt R@1 over hybrid baseline, and 6.5pt above pure turn-level cosine. That's the production pipeline Aquifer ships by default when a reranker is configured.
348
347
 
349
348
  ### Multi-Tenant
350
349
 
package/consumers/cli.js CHANGED
@@ -43,7 +43,7 @@ function parsePositiveInt(value, fallback) {
43
43
  function parseArgs(argv) {
44
44
  const args = { _: [], flags: {} };
45
45
  // Flags that take a value (not boolean)
46
- const VALUE_FLAGS = new Set(['limit', 'agent-id', 'source', 'date-from', 'date-to', 'output', 'format', 'config', 'status', 'concurrency', 'entities', 'entity-mode', 'session-id', 'verdict', 'note', 'db', 'since', 'min-messages', 'lookback-days', 'max-chars']);
46
+ const VALUE_FLAGS = new Set(['limit', 'agent-id', 'source', 'date-from', 'date-to', 'output', 'format', 'config', 'status', 'concurrency', 'entities', 'entity-mode', 'session-id', 'verdict', 'note', 'db', 'since', 'min-messages', 'lookback-days', 'max-chars', 'out']);
47
47
  for (let i = 0; i < argv.length; i++) {
48
48
  if (argv[i] === '--') { args._.push(...argv.slice(i + 1)); break; }
49
49
  if (argv[i].startsWith('--')) {
@@ -360,6 +360,16 @@ Options:
360
360
  return;
361
361
  }
362
362
 
363
+ // mcp-contract: write canonical MCP tool manifest to disk. No Aquifer
364
+ // instance needed — manifest is static. Default path /tmp/aquifer-mcp-contract.json.
365
+ if (command === 'mcp-contract') {
366
+ const { writeMcpManifestFile } = require('../index');
367
+ const outPath = args.flags.out || '/tmp/aquifer-mcp-contract.json';
368
+ const written = writeMcpManifestFile(outPath);
369
+ console.log(`Wrote MCP manifest to ${written}`);
370
+ return;
371
+ }
372
+
363
373
  // All other commands need an Aquifer instance
364
374
  const configOverrides = {};
365
375
  if (args.flags.config) {
@@ -0,0 +1,145 @@
1
+ {
2
+ "$schema": "https://aquifer.dev/schema/consumer-profile.v1.json",
3
+ "consumer_profile_id": "miranda",
4
+ "version": 1,
5
+ "description": "Miranda persona consumer profile — canonical shape for session state, handoff, decision log, timeline categories, and default artifact producers. Reference implementation shipped inside Aquifer; production deployments may register additional versions with schema changes.",
6
+ "defaults": {
7
+ "tenant_id": "default",
8
+ "agent_id": "main",
9
+ "time_zone": "Asia/Taipei"
10
+ },
11
+ "schemas": {
12
+ "default.session_state.v1": {
13
+ "kind": "default",
14
+ "target": "sessionState",
15
+ "description": "What Miranda is currently focused on — goal + active threads + affect tag.",
16
+ "json_schema": {
17
+ "type": "object",
18
+ "additionalProperties": true,
19
+ "properties": {
20
+ "goal": { "type": ["string", "null"] },
21
+ "active_work": { "type": "array", "items": { "type": "string" } },
22
+ "blockers": { "type": "array", "items": { "type": "string" } },
23
+ "affect": {
24
+ "type": "object",
25
+ "additionalProperties": true,
26
+ "properties": {
27
+ "mood": { "type": ["string", "null"] },
28
+ "energy": { "enum": ["low", "medium", "high", null] },
29
+ "confidence": { "enum": ["low", "medium", "high", null] },
30
+ "notes": { "type": ["string", "null"] }
31
+ }
32
+ }
33
+ },
34
+ "required": ["goal", "active_work", "blockers", "affect"]
35
+ }
36
+ },
37
+ "default.session_handoff.v1": {
38
+ "kind": "default",
39
+ "target": "sessionHandoff",
40
+ "description": "Session-end baton: what was the last step, where to pick up, what still blocks.",
41
+ "json_schema": {
42
+ "type": "object",
43
+ "additionalProperties": true,
44
+ "properties": {
45
+ "last_step": { "type": "string" },
46
+ "status": { "enum": ["in_progress", "completed", "blocked"] },
47
+ "next": { "type": ["string", "null"] },
48
+ "blockers": { "type": "array", "items": { "type": "string" } },
49
+ "decided": { "type": "array", "items": { "type": "string" } },
50
+ "open_loops": { "type": "array", "items": { "type": "string" } }
51
+ },
52
+ "required": ["last_step", "status", "next", "blockers", "decided", "open_loops"]
53
+ }
54
+ },
55
+ "default.decision_log.v1": {
56
+ "kind": "default",
57
+ "target": "decisionLog",
58
+ "description": "Committed / proposed / reversed decisions with optional fact linkage.",
59
+ "json_schema": {
60
+ "type": "object",
61
+ "additionalProperties": true,
62
+ "properties": {
63
+ "decision": { "type": "string" },
64
+ "reason": { "type": ["string", "null"] },
65
+ "status": { "enum": ["proposed", "committed", "reversed"] },
66
+ "related_fact_ids": {
67
+ "type": "array",
68
+ "items": { "type": "integer" }
69
+ }
70
+ },
71
+ "required": ["decision", "reason", "status"]
72
+ }
73
+ },
74
+ "default.timeline.v1": {
75
+ "kind": "default",
76
+ "target": "timeline",
77
+ "description": "Miranda daily timeline category vocabulary. Categories map to existing daily-log sections (focus/todo/mood/handoff) plus organisational tags Miranda uses in weekly/monthly rollups.",
78
+ "category_vocabulary": [
79
+ "cli",
80
+ "focus",
81
+ "todo",
82
+ "handoff",
83
+ "narrative",
84
+ "organized",
85
+ "weekly",
86
+ "monthly",
87
+ "stats",
88
+ "health",
89
+ "garmin",
90
+ "note"
91
+ ],
92
+ "json_schema": {
93
+ "type": "object",
94
+ "additionalProperties": false,
95
+ "properties": {
96
+ "occurred_at": { "type": "string", "format": "date-time" },
97
+ "source": { "type": "string" },
98
+ "session_ref": { "type": ["string", "null"] },
99
+ "category": { "type": "string" },
100
+ "text": { "type": "string" },
101
+ "metadata": { "type": "object" }
102
+ },
103
+ "required": ["occurred_at", "source", "session_ref", "category", "text", "metadata"]
104
+ }
105
+ }
106
+ },
107
+ "artifacts": {
108
+ "producers": [
109
+ {
110
+ "producer_id": "miranda.workspace.daily-log",
111
+ "type": "daily-log",
112
+ "trigger_phase": "timeline_write",
113
+ "format": "markdown",
114
+ "destination": "workspace://memory/{date}.md",
115
+ "description": "Renders the day's timeline events + state snapshot + handoff to a single markdown file under the Miranda workspace. Source of truth remains the DB; the .md is a rendered view."
116
+ },
117
+ {
118
+ "producer_id": "miranda.workspace.weekly-log",
119
+ "type": "weekly-log",
120
+ "trigger_phase": "artifact_dispatch",
121
+ "format": "markdown",
122
+ "destination": "workspace://memory/weekly/{week_start}.md",
123
+ "description": "Weekly rollup. Timeline events tagged [WEEKLY] plus cross-session narratives. Rendered from DB via rollupWeekly() helper."
124
+ },
125
+ {
126
+ "producer_id": "miranda.workspace.monthly-log",
127
+ "type": "monthly-log",
128
+ "trigger_phase": "artifact_dispatch",
129
+ "format": "markdown",
130
+ "destination": "workspace://memory/monthly/{month}.md",
131
+ "description": "Monthly rollup. Aggregates weekly entries + narrative supersede chain."
132
+ }
133
+ ]
134
+ },
135
+ "extraction_hints": {
136
+ "entities": {
137
+ "include_types": ["person", "project", "tool", "topic"],
138
+ "exclude_patterns": ["^/tmp/", "^/home/mingko/\\.", "^node_modules/"]
139
+ },
140
+ "facts": {
141
+ "prefer_subjects": ["MK", "Miranda", "Aquifer", "OpenClaw", "Jenny", "Evan", "Ivan"],
142
+ "avoid_ephemeral": true
143
+ }
144
+ }
145
+ }
@@ -0,0 +1,186 @@
1
+ 'use strict';
2
+
3
+ // Miranda daily log renderer — reference implementation for the artifact
4
+ // capability (spec §12).
5
+ //
6
+ // Pulls the canonical state for a date from Aquifer (timeline events + latest
7
+ // state + active narrative + latest handoff) and renders it into a single
8
+ // markdown file. Pure logic — does not write to disk; returns the rendered
9
+ // string plus an artifact record declaration the caller can persist via
10
+ // aq.artifacts.record().
11
+ //
12
+ // Shape deliberately mirrors the historical Miranda daily-log format so the
13
+ // downstream consumers (CC, Discord pushes, weekly rollup) see no regression
14
+ // during the cutover from render-daily-log.js to this reference impl.
15
+
16
+ const crypto = require('crypto');
17
+
18
+ function startOfDayIso(dateStr) {
19
+ return `${dateStr}T00:00:00Z`;
20
+ }
21
+
22
+ function endOfDayIso(dateStr) {
23
+ return `${dateStr}T23:59:59.999Z`;
24
+ }
25
+
26
+ function ensureDate(input) {
27
+ if (!input) throw new Error('date (YYYY-MM-DD) is required');
28
+ if (!/^\d{4}-\d{2}-\d{2}$/.test(input)) {
29
+ throw new Error(`date must match YYYY-MM-DD, got: ${input}`);
30
+ }
31
+ return input;
32
+ }
33
+
34
+ function renderSection(title, lines) {
35
+ if (!lines || lines.length === 0) return null;
36
+ return `## ${title}\n\n${lines.join('\n')}\n`;
37
+ }
38
+
39
+ function formatTimelineLine(evt) {
40
+ const ts = new Date(evt.occurredAt).toISOString().slice(11, 16);
41
+ const src = evt.sessionRef ? ` (${evt.sessionRef})` : '';
42
+ return `- \`${ts}\`${src} ${evt.text}`;
43
+ }
44
+
45
+ function formatHandoff(payload) {
46
+ if (!payload) return null;
47
+ const lines = [];
48
+ if (payload.last_step) lines.push(`**Last step.** ${payload.last_step}`);
49
+ if (payload.status) lines.push(`**Status.** ${payload.status}`);
50
+ if (payload.next) lines.push(`**Next.** ${payload.next}`);
51
+ if (Array.isArray(payload.blockers) && payload.blockers.length > 0) {
52
+ lines.push(`**Blockers.**`);
53
+ for (const b of payload.blockers) lines.push(`- ${b}`);
54
+ }
55
+ if (Array.isArray(payload.open_loops) && payload.open_loops.length > 0) {
56
+ lines.push(`**Open loops.**`);
57
+ for (const l of payload.open_loops) lines.push(`- ${l}`);
58
+ }
59
+ return lines.length > 0 ? lines.join('\n') + '\n' : null;
60
+ }
61
+
62
+ function formatState(state) {
63
+ if (!state) return null;
64
+ const lines = [];
65
+ if (state.goal) lines.push(`**Goal.** ${state.goal}`);
66
+ if (Array.isArray(state.active_work) && state.active_work.length > 0) {
67
+ lines.push(`**Active work.**`);
68
+ for (const w of state.active_work) lines.push(`- ${w}`);
69
+ }
70
+ if (state.affect && typeof state.affect === 'object') {
71
+ const bits = [];
72
+ if (state.affect.mood) bits.push(`mood: ${state.affect.mood}`);
73
+ if (state.affect.energy) bits.push(`energy: ${state.affect.energy}`);
74
+ if (state.affect.confidence) bits.push(`confidence: ${state.affect.confidence}`);
75
+ if (bits.length > 0) lines.push(`**Affect.** ${bits.join(', ')}`);
76
+ }
77
+ return lines.length > 0 ? lines.join('\n') + '\n' : null;
78
+ }
79
+
80
+ // -------------------------------------------------------------------------
81
+ // Public entry
82
+ // -------------------------------------------------------------------------
83
+ //
84
+ // renderDailyMd({ aquifer, date, agentId, tenantId?, categories? }) returns:
85
+ // {
86
+ // markdown: string,
87
+ // artifact: { producerId, type, format, destination, payload,
88
+ // idempotencyKey } // ready for aq.artifacts.record()
89
+ // }
90
+ //
91
+ // The caller decides whether to persist the artifact record and where the
92
+ // rendered file lands. Aquifer itself doesn't touch disk.
93
+
94
+ async function renderDailyMd({
95
+ aquifer, date, agentId, tenantId, categories,
96
+ destinationTemplate = 'workspace://memory/{date}.md',
97
+ producerId = 'miranda.workspace.daily-log',
98
+ }) {
99
+ if (!aquifer) throw new Error('aquifer instance is required');
100
+ if (!agentId) throw new Error('agentId is required');
101
+ const day = ensureDate(date);
102
+
103
+ const since = startOfDayIso(day);
104
+ const until = endOfDayIso(day);
105
+
106
+ const timelineResult = await aquifer.timeline.list({
107
+ tenantId, agentId, categories, since, until, limit: 500,
108
+ });
109
+ if (!timelineResult.ok) throw new Error(`timeline.list failed: ${timelineResult.error.message}`);
110
+
111
+ const stateResult = await aquifer.state.getLatest({ tenantId, agentId });
112
+ if (!stateResult.ok) throw new Error(`state.getLatest failed: ${stateResult.error.message}`);
113
+
114
+ const handoffResult = await aquifer.handoff.getLatest({ tenantId, agentId });
115
+ if (!handoffResult.ok) throw new Error(`handoff.getLatest failed: ${handoffResult.error.message}`);
116
+
117
+ const narrativeResult = await aquifer.narratives.getLatest({ tenantId, agentId });
118
+ if (!narrativeResult.ok) throw new Error(`narratives.getLatest failed: ${narrativeResult.error.message}`);
119
+
120
+ const events = (timelineResult.data.rows || []).slice().sort((a, b) =>
121
+ new Date(a.occurredAt).getTime() - new Date(b.occurredAt).getTime());
122
+
123
+ // Group timeline by category.
124
+ const grouped = new Map();
125
+ for (const evt of events) {
126
+ if (!grouped.has(evt.category)) grouped.set(evt.category, []);
127
+ grouped.get(evt.category).push(evt);
128
+ }
129
+
130
+ const sections = [];
131
+ sections.push(`# ${day}\n`);
132
+ const stateBlock = formatState(stateResult.data.state);
133
+ if (stateBlock) sections.push(`## State\n\n${stateBlock}`);
134
+
135
+ if (narrativeResult.data.narrative) {
136
+ sections.push(`## Narrative\n\n${narrativeResult.data.narrative.text}\n`);
137
+ }
138
+
139
+ const categoryOrder = ['focus', 'todo', 'mood', 'handoff', 'narrative',
140
+ 'organized', 'note', 'stats', 'health', 'garmin', 'weekly', 'monthly', 'cli'];
141
+ const emitted = new Set();
142
+ for (const cat of categoryOrder) {
143
+ if (!grouped.has(cat)) continue;
144
+ const lines = grouped.get(cat).map(formatTimelineLine);
145
+ const sec = renderSection(cat.charAt(0).toUpperCase() + cat.slice(1), lines);
146
+ if (sec) sections.push(sec);
147
+ emitted.add(cat);
148
+ }
149
+ for (const [cat, evts] of grouped) {
150
+ if (emitted.has(cat)) continue;
151
+ const lines = evts.map(formatTimelineLine);
152
+ const sec = renderSection(cat, lines);
153
+ if (sec) sections.push(sec);
154
+ }
155
+
156
+ const handoffBlock = formatHandoff(handoffResult.data.handoff);
157
+ if (handoffBlock) sections.push(`## Handoff\n\n${handoffBlock}`);
158
+
159
+ const markdown = sections.join('\n').replace(/\n{3,}/g, '\n\n').trim() + '\n';
160
+
161
+ const destination = destinationTemplate.replace('{date}', day);
162
+ const idempotencyKey = crypto.createHash('sha256')
163
+ .update(`miranda:daily:${tenantId || 'default'}:${agentId}:${day}`)
164
+ .digest('hex');
165
+
166
+ return {
167
+ markdown,
168
+ artifact: {
169
+ producerId,
170
+ type: 'daily-log',
171
+ format: 'markdown',
172
+ destination,
173
+ triggerPhase: 'artifact_dispatch',
174
+ payload: {
175
+ date: day,
176
+ event_count: events.length,
177
+ has_state: !!stateResult.data.state,
178
+ has_handoff: !!handoffResult.data.handoff,
179
+ has_narrative: !!narrativeResult.data.narrative,
180
+ },
181
+ idempotencyKey,
182
+ },
183
+ };
184
+ }
185
+
186
+ module.exports = { renderDailyMd };
package/core/aquifer.js CHANGED
@@ -270,6 +270,12 @@ function createAquifer(config = {}) {
270
270
  await pool.query(factsSql);
271
271
  }
272
272
 
273
+ // 5. Completion foundation (always, additive): narratives,
274
+ // consumer_profiles, sessions.consolidation_phases. Pure additive DDL
275
+ // with IF NOT EXISTS guards — safe on every migrate() call.
276
+ const completionSql = loadSql('004-completion.sql', schema);
277
+ await pool.query(completionSql);
278
+
273
279
  migrated = true;
274
280
  } finally {
275
281
  await pool.query('SELECT pg_advisory_unlock($1)', [lockKey]).catch((err) => {
@@ -1233,6 +1239,29 @@ function createAquifer(config = {}) {
1233
1239
  },
1234
1240
  };
1235
1241
 
1242
+ // Completion-capability surfaces (P2). All methods return AqResult envelope;
1243
+ // DDL materialised in schema/004-completion.sql (migrated unconditionally,
1244
+ // additive only). See core/errors.js for envelope shape.
1245
+ const { createNarratives } = require('./narratives');
1246
+ const { createTimeline } = require('./timeline');
1247
+ const { createState } = require('./state');
1248
+ const { createHandoff } = require('./handoff');
1249
+ const { createProfiles } = require('./profiles');
1250
+ const { createDecisions } = require('./decisions');
1251
+ const { createArtifacts } = require('./artifacts');
1252
+ const { createConsolidation } = require('./consolidation');
1253
+ const { createBundles } = require('./bundles');
1254
+ const qSchema = qi(schema);
1255
+ aquifer.narratives = createNarratives({ pool, schema: qSchema, defaultTenantId: tenantId });
1256
+ aquifer.timeline = createTimeline({ pool, schema: qSchema, defaultTenantId: tenantId });
1257
+ aquifer.state = createState({ pool, schema: qSchema, defaultTenantId: tenantId });
1258
+ aquifer.handoff = createHandoff({ pool, schema: qSchema, defaultTenantId: tenantId });
1259
+ aquifer.profiles = createProfiles({ pool, schema: qSchema, defaultTenantId: tenantId });
1260
+ aquifer.decisions = createDecisions({ pool, schema: qSchema, defaultTenantId: tenantId });
1261
+ aquifer.artifacts = createArtifacts({ pool, schema: qSchema, defaultTenantId: tenantId });
1262
+ aquifer.consolidation = createConsolidation({ pool, schema: qSchema, defaultTenantId: tenantId });
1263
+ aquifer.bundles = createBundles({ pool, schema: qSchema, defaultTenantId: tenantId });
1264
+
1236
1265
  return aquifer;
1237
1266
  }
1238
1267
 
@@ -0,0 +1,174 @@
1
+ 'use strict';
2
+
3
+ // aq.artifacts.* — producer-declared output record capability.
4
+ //
5
+ // Spec: aquifer-completion §12 artifact. Aquifer stores the declaration +
6
+ // lifecycle status but never interprets the payload. Producers own shape.
7
+ // Typical flow: record with status='pending', produce content externally,
8
+ // then upsert same idempotency_key with status='produced' + contentRef.
9
+
10
+ const crypto = require('crypto');
11
+ const { AqError, ok, err } = require('./errors');
12
+
13
+ const DEFAULT_PROFILE = Object.freeze({
14
+ id: 'anon',
15
+ version: 0,
16
+ schemaHash: 'pending',
17
+ });
18
+
19
+ const VALID_STATUSES = new Set(['pending', 'produced', 'failed', 'discarded']);
20
+
21
+ function resolveProfile(profile) {
22
+ if (!profile) return DEFAULT_PROFILE;
23
+ return {
24
+ id: profile.id || DEFAULT_PROFILE.id,
25
+ version: Number.isInteger(profile.version) ? profile.version : DEFAULT_PROFILE.version,
26
+ schemaHash: profile.schemaHash || DEFAULT_PROFILE.schemaHash,
27
+ };
28
+ }
29
+
30
+ function toNumber(v) {
31
+ if (v === null || v === undefined) return null;
32
+ const n = Number(v);
33
+ return Number.isFinite(n) ? n : null;
34
+ }
35
+
36
+ function defaultIdempotencyKey({ tenantId, producerId, sessionId, artifactType, destination }) {
37
+ return crypto.createHash('sha256')
38
+ .update(`${tenantId}:${producerId}:${sessionId || ''}:${artifactType}:${destination}`)
39
+ .digest('hex');
40
+ }
41
+
42
+ function mapRow(row) {
43
+ if (!row) return null;
44
+ return {
45
+ artifactId: toNumber(row.id),
46
+ agentId: row.agent_id,
47
+ sessionId: row.source_session_id,
48
+ producerId: row.producer_id,
49
+ type: row.artifact_type,
50
+ triggerPhase: row.trigger_phase,
51
+ format: row.format,
52
+ destination: row.destination,
53
+ status: row.status,
54
+ contentRef: row.content_ref,
55
+ payload: row.payload || {},
56
+ metadata: row.metadata || {},
57
+ producedAt: row.produced_at,
58
+ createdAt: row.created_at,
59
+ updatedAt: row.updated_at,
60
+ };
61
+ }
62
+
63
+ function createArtifacts({ pool, schema, defaultTenantId }) {
64
+ async function record(input) {
65
+ try {
66
+ if (!input || typeof input !== 'object') {
67
+ return err('AQ_INVALID_INPUT', 'record requires an input object');
68
+ }
69
+ if (!input.agentId) return err('AQ_INVALID_INPUT', 'agentId is required');
70
+ if (!input.producerId) return err('AQ_INVALID_INPUT', 'producerId is required');
71
+ if (!input.type) return err('AQ_INVALID_INPUT', 'type is required');
72
+ if (!input.format) return err('AQ_INVALID_INPUT', 'format is required');
73
+ if (!input.destination) return err('AQ_INVALID_INPUT', 'destination is required');
74
+
75
+ const status = input.status || 'pending';
76
+ if (!VALID_STATUSES.has(status)) {
77
+ return err('AQ_INVALID_INPUT',
78
+ `status must be one of ${Array.from(VALID_STATUSES).join(', ')}`);
79
+ }
80
+
81
+ const tenantId = input.tenantId || defaultTenantId || 'default';
82
+ const profile = resolveProfile(input.profile);
83
+ const idempotencyKey = input.idempotencyKey
84
+ || defaultIdempotencyKey({
85
+ tenantId,
86
+ producerId: input.producerId,
87
+ sessionId: input.sessionId,
88
+ artifactType: input.type,
89
+ destination: input.destination,
90
+ });
91
+
92
+ // Upsert semantics: producer may re-record the same artifact with
93
+ // updated status ('pending' → 'produced'), so DO UPDATE on matching
94
+ // idempotency_key, allowing lifecycle transitions.
95
+ const { rows } = await pool.query(
96
+ `INSERT INTO ${schema}.artifacts (
97
+ tenant_id, agent_id, source_session_id,
98
+ consumer_profile_id, consumer_profile_version, consumer_schema_hash,
99
+ idempotency_key, producer_id, artifact_type, trigger_phase,
100
+ format, destination, status, content_ref, payload, metadata,
101
+ produced_at
102
+ ) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16,
103
+ CASE WHEN $13 = 'produced' THEN COALESCE($17::timestamptz, now()) ELSE $17::timestamptz END)
104
+ ON CONFLICT (idempotency_key) DO UPDATE SET
105
+ status = EXCLUDED.status,
106
+ content_ref = COALESCE(EXCLUDED.content_ref, ${schema}.artifacts.content_ref),
107
+ payload = EXCLUDED.payload,
108
+ metadata = EXCLUDED.metadata,
109
+ produced_at = CASE
110
+ WHEN EXCLUDED.status = 'produced' AND ${schema}.artifacts.produced_at IS NULL
111
+ THEN now()
112
+ ELSE ${schema}.artifacts.produced_at
113
+ END
114
+ RETURNING *`,
115
+ [
116
+ tenantId, input.agentId, input.sessionId || null,
117
+ profile.id, profile.version, profile.schemaHash,
118
+ idempotencyKey, input.producerId, input.type,
119
+ input.triggerPhase || null, input.format, input.destination,
120
+ status, input.contentRef || null,
121
+ JSON.stringify(input.payload || {}),
122
+ JSON.stringify(input.metadata || {}),
123
+ input.producedAt || null,
124
+ ],
125
+ );
126
+ const mapped = mapRow(rows[0]);
127
+ return ok({ artifactId: mapped.artifactId });
128
+ } catch (e) {
129
+ if (e instanceof AqError) return err(e);
130
+ return err('AQ_INTERNAL', e.message, { cause: e });
131
+ }
132
+ }
133
+
134
+ async function list(input = {}) {
135
+ try {
136
+ const tenantId = input.tenantId || defaultTenantId || 'default';
137
+ const limit = Math.min(Math.max(input.limit || 50, 1), 500);
138
+ const params = [tenantId];
139
+ let where = 'tenant_id = $1';
140
+ if (input.agentId) {
141
+ params.push(input.agentId);
142
+ where += ` AND agent_id = $${params.length}`;
143
+ }
144
+ if (input.sessionId) {
145
+ params.push(input.sessionId);
146
+ where += ` AND source_session_id = $${params.length}`;
147
+ }
148
+ if (input.producerId) {
149
+ params.push(input.producerId);
150
+ where += ` AND producer_id = $${params.length}`;
151
+ }
152
+ if (Array.isArray(input.statuses) && input.statuses.length > 0) {
153
+ params.push(input.statuses);
154
+ where += ` AND status = ANY($${params.length})`;
155
+ }
156
+ params.push(limit);
157
+
158
+ const { rows } = await pool.query(
159
+ `SELECT * FROM ${schema}.artifacts
160
+ WHERE ${where}
161
+ ORDER BY created_at DESC, id DESC
162
+ LIMIT $${params.length}`,
163
+ params,
164
+ );
165
+ return ok({ rows: rows.map(mapRow) });
166
+ } catch (e) {
167
+ return err('AQ_INTERNAL', e.message, { cause: e });
168
+ }
169
+ }
170
+
171
+ return { record, list };
172
+ }
173
+
174
+ module.exports = { createArtifacts };