@memoryrelay/plugin-memoryrelay-ai 0.16.3 → 0.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,199 @@
1
+ import type { LocalCache } from "./local-cache.js";
2
+ import type { MemoryRelayClient } from "../client/memoryrelay-client.js";
3
+ import type { Memory } from "../pipelines/types.js";
4
+ import type { LocalCacheConfig } from "./types.js";
5
+
6
+ // Backoff schedule: consecutiveErrors → interval ms
7
+ const BACKOFF_SCHEDULE: Record<number, number> = {
8
+ 0: 0, // use base interval
9
+ 1: 60_000, // 1 minute
10
+ 2: 300_000, // 5 minutes
11
+ };
12
+ const MAX_BACKOFF_MS = 1_800_000; // 30 minutes
13
+
14
+ const PULL_PAGE_SIZE = 100;
15
+
16
+ export class SyncDaemon {
17
+ private readonly cache: LocalCache;
18
+ private readonly client: MemoryRelayClient;
19
+ private readonly config: LocalCacheConfig;
20
+
21
+ private intervalId: ReturnType<typeof setInterval> | null = null;
22
+ private consecutiveErrors = 0;
23
+ private _lastError: string | null = null;
24
+
25
+ constructor(cache: LocalCache, client: MemoryRelayClient, config: LocalCacheConfig) {
26
+ this.cache = cache;
27
+ this.client = client;
28
+ this.config = config;
29
+ }
30
+
31
+ start(): void {
32
+ if (this.intervalId !== null) return;
33
+
34
+ const baseMs = this.config.syncIntervalMinutes * 60_000;
35
+ this.scheduleNext(baseMs);
36
+ }
37
+
38
+ stop(): void {
39
+ if (this.intervalId !== null) {
40
+ clearInterval(this.intervalId);
41
+ this.intervalId = null;
42
+ }
43
+ }
44
+
45
+ isRunning(): boolean {
46
+ return this.intervalId !== null;
47
+ }
48
+
49
+ lastError(): string | null {
50
+ return this._lastError;
51
+ }
52
+
53
+ getConsecutiveErrors(): number {
54
+ return this.consecutiveErrors;
55
+ }
56
+
57
+ async pull(): Promise<{ added: number; updated: number }> {
58
+ let added = 0;
59
+ let updated = 0;
60
+
61
+ try {
62
+ const syncState = this.cache.getSyncState();
63
+ let offset = syncState.cursor ? parseInt(syncState.cursor, 10) : 0;
64
+ let hasMore = true;
65
+
66
+ while (hasMore) {
67
+ const memories: Memory[] = await this.client.list(PULL_PAGE_SIZE, offset);
68
+
69
+ if (memories.length === 0) {
70
+ hasMore = false;
71
+ break;
72
+ }
73
+
74
+ for (const memory of memories) {
75
+ const existed = this.cache.get(memory.id) !== null;
76
+ this.cache.upsert({
77
+ id: memory.id,
78
+ remote_id: memory.id,
79
+ content: memory.content,
80
+ agent_id: memory.agent_id,
81
+ user_id: memory.user_id ?? "",
82
+ metadata: memory.metadata ?? {},
83
+ entities: memory.entities ?? [],
84
+ importance: memory.importance ?? 0.5,
85
+ tier: memory.tier ?? "warm",
86
+ scope: "long-term",
87
+ synced_at: new Date().toISOString(),
88
+ updated_at: memory.updated_at,
89
+ created_at: memory.created_at,
90
+ });
91
+ if (existed) {
92
+ updated++;
93
+ } else {
94
+ added++;
95
+ }
96
+ }
97
+
98
+ offset += memories.length;
99
+ hasMore = memories.length >= PULL_PAGE_SIZE;
100
+ }
101
+
102
+ // Update sync state
103
+ this.cache.setSyncState({
104
+ cursor: String(offset),
105
+ lastPull: new Date().toISOString(),
106
+ });
107
+
108
+ this.onSuccess();
109
+ return { added, updated };
110
+ } catch (err) {
111
+ this.onError(err);
112
+ throw err;
113
+ }
114
+ }
115
+
116
+ async push(): Promise<{ flushed: number; failed: number }> {
117
+ let flushed = 0;
118
+ let failed = 0;
119
+
120
+ try {
121
+ const entries = this.cache.bufferReadPending();
122
+ if (entries.length === 0) {
123
+ return { flushed: 0, failed: 0 };
124
+ }
125
+
126
+ const flushedIds: string[] = [];
127
+
128
+ for (const entry of entries) {
129
+ try {
130
+ await this.client.store(entry.content, entry.metadata as Record<string, string>, {
131
+ scope: entry.scope,
132
+ });
133
+ flushedIds.push(String(entry.id));
134
+ flushed++;
135
+ } catch {
136
+ failed++;
137
+ }
138
+ }
139
+
140
+ if (flushedIds.length > 0) {
141
+ this.cache.bufferMarkFlushed(flushedIds);
142
+ this.cache.setSyncState({ lastPush: new Date().toISOString() });
143
+ }
144
+
145
+ if (failed === 0) {
146
+ this.onSuccess();
147
+ } else if (flushed === 0) {
148
+ this.onError(new Error(`All ${failed} buffer entries failed to push`));
149
+ }
150
+
151
+ return { flushed, failed };
152
+ } catch (err) {
153
+ this.onError(err);
154
+ throw err;
155
+ }
156
+ }
157
+
158
+ // --- Internal ---
159
+
160
+ private scheduleNext(delayMs: number): void {
161
+ this.intervalId = setInterval(async () => {
162
+ try {
163
+ await this.pull();
164
+ await this.push();
165
+ } catch {
166
+ // errors already handled in pull/push via onError
167
+ }
168
+ }, delayMs);
169
+ }
170
+
171
+ private getBackoffMs(): number {
172
+ if (this.consecutiveErrors === 0) {
173
+ return this.config.syncIntervalMinutes * 60_000;
174
+ }
175
+ return BACKOFF_SCHEDULE[this.consecutiveErrors] ?? MAX_BACKOFF_MS;
176
+ }
177
+
178
+ private onSuccess(): void {
179
+ if (this.consecutiveErrors > 0) {
180
+ this.consecutiveErrors = 0;
181
+ this._lastError = null;
182
+ this.reschedule();
183
+ }
184
+ }
185
+
186
+ private onError(err: unknown): void {
187
+ this.consecutiveErrors++;
188
+ this._lastError = err instanceof Error ? err.message : String(err);
189
+ this.reschedule();
190
+ }
191
+
192
+ private reschedule(): void {
193
+ if (this.intervalId === null) return;
194
+ clearInterval(this.intervalId);
195
+ this.intervalId = null;
196
+ const nextMs = this.getBackoffMs();
197
+ this.scheduleNext(nextMs);
198
+ }
199
+ }
@@ -0,0 +1,53 @@
1
+ export interface LocalCacheConfig {
2
+ enabled: boolean;
3
+ dbPath: string;
4
+ syncIntervalMinutes: number;
5
+ maxLocalMemories: number;
6
+ vectorSearch: { enabled: boolean; provider: string };
7
+ ttl: { hot: number; warm: number; cold: number };
8
+ }
9
+
10
+ export interface BufferEntry {
11
+ id: number;
12
+ content: string;
13
+ metadata: Record<string, string>;
14
+ scope: "session" | "long-term";
15
+ session_id?: string;
16
+ namespace: string;
17
+ created_at: string;
18
+ flushed: boolean;
19
+ }
20
+
21
+ export interface SyncState {
22
+ lastPull: string | null;
23
+ lastPush: string | null;
24
+ cursor: string | null;
25
+ }
26
+
27
+ export interface CacheStats {
28
+ totalMemories: number;
29
+ tierBreakdown: { hot: number; warm: number; cold: number };
30
+ bufferDepth: number;
31
+ lastSync: string | null;
32
+ dbSizeBytes: number;
33
+ }
34
+
35
+ export interface LocalMemory {
36
+ id: string;
37
+ remote_id: string | null;
38
+ content: string;
39
+ agent_id: string;
40
+ user_id: string;
41
+ metadata: Record<string, unknown>;
42
+ entities: unknown[];
43
+ importance: number;
44
+ tier: "hot" | "warm" | "cold";
45
+ scope: "session" | "long-term";
46
+ session_id: string | null;
47
+ namespace: string;
48
+ created_at: string;
49
+ updated_at: string;
50
+ synced_at: string | null;
51
+ expires_at: string | null;
52
+ embedding: Buffer | null;
53
+ }
@@ -0,0 +1,162 @@
1
+ import type Database from "better-sqlite3";
2
+ import type { LocalMemory } from "./types.js";
3
+
4
+ /**
5
+ * Attempt to load the sqlite-vec extension into the database.
6
+ * Returns true if loaded successfully, false otherwise.
7
+ */
8
+ export async function loadVectorExtension(db: Database.Database): Promise<boolean> {
9
+ try {
10
+ const sqliteVec = await import("sqlite-vec");
11
+ sqliteVec.load(db);
12
+ return true;
13
+ } catch {
14
+ return false;
15
+ }
16
+ }
17
+
18
+ /**
19
+ * Create the vec0 virtual table for vector search.
20
+ * Only call after loadVectorExtension returns true.
21
+ */
22
+ export function createVecTable(db: Database.Database): void {
23
+ const fn = (db as unknown as { exec: (sql: string) => void }).exec.bind(db);
24
+ fn(
25
+ "CREATE VIRTUAL TABLE IF NOT EXISTS memories_vec USING vec0(memory_id TEXT PRIMARY KEY, embedding float[768])",
26
+ );
27
+ }
28
+
29
+ /**
30
+ * Store an embedding for a memory. Upserts into the vec0 table.
31
+ */
32
+ export function storeEmbedding(
33
+ db: Database.Database,
34
+ memoryId: string,
35
+ embedding: Float32Array,
36
+ ): void {
37
+ db.prepare(
38
+ "INSERT OR REPLACE INTO memories_vec (memory_id, embedding) VALUES (?, ?)",
39
+ ).run(memoryId, Buffer.from(embedding.buffer, embedding.byteOffset, embedding.byteLength));
40
+ }
41
+
42
+ /**
43
+ * Search for similar vectors. Returns memory IDs ordered by similarity.
44
+ */
45
+ export function searchVector(
46
+ db: Database.Database,
47
+ queryEmbedding: Float32Array,
48
+ limit: number,
49
+ ): string[] {
50
+ const rows = db
51
+ .prepare(
52
+ "SELECT memory_id FROM memories_vec WHERE embedding MATCH ? ORDER BY distance LIMIT ?",
53
+ )
54
+ .all(
55
+ Buffer.from(queryEmbedding.buffer, queryEmbedding.byteOffset, queryEmbedding.byteLength),
56
+ limit,
57
+ ) as { memory_id: string }[];
58
+ return rows.map((r) => r.memory_id);
59
+ }
60
+
61
+ /**
62
+ * Hybrid search combining FTS5 text search with vector similarity.
63
+ * Falls back to FTS5-only when queryEmbedding is null.
64
+ */
65
+ export function searchHybrid(
66
+ db: Database.Database,
67
+ queryText: string,
68
+ queryEmbedding: Float32Array | null,
69
+ limit: number,
70
+ vectorAvailable: boolean = true,
71
+ ): LocalMemory[] {
72
+ const resultMap = new Map<string, { memory: LocalMemory; score: number }>();
73
+
74
+ // FTS5 search
75
+ if (queryText.trim()) {
76
+ const safeQuery = queryText
77
+ .replace(/['"]/g, " ")
78
+ .split(/\s+/)
79
+ .filter(Boolean)
80
+ .map((term) => `"${term}"`)
81
+ .join(" ");
82
+
83
+ if (safeQuery) {
84
+ const ftsRows = db
85
+ .prepare(
86
+ `SELECT m.*, fts.rank
87
+ FROM memories_fts fts
88
+ JOIN memories m ON m.rowid = fts.rowid
89
+ WHERE memories_fts MATCH ?
90
+ ORDER BY fts.rank
91
+ LIMIT ?`,
92
+ )
93
+ .all(safeQuery, limit * 2) as (MemoryRow & { rank: number })[];
94
+
95
+ for (let i = 0; i < ftsRows.length; i++) {
96
+ const row = ftsRows[i];
97
+ const ftsScore = 1.0 - i / ftsRows.length; // normalize to 0-1
98
+ resultMap.set(row.id, { memory: rowToMemory(row), score: ftsScore });
99
+ }
100
+ }
101
+ }
102
+
103
+ // Vector search (only if extension available and embedding provided)
104
+ if (vectorAvailable && queryEmbedding) {
105
+ try {
106
+ const vecIds = searchVector(db, queryEmbedding, limit * 2);
107
+ for (let i = 0; i < vecIds.length; i++) {
108
+ const vecScore = 1.0 - i / vecIds.length;
109
+ const existing = resultMap.get(vecIds[i]);
110
+ if (existing) {
111
+ // Boost items found by both methods
112
+ existing.score += vecScore;
113
+ } else {
114
+ const row = db
115
+ .prepare("SELECT * FROM memories WHERE id = ?")
116
+ .get(vecIds[i]) as MemoryRow | undefined;
117
+ if (row) {
118
+ resultMap.set(vecIds[i], { memory: rowToMemory(row), score: vecScore });
119
+ }
120
+ }
121
+ }
122
+ } catch {
123
+ // Vector search failed — continue with FTS results only
124
+ }
125
+ }
126
+
127
+ // Sort by combined score descending, return top N
128
+ return Array.from(resultMap.values())
129
+ .sort((a, b) => b.score - a.score)
130
+ .slice(0, limit)
131
+ .map((r) => r.memory);
132
+ }
133
+
134
+ // --- Internal helpers (duplicated from local-cache.ts to keep module self-contained) ---
135
+
136
+ interface MemoryRow {
137
+ id: string;
138
+ remote_id: string | null;
139
+ content: string;
140
+ agent_id: string;
141
+ user_id: string;
142
+ metadata: string;
143
+ entities: string;
144
+ importance: number;
145
+ tier: "hot" | "warm" | "cold";
146
+ scope: "session" | "long-term";
147
+ session_id: string | null;
148
+ namespace: string;
149
+ created_at: string;
150
+ updated_at: string;
151
+ synced_at: string | null;
152
+ expires_at: string | null;
153
+ embedding: Buffer | null;
154
+ }
155
+
156
+ function rowToMemory(row: MemoryRow): LocalMemory {
157
+ return {
158
+ ...row,
159
+ metadata: JSON.parse(row.metadata),
160
+ entities: JSON.parse(row.entities),
161
+ };
162
+ }
@@ -153,7 +153,7 @@ export class MemoryRelayClient implements IMemoryRelayClient {
153
153
  headers: {
154
154
  "Content-Type": "application/json",
155
155
  Authorization: `Bearer ${this.apiKey}`,
156
- "User-Agent": "openclaw-memory-memoryrelay/0.16.3",
156
+ "User-Agent": "openclaw-memory-memoryrelay/0.17.1",
157
157
  },
158
158
  body: body ? JSON.stringify(body) : undefined,
159
159
  },
@@ -1,6 +1,6 @@
1
1
  // src/hooks/agent-end.ts
2
2
  import type { OpenClawPluginApi } from "openclaw/plugin-sdk";
3
- import type { PluginConfig, MemoryRelayClient, ConversationMessage, SessionResolverLike } from "../pipelines/types.js";
3
+ import type { PluginConfig, MemoryRelayClient, ConversationMessage, SessionResolverLike, LocalCacheLike, SyncDaemonLike } from "../pipelines/types.js";
4
4
  import { buildRequestContext } from "../context/request-context.js";
5
5
  import { runPipeline } from "../pipelines/runner.js";
6
6
  import { capturePipeline } from "../pipelines/capture/index.js";
@@ -10,6 +10,8 @@ export function registerAgentEnd(
10
10
  config: PluginConfig,
11
11
  client: MemoryRelayClient,
12
12
  sessionResolver?: SessionResolverLike,
13
+ localCache?: LocalCacheLike,
14
+ syncDaemon?: SyncDaemonLike,
13
15
  ): void {
14
16
  if (!config.autoCapture?.enabled) return;
15
17
 
@@ -39,7 +41,7 @@ export function registerAgentEnd(
39
41
  if (messages.length === 0) return;
40
42
 
41
43
  const requestCtx = buildRequestContext(event, config);
42
- const pipelineCtx = { requestCtx, config, client, sessionResolver };
44
+ const pipelineCtx = { requestCtx, config, client, sessionResolver, localCache, syncDaemon };
43
45
  await runPipeline(capturePipeline, { messages }, pipelineCtx);
44
46
  } catch (err) {
45
47
  api.logger.warn?.(`memory-memoryrelay: capture failed: ${String(err)}`);
@@ -1,6 +1,6 @@
1
1
  // src/hooks/before-prompt-build.ts
2
2
  import type { OpenClawPluginApi } from "openclaw/plugin-sdk";
3
- import type { PluginConfig, MemoryRelayClient, SessionResolverLike } from "../pipelines/types.js";
3
+ import type { PluginConfig, MemoryRelayClient, SessionResolverLike, LocalCacheLike, SyncDaemonLike } from "../pipelines/types.js";
4
4
  import { buildRequestContext } from "../context/request-context.js";
5
5
  import { runPipeline } from "../pipelines/runner.js";
6
6
  import { recallPipeline } from "../pipelines/recall/index.js";
@@ -10,6 +10,8 @@ export function registerBeforePromptBuild(
10
10
  config: PluginConfig,
11
11
  client: MemoryRelayClient,
12
12
  sessionResolver?: SessionResolverLike,
13
+ localCache?: LocalCacheLike,
14
+ syncDaemon?: SyncDaemonLike,
13
15
  ): void {
14
16
  api.on("before_prompt_build", async (event) => {
15
17
  if (!config.autoRecall) return;
@@ -29,7 +31,7 @@ export function registerBeforePromptBuild(
29
31
 
30
32
  try {
31
33
  const requestCtx = buildRequestContext(event, config);
32
- const pipelineCtx = { requestCtx, config, client, sessionResolver };
34
+ const pipelineCtx = { requestCtx, config, client, sessionResolver, localCache, syncDaemon };
33
35
  const result = await runPipeline(recallPipeline, {
34
36
  prompt: requestCtx.prompt, memories: [], scope: "all" as const,
35
37
  }, pipelineCtx);
@@ -20,14 +20,38 @@ export const captureStore: CaptureStage = {
20
20
  }
21
21
  }
22
22
 
23
+ let buffered = false;
24
+
23
25
  for (const msg of toStore) {
24
26
  const scope = resolveScope(msg.content);
25
- const opts: Record<string, unknown> = { scope };
27
+ const metadata: Record<string, unknown> = { source: "auto-capture", scope };
26
28
  if (scope === "session" && sessionId) {
27
- opts.session_id = sessionId;
29
+ metadata.session_id = sessionId;
30
+ }
31
+ metadata.namespace = ctx.requestCtx.namespace;
32
+
33
+ if (ctx.localCache) {
34
+ try {
35
+ ctx.localCache.bufferWrite(msg.content, metadata);
36
+ buffered = true;
37
+ } catch {
38
+ // Buffer write failed — fall back to direct API call
39
+ const opts: Record<string, unknown> = { scope };
40
+ if (scope === "session" && sessionId) {
41
+ opts.session_id = sessionId;
42
+ }
43
+ await ctx.client.store(msg.content, { source: "auto-capture", scope }, opts);
44
+ }
45
+ } else {
46
+ // No local cache — direct API call (existing behavior)
47
+ const opts: Record<string, unknown> = { scope };
48
+ if (scope === "session" && sessionId) {
49
+ opts.session_id = sessionId;
50
+ }
51
+ await ctx.client.store(msg.content, { source: "auto-capture", scope }, opts);
28
52
  }
29
- await ctx.client.store(msg.content, { source: "auto-capture", scope }, opts);
30
53
  }
31
- return { action: "continue", data: input };
54
+
55
+ return { action: "continue", data: input, buffered };
32
56
  },
33
57
  };
@@ -1,4 +1,12 @@
1
- import type { RecallStage } from "../types.js";
1
+ import type { RecallStage, ScoredMemory } from "../types.js";
2
+
3
+ function isCacheStale(lastPull: string | null, syncIntervalMinutes: number): boolean {
4
+ if (!lastPull) return true;
5
+ const lastPullTime = new Date(lastPull).getTime();
6
+ if (isNaN(lastPullTime)) return true;
7
+ const staleAfterMs = syncIntervalMinutes * 60 * 1000;
8
+ return Date.now() - lastPullTime > staleAfterMs;
9
+ }
2
10
 
3
11
  export const recallSearch: RecallStage = {
4
12
  name: "search",
@@ -20,13 +28,84 @@ export const recallSearch: RecallStage = {
20
28
  });
21
29
  sessionId = entry.sessionId;
22
30
  } catch {
23
- // Fall back to raw session key if resolution fails
24
31
  sessionId = resolvedSessionKey;
25
32
  }
26
33
  } else {
27
34
  sessionId = resolvedSessionKey;
28
35
  }
29
36
 
37
+ // Local-first search: try local cache before API
38
+ if (ctx.localCache) {
39
+ try {
40
+ const localCount = ctx.localCache.count();
41
+ if (localCount > 0) {
42
+ const localLongTerm = ctx.localCache.search(input.prompt, {
43
+ limit,
44
+ scope: "long-term",
45
+ namespace,
46
+ });
47
+ const localSession = ctx.localCache.search(input.prompt, {
48
+ limit,
49
+ scope: "session",
50
+ sessionId,
51
+ namespace,
52
+ });
53
+
54
+ if (localLongTerm.length > 0 || localSession.length > 0) {
55
+ // Trigger background refresh if stale
56
+ if (ctx.syncDaemon) {
57
+ const syncIntervalMinutes = ctx.config.syncIntervalMinutes ?? 5;
58
+ const syncState = ctx.localCache.getSyncState();
59
+ if (isCacheStale(syncState.lastPull, syncIntervalMinutes)) {
60
+ ctx.syncDaemon.pull().catch(() => {});
61
+ }
62
+ }
63
+
64
+ return {
65
+ action: "continue",
66
+ data: {
67
+ ...input,
68
+ longTerm: localLongTerm.map((m) => ({
69
+ memory: {
70
+ id: m.id,
71
+ content: m.content,
72
+ agent_id: m.agent_id,
73
+ user_id: m.user_id,
74
+ metadata: m.metadata as Record<string, string>,
75
+ entities: m.entities as string[],
76
+ created_at: m.created_at,
77
+ updated_at: m.updated_at,
78
+ importance: m.importance,
79
+ tier: m.tier,
80
+ },
81
+ finalScore: m.importance ?? 0.5,
82
+ })) as ScoredMemory[],
83
+ session: localSession.map((m) => ({
84
+ memory: {
85
+ id: m.id,
86
+ content: m.content,
87
+ agent_id: m.agent_id,
88
+ user_id: m.user_id,
89
+ metadata: m.metadata as Record<string, string>,
90
+ entities: m.entities as string[],
91
+ created_at: m.created_at,
92
+ updated_at: m.updated_at,
93
+ importance: m.importance,
94
+ tier: m.tier,
95
+ },
96
+ finalScore: m.importance ?? 0.5,
97
+ })) as ScoredMemory[],
98
+ source: "local" as const,
99
+ },
100
+ };
101
+ }
102
+ }
103
+ } catch {
104
+ // Graceful degradation: fall through to API search
105
+ }
106
+ }
107
+
108
+ // Fallback: API search
30
109
  const [longTerm, session] = await Promise.all([
31
110
  client.search(input.prompt, limit, threshold, { scope: "long-term", namespace }),
32
111
  client.search(input.prompt, limit, threshold, { scope: "session", session_id: sessionId, namespace }),
@@ -37,6 +116,7 @@ export const recallSearch: RecallStage = {
37
116
  ...input,
38
117
  longTerm: longTerm.map(r => ({ memory: r.memory, finalScore: r.score })),
39
118
  session: session.map(r => ({ memory: r.memory, finalScore: r.score })),
119
+ source: "api" as const,
40
120
  },
41
121
  };
42
122
  },
@@ -66,6 +66,7 @@ export interface PluginConfig {
66
66
  importanceBoost?: boolean;
67
67
  tierBoost?: boolean;
68
68
  };
69
+ syncIntervalMinutes?: number;
69
70
  sessionTimeoutMinutes?: number;
70
71
  sessionCleanupIntervalMinutes?: number;
71
72
  debug?: boolean;
@@ -115,11 +116,34 @@ export interface SessionResolverLike {
115
116
  resolve(requestCtx: RequestContext): Promise<{ sessionId: string; externalId: string }>;
116
117
  }
117
118
 
119
+ export interface LocalCacheLike {
120
+ bufferWrite(content: string, metadata: Record<string, unknown>): string;
121
+ bufferDepth(): number;
122
+ count(): number;
123
+ search(query: string, opts?: { limit?: number; scope?: string; sessionId?: string; namespace?: string }): Array<{
124
+ id: string; content: string; agent_id: string; user_id: string;
125
+ metadata: Record<string, unknown>; entities: unknown[];
126
+ importance: number; tier: "hot" | "warm" | "cold";
127
+ created_at: string; updated_at: string;
128
+ }>;
129
+ getSyncState(): { lastPull: string | null; lastPush: string | null; cursor: string | null };
130
+ close(): void;
131
+ }
132
+
133
+ export interface SyncDaemonLike {
134
+ start(): void;
135
+ stop(): void;
136
+ pull(): Promise<{ added: number; updated: number }>;
137
+ isRunning(): boolean;
138
+ }
139
+
118
140
  export interface PipelineContext {
119
141
  readonly requestCtx: RequestContext;
120
142
  readonly config: PluginConfig;
121
143
  readonly client: MemoryRelayClient;
122
144
  readonly sessionResolver?: SessionResolverLike;
145
+ readonly localCache?: LocalCacheLike;
146
+ readonly syncDaemon?: SyncDaemonLike;
123
147
  }
124
148
 
125
149
  export interface RecallInput {
@@ -129,6 +153,7 @@ export interface RecallInput {
129
153
  resolvedSessionKey?: string;
130
154
  longTerm?: ScoredMemory[];
131
155
  session?: ScoredMemory[];
156
+ source?: "local" | "api";
132
157
  formatted?: string;
133
158
  }
134
159
 
@@ -147,7 +172,7 @@ export interface CaptureInput {
147
172
  }
148
173
 
149
174
  export type CaptureResult =
150
- | { action: "continue"; data: CaptureInput }
175
+ | { action: "continue"; data: CaptureInput; buffered?: boolean }
151
176
  | { action: "skip" };
152
177
 
153
178
  export interface CaptureStage {
@@ -280,7 +280,7 @@ export class StatusReporter {
280
280
  /**
281
281
  * Format time ago string
282
282
  */
283
- private static formatTimeAgo(date: Date): string {
283
+ static formatTimeAgo(date: Date): string {
284
284
  const seconds = Math.floor((Date.now() - date.getTime()) / 1000);
285
285
 
286
286
  if (seconds < 60) return `${seconds} seconds ago`;