@xfabric/memory 0.1.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (69) hide show
  1. package/dist/index.d.ts +10 -3
  2. package/dist/index.d.ts.map +1 -1
  3. package/dist/index.js +14 -2
  4. package/dist/index.js.map +1 -1
  5. package/dist/memory-manager.d.ts +83 -1
  6. package/dist/memory-manager.d.ts.map +1 -1
  7. package/dist/memory-manager.js +491 -8
  8. package/dist/memory-manager.js.map +1 -1
  9. package/dist/plugin/index.d.ts +8 -0
  10. package/dist/plugin/index.d.ts.map +1 -0
  11. package/dist/plugin/index.js +130 -0
  12. package/dist/plugin/index.js.map +1 -0
  13. package/dist/providers/batch-gemini.d.ts +20 -0
  14. package/dist/providers/batch-gemini.d.ts.map +1 -0
  15. package/dist/providers/batch-gemini.js +279 -0
  16. package/dist/providers/batch-gemini.js.map +1 -0
  17. package/dist/providers/batch-openai.d.ts +20 -0
  18. package/dist/providers/batch-openai.d.ts.map +1 -0
  19. package/dist/providers/batch-openai.js +267 -0
  20. package/dist/providers/batch-openai.js.map +1 -0
  21. package/dist/providers/batch-runner.d.ts +62 -0
  22. package/dist/providers/batch-runner.d.ts.map +1 -0
  23. package/dist/providers/batch-runner.js +141 -0
  24. package/dist/providers/batch-runner.js.map +1 -0
  25. package/dist/providers/index.d.ts +3 -0
  26. package/dist/providers/index.d.ts.map +1 -1
  27. package/dist/providers/index.js +4 -0
  28. package/dist/providers/index.js.map +1 -1
  29. package/dist/sync/index.d.ts +1 -0
  30. package/dist/sync/index.d.ts.map +1 -1
  31. package/dist/sync/index.js +1 -0
  32. package/dist/sync/index.js.map +1 -1
  33. package/dist/sync/session-events.d.ts +23 -0
  34. package/dist/sync/session-events.d.ts.map +1 -0
  35. package/dist/sync/session-events.js +35 -0
  36. package/dist/sync/session-events.js.map +1 -0
  37. package/dist/types.d.ts +125 -0
  38. package/dist/types.d.ts.map +1 -1
  39. package/dist/types.js +21 -0
  40. package/dist/types.js.map +1 -1
  41. package/dist/utils/cache-key.d.ts +73 -0
  42. package/dist/utils/cache-key.d.ts.map +1 -0
  43. package/dist/utils/cache-key.js +62 -0
  44. package/dist/utils/cache-key.js.map +1 -0
  45. package/dist/utils/error-detection.d.ts +20 -0
  46. package/dist/utils/error-detection.d.ts.map +1 -0
  47. package/dist/utils/error-detection.js +28 -0
  48. package/dist/utils/error-detection.js.map +1 -0
  49. package/dist/utils/fingerprint.d.ts +9 -0
  50. package/dist/utils/fingerprint.d.ts.map +1 -0
  51. package/dist/utils/fingerprint.js +27 -0
  52. package/dist/utils/fingerprint.js.map +1 -0
  53. package/dist/utils/index.d.ts +6 -0
  54. package/dist/utils/index.d.ts.map +1 -1
  55. package/dist/utils/index.js +7 -0
  56. package/dist/utils/index.js.map +1 -1
  57. package/dist/utils/lazy-manager.d.ts +26 -0
  58. package/dist/utils/lazy-manager.d.ts.map +1 -0
  59. package/dist/utils/lazy-manager.js +50 -0
  60. package/dist/utils/lazy-manager.js.map +1 -0
  61. package/dist/utils/provider-key.d.ts +22 -0
  62. package/dist/utils/provider-key.d.ts.map +1 -0
  63. package/dist/utils/provider-key.js +31 -0
  64. package/dist/utils/provider-key.js.map +1 -0
  65. package/dist/utils/status-format.d.ts +44 -0
  66. package/dist/utils/status-format.d.ts.map +1 -0
  67. package/dist/utils/status-format.js +39 -0
  68. package/dist/utils/status-format.js.map +1 -0
  69. package/package.json +22 -12
@@ -1,15 +1,18 @@
1
1
  import { join, relative, resolve, dirname, basename } from "node:path";
2
- import { readFile, stat, readdir, copyFile, rename, unlink } from "node:fs/promises";
2
+ import { readFile, stat, readdir, copyFile, rename, unlink, open as fsOpen } from "node:fs/promises";
3
3
  import { existsSync, mkdirSync } from "node:fs";
4
+ import { SESSION_DIRTY_DEBOUNCE_MS, EMBEDDING_RETRY_MAX_ATTEMPTS, EMBEDDING_RETRY_BASE_DELAY_MS, EMBEDDING_RETRY_MAX_DELAY_MS, OPENAI_BATCH_ENDPOINT, } from "./types.js";
4
5
  import { hashText } from "./utils/hash.js";
5
6
  import { retry } from "./utils/retry.js";
6
7
  import { Semaphore } from "./utils/concurrency.js";
7
- import { createEmbeddingProvider, } from "./providers/index.js";
8
+ import { isRetryableEmbeddingError, isBatchTimeoutError, isBatchUnavailableError, } from "./utils/error-detection.js";
9
+ import { createEmbeddingProvider, runOpenAiEmbeddingBatches, runGeminiEmbeddingBatches, BatchFailureTracker, } from "./providers/index.js";
8
10
  import { MemoryStorage } from "./storage/sqlite.js";
9
11
  import { loadSqliteVec } from "./storage/sqlite-vec.js";
10
12
  import { chunkMarkdown } from "./chunking/markdown.js";
11
13
  import { chunkSession } from "./chunking/session.js";
12
14
  import { FileWatcher } from "./sync/watcher.js";
15
+ import { onSessionTranscriptUpdate } from "./sync/session-events.js";
13
16
  import { searchVector, searchVectorWithSqliteVec } from "./search/vector.js";
14
17
  import { searchFts } from "./search/fts.js";
15
18
  import { mergeHybridResults } from "./search/hybrid.js";
@@ -24,6 +27,10 @@ const DEFAULT_BATCH_CONFIG = {
24
27
  timeoutMs: 60000,
25
28
  maxConsecutiveFailures: 3,
26
29
  };
30
+ // Additional constants for extended features
31
+ const SESSION_DELTA_READ_CHUNK_BYTES = 64 * 1024;
32
+ const EMBEDDING_BATCH_MAX_TOKENS = 8000;
33
+ const EMBEDDING_APPROX_CHARS_PER_TOKEN = 1;
27
34
  /**
28
35
  * Memory manager for indexing and searching workspace content
29
36
  */
@@ -47,6 +54,10 @@ export class MemoryManager {
47
54
  // Batch processing state
48
55
  batchDisabled = false;
49
56
  batchDisableReason;
57
+ batchFailureTracker = new BatchFailureTracker();
58
+ // OpenAI/Gemini client references for batch API
59
+ openAiClient;
60
+ geminiClient;
50
61
  // sqlite-vec availability
51
62
  sqliteVecLoaded = false;
52
63
  sqliteVecError;
@@ -54,6 +65,15 @@ export class MemoryManager {
54
65
  syncIntervalTimer = null;
55
66
  // Last sync timestamp
56
67
  lastSyncAt;
68
+ // Session delta tracking for incremental sync
69
+ sessionDeltas = new Map();
70
+ // Session warm tracking (dedupe warmSession calls)
71
+ sessionWarm = new Set();
72
+ // Session pending files for debounced processing
73
+ sessionPendingFiles = new Set();
74
+ sessionWatchTimer = null;
75
+ // Session event listener unsubscribe function
76
+ sessionUnsubscribe = null;
57
77
  constructor(config, storage) {
58
78
  this.config = config;
59
79
  this.storage = storage;
@@ -85,10 +105,21 @@ export class MemoryManager {
85
105
  enabled: config.sync?.enabled ?? true,
86
106
  intervalMs: extConfig.sync?.intervalMs ?? 0,
87
107
  syncSessions: extConfig.sync?.syncSessions ?? false,
108
+ onSearch: extConfig.sync?.onSearch ?? false,
109
+ onSessionStart: extConfig.sync?.onSessionStart ?? false,
110
+ intervalMinutes: extConfig.sync?.intervalMinutes ?? 0,
111
+ sessions: extConfig.sync?.sessions
112
+ ? {
113
+ deltaBytes: extConfig.sync.sessions.deltaBytes ?? 0,
114
+ deltaMessages: extConfig.sync.sessions.deltaMessages ?? 0,
115
+ }
116
+ : undefined,
88
117
  },
89
118
  batch: {
90
119
  ...DEFAULT_BATCH_CONFIG,
91
120
  ...extConfig.batch,
121
+ wait: extConfig.batch?.wait ?? true,
122
+ pollIntervalMs: extConfig.batch?.pollIntervalMs ?? 2000,
92
123
  },
93
124
  sessionsDir: extConfig.sessionsDir,
94
125
  extraPaths: extConfig.extraPaths,
@@ -117,9 +148,11 @@ export class MemoryManager {
117
148
  await manager.startWatching();
118
149
  }
119
150
  // Start interval sync if configured
120
- if (normalizedConfig.sync.intervalMs > 0) {
151
+ if (normalizedConfig.sync.intervalMs > 0 || normalizedConfig.sync.intervalMinutes > 0) {
121
152
  manager.startIntervalSync();
122
153
  }
154
+ // Set up session event listener for push-based notifications
155
+ manager.setupSessionListener();
123
156
  // Cache instance
124
157
  INDEX_CACHE.set(cacheKey, manager);
125
158
  return manager;
@@ -168,10 +201,29 @@ export class MemoryManager {
168
201
  get providerKey() {
169
202
  return computeProviderKey(this.provider.id, this.config.remote?.baseUrl, this.config.remote?.model);
170
203
  }
204
+ /**
205
+ * Pre-sync on session start with deduplication
206
+ */
207
+ async warmSession(sessionKey) {
208
+ if (!this.config.sync.onSessionStart)
209
+ return;
210
+ const key = sessionKey?.trim() || "";
211
+ if (key && this.sessionWarm.has(key))
212
+ return;
213
+ void this.sync().catch(() => { });
214
+ if (key)
215
+ this.sessionWarm.add(key);
216
+ }
171
217
  /**
172
218
  * Search memory for relevant content
173
219
  */
174
220
  async search(query, options = {}) {
221
+ // Warm session if configured
222
+ void this.warmSession(options.sessionKey);
223
+ // Sync before search if configured and dirty
224
+ if (this.config.sync.onSearch && (this.memoryDirty || this.sessionsDirty)) {
225
+ void this.sync().catch(() => { });
226
+ }
175
227
  const maxResults = options.maxResults ?? this.config.query.maxResults;
176
228
  const minScore = options.minScore ?? this.config.query.minScore;
177
229
  // Get query embedding
@@ -827,6 +879,252 @@ export class MemoryManager {
827
879
  }
828
880
  return results;
829
881
  }
882
+ /**
883
+ * Embed batch with exponential backoff retry (500ms-8s, 3 attempts)
884
+ */
885
+ async embedBatchWithRetry(texts) {
886
+ if (texts.length === 0)
887
+ return [];
888
+ let attempt = 0;
889
+ let delayMs = EMBEDDING_RETRY_BASE_DELAY_MS;
890
+ while (true) {
891
+ try {
892
+ return await this.provider.embedBatch(texts);
893
+ }
894
+ catch (err) {
895
+ const message = err instanceof Error ? err.message : String(err);
896
+ if (!isRetryableEmbeddingError(message) || attempt >= EMBEDDING_RETRY_MAX_ATTEMPTS) {
897
+ throw err;
898
+ }
899
+ const waitMs = Math.min(EMBEDDING_RETRY_MAX_DELAY_MS, Math.round(delayMs * (1 + Math.random() * 0.2)));
900
+ await new Promise((resolve) => setTimeout(resolve, waitMs));
901
+ delayMs *= 2;
902
+ attempt += 1;
903
+ }
904
+ }
905
+ }
906
+ /**
907
+ * Build embedding batches respecting token limits
908
+ */
909
+ buildEmbeddingBatches(texts) {
910
+ const batches = [];
911
+ let current = [];
912
+ let currentTokens = 0;
913
+ for (const text of texts) {
914
+ const estimate = Math.ceil(text.length / EMBEDDING_APPROX_CHARS_PER_TOKEN);
915
+ const wouldExceed = current.length > 0 && currentTokens + estimate > EMBEDDING_BATCH_MAX_TOKENS;
916
+ if (wouldExceed) {
917
+ batches.push(current);
918
+ current = [];
919
+ currentTokens = 0;
920
+ }
921
+ if (current.length === 0 && estimate > EMBEDDING_BATCH_MAX_TOKENS) {
922
+ batches.push([text]);
923
+ continue;
924
+ }
925
+ current.push(text);
926
+ currentTokens += estimate;
927
+ }
928
+ if (current.length > 0) {
929
+ batches.push(current);
930
+ }
931
+ return batches;
932
+ }
933
+ /**
934
+ * Run batch with timeout retry (single retry on timeout)
935
+ */
936
+ async runBatchWithTimeoutRetry(params) {
937
+ try {
938
+ return await params.run();
939
+ }
940
+ catch (err) {
941
+ const message = err instanceof Error ? err.message : String(err);
942
+ if (isBatchTimeoutError(message)) {
943
+ try {
944
+ return await params.run();
945
+ }
946
+ catch (retryErr) {
947
+ retryErr.batchAttempts = 2;
948
+ throw retryErr;
949
+ }
950
+ }
951
+ throw err;
952
+ }
953
+ }
954
+ /**
955
+ * Run batch with fallback to inline embedding on failure
956
+ */
957
+ async runBatchWithFallback(params) {
958
+ if (this.batchDisabled || this.batchFailureTracker.disabled) {
959
+ return await params.fallback();
960
+ }
961
+ try {
962
+ const result = await this.runBatchWithTimeoutRetry({
963
+ provider: params.provider,
964
+ run: params.run,
965
+ });
966
+ await this.batchFailureTracker.reset();
967
+ return result;
968
+ }
969
+ catch (err) {
970
+ const message = err instanceof Error ? err.message : String(err);
971
+ const attempts = err.batchAttempts ?? 1;
972
+ const forceDisable = isBatchUnavailableError(message);
973
+ await this.batchFailureTracker.recordFailure({
974
+ provider: params.provider,
975
+ message,
976
+ attempts,
977
+ forceDisable,
978
+ });
979
+ return await params.fallback();
980
+ }
981
+ }
982
+ /**
983
+ * Embed chunks using async batch API (OpenAI or Gemini)
984
+ * Exposed for advanced use cases where caller controls chunking
985
+ */
986
+ async embedChunksWithAsyncBatch(chunks, entry, source) {
987
+ const provider = this.provider;
988
+ // Check cache for existing embeddings
989
+ const embeddings = Array.from({ length: chunks.length }, () => []);
990
+ const missing = [];
991
+ for (let i = 0; i < chunks.length; i++) {
992
+ const chunk = chunks[i];
993
+ const cached = this.storage.getCachedEmbedding(provider.id, provider.model, this.providerKey, chunk.hash);
994
+ if (cached && cached.length > 0) {
995
+ embeddings[i] = cached;
996
+ }
997
+ else {
998
+ missing.push({ index: i, chunk });
999
+ }
1000
+ }
1001
+ if (missing.length === 0)
1002
+ return embeddings;
1003
+ // Use async batch API if available
1004
+ if (provider.id === "openai" && this.openAiClient && this.config.batch.enabled) {
1005
+ return this.embedChunksWithOpenAiBatch(missing, embeddings, entry, source);
1006
+ }
1007
+ if (provider.id === "gemini" && this.geminiClient && this.config.batch.enabled) {
1008
+ return this.embedChunksWithGeminiBatch(missing, embeddings, entry, source);
1009
+ }
1010
+ // Fallback to inline batch
1011
+ return this.embedChunksInline(missing, embeddings);
1012
+ }
1013
+ /**
1014
+ * Embed chunks using OpenAI async batch API
1015
+ */
1016
+ async embedChunksWithOpenAiBatch(missing, embeddings, entry, source) {
1017
+ const openAi = this.openAiClient;
1018
+ if (!openAi)
1019
+ return this.embedChunksInline(missing, embeddings);
1020
+ const requests = [];
1021
+ const mapping = new Map();
1022
+ for (const item of missing) {
1023
+ const chunk = item.chunk;
1024
+ const customId = hashText(`${source}:${entry.path}:${chunk.startLine}:${chunk.endLine}:${chunk.hash}:${item.index}`);
1025
+ mapping.set(customId, { index: item.index, hash: chunk.hash });
1026
+ requests.push({
1027
+ custom_id: customId,
1028
+ method: "POST",
1029
+ url: OPENAI_BATCH_ENDPOINT,
1030
+ body: {
1031
+ model: openAi.model,
1032
+ input: chunk.text,
1033
+ },
1034
+ });
1035
+ }
1036
+ const batchResult = await this.runBatchWithFallback({
1037
+ provider: "openai",
1038
+ run: async () => await runOpenAiEmbeddingBatches({
1039
+ openAi,
1040
+ agentId: this.config.agentId,
1041
+ requests,
1042
+ wait: this.config.batch.wait,
1043
+ concurrency: this.config.batch.concurrency,
1044
+ pollIntervalMs: this.config.batch.pollIntervalMs,
1045
+ timeoutMs: this.config.batch.timeoutMs,
1046
+ }),
1047
+ fallback: async () => await this.embedChunksInline(missing, embeddings),
1048
+ });
1049
+ if (Array.isArray(batchResult))
1050
+ return batchResult;
1051
+ const byCustomId = batchResult;
1052
+ // Map results back and cache
1053
+ for (const [customId, embedding] of byCustomId.entries()) {
1054
+ const mapped = mapping.get(customId);
1055
+ if (!mapped)
1056
+ continue;
1057
+ embeddings[mapped.index] = embedding;
1058
+ this.storage.cacheEmbedding(this.provider.id, this.provider.model, this.providerKey, mapped.hash, embedding);
1059
+ }
1060
+ return embeddings;
1061
+ }
1062
+ /**
1063
+ * Embed chunks using Gemini async batch API
1064
+ */
1065
+ async embedChunksWithGeminiBatch(missing, embeddings, entry, source) {
1066
+ const gemini = this.geminiClient;
1067
+ if (!gemini)
1068
+ return this.embedChunksInline(missing, embeddings);
1069
+ const requests = [];
1070
+ const mapping = new Map();
1071
+ for (const item of missing) {
1072
+ const chunk = item.chunk;
1073
+ const customId = hashText(`${source}:${entry.path}:${chunk.startLine}:${chunk.endLine}:${chunk.hash}:${item.index}`);
1074
+ mapping.set(customId, { index: item.index, hash: chunk.hash });
1075
+ requests.push({
1076
+ custom_id: customId,
1077
+ content: { parts: [{ text: chunk.text }] },
1078
+ taskType: "RETRIEVAL_DOCUMENT",
1079
+ });
1080
+ }
1081
+ const batchResult = await this.runBatchWithFallback({
1082
+ provider: "gemini",
1083
+ run: async () => await runGeminiEmbeddingBatches({
1084
+ gemini,
1085
+ agentId: this.config.agentId,
1086
+ requests,
1087
+ wait: this.config.batch.wait,
1088
+ concurrency: this.config.batch.concurrency,
1089
+ pollIntervalMs: this.config.batch.pollIntervalMs,
1090
+ timeoutMs: this.config.batch.timeoutMs,
1091
+ }),
1092
+ fallback: async () => await this.embedChunksInline(missing, embeddings),
1093
+ });
1094
+ if (Array.isArray(batchResult))
1095
+ return batchResult;
1096
+ const byCustomId = batchResult;
1097
+ // Map results back and cache
1098
+ for (const [customId, embedding] of byCustomId.entries()) {
1099
+ const mapped = mapping.get(customId);
1100
+ if (!mapped)
1101
+ continue;
1102
+ embeddings[mapped.index] = embedding;
1103
+ this.storage.cacheEmbedding(this.provider.id, this.provider.model, this.providerKey, mapped.hash, embedding);
1104
+ }
1105
+ return embeddings;
1106
+ }
1107
+ /**
1108
+ * Embed chunks using inline batch API (fallback)
1109
+ */
1110
+ async embedChunksInline(missing, embeddings) {
1111
+ const texts = missing.map((m) => m.chunk.text);
1112
+ const batches = this.buildEmbeddingBatches(texts);
1113
+ let cursor = 0;
1114
+ for (const batch of batches) {
1115
+ const batchEmbeddings = await this.embedBatchWithRetry(batch);
1116
+ for (let i = 0; i < batch.length; i++) {
1117
+ const item = missing[cursor + i];
1118
+ const embedding = batchEmbeddings[i] ?? [];
1119
+ if (item) {
1120
+ embeddings[item.index] = embedding;
1121
+ this.storage.cacheEmbedding(this.provider.id, this.provider.model, this.providerKey, item.chunk.hash, embedding);
1122
+ }
1123
+ }
1124
+ cursor += batch.length;
1125
+ }
1126
+ return embeddings;
1127
+ }
830
1128
  /**
831
1129
  * Start watching for file changes
832
1130
  */
@@ -847,6 +1145,175 @@ export class MemoryManager {
847
1145
  });
848
1146
  await this.watcher.start();
849
1147
  }
1148
+ /**
1149
+ * Schedule session dirty check with debouncing
1150
+ * Call this when a session file is modified externally
1151
+ */
1152
+ scheduleSessionDirty(sessionFile) {
1153
+ this.sessionPendingFiles.add(sessionFile);
1154
+ if (this.sessionWatchTimer)
1155
+ return;
1156
+ this.sessionWatchTimer = setTimeout(() => {
1157
+ this.sessionWatchTimer = null;
1158
+ void this.processSessionDeltaBatch().catch(() => { });
1159
+ }, SESSION_DIRTY_DEBOUNCE_MS);
1160
+ }
1161
+ /**
1162
+ * Process pending session files and check delta thresholds
1163
+ */
1164
+ async processSessionDeltaBatch() {
1165
+ if (this.sessionPendingFiles.size === 0)
1166
+ return;
1167
+ const pending = Array.from(this.sessionPendingFiles);
1168
+ this.sessionPendingFiles.clear();
1169
+ let shouldSync = false;
1170
+ for (const sessionFile of pending) {
1171
+ const delta = await this.updateSessionDelta(sessionFile);
1172
+ if (!delta)
1173
+ continue;
1174
+ const bytesThreshold = delta.deltaBytes;
1175
+ const messagesThreshold = delta.deltaMessages;
1176
+ const bytesHit = bytesThreshold <= 0 ? delta.pendingBytes > 0 : delta.pendingBytes >= bytesThreshold;
1177
+ const messagesHit = messagesThreshold <= 0
1178
+ ? delta.pendingMessages > 0
1179
+ : delta.pendingMessages >= messagesThreshold;
1180
+ if (!bytesHit && !messagesHit)
1181
+ continue;
1182
+ this.sessionsDirtyFiles.add(sessionFile);
1183
+ this.sessionsDirty = true;
1184
+ delta.pendingBytes =
1185
+ bytesThreshold > 0 ? Math.max(0, delta.pendingBytes - bytesThreshold) : 0;
1186
+ delta.pendingMessages =
1187
+ messagesThreshold > 0 ? Math.max(0, delta.pendingMessages - messagesThreshold) : 0;
1188
+ shouldSync = true;
1189
+ }
1190
+ if (shouldSync) {
1191
+ void this.sync().catch(() => { });
1192
+ }
1193
+ }
1194
+ /**
1195
+ * Update session delta tracking for a file
1196
+ */
1197
+ async updateSessionDelta(sessionFile) {
1198
+ const thresholds = this.config.sync.sessions;
1199
+ if (!thresholds)
1200
+ return null;
1201
+ let fileStat;
1202
+ try {
1203
+ fileStat = await stat(sessionFile);
1204
+ }
1205
+ catch {
1206
+ return null;
1207
+ }
1208
+ const size = fileStat.size;
1209
+ let state = this.sessionDeltas.get(sessionFile);
1210
+ if (!state) {
1211
+ state = { lastSize: 0, pendingBytes: 0, pendingMessages: 0 };
1212
+ this.sessionDeltas.set(sessionFile, state);
1213
+ }
1214
+ const deltaBytes = Math.max(0, size - state.lastSize);
1215
+ if (deltaBytes === 0 && size === state.lastSize) {
1216
+ return {
1217
+ deltaBytes: thresholds.deltaBytes,
1218
+ deltaMessages: thresholds.deltaMessages,
1219
+ pendingBytes: state.pendingBytes,
1220
+ pendingMessages: state.pendingMessages,
1221
+ };
1222
+ }
1223
+ if (size < state.lastSize) {
1224
+ // File was truncated, reset
1225
+ state.lastSize = size;
1226
+ state.pendingBytes += size;
1227
+ const shouldCountMessages = thresholds.deltaMessages > 0 &&
1228
+ (thresholds.deltaBytes <= 0 || state.pendingBytes < thresholds.deltaBytes);
1229
+ if (shouldCountMessages) {
1230
+ state.pendingMessages += await this.countNewlines(sessionFile, 0, size);
1231
+ }
1232
+ }
1233
+ else {
1234
+ state.pendingBytes += deltaBytes;
1235
+ const shouldCountMessages = thresholds.deltaMessages > 0 &&
1236
+ (thresholds.deltaBytes <= 0 || state.pendingBytes < thresholds.deltaBytes);
1237
+ if (shouldCountMessages) {
1238
+ state.pendingMessages += await this.countNewlines(sessionFile, state.lastSize, size);
1239
+ }
1240
+ state.lastSize = size;
1241
+ }
1242
+ this.sessionDeltas.set(sessionFile, state);
1243
+ return {
1244
+ deltaBytes: thresholds.deltaBytes,
1245
+ deltaMessages: thresholds.deltaMessages,
1246
+ pendingBytes: state.pendingBytes,
1247
+ pendingMessages: state.pendingMessages,
1248
+ };
1249
+ }
1250
+ /**
1251
+ * Count newlines in a file range (for message counting)
1252
+ */
1253
+ async countNewlines(absPath, start, end) {
1254
+ if (end <= start)
1255
+ return 0;
1256
+ const handle = await fsOpen(absPath, "r");
1257
+ try {
1258
+ let offset = start;
1259
+ let count = 0;
1260
+ const buffer = Buffer.alloc(SESSION_DELTA_READ_CHUNK_BYTES);
1261
+ while (offset < end) {
1262
+ const toRead = Math.min(buffer.length, end - offset);
1263
+ const { bytesRead } = await handle.read(buffer, 0, toRead, offset);
1264
+ if (bytesRead <= 0)
1265
+ break;
1266
+ for (let i = 0; i < bytesRead; i += 1) {
1267
+ if (buffer[i] === 10)
1268
+ count += 1;
1269
+ }
1270
+ offset += bytesRead;
1271
+ }
1272
+ return count;
1273
+ }
1274
+ finally {
1275
+ await handle.close();
1276
+ }
1277
+ }
1278
+ /**
1279
+ * Reset session delta tracking for a file
1280
+ * Call this after successfully processing a session file
1281
+ */
1282
+ resetSessionDelta(absPath, size) {
1283
+ const state = this.sessionDeltas.get(absPath);
1284
+ if (!state)
1285
+ return;
1286
+ state.lastSize = size;
1287
+ state.pendingBytes = 0;
1288
+ state.pendingMessages = 0;
1289
+ }
1290
+ /**
1291
+ * Check if a session file belongs to this agent's sessions directory
1292
+ */
1293
+ isSessionFileForAgent(sessionFile) {
1294
+ if (!sessionFile || !this.config.sessionsDir)
1295
+ return false;
1296
+ const resolvedFile = resolve(sessionFile);
1297
+ const resolvedDir = resolve(this.config.sessionsDir);
1298
+ return resolvedFile.startsWith(`${resolvedDir}/`) || resolvedFile.startsWith(`${resolvedDir}\\`);
1299
+ }
1300
+ /**
1301
+ * Set up session transcript event listener
1302
+ * This provides push-based notifications when session files change
1303
+ * Called automatically during create(), but can be called manually if needed
1304
+ */
1305
+ setupSessionListener() {
1306
+ if (!this.config.sync.syncSessions || this.sessionUnsubscribe)
1307
+ return;
1308
+ this.sessionUnsubscribe = onSessionTranscriptUpdate((update) => {
1309
+ if (this.closed)
1310
+ return;
1311
+ const sessionFile = update.sessionFile;
1312
+ if (!this.isSessionFileForAgent(sessionFile))
1313
+ return;
1314
+ this.scheduleSessionDirty(sessionFile);
1315
+ });
1316
+ }
850
1317
  /**
851
1318
  * Start interval-based sync
852
1319
  */
@@ -854,11 +1321,17 @@ export class MemoryManager {
854
1321
  if (this.syncIntervalTimer) {
855
1322
  return;
856
1323
  }
1324
+ // Support both intervalMs and intervalMinutes
1325
+ const ms = this.config.sync.intervalMinutes > 0
1326
+ ? this.config.sync.intervalMinutes * 60 * 1000
1327
+ : this.config.sync.intervalMs;
1328
+ if (ms <= 0)
1329
+ return;
857
1330
  this.syncIntervalTimer = setInterval(() => {
858
1331
  if (this.memoryDirty || this.sessionsDirty) {
859
1332
  this.sync().catch(() => { });
860
1333
  }
861
- }, this.config.sync.intervalMs);
1334
+ }, ms);
862
1335
  }
863
1336
  /**
864
1337
  * Stop interval-based sync
@@ -884,6 +1357,16 @@ export class MemoryManager {
884
1357
  async close() {
885
1358
  this.stopIntervalSync();
886
1359
  await this.stopWatching();
1360
+ // Clean up session watch timer
1361
+ if (this.sessionWatchTimer) {
1362
+ clearTimeout(this.sessionWatchTimer);
1363
+ this.sessionWatchTimer = null;
1364
+ }
1365
+ // Unsubscribe from session events
1366
+ if (this.sessionUnsubscribe) {
1367
+ this.sessionUnsubscribe();
1368
+ this.sessionUnsubscribe = null;
1369
+ }
887
1370
  this.storage.close();
888
1371
  this.closed = true;
889
1372
  // Remove from cache
@@ -951,10 +1434,10 @@ export class MemoryManager {
951
1434
  consecutiveFailures: this.consecutiveEmbeddingFailures,
952
1435
  },
953
1436
  batch: {
954
- enabled: this.config.batch.enabled && !this.batchDisabled,
955
- pendingJobs: 0, // TODO: track pending batch jobs
956
- autoDisabled: this.batchDisabled,
957
- disableReason: this.batchDisableReason,
1437
+ enabled: this.config.batch.enabled && !this.batchDisabled && !this.batchFailureTracker.disabled,
1438
+ pendingJobs: 0,
1439
+ autoDisabled: this.batchDisabled || this.batchFailureTracker.disabled,
1440
+ disableReason: this.batchDisableReason ?? this.batchFailureTracker.error,
958
1441
  },
959
1442
  sync: {
960
1443
  watching: this.watcher !== null,