@danielsimonjr/memory-mcp 0.48.0 → 9.8.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (209) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +2000 -194
  3. package/dist/__tests__/file-path.test.js +7 -11
  4. package/dist/__tests__/knowledge-graph.test.js +3 -8
  5. package/dist/core/EntityManager.d.ts +266 -0
  6. package/dist/core/EntityManager.d.ts.map +1 -0
  7. package/dist/core/EntityManager.js +85 -133
  8. package/dist/core/GraphEventEmitter.d.ts +202 -0
  9. package/dist/core/GraphEventEmitter.d.ts.map +1 -0
  10. package/dist/core/GraphEventEmitter.js +346 -0
  11. package/dist/core/GraphStorage.d.ts +395 -0
  12. package/dist/core/GraphStorage.d.ts.map +1 -0
  13. package/dist/core/GraphStorage.js +643 -31
  14. package/dist/core/GraphTraversal.d.ts +141 -0
  15. package/dist/core/GraphTraversal.d.ts.map +1 -0
  16. package/dist/core/GraphTraversal.js +573 -0
  17. package/dist/core/HierarchyManager.d.ts +111 -0
  18. package/dist/core/HierarchyManager.d.ts.map +1 -0
  19. package/dist/{features → core}/HierarchyManager.js +14 -9
  20. package/dist/core/ManagerContext.d.ts +72 -0
  21. package/dist/core/ManagerContext.d.ts.map +1 -0
  22. package/dist/core/ManagerContext.js +118 -0
  23. package/dist/core/ObservationManager.d.ts +85 -0
  24. package/dist/core/ObservationManager.d.ts.map +1 -0
  25. package/dist/core/ObservationManager.js +51 -57
  26. package/dist/core/RelationManager.d.ts +131 -0
  27. package/dist/core/RelationManager.d.ts.map +1 -0
  28. package/dist/core/RelationManager.js +31 -7
  29. package/dist/core/SQLiteStorage.d.ts +354 -0
  30. package/dist/core/SQLiteStorage.d.ts.map +1 -0
  31. package/dist/core/SQLiteStorage.js +917 -0
  32. package/dist/core/StorageFactory.d.ts +45 -0
  33. package/dist/core/StorageFactory.d.ts.map +1 -0
  34. package/dist/core/StorageFactory.js +64 -0
  35. package/dist/core/TransactionManager.d.ts +464 -0
  36. package/dist/core/TransactionManager.d.ts.map +1 -0
  37. package/dist/core/TransactionManager.js +490 -13
  38. package/dist/core/index.d.ts +17 -0
  39. package/dist/core/index.d.ts.map +1 -0
  40. package/dist/core/index.js +12 -2
  41. package/dist/features/AnalyticsManager.d.ts +44 -0
  42. package/dist/features/AnalyticsManager.d.ts.map +1 -0
  43. package/dist/features/AnalyticsManager.js +3 -2
  44. package/dist/features/ArchiveManager.d.ts +133 -0
  45. package/dist/features/ArchiveManager.d.ts.map +1 -0
  46. package/dist/features/ArchiveManager.js +221 -14
  47. package/dist/features/CompressionManager.d.ts +117 -0
  48. package/dist/features/CompressionManager.d.ts.map +1 -0
  49. package/dist/features/CompressionManager.js +189 -20
  50. package/dist/features/IOManager.d.ts +225 -0
  51. package/dist/features/IOManager.d.ts.map +1 -0
  52. package/dist/features/IOManager.js +1041 -0
  53. package/dist/features/StreamingExporter.d.ts +123 -0
  54. package/dist/features/StreamingExporter.d.ts.map +1 -0
  55. package/dist/features/StreamingExporter.js +203 -0
  56. package/dist/features/TagManager.d.ts +147 -0
  57. package/dist/features/TagManager.d.ts.map +1 -0
  58. package/dist/features/index.d.ts +12 -0
  59. package/dist/features/index.d.ts.map +1 -0
  60. package/dist/features/index.js +5 -6
  61. package/dist/index.d.ts +9 -0
  62. package/dist/index.d.ts.map +1 -0
  63. package/dist/index.js +10 -10
  64. package/dist/memory.jsonl +1 -26
  65. package/dist/search/BasicSearch.d.ts +51 -0
  66. package/dist/search/BasicSearch.d.ts.map +1 -0
  67. package/dist/search/BasicSearch.js +9 -3
  68. package/dist/search/BooleanSearch.d.ts +98 -0
  69. package/dist/search/BooleanSearch.d.ts.map +1 -0
  70. package/dist/search/BooleanSearch.js +156 -9
  71. package/dist/search/EmbeddingService.d.ts +178 -0
  72. package/dist/search/EmbeddingService.d.ts.map +1 -0
  73. package/dist/search/EmbeddingService.js +358 -0
  74. package/dist/search/FuzzySearch.d.ts +118 -0
  75. package/dist/search/FuzzySearch.d.ts.map +1 -0
  76. package/dist/search/FuzzySearch.js +241 -25
  77. package/dist/search/QueryCostEstimator.d.ts +111 -0
  78. package/dist/search/QueryCostEstimator.d.ts.map +1 -0
  79. package/dist/search/QueryCostEstimator.js +355 -0
  80. package/dist/search/RankedSearch.d.ts +71 -0
  81. package/dist/search/RankedSearch.d.ts.map +1 -0
  82. package/dist/search/RankedSearch.js +54 -6
  83. package/dist/search/SavedSearchManager.d.ts +79 -0
  84. package/dist/search/SavedSearchManager.d.ts.map +1 -0
  85. package/dist/search/SearchFilterChain.d.ts +120 -0
  86. package/dist/search/SearchFilterChain.d.ts.map +1 -0
  87. package/dist/search/SearchFilterChain.js +2 -4
  88. package/dist/search/SearchManager.d.ts +326 -0
  89. package/dist/search/SearchManager.d.ts.map +1 -0
  90. package/dist/search/SearchManager.js +148 -0
  91. package/dist/search/SearchSuggestions.d.ts +27 -0
  92. package/dist/search/SearchSuggestions.d.ts.map +1 -0
  93. package/dist/search/SearchSuggestions.js +1 -1
  94. package/dist/search/SemanticSearch.d.ts +149 -0
  95. package/dist/search/SemanticSearch.d.ts.map +1 -0
  96. package/dist/search/SemanticSearch.js +323 -0
  97. package/dist/search/TFIDFEventSync.d.ts +85 -0
  98. package/dist/search/TFIDFEventSync.d.ts.map +1 -0
  99. package/dist/search/TFIDFEventSync.js +133 -0
  100. package/dist/search/TFIDFIndexManager.d.ts +151 -0
  101. package/dist/search/TFIDFIndexManager.d.ts.map +1 -0
  102. package/dist/search/TFIDFIndexManager.js +232 -17
  103. package/dist/search/VectorStore.d.ts +235 -0
  104. package/dist/search/VectorStore.d.ts.map +1 -0
  105. package/dist/search/VectorStore.js +311 -0
  106. package/dist/search/index.d.ts +21 -0
  107. package/dist/search/index.d.ts.map +1 -0
  108. package/dist/search/index.js +12 -0
  109. package/dist/server/MCPServer.d.ts +21 -0
  110. package/dist/server/MCPServer.d.ts.map +1 -0
  111. package/dist/server/MCPServer.js +4 -4
  112. package/dist/server/responseCompressor.d.ts +94 -0
  113. package/dist/server/responseCompressor.d.ts.map +1 -0
  114. package/dist/server/responseCompressor.js +127 -0
  115. package/dist/server/toolDefinitions.d.ts +27 -0
  116. package/dist/server/toolDefinitions.d.ts.map +1 -0
  117. package/dist/server/toolDefinitions.js +188 -17
  118. package/dist/server/toolHandlers.d.ts +41 -0
  119. package/dist/server/toolHandlers.d.ts.map +1 -0
  120. package/dist/server/toolHandlers.js +467 -75
  121. package/dist/types/index.d.ts +13 -0
  122. package/dist/types/index.d.ts.map +1 -0
  123. package/dist/types/index.js +1 -1
  124. package/dist/types/types.d.ts +1654 -0
  125. package/dist/types/types.d.ts.map +1 -0
  126. package/dist/types/types.js +9 -0
  127. package/dist/utils/compressedCache.d.ts +192 -0
  128. package/dist/utils/compressedCache.d.ts.map +1 -0
  129. package/dist/utils/compressedCache.js +309 -0
  130. package/dist/utils/compressionUtil.d.ts +214 -0
  131. package/dist/utils/compressionUtil.d.ts.map +1 -0
  132. package/dist/utils/compressionUtil.js +247 -0
  133. package/dist/utils/constants.d.ts +245 -0
  134. package/dist/utils/constants.d.ts.map +1 -0
  135. package/dist/utils/constants.js +124 -0
  136. package/dist/utils/entityUtils.d.ts +321 -0
  137. package/dist/utils/entityUtils.d.ts.map +1 -0
  138. package/dist/utils/entityUtils.js +434 -4
  139. package/dist/utils/errors.d.ts +95 -0
  140. package/dist/utils/errors.d.ts.map +1 -0
  141. package/dist/utils/errors.js +24 -0
  142. package/dist/utils/formatters.d.ts +145 -0
  143. package/dist/utils/formatters.d.ts.map +1 -0
  144. package/dist/utils/{paginationUtils.js → formatters.js} +54 -3
  145. package/dist/utils/index.d.ts +23 -0
  146. package/dist/utils/index.d.ts.map +1 -0
  147. package/dist/utils/index.js +69 -31
  148. package/dist/utils/indexes.d.ts +270 -0
  149. package/dist/utils/indexes.d.ts.map +1 -0
  150. package/dist/utils/indexes.js +526 -0
  151. package/dist/utils/logger.d.ts +24 -0
  152. package/dist/utils/logger.d.ts.map +1 -0
  153. package/dist/utils/operationUtils.d.ts +124 -0
  154. package/dist/utils/operationUtils.d.ts.map +1 -0
  155. package/dist/utils/operationUtils.js +175 -0
  156. package/dist/utils/parallelUtils.d.ts +72 -0
  157. package/dist/utils/parallelUtils.d.ts.map +1 -0
  158. package/dist/utils/parallelUtils.js +169 -0
  159. package/dist/utils/schemas.d.ts +374 -0
  160. package/dist/utils/schemas.d.ts.map +1 -0
  161. package/dist/utils/schemas.js +302 -2
  162. package/dist/utils/searchAlgorithms.d.ts +99 -0
  163. package/dist/utils/searchAlgorithms.d.ts.map +1 -0
  164. package/dist/utils/searchAlgorithms.js +167 -0
  165. package/dist/utils/searchCache.d.ts +108 -0
  166. package/dist/utils/searchCache.d.ts.map +1 -0
  167. package/dist/utils/taskScheduler.d.ts +290 -0
  168. package/dist/utils/taskScheduler.d.ts.map +1 -0
  169. package/dist/utils/taskScheduler.js +466 -0
  170. package/dist/workers/index.d.ts +12 -0
  171. package/dist/workers/index.d.ts.map +1 -0
  172. package/dist/workers/index.js +9 -0
  173. package/dist/workers/levenshteinWorker.d.ts +60 -0
  174. package/dist/workers/levenshteinWorker.d.ts.map +1 -0
  175. package/dist/workers/levenshteinWorker.js +98 -0
  176. package/package.json +17 -4
  177. package/dist/__tests__/edge-cases/edge-cases.test.js +0 -406
  178. package/dist/__tests__/integration/workflows.test.js +0 -449
  179. package/dist/__tests__/performance/benchmarks.test.js +0 -413
  180. package/dist/__tests__/unit/core/EntityManager.test.js +0 -334
  181. package/dist/__tests__/unit/core/GraphStorage.test.js +0 -205
  182. package/dist/__tests__/unit/core/RelationManager.test.js +0 -274
  183. package/dist/__tests__/unit/features/CompressionManager.test.js +0 -350
  184. package/dist/__tests__/unit/search/BasicSearch.test.js +0 -311
  185. package/dist/__tests__/unit/search/BooleanSearch.test.js +0 -432
  186. package/dist/__tests__/unit/search/FuzzySearch.test.js +0 -448
  187. package/dist/__tests__/unit/search/RankedSearch.test.js +0 -379
  188. package/dist/__tests__/unit/utils/levenshtein.test.js +0 -77
  189. package/dist/core/KnowledgeGraphManager.js +0 -423
  190. package/dist/features/BackupManager.js +0 -311
  191. package/dist/features/ExportManager.js +0 -305
  192. package/dist/features/ImportExportManager.js +0 -50
  193. package/dist/features/ImportManager.js +0 -328
  194. package/dist/memory-saved-searches.jsonl +0 -0
  195. package/dist/memory-tag-aliases.jsonl +0 -0
  196. package/dist/types/analytics.types.js +0 -6
  197. package/dist/types/entity.types.js +0 -7
  198. package/dist/types/import-export.types.js +0 -7
  199. package/dist/types/search.types.js +0 -7
  200. package/dist/types/tag.types.js +0 -6
  201. package/dist/utils/dateUtils.js +0 -89
  202. package/dist/utils/filterUtils.js +0 -155
  203. package/dist/utils/levenshtein.js +0 -62
  204. package/dist/utils/pathUtils.js +0 -115
  205. package/dist/utils/responseFormatter.js +0 -55
  206. package/dist/utils/tagUtils.js +0 -107
  207. package/dist/utils/tfidf.js +0 -90
  208. package/dist/utils/validationHelper.js +0 -99
  209. package/dist/utils/validationUtils.js +0 -109
@@ -0,0 +1,1041 @@
1
+ /**
2
+ * IO Manager
3
+ *
4
+ * Unified manager for import, export, and backup operations.
5
+ * Consolidates BackupManager, ExportManager, and ImportManager (Sprint 11.4).
6
+ *
7
+ * @module features/IOManager
8
+ */
9
+ import { promises as fs } from 'fs';
10
+ import { dirname, join } from 'path';
11
+ import { FileOperationError } from '../utils/errors.js';
12
+ import { compress, decompress, hasBrotliExtension, COMPRESSION_CONFIG, STREAMING_CONFIG, checkCancellation, createProgressReporter, createProgress, } from '../utils/index.js';
13
+ import { StreamingExporter } from './StreamingExporter.js';
14
+ // ============================================================
15
+ // IO MANAGER CLASS
16
+ // ============================================================
17
+ /**
18
+ * Unified manager for import, export, and backup operations.
19
+ *
20
+ * Combines functionality from:
21
+ * - ExportManager: Graph export to various formats
22
+ * - ImportManager: Graph import from various formats
23
+ * - BackupManager: Point-in-time backup and restore
24
+ */
25
+ export class IOManager {
26
+ storage;
27
+ backupDir;
28
+ constructor(storage) {
29
+ this.storage = storage;
30
+ const filePath = this.storage.getFilePath();
31
+ const dir = dirname(filePath);
32
+ this.backupDir = join(dir, '.backups');
33
+ }
34
+ // ============================================================
35
+ // EXPORT OPERATIONS
36
+ // ============================================================
37
+ /**
38
+ * Export graph to specified format.
39
+ *
40
+ * @param graph - Knowledge graph to export
41
+ * @param format - Export format
42
+ * @returns Formatted export string
43
+ */
44
+ exportGraph(graph, format) {
45
+ switch (format) {
46
+ case 'json':
47
+ return this.exportAsJson(graph);
48
+ case 'csv':
49
+ return this.exportAsCsv(graph);
50
+ case 'graphml':
51
+ return this.exportAsGraphML(graph);
52
+ case 'gexf':
53
+ return this.exportAsGEXF(graph);
54
+ case 'dot':
55
+ return this.exportAsDOT(graph);
56
+ case 'markdown':
57
+ return this.exportAsMarkdown(graph);
58
+ case 'mermaid':
59
+ return this.exportAsMermaid(graph);
60
+ default:
61
+ throw new Error(`Unsupported export format: ${format}`);
62
+ }
63
+ }
64
+ /**
65
+ * Export graph with optional brotli compression.
66
+ *
67
+ * Compression is applied when:
68
+ * - `options.compress` is explicitly set to `true`
69
+ * - The exported content exceeds 100KB (auto-compress threshold)
70
+ *
71
+ * Compressed content is returned as base64-encoded string.
72
+ * Uncompressed content is returned as UTF-8 string.
73
+ *
74
+ * @param graph - Knowledge graph to export
75
+ * @param format - Export format
76
+ * @param options - Export options including compression settings
77
+ * @returns Export result with content and compression metadata
78
+ *
79
+ * @example
80
+ * ```typescript
81
+ * // Export with explicit compression
82
+ * const result = await manager.exportGraphWithCompression(graph, 'json', {
83
+ * compress: true,
84
+ * compressionQuality: 11
85
+ * });
86
+ *
87
+ * // Export with auto-compression for large graphs
88
+ * const result = await manager.exportGraphWithCompression(graph, 'json');
89
+ * // Compresses automatically if content > 100KB
90
+ * ```
91
+ */
92
+ async exportGraphWithCompression(graph, format, options) {
93
+ // Check if streaming should be used
94
+ const shouldStream = options?.streaming ||
95
+ (options?.outputPath && graph.entities.length >= STREAMING_CONFIG.STREAMING_THRESHOLD);
96
+ if (shouldStream && options?.outputPath) {
97
+ return this.streamExport(format, graph, options);
98
+ }
99
+ // Generate export content using existing method
100
+ const content = this.exportGraph(graph, format);
101
+ const originalSize = Buffer.byteLength(content, 'utf-8');
102
+ // Determine if compression should be applied
103
+ const shouldCompress = options?.compress === true ||
104
+ (options?.compress !== false &&
105
+ originalSize > COMPRESSION_CONFIG.AUTO_COMPRESS_EXPORT_SIZE);
106
+ if (shouldCompress) {
107
+ const quality = options?.compressionQuality ?? COMPRESSION_CONFIG.BROTLI_QUALITY_BATCH;
108
+ const compressionResult = await compress(content, {
109
+ quality,
110
+ mode: 'text',
111
+ });
112
+ return {
113
+ format,
114
+ content: compressionResult.compressed.toString('base64'),
115
+ entityCount: graph.entities.length,
116
+ relationCount: graph.relations.length,
117
+ compressed: true,
118
+ encoding: 'base64',
119
+ originalSize,
120
+ compressedSize: compressionResult.compressedSize,
121
+ compressionRatio: compressionResult.ratio,
122
+ };
123
+ }
124
+ // Return uncompressed content
125
+ return {
126
+ format,
127
+ content,
128
+ entityCount: graph.entities.length,
129
+ relationCount: graph.relations.length,
130
+ compressed: false,
131
+ encoding: 'utf-8',
132
+ originalSize,
133
+ compressedSize: originalSize,
134
+ compressionRatio: 1,
135
+ };
136
+ }
137
+ /**
138
+ * Stream export to a file for large graphs.
139
+ *
140
+ * Uses StreamingExporter to write entities and relations incrementally
141
+ * to avoid loading the entire export content into memory.
142
+ *
143
+ * @param format - Export format
144
+ * @param graph - Knowledge graph to export
145
+ * @param options - Export options with required outputPath
146
+ * @returns Export result with streaming metadata
147
+ * @private
148
+ */
149
+ async streamExport(format, graph, options) {
150
+ const exporter = new StreamingExporter(options.outputPath);
151
+ let result;
152
+ switch (format) {
153
+ case 'json':
154
+ // Use JSONL format for streaming (line-delimited JSON)
155
+ result = await exporter.streamJSONL(graph);
156
+ break;
157
+ case 'csv':
158
+ result = await exporter.streamCSV(graph);
159
+ break;
160
+ default:
161
+ // Fallback to in-memory export for unsupported streaming formats
162
+ const content = this.exportGraph(graph, format);
163
+ await fs.writeFile(options.outputPath, content);
164
+ result = {
165
+ bytesWritten: Buffer.byteLength(content, 'utf-8'),
166
+ entitiesWritten: graph.entities.length,
167
+ relationsWritten: graph.relations.length,
168
+ durationMs: 0,
169
+ };
170
+ }
171
+ return {
172
+ format,
173
+ content: `Streamed to ${options.outputPath}`,
174
+ entityCount: result.entitiesWritten,
175
+ relationCount: result.relationsWritten,
176
+ compressed: false,
177
+ encoding: 'utf-8',
178
+ originalSize: result.bytesWritten,
179
+ compressedSize: result.bytesWritten,
180
+ compressionRatio: 1,
181
+ streamed: true,
182
+ outputPath: options.outputPath,
183
+ };
184
+ }
185
+ exportAsJson(graph) {
186
+ return JSON.stringify(graph, null, 2);
187
+ }
188
+ exportAsCsv(graph) {
189
+ const lines = [];
190
+ const escapeCsvField = (field) => {
191
+ if (field === undefined || field === null)
192
+ return '';
193
+ const str = String(field);
194
+ if (str.includes(',') || str.includes('"') || str.includes('\n')) {
195
+ return `"${str.replace(/"/g, '""')}"`;
196
+ }
197
+ return str;
198
+ };
199
+ lines.push('# ENTITIES');
200
+ lines.push('name,entityType,observations,createdAt,lastModified,tags,importance');
201
+ for (const entity of graph.entities) {
202
+ const observationsStr = entity.observations.join('; ');
203
+ const tagsStr = entity.tags ? entity.tags.join('; ') : '';
204
+ const importanceStr = entity.importance !== undefined ? String(entity.importance) : '';
205
+ lines.push([
206
+ escapeCsvField(entity.name),
207
+ escapeCsvField(entity.entityType),
208
+ escapeCsvField(observationsStr),
209
+ escapeCsvField(entity.createdAt),
210
+ escapeCsvField(entity.lastModified),
211
+ escapeCsvField(tagsStr),
212
+ escapeCsvField(importanceStr),
213
+ ].join(','));
214
+ }
215
+ lines.push('');
216
+ lines.push('# RELATIONS');
217
+ lines.push('from,to,relationType,createdAt,lastModified');
218
+ for (const relation of graph.relations) {
219
+ lines.push([
220
+ escapeCsvField(relation.from),
221
+ escapeCsvField(relation.to),
222
+ escapeCsvField(relation.relationType),
223
+ escapeCsvField(relation.createdAt),
224
+ escapeCsvField(relation.lastModified),
225
+ ].join(','));
226
+ }
227
+ return lines.join('\n');
228
+ }
229
+ exportAsGraphML(graph) {
230
+ const lines = [];
231
+ const escapeXml = (str) => {
232
+ if (str === undefined || str === null)
233
+ return '';
234
+ return String(str)
235
+ .replace(/&/g, '&')
236
+ .replace(/</g, '&lt;')
237
+ .replace(/>/g, '&gt;')
238
+ .replace(/"/g, '&quot;')
239
+ .replace(/'/g, '&apos;');
240
+ };
241
+ lines.push('<?xml version="1.0" encoding="UTF-8"?>');
242
+ lines.push('<graphml xmlns="http://graphml.graphdrawing.org/xmlns">');
243
+ lines.push(' <key id="d0" for="node" attr.name="entityType" attr.type="string"/>');
244
+ lines.push(' <key id="d1" for="node" attr.name="observations" attr.type="string"/>');
245
+ lines.push(' <key id="d2" for="node" attr.name="createdAt" attr.type="string"/>');
246
+ lines.push(' <key id="d3" for="node" attr.name="lastModified" attr.type="string"/>');
247
+ lines.push(' <key id="d4" for="node" attr.name="tags" attr.type="string"/>');
248
+ lines.push(' <key id="d5" for="node" attr.name="importance" attr.type="double"/>');
249
+ lines.push(' <key id="e0" for="edge" attr.name="relationType" attr.type="string"/>');
250
+ lines.push(' <key id="e1" for="edge" attr.name="createdAt" attr.type="string"/>');
251
+ lines.push(' <key id="e2" for="edge" attr.name="lastModified" attr.type="string"/>');
252
+ lines.push(' <graph id="G" edgedefault="directed">');
253
+ for (const entity of graph.entities) {
254
+ const nodeId = escapeXml(entity.name);
255
+ lines.push(` <node id="${nodeId}">`);
256
+ lines.push(` <data key="d0">${escapeXml(entity.entityType)}</data>`);
257
+ lines.push(` <data key="d1">${escapeXml(entity.observations.join('; '))}</data>`);
258
+ if (entity.createdAt)
259
+ lines.push(` <data key="d2">${escapeXml(entity.createdAt)}</data>`);
260
+ if (entity.lastModified)
261
+ lines.push(` <data key="d3">${escapeXml(entity.lastModified)}</data>`);
262
+ if (entity.tags?.length)
263
+ lines.push(` <data key="d4">${escapeXml(entity.tags.join('; '))}</data>`);
264
+ if (entity.importance !== undefined)
265
+ lines.push(` <data key="d5">${entity.importance}</data>`);
266
+ lines.push(' </node>');
267
+ }
268
+ let edgeId = 0;
269
+ for (const relation of graph.relations) {
270
+ const sourceId = escapeXml(relation.from);
271
+ const targetId = escapeXml(relation.to);
272
+ lines.push(` <edge id="e${edgeId}" source="${sourceId}" target="${targetId}">`);
273
+ lines.push(` <data key="e0">${escapeXml(relation.relationType)}</data>`);
274
+ if (relation.createdAt)
275
+ lines.push(` <data key="e1">${escapeXml(relation.createdAt)}</data>`);
276
+ if (relation.lastModified)
277
+ lines.push(` <data key="e2">${escapeXml(relation.lastModified)}</data>`);
278
+ lines.push(' </edge>');
279
+ edgeId++;
280
+ }
281
+ lines.push(' </graph>');
282
+ lines.push('</graphml>');
283
+ return lines.join('\n');
284
+ }
285
+ exportAsGEXF(graph) {
286
+ const lines = [];
287
+ const escapeXml = (str) => {
288
+ if (str === undefined || str === null)
289
+ return '';
290
+ return String(str)
291
+ .replace(/&/g, '&amp;')
292
+ .replace(/</g, '&lt;')
293
+ .replace(/>/g, '&gt;')
294
+ .replace(/"/g, '&quot;')
295
+ .replace(/'/g, '&apos;');
296
+ };
297
+ lines.push('<?xml version="1.0" encoding="UTF-8"?>');
298
+ lines.push('<gexf xmlns="http://www.gexf.net/1.2draft" version="1.2">');
299
+ lines.push(' <meta>');
300
+ lines.push(' <creator>Memory MCP Server</creator>');
301
+ lines.push(' </meta>');
302
+ lines.push(' <graph mode="static" defaultedgetype="directed">');
303
+ lines.push(' <attributes class="node">');
304
+ lines.push(' <attribute id="0" title="entityType" type="string"/>');
305
+ lines.push(' <attribute id="1" title="observations" type="string"/>');
306
+ lines.push(' </attributes>');
307
+ lines.push(' <nodes>');
308
+ for (const entity of graph.entities) {
309
+ const nodeId = escapeXml(entity.name);
310
+ lines.push(` <node id="${nodeId}" label="${nodeId}">`);
311
+ lines.push(' <attvalues>');
312
+ lines.push(` <attvalue for="0" value="${escapeXml(entity.entityType)}"/>`);
313
+ lines.push(` <attvalue for="1" value="${escapeXml(entity.observations.join('; '))}"/>`);
314
+ lines.push(' </attvalues>');
315
+ lines.push(' </node>');
316
+ }
317
+ lines.push(' </nodes>');
318
+ lines.push(' <edges>');
319
+ let edgeId = 0;
320
+ for (const relation of graph.relations) {
321
+ const sourceId = escapeXml(relation.from);
322
+ const targetId = escapeXml(relation.to);
323
+ const label = escapeXml(relation.relationType);
324
+ lines.push(` <edge id="${edgeId}" source="${sourceId}" target="${targetId}" label="${label}"/>`);
325
+ edgeId++;
326
+ }
327
+ lines.push(' </edges>');
328
+ lines.push(' </graph>');
329
+ lines.push('</gexf>');
330
+ return lines.join('\n');
331
+ }
332
+ exportAsDOT(graph) {
333
+ const lines = [];
334
+ const escapeDot = (str) => {
335
+ return '"' + str.replace(/\\/g, '\\\\').replace(/"/g, '\\"').replace(/\n/g, '\\n') + '"';
336
+ };
337
+ lines.push('digraph KnowledgeGraph {');
338
+ lines.push(' rankdir=LR;');
339
+ lines.push(' node [shape=box, style=rounded];');
340
+ lines.push('');
341
+ for (const entity of graph.entities) {
342
+ const nodeId = escapeDot(entity.name);
343
+ const label = [`${entity.name}`, `Type: ${entity.entityType}`];
344
+ if (entity.tags?.length)
345
+ label.push(`Tags: ${entity.tags.join(', ')}`);
346
+ const labelStr = escapeDot(label.join('\\n'));
347
+ lines.push(` ${nodeId} [label=${labelStr}];`);
348
+ }
349
+ lines.push('');
350
+ for (const relation of graph.relations) {
351
+ const fromId = escapeDot(relation.from);
352
+ const toId = escapeDot(relation.to);
353
+ const label = escapeDot(relation.relationType);
354
+ lines.push(` ${fromId} -> ${toId} [label=${label}];`);
355
+ }
356
+ lines.push('}');
357
+ return lines.join('\n');
358
+ }
359
+ exportAsMarkdown(graph) {
360
+ const lines = [];
361
+ lines.push('# Knowledge Graph Export');
362
+ lines.push('');
363
+ lines.push(`**Exported:** ${new Date().toISOString()}`);
364
+ lines.push(`**Entities:** ${graph.entities.length}`);
365
+ lines.push(`**Relations:** ${graph.relations.length}`);
366
+ lines.push('');
367
+ lines.push('## Entities');
368
+ lines.push('');
369
+ for (const entity of graph.entities) {
370
+ lines.push(`### ${entity.name}`);
371
+ lines.push('');
372
+ lines.push(`- **Type:** ${entity.entityType}`);
373
+ if (entity.tags?.length)
374
+ lines.push(`- **Tags:** ${entity.tags.map(t => `\`${t}\``).join(', ')}`);
375
+ if (entity.importance !== undefined)
376
+ lines.push(`- **Importance:** ${entity.importance}/10`);
377
+ if (entity.observations.length > 0) {
378
+ lines.push('');
379
+ lines.push('**Observations:**');
380
+ for (const obs of entity.observations) {
381
+ lines.push(`- ${obs}`);
382
+ }
383
+ }
384
+ lines.push('');
385
+ }
386
+ if (graph.relations.length > 0) {
387
+ lines.push('## Relations');
388
+ lines.push('');
389
+ for (const relation of graph.relations) {
390
+ lines.push(`- **${relation.from}** → *${relation.relationType}* → **${relation.to}**`);
391
+ }
392
+ lines.push('');
393
+ }
394
+ return lines.join('\n');
395
+ }
396
+ exportAsMermaid(graph) {
397
+ const lines = [];
398
+ const sanitizeId = (str) => str.replace(/[^a-zA-Z0-9_]/g, '_');
399
+ const escapeLabel = (str) => str.replace(/"/g, '#quot;');
400
+ lines.push('graph LR');
401
+ lines.push(' %% Knowledge Graph');
402
+ lines.push('');
403
+ const nodeIds = new Map();
404
+ for (const entity of graph.entities) {
405
+ nodeIds.set(entity.name, sanitizeId(entity.name));
406
+ }
407
+ for (const entity of graph.entities) {
408
+ const nodeId = nodeIds.get(entity.name);
409
+ const labelParts = [entity.name, `Type: ${entity.entityType}`];
410
+ if (entity.tags?.length)
411
+ labelParts.push(`Tags: ${entity.tags.join(', ')}`);
412
+ const label = escapeLabel(labelParts.join('<br/>'));
413
+ lines.push(` ${nodeId}["${label}"]`);
414
+ }
415
+ lines.push('');
416
+ for (const relation of graph.relations) {
417
+ const fromId = nodeIds.get(relation.from);
418
+ const toId = nodeIds.get(relation.to);
419
+ if (fromId && toId) {
420
+ const label = escapeLabel(relation.relationType);
421
+ lines.push(` ${fromId} -->|"${label}"| ${toId}`);
422
+ }
423
+ }
424
+ return lines.join('\n');
425
+ }
426
+ // ============================================================
427
+ // IMPORT OPERATIONS
428
+ // ============================================================
429
+ /**
430
+ * Import graph from formatted data.
431
+ *
432
+ * Phase 9B: Supports progress tracking and cancellation via LongRunningOperationOptions.
433
+ *
434
+ * @param format - Import format
435
+ * @param data - Import data string
436
+ * @param mergeStrategy - How to handle conflicts
437
+ * @param dryRun - If true, preview changes without applying
438
+ * @param options - Optional progress/cancellation options (Phase 9B)
439
+ * @returns Import result with statistics
440
+ * @throws {OperationCancelledError} If operation is cancelled via signal (Phase 9B)
441
+ */
442
+ async importGraph(format, data, mergeStrategy = 'skip', dryRun = false, options) {
443
+ // Check for early cancellation
444
+ checkCancellation(options?.signal, 'importGraph');
445
+ // Setup progress reporter
446
+ const reportProgress = createProgressReporter(options?.onProgress);
447
+ reportProgress?.(createProgress(0, 100, 'importGraph'));
448
+ let importedGraph;
449
+ try {
450
+ // Parsing phase (0-20% progress)
451
+ reportProgress?.(createProgress(5, 100, 'parsing data'));
452
+ checkCancellation(options?.signal, 'importGraph');
453
+ switch (format) {
454
+ case 'json':
455
+ importedGraph = this.parseJsonImport(data);
456
+ break;
457
+ case 'csv':
458
+ importedGraph = this.parseCsvImport(data);
459
+ break;
460
+ case 'graphml':
461
+ importedGraph = this.parseGraphMLImport(data);
462
+ break;
463
+ default:
464
+ throw new Error(`Unsupported import format: ${format}`);
465
+ }
466
+ reportProgress?.(createProgress(20, 100, 'parsing complete'));
467
+ }
468
+ catch (error) {
469
+ return {
470
+ entitiesAdded: 0,
471
+ entitiesSkipped: 0,
472
+ entitiesUpdated: 0,
473
+ relationsAdded: 0,
474
+ relationsSkipped: 0,
475
+ errors: [`Failed to parse ${format} data: ${error instanceof Error ? error.message : String(error)}`],
476
+ };
477
+ }
478
+ // Merging phase (20-100% progress)
479
+ return await this.mergeImportedGraph(importedGraph, mergeStrategy, dryRun, options);
480
+ }
481
+ parseJsonImport(data) {
482
+ const parsed = JSON.parse(data);
483
+ if (!parsed.entities || !Array.isArray(parsed.entities)) {
484
+ throw new Error('Invalid JSON: missing or invalid entities array');
485
+ }
486
+ if (!parsed.relations || !Array.isArray(parsed.relations)) {
487
+ throw new Error('Invalid JSON: missing or invalid relations array');
488
+ }
489
+ return {
490
+ entities: parsed.entities,
491
+ relations: parsed.relations,
492
+ };
493
+ }
494
+ parseCsvImport(data) {
495
+ const lines = data
496
+ .split('\n')
497
+ .map(line => line.trim())
498
+ .filter(line => line);
499
+ const entities = [];
500
+ const relations = [];
501
+ let section = null;
502
+ let headerParsed = false;
503
+ const parseCsvLine = (line) => {
504
+ const fields = [];
505
+ let current = '';
506
+ let inQuotes = false;
507
+ for (let i = 0; i < line.length; i++) {
508
+ const char = line[i];
509
+ if (char === '"') {
510
+ if (inQuotes && line[i + 1] === '"') {
511
+ current += '"';
512
+ i++;
513
+ }
514
+ else {
515
+ inQuotes = !inQuotes;
516
+ }
517
+ }
518
+ else if (char === ',' && !inQuotes) {
519
+ fields.push(current);
520
+ current = '';
521
+ }
522
+ else {
523
+ current += char;
524
+ }
525
+ }
526
+ fields.push(current);
527
+ return fields;
528
+ };
529
+ for (const line of lines) {
530
+ if (line.startsWith('# ENTITIES')) {
531
+ section = 'entities';
532
+ headerParsed = false;
533
+ continue;
534
+ }
535
+ else if (line.startsWith('# RELATIONS')) {
536
+ section = 'relations';
537
+ headerParsed = false;
538
+ continue;
539
+ }
540
+ if (line.startsWith('#'))
541
+ continue;
542
+ if (section === 'entities') {
543
+ if (!headerParsed) {
544
+ headerParsed = true;
545
+ continue;
546
+ }
547
+ const fields = parseCsvLine(line);
548
+ if (fields.length >= 2) {
549
+ const entity = {
550
+ name: fields[0],
551
+ entityType: fields[1],
552
+ observations: fields[2]
553
+ ? fields[2]
554
+ .split(';')
555
+ .map(s => s.trim())
556
+ .filter(s => s)
557
+ : [],
558
+ createdAt: fields[3] || undefined,
559
+ lastModified: fields[4] || undefined,
560
+ tags: fields[5]
561
+ ? fields[5]
562
+ .split(';')
563
+ .map(s => s.trim().toLowerCase())
564
+ .filter(s => s)
565
+ : undefined,
566
+ importance: fields[6] ? parseFloat(fields[6]) : undefined,
567
+ };
568
+ entities.push(entity);
569
+ }
570
+ }
571
+ else if (section === 'relations') {
572
+ if (!headerParsed) {
573
+ headerParsed = true;
574
+ continue;
575
+ }
576
+ const fields = parseCsvLine(line);
577
+ if (fields.length >= 3) {
578
+ const relation = {
579
+ from: fields[0],
580
+ to: fields[1],
581
+ relationType: fields[2],
582
+ createdAt: fields[3] || undefined,
583
+ lastModified: fields[4] || undefined,
584
+ };
585
+ relations.push(relation);
586
+ }
587
+ }
588
+ }
589
+ return { entities, relations };
590
+ }
591
+ parseGraphMLImport(data) {
592
+ const entities = [];
593
+ const relations = [];
594
+ const nodeRegex = /<node\s+id="([^"]+)"[^>]*>([\s\S]*?)<\/node>/g;
595
+ let nodeMatch;
596
+ while ((nodeMatch = nodeRegex.exec(data)) !== null) {
597
+ const nodeId = nodeMatch[1];
598
+ const nodeContent = nodeMatch[2];
599
+ const getDataValue = (key) => {
600
+ const dataRegex = new RegExp(`<data\\s+key="${key}">([^<]*)<\/data>`);
601
+ const match = dataRegex.exec(nodeContent);
602
+ return match ? match[1] : undefined;
603
+ };
604
+ const entity = {
605
+ name: nodeId,
606
+ entityType: getDataValue('d0') || getDataValue('entityType') || 'unknown',
607
+ observations: (getDataValue('d1') || getDataValue('observations') || '')
608
+ .split(';')
609
+ .map(s => s.trim())
610
+ .filter(s => s),
611
+ createdAt: getDataValue('d2') || getDataValue('createdAt'),
612
+ lastModified: getDataValue('d3') || getDataValue('lastModified'),
613
+ tags: (getDataValue('d4') || getDataValue('tags') || '')
614
+ .split(';')
615
+ .map(s => s.trim().toLowerCase())
616
+ .filter(s => s),
617
+ importance: getDataValue('d5') || getDataValue('importance') ? parseFloat(getDataValue('d5') || getDataValue('importance') || '0') : undefined,
618
+ };
619
+ entities.push(entity);
620
+ }
621
+ const edgeRegex = /<edge\s+[^>]*source="([^"]+)"\s+target="([^"]+)"[^>]*>([\s\S]*?)<\/edge>/g;
622
+ let edgeMatch;
623
+ while ((edgeMatch = edgeRegex.exec(data)) !== null) {
624
+ const source = edgeMatch[1];
625
+ const target = edgeMatch[2];
626
+ const edgeContent = edgeMatch[3];
627
+ const getDataValue = (key) => {
628
+ const dataRegex = new RegExp(`<data\\s+key="${key}">([^<]*)<\/data>`);
629
+ const match = dataRegex.exec(edgeContent);
630
+ return match ? match[1] : undefined;
631
+ };
632
+ const relation = {
633
+ from: source,
634
+ to: target,
635
+ relationType: getDataValue('e0') || getDataValue('relationType') || 'related_to',
636
+ createdAt: getDataValue('e1') || getDataValue('createdAt'),
637
+ lastModified: getDataValue('e2') || getDataValue('lastModified'),
638
+ };
639
+ relations.push(relation);
640
+ }
641
+ return { entities, relations };
642
+ }
643
+ async mergeImportedGraph(importedGraph, mergeStrategy, dryRun, options) {
644
+ // Check for cancellation
645
+ checkCancellation(options?.signal, 'importGraph');
646
+ // Setup progress reporter (we're at 20% from parsing, need to go to 100%)
647
+ const reportProgress = createProgressReporter(options?.onProgress);
648
+ const existingGraph = await this.storage.getGraphForMutation();
649
+ const result = {
650
+ entitiesAdded: 0,
651
+ entitiesSkipped: 0,
652
+ entitiesUpdated: 0,
653
+ relationsAdded: 0,
654
+ relationsSkipped: 0,
655
+ errors: [],
656
+ };
657
+ const existingEntitiesMap = new Map();
658
+ for (const entity of existingGraph.entities) {
659
+ existingEntitiesMap.set(entity.name, entity);
660
+ }
661
+ const existingRelationsSet = new Set();
662
+ for (const relation of existingGraph.relations) {
663
+ existingRelationsSet.add(`${relation.from}|${relation.to}|${relation.relationType}`);
664
+ }
665
+ // Process entities (20-60% progress)
666
+ const totalEntities = importedGraph.entities.length;
667
+ const totalRelations = importedGraph.relations.length;
668
+ let processedEntities = 0;
669
+ for (const importedEntity of importedGraph.entities) {
670
+ // Check for cancellation periodically
671
+ checkCancellation(options?.signal, 'importGraph');
672
+ const existing = existingEntitiesMap.get(importedEntity.name);
673
+ if (!existing) {
674
+ result.entitiesAdded++;
675
+ if (!dryRun) {
676
+ existingGraph.entities.push(importedEntity);
677
+ existingEntitiesMap.set(importedEntity.name, importedEntity);
678
+ }
679
+ }
680
+ else {
681
+ switch (mergeStrategy) {
682
+ case 'replace':
683
+ result.entitiesUpdated++;
684
+ if (!dryRun) {
685
+ Object.assign(existing, importedEntity);
686
+ }
687
+ break;
688
+ case 'skip':
689
+ result.entitiesSkipped++;
690
+ break;
691
+ case 'merge':
692
+ result.entitiesUpdated++;
693
+ if (!dryRun) {
694
+ existing.observations = [
695
+ ...new Set([...existing.observations, ...importedEntity.observations]),
696
+ ];
697
+ if (importedEntity.tags) {
698
+ existing.tags = existing.tags || [];
699
+ existing.tags = [...new Set([...existing.tags, ...importedEntity.tags])];
700
+ }
701
+ if (importedEntity.importance !== undefined) {
702
+ existing.importance = importedEntity.importance;
703
+ }
704
+ existing.lastModified = new Date().toISOString();
705
+ }
706
+ break;
707
+ case 'fail':
708
+ result.errors.push(`Entity "${importedEntity.name}" already exists`);
709
+ break;
710
+ }
711
+ }
712
+ processedEntities++;
713
+ // Map entity progress (0-100%) to overall progress (20-60%)
714
+ const entityProgress = totalEntities > 0 ? Math.round(20 + (processedEntities / totalEntities) * 40) : 60;
715
+ reportProgress?.(createProgress(entityProgress, 100, 'importing entities'));
716
+ }
717
+ reportProgress?.(createProgress(60, 100, 'importing relations'));
718
+ // Process relations (60-95% progress)
719
+ let processedRelations = 0;
720
+ for (const importedRelation of importedGraph.relations) {
721
+ // Check for cancellation periodically
722
+ checkCancellation(options?.signal, 'importGraph');
723
+ const relationKey = `${importedRelation.from}|${importedRelation.to}|${importedRelation.relationType}`;
724
+ if (!existingEntitiesMap.has(importedRelation.from)) {
725
+ result.errors.push(`Relation source entity "${importedRelation.from}" does not exist`);
726
+ processedRelations++;
727
+ continue;
728
+ }
729
+ if (!existingEntitiesMap.has(importedRelation.to)) {
730
+ result.errors.push(`Relation target entity "${importedRelation.to}" does not exist`);
731
+ processedRelations++;
732
+ continue;
733
+ }
734
+ if (!existingRelationsSet.has(relationKey)) {
735
+ result.relationsAdded++;
736
+ if (!dryRun) {
737
+ existingGraph.relations.push(importedRelation);
738
+ existingRelationsSet.add(relationKey);
739
+ }
740
+ }
741
+ else {
742
+ if (mergeStrategy === 'fail') {
743
+ result.errors.push(`Relation "${relationKey}" already exists`);
744
+ }
745
+ else {
746
+ result.relationsSkipped++;
747
+ }
748
+ }
749
+ processedRelations++;
750
+ // Map relation progress (0-100%) to overall progress (60-95%)
751
+ const relationProgress = totalRelations > 0 ? Math.round(60 + (processedRelations / totalRelations) * 35) : 95;
752
+ reportProgress?.(createProgress(relationProgress, 100, 'importing relations'));
753
+ }
754
+ // Check for cancellation before final save
755
+ checkCancellation(options?.signal, 'importGraph');
756
+ reportProgress?.(createProgress(95, 100, 'saving graph'));
757
+ if (!dryRun && (mergeStrategy !== 'fail' || result.errors.length === 0)) {
758
+ await this.storage.saveGraph(existingGraph);
759
+ }
760
+ // Report completion
761
+ reportProgress?.(createProgress(100, 100, 'importGraph'));
762
+ return result;
763
+ }
764
+ // ============================================================
765
+ // BACKUP OPERATIONS
766
+ // ============================================================
767
+ /**
768
+ * Ensure backup directory exists.
769
+ */
770
+ async ensureBackupDir() {
771
+ try {
772
+ await fs.mkdir(this.backupDir, { recursive: true });
773
+ }
774
+ catch (error) {
775
+ throw new FileOperationError('create backup directory', this.backupDir, error);
776
+ }
777
+ }
778
+ /**
779
+ * Generate backup file name with timestamp.
780
+ * @param compressed - Whether the backup will be compressed (affects extension)
781
+ */
782
+ generateBackupFileName(compressed = true) {
783
+ const now = new Date();
784
+ const timestamp = now.toISOString()
785
+ .replace(/:/g, '-')
786
+ .replace(/\./g, '-')
787
+ .replace('T', '_')
788
+ .replace('Z', '');
789
+ const extension = compressed ? '.jsonl.br' : '.jsonl';
790
+ return `backup_${timestamp}${extension}`;
791
+ }
792
+ /**
793
+ * Create a backup of the current knowledge graph.
794
+ *
795
+ * By default, backups are compressed with brotli for 50-70% space reduction.
796
+ * Use `options.compress = false` to create uncompressed backups.
797
+ *
798
+ * @param options - Backup options (compress, description) or legacy description string
799
+ * @returns Promise resolving to BackupResult with compression statistics
800
+ *
801
+ * @example
802
+ * ```typescript
803
+ * // Compressed backup (default)
804
+ * const result = await manager.createBackup({ description: 'Pre-migration backup' });
805
+ * console.log(`Compressed from ${result.originalSize} to ${result.compressedSize} bytes`);
806
+ *
807
+ * // Uncompressed backup
808
+ * const result = await manager.createBackup({ compress: false });
809
+ * ```
810
+ */
811
+ async createBackup(options) {
812
+ await this.ensureBackupDir();
813
+ // Handle legacy string argument (backward compatibility)
814
+ const opts = typeof options === 'string'
815
+ ? { description: options, compress: COMPRESSION_CONFIG.AUTO_COMPRESS_BACKUP }
816
+ : { compress: COMPRESSION_CONFIG.AUTO_COMPRESS_BACKUP, ...options };
817
+ const shouldCompress = opts.compress ?? COMPRESSION_CONFIG.AUTO_COMPRESS_BACKUP;
818
+ const graph = await this.storage.loadGraph();
819
+ const timestamp = new Date().toISOString();
820
+ const fileName = this.generateBackupFileName(shouldCompress);
821
+ const backupPath = join(this.backupDir, fileName);
822
+ try {
823
+ const originalPath = this.storage.getFilePath();
824
+ let fileContent;
825
+ try {
826
+ fileContent = await fs.readFile(originalPath, 'utf-8');
827
+ }
828
+ catch {
829
+ // If file doesn't exist, generate content from graph
830
+ const lines = [
831
+ ...graph.entities.map(e => JSON.stringify({ type: 'entity', ...e })),
832
+ ...graph.relations.map(r => JSON.stringify({ type: 'relation', ...r })),
833
+ ];
834
+ fileContent = lines.join('\n');
835
+ }
836
+ const originalSize = Buffer.byteLength(fileContent, 'utf-8');
837
+ let compressedSize = originalSize;
838
+ let compressionRatio = 1;
839
+ if (shouldCompress) {
840
+ // Compress with maximum quality for backups (archive quality)
841
+ const compressionResult = await compress(fileContent, {
842
+ quality: COMPRESSION_CONFIG.BROTLI_QUALITY_ARCHIVE,
843
+ mode: 'text',
844
+ });
845
+ await fs.writeFile(backupPath, compressionResult.compressed);
846
+ compressedSize = compressionResult.compressedSize;
847
+ compressionRatio = compressionResult.ratio;
848
+ }
849
+ else {
850
+ // Write uncompressed backup
851
+ await fs.writeFile(backupPath, fileContent);
852
+ }
853
+ const stats = await fs.stat(backupPath);
854
+ const metadata = {
855
+ timestamp,
856
+ entityCount: graph.entities.length,
857
+ relationCount: graph.relations.length,
858
+ fileSize: stats.size,
859
+ description: opts.description,
860
+ compressed: shouldCompress,
861
+ originalSize,
862
+ compressionRatio: shouldCompress ? compressionRatio : undefined,
863
+ compressionFormat: shouldCompress ? 'brotli' : 'none',
864
+ };
865
+ const metadataPath = `${backupPath}.meta.json`;
866
+ await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
867
+ return {
868
+ path: backupPath,
869
+ timestamp,
870
+ entityCount: graph.entities.length,
871
+ relationCount: graph.relations.length,
872
+ compressed: shouldCompress,
873
+ originalSize,
874
+ compressedSize,
875
+ compressionRatio,
876
+ description: opts.description,
877
+ };
878
+ }
879
+ catch (error) {
880
+ throw new FileOperationError('create backup', backupPath, error);
881
+ }
882
+ }
883
+ /**
884
+ * List all available backups, sorted by timestamp (newest first).
885
+ *
886
+ * Detects both compressed (.jsonl.br) and uncompressed (.jsonl) backups.
887
+ *
888
+ * @returns Promise resolving to array of backup information with compression details
889
+ */
890
+ async listBackups() {
891
+ try {
892
+ try {
893
+ await fs.access(this.backupDir);
894
+ }
895
+ catch {
896
+ return [];
897
+ }
898
+ const files = await fs.readdir(this.backupDir);
899
+ // Match both .jsonl and .jsonl.br backup files, exclude metadata files
900
+ const backupFiles = files.filter(f => f.startsWith('backup_') &&
901
+ (f.endsWith('.jsonl') || f.endsWith('.jsonl.br')) &&
902
+ !f.endsWith('.meta.json'));
903
+ const backups = [];
904
+ for (const fileName of backupFiles) {
905
+ const filePath = join(this.backupDir, fileName);
906
+ const isCompressed = hasBrotliExtension(fileName);
907
+ // Try to read metadata file (handles both .jsonl.meta.json and .jsonl.br.meta.json)
908
+ const metadataPath = `${filePath}.meta.json`;
909
+ try {
910
+ const [metadataContent, stats] = await Promise.all([
911
+ fs.readFile(metadataPath, 'utf-8'),
912
+ fs.stat(filePath),
913
+ ]);
914
+ const metadata = JSON.parse(metadataContent);
915
+ // Ensure compression fields are present (backward compatibility)
916
+ if (metadata.compressed === undefined) {
917
+ metadata.compressed = isCompressed;
918
+ }
919
+ if (metadata.compressionFormat === undefined) {
920
+ metadata.compressionFormat = isCompressed ? 'brotli' : 'none';
921
+ }
922
+ backups.push({
923
+ fileName,
924
+ filePath,
925
+ metadata,
926
+ compressed: isCompressed,
927
+ size: stats.size,
928
+ });
929
+ }
930
+ catch {
931
+ // Skip backups without valid metadata
932
+ continue;
933
+ }
934
+ }
935
+ backups.sort((a, b) => new Date(b.metadata.timestamp).getTime() - new Date(a.metadata.timestamp).getTime());
936
+ return backups;
937
+ }
938
+ catch (error) {
939
+ throw new FileOperationError('list backups', this.backupDir, error);
940
+ }
941
+ }
942
+ /**
943
+ * Restore the knowledge graph from a backup file.
944
+ *
945
+ * Automatically detects and decompresses brotli-compressed backups (.br extension).
946
+ * Maintains backward compatibility with uncompressed backups.
947
+ *
948
+ * @param backupPath - Path to the backup file to restore from
949
+ * @returns Promise resolving to RestoreResult with restoration details
950
+ *
951
+ * @example
952
+ * ```typescript
953
+ * // Restore from compressed backup
954
+ * const result = await manager.restoreFromBackup('/path/to/backup.jsonl.br');
955
+ * console.log(`Restored ${result.entityCount} entities from compressed backup`);
956
+ *
957
+ * // Restore from uncompressed backup (legacy)
958
+ * const result = await manager.restoreFromBackup('/path/to/backup.jsonl');
959
+ * ```
960
+ */
961
+ async restoreFromBackup(backupPath) {
962
+ try {
963
+ await fs.access(backupPath);
964
+ const isCompressed = hasBrotliExtension(backupPath);
965
+ const backupBuffer = await fs.readFile(backupPath);
966
+ let backupContent;
967
+ if (isCompressed) {
968
+ // Decompress the backup
969
+ const decompressedBuffer = await decompress(backupBuffer);
970
+ backupContent = decompressedBuffer.toString('utf-8');
971
+ }
972
+ else {
973
+ // Read as plain text
974
+ backupContent = backupBuffer.toString('utf-8');
975
+ }
976
+ const mainPath = this.storage.getFilePath();
977
+ await fs.writeFile(mainPath, backupContent);
978
+ this.storage.clearCache();
979
+ // Load the restored graph to get counts
980
+ const graph = await this.storage.loadGraph();
981
+ return {
982
+ entityCount: graph.entities.length,
983
+ relationCount: graph.relations.length,
984
+ restoredFrom: backupPath,
985
+ wasCompressed: isCompressed,
986
+ };
987
+ }
988
+ catch (error) {
989
+ throw new FileOperationError('restore from backup', backupPath, error);
990
+ }
991
+ }
992
+ /**
993
+ * Delete a specific backup file.
994
+ *
995
+ * @param backupPath - Path to the backup file to delete
996
+ */
997
+ async deleteBackup(backupPath) {
998
+ try {
999
+ await fs.unlink(backupPath);
1000
+ try {
1001
+ await fs.unlink(`${backupPath}.meta.json`);
1002
+ }
1003
+ catch {
1004
+ // Metadata file doesn't exist - that's ok
1005
+ }
1006
+ }
1007
+ catch (error) {
1008
+ throw new FileOperationError('delete backup', backupPath, error);
1009
+ }
1010
+ }
1011
+ /**
1012
+ * Clean old backups, keeping only the most recent N backups.
1013
+ *
1014
+ * @param keepCount - Number of recent backups to keep (default: 10)
1015
+ * @returns Promise resolving to number of backups deleted
1016
+ */
1017
+ async cleanOldBackups(keepCount = 10) {
1018
+ const backups = await this.listBackups();
1019
+ if (backups.length <= keepCount) {
1020
+ return 0;
1021
+ }
1022
+ const backupsToDelete = backups.slice(keepCount);
1023
+ let deletedCount = 0;
1024
+ for (const backup of backupsToDelete) {
1025
+ try {
1026
+ await this.deleteBackup(backup.filePath);
1027
+ deletedCount++;
1028
+ }
1029
+ catch {
1030
+ continue;
1031
+ }
1032
+ }
1033
+ return deletedCount;
1034
+ }
1035
+ /**
1036
+ * Get the path to the backup directory.
1037
+ */
1038
+ getBackupDir() {
1039
+ return this.backupDir;
1040
+ }
1041
+ }