@danielsimonjr/memory-mcp 0.48.0 → 9.8.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (210) hide show
  1. package/LICENSE +22 -0
  2. package/README.md +2000 -194
  3. package/dist/__tests__/file-path.test.js +7 -11
  4. package/dist/__tests__/knowledge-graph.test.js +3 -8
  5. package/dist/core/EntityManager.d.ts +266 -0
  6. package/dist/core/EntityManager.d.ts.map +1 -0
  7. package/dist/core/EntityManager.js +89 -137
  8. package/dist/core/GraphEventEmitter.d.ts +202 -0
  9. package/dist/core/GraphEventEmitter.d.ts.map +1 -0
  10. package/dist/core/GraphEventEmitter.js +346 -0
  11. package/dist/core/GraphStorage.d.ts +395 -0
  12. package/dist/core/GraphStorage.d.ts.map +1 -0
  13. package/dist/core/GraphStorage.js +644 -31
  14. package/dist/core/GraphTraversal.d.ts +141 -0
  15. package/dist/core/GraphTraversal.d.ts.map +1 -0
  16. package/dist/core/GraphTraversal.js +573 -0
  17. package/dist/core/HierarchyManager.d.ts +111 -0
  18. package/dist/core/HierarchyManager.d.ts.map +1 -0
  19. package/dist/{features → core}/HierarchyManager.js +14 -9
  20. package/dist/core/ManagerContext.d.ts +72 -0
  21. package/dist/core/ManagerContext.d.ts.map +1 -0
  22. package/dist/core/ManagerContext.js +118 -0
  23. package/dist/core/ObservationManager.d.ts +85 -0
  24. package/dist/core/ObservationManager.d.ts.map +1 -0
  25. package/dist/core/ObservationManager.js +51 -57
  26. package/dist/core/RelationManager.d.ts +131 -0
  27. package/dist/core/RelationManager.d.ts.map +1 -0
  28. package/dist/core/RelationManager.js +31 -7
  29. package/dist/core/SQLiteStorage.d.ts +354 -0
  30. package/dist/core/SQLiteStorage.d.ts.map +1 -0
  31. package/dist/core/SQLiteStorage.js +918 -0
  32. package/dist/core/StorageFactory.d.ts +45 -0
  33. package/dist/core/StorageFactory.d.ts.map +1 -0
  34. package/dist/core/StorageFactory.js +64 -0
  35. package/dist/core/TransactionManager.d.ts +464 -0
  36. package/dist/core/TransactionManager.d.ts.map +1 -0
  37. package/dist/core/TransactionManager.js +493 -14
  38. package/dist/core/index.d.ts +17 -0
  39. package/dist/core/index.d.ts.map +1 -0
  40. package/dist/core/index.js +12 -2
  41. package/dist/features/AnalyticsManager.d.ts +44 -0
  42. package/dist/features/AnalyticsManager.d.ts.map +1 -0
  43. package/dist/features/AnalyticsManager.js +3 -2
  44. package/dist/features/ArchiveManager.d.ts +133 -0
  45. package/dist/features/ArchiveManager.d.ts.map +1 -0
  46. package/dist/features/ArchiveManager.js +221 -14
  47. package/dist/features/CompressionManager.d.ts +117 -0
  48. package/dist/features/CompressionManager.d.ts.map +1 -0
  49. package/dist/features/CompressionManager.js +189 -20
  50. package/dist/features/IOManager.d.ts +225 -0
  51. package/dist/features/IOManager.d.ts.map +1 -0
  52. package/dist/features/IOManager.js +1092 -0
  53. package/dist/features/StreamingExporter.d.ts +128 -0
  54. package/dist/features/StreamingExporter.d.ts.map +1 -0
  55. package/dist/features/StreamingExporter.js +211 -0
  56. package/dist/features/TagManager.d.ts +147 -0
  57. package/dist/features/TagManager.d.ts.map +1 -0
  58. package/dist/features/index.d.ts +12 -0
  59. package/dist/features/index.d.ts.map +1 -0
  60. package/dist/features/index.js +5 -6
  61. package/dist/index.d.ts +9 -0
  62. package/dist/index.d.ts.map +1 -0
  63. package/dist/index.js +10 -10
  64. package/dist/memory.jsonl +1 -26
  65. package/dist/search/BasicSearch.d.ts +51 -0
  66. package/dist/search/BasicSearch.d.ts.map +1 -0
  67. package/dist/search/BasicSearch.js +9 -3
  68. package/dist/search/BooleanSearch.d.ts +98 -0
  69. package/dist/search/BooleanSearch.d.ts.map +1 -0
  70. package/dist/search/BooleanSearch.js +156 -9
  71. package/dist/search/EmbeddingService.d.ts +178 -0
  72. package/dist/search/EmbeddingService.d.ts.map +1 -0
  73. package/dist/search/EmbeddingService.js +358 -0
  74. package/dist/search/FuzzySearch.d.ts +118 -0
  75. package/dist/search/FuzzySearch.d.ts.map +1 -0
  76. package/dist/search/FuzzySearch.js +241 -25
  77. package/dist/search/QueryCostEstimator.d.ts +111 -0
  78. package/dist/search/QueryCostEstimator.d.ts.map +1 -0
  79. package/dist/search/QueryCostEstimator.js +355 -0
  80. package/dist/search/RankedSearch.d.ts +71 -0
  81. package/dist/search/RankedSearch.d.ts.map +1 -0
  82. package/dist/search/RankedSearch.js +54 -6
  83. package/dist/search/SavedSearchManager.d.ts +79 -0
  84. package/dist/search/SavedSearchManager.d.ts.map +1 -0
  85. package/dist/search/SavedSearchManager.js +3 -2
  86. package/dist/search/SearchFilterChain.d.ts +120 -0
  87. package/dist/search/SearchFilterChain.d.ts.map +1 -0
  88. package/dist/search/SearchFilterChain.js +2 -4
  89. package/dist/search/SearchManager.d.ts +326 -0
  90. package/dist/search/SearchManager.d.ts.map +1 -0
  91. package/dist/search/SearchManager.js +148 -0
  92. package/dist/search/SearchSuggestions.d.ts +27 -0
  93. package/dist/search/SearchSuggestions.d.ts.map +1 -0
  94. package/dist/search/SearchSuggestions.js +1 -1
  95. package/dist/search/SemanticSearch.d.ts +149 -0
  96. package/dist/search/SemanticSearch.d.ts.map +1 -0
  97. package/dist/search/SemanticSearch.js +323 -0
  98. package/dist/search/TFIDFEventSync.d.ts +85 -0
  99. package/dist/search/TFIDFEventSync.d.ts.map +1 -0
  100. package/dist/search/TFIDFEventSync.js +133 -0
  101. package/dist/search/TFIDFIndexManager.d.ts +151 -0
  102. package/dist/search/TFIDFIndexManager.d.ts.map +1 -0
  103. package/dist/search/TFIDFIndexManager.js +232 -17
  104. package/dist/search/VectorStore.d.ts +235 -0
  105. package/dist/search/VectorStore.d.ts.map +1 -0
  106. package/dist/search/VectorStore.js +311 -0
  107. package/dist/search/index.d.ts +21 -0
  108. package/dist/search/index.d.ts.map +1 -0
  109. package/dist/search/index.js +12 -0
  110. package/dist/server/MCPServer.d.ts +21 -0
  111. package/dist/server/MCPServer.d.ts.map +1 -0
  112. package/dist/server/MCPServer.js +4 -4
  113. package/dist/server/responseCompressor.d.ts +94 -0
  114. package/dist/server/responseCompressor.d.ts.map +1 -0
  115. package/dist/server/responseCompressor.js +127 -0
  116. package/dist/server/toolDefinitions.d.ts +27 -0
  117. package/dist/server/toolDefinitions.d.ts.map +1 -0
  118. package/dist/server/toolDefinitions.js +188 -17
  119. package/dist/server/toolHandlers.d.ts +41 -0
  120. package/dist/server/toolHandlers.d.ts.map +1 -0
  121. package/dist/server/toolHandlers.js +469 -75
  122. package/dist/types/index.d.ts +13 -0
  123. package/dist/types/index.d.ts.map +1 -0
  124. package/dist/types/index.js +1 -1
  125. package/dist/types/types.d.ts +1654 -0
  126. package/dist/types/types.d.ts.map +1 -0
  127. package/dist/types/types.js +9 -0
  128. package/dist/utils/compressedCache.d.ts +192 -0
  129. package/dist/utils/compressedCache.d.ts.map +1 -0
  130. package/dist/utils/compressedCache.js +309 -0
  131. package/dist/utils/compressionUtil.d.ts +214 -0
  132. package/dist/utils/compressionUtil.d.ts.map +1 -0
  133. package/dist/utils/compressionUtil.js +247 -0
  134. package/dist/utils/constants.d.ts +245 -0
  135. package/dist/utils/constants.d.ts.map +1 -0
  136. package/dist/utils/constants.js +124 -0
  137. package/dist/utils/entityUtils.d.ts +354 -0
  138. package/dist/utils/entityUtils.d.ts.map +1 -0
  139. package/dist/utils/entityUtils.js +511 -4
  140. package/dist/utils/errors.d.ts +95 -0
  141. package/dist/utils/errors.d.ts.map +1 -0
  142. package/dist/utils/errors.js +24 -0
  143. package/dist/utils/formatters.d.ts +145 -0
  144. package/dist/utils/formatters.d.ts.map +1 -0
  145. package/dist/utils/{paginationUtils.js → formatters.js} +54 -3
  146. package/dist/utils/index.d.ts +23 -0
  147. package/dist/utils/index.d.ts.map +1 -0
  148. package/dist/utils/index.js +71 -31
  149. package/dist/utils/indexes.d.ts +270 -0
  150. package/dist/utils/indexes.d.ts.map +1 -0
  151. package/dist/utils/indexes.js +526 -0
  152. package/dist/utils/logger.d.ts +24 -0
  153. package/dist/utils/logger.d.ts.map +1 -0
  154. package/dist/utils/operationUtils.d.ts +124 -0
  155. package/dist/utils/operationUtils.d.ts.map +1 -0
  156. package/dist/utils/operationUtils.js +175 -0
  157. package/dist/utils/parallelUtils.d.ts +76 -0
  158. package/dist/utils/parallelUtils.d.ts.map +1 -0
  159. package/dist/utils/parallelUtils.js +191 -0
  160. package/dist/utils/schemas.d.ts +374 -0
  161. package/dist/utils/schemas.d.ts.map +1 -0
  162. package/dist/utils/schemas.js +307 -7
  163. package/dist/utils/searchAlgorithms.d.ts +99 -0
  164. package/dist/utils/searchAlgorithms.d.ts.map +1 -0
  165. package/dist/utils/searchAlgorithms.js +167 -0
  166. package/dist/utils/searchCache.d.ts +108 -0
  167. package/dist/utils/searchCache.d.ts.map +1 -0
  168. package/dist/utils/taskScheduler.d.ts +294 -0
  169. package/dist/utils/taskScheduler.d.ts.map +1 -0
  170. package/dist/utils/taskScheduler.js +486 -0
  171. package/dist/workers/index.d.ts +12 -0
  172. package/dist/workers/index.d.ts.map +1 -0
  173. package/dist/workers/index.js +9 -0
  174. package/dist/workers/levenshteinWorker.d.ts +60 -0
  175. package/dist/workers/levenshteinWorker.d.ts.map +1 -0
  176. package/dist/workers/levenshteinWorker.js +98 -0
  177. package/package.json +17 -4
  178. package/dist/__tests__/edge-cases/edge-cases.test.js +0 -406
  179. package/dist/__tests__/integration/workflows.test.js +0 -449
  180. package/dist/__tests__/performance/benchmarks.test.js +0 -413
  181. package/dist/__tests__/unit/core/EntityManager.test.js +0 -334
  182. package/dist/__tests__/unit/core/GraphStorage.test.js +0 -205
  183. package/dist/__tests__/unit/core/RelationManager.test.js +0 -274
  184. package/dist/__tests__/unit/features/CompressionManager.test.js +0 -350
  185. package/dist/__tests__/unit/search/BasicSearch.test.js +0 -311
  186. package/dist/__tests__/unit/search/BooleanSearch.test.js +0 -432
  187. package/dist/__tests__/unit/search/FuzzySearch.test.js +0 -448
  188. package/dist/__tests__/unit/search/RankedSearch.test.js +0 -379
  189. package/dist/__tests__/unit/utils/levenshtein.test.js +0 -77
  190. package/dist/core/KnowledgeGraphManager.js +0 -423
  191. package/dist/features/BackupManager.js +0 -311
  192. package/dist/features/ExportManager.js +0 -305
  193. package/dist/features/ImportExportManager.js +0 -50
  194. package/dist/features/ImportManager.js +0 -328
  195. package/dist/memory-saved-searches.jsonl +0 -0
  196. package/dist/memory-tag-aliases.jsonl +0 -0
  197. package/dist/types/analytics.types.js +0 -6
  198. package/dist/types/entity.types.js +0 -7
  199. package/dist/types/import-export.types.js +0 -7
  200. package/dist/types/search.types.js +0 -7
  201. package/dist/types/tag.types.js +0 -6
  202. package/dist/utils/dateUtils.js +0 -89
  203. package/dist/utils/filterUtils.js +0 -155
  204. package/dist/utils/levenshtein.js +0 -62
  205. package/dist/utils/pathUtils.js +0 -115
  206. package/dist/utils/responseFormatter.js +0 -55
  207. package/dist/utils/tagUtils.js +0 -107
  208. package/dist/utils/tfidf.js +0 -90
  209. package/dist/utils/validationHelper.js +0 -99
  210. package/dist/utils/validationUtils.js +0 -109
@@ -0,0 +1,1092 @@
1
+ /**
2
+ * IO Manager
3
+ *
4
+ * Unified manager for import, export, and backup operations.
5
+ * Consolidates BackupManager, ExportManager, and ImportManager (Sprint 11.4).
6
+ *
7
+ * @module features/IOManager
8
+ */
9
+ import { promises as fs } from 'fs';
10
+ import { dirname, join } from 'path';
11
+ import { FileOperationError } from '../utils/errors.js';
12
+ import { compress, decompress, hasBrotliExtension, COMPRESSION_CONFIG, STREAMING_CONFIG, checkCancellation, createProgressReporter, createProgress, validateFilePath, sanitizeObject, escapeCsvFormula, } from '../utils/index.js';
13
+ import { StreamingExporter } from './StreamingExporter.js';
14
+ // ============================================================
15
+ // IO MANAGER CLASS
16
+ // ============================================================
17
+ /**
18
+ * Unified manager for import, export, and backup operations.
19
+ *
20
+ * Combines functionality from:
21
+ * - ExportManager: Graph export to various formats
22
+ * - ImportManager: Graph import from various formats
23
+ * - BackupManager: Point-in-time backup and restore
24
+ */
25
+ export class IOManager {
26
+ storage;
27
+ backupDir;
28
+ constructor(storage) {
29
+ this.storage = storage;
30
+ const filePath = this.storage.getFilePath();
31
+ const dir = dirname(filePath);
32
+ this.backupDir = join(dir, '.backups');
33
+ }
34
+ // ============================================================
35
+ // EXPORT OPERATIONS
36
+ // ============================================================
37
+ /**
38
+ * Export graph to specified format.
39
+ *
40
+ * @param graph - Knowledge graph to export
41
+ * @param format - Export format
42
+ * @returns Formatted export string
43
+ */
44
+ exportGraph(graph, format) {
45
+ switch (format) {
46
+ case 'json':
47
+ return this.exportAsJson(graph);
48
+ case 'csv':
49
+ return this.exportAsCsv(graph);
50
+ case 'graphml':
51
+ return this.exportAsGraphML(graph);
52
+ case 'gexf':
53
+ return this.exportAsGEXF(graph);
54
+ case 'dot':
55
+ return this.exportAsDOT(graph);
56
+ case 'markdown':
57
+ return this.exportAsMarkdown(graph);
58
+ case 'mermaid':
59
+ return this.exportAsMermaid(graph);
60
+ default:
61
+ throw new Error(`Unsupported export format: ${format}`);
62
+ }
63
+ }
64
+ /**
65
+ * Export graph with optional brotli compression.
66
+ *
67
+ * Compression is applied when:
68
+ * - `options.compress` is explicitly set to `true`
69
+ * - The exported content exceeds 100KB (auto-compress threshold)
70
+ *
71
+ * Compressed content is returned as base64-encoded string.
72
+ * Uncompressed content is returned as UTF-8 string.
73
+ *
74
+ * @param graph - Knowledge graph to export
75
+ * @param format - Export format
76
+ * @param options - Export options including compression settings
77
+ * @returns Export result with content and compression metadata
78
+ *
79
+ * @example
80
+ * ```typescript
81
+ * // Export with explicit compression
82
+ * const result = await manager.exportGraphWithCompression(graph, 'json', {
83
+ * compress: true,
84
+ * compressionQuality: 11
85
+ * });
86
+ *
87
+ * // Export with auto-compression for large graphs
88
+ * const result = await manager.exportGraphWithCompression(graph, 'json');
89
+ * // Compresses automatically if content > 100KB
90
+ * ```
91
+ */
92
+ async exportGraphWithCompression(graph, format, options) {
93
+ // Check if streaming should be used
94
+ const shouldStream = options?.streaming ||
95
+ (options?.outputPath && graph.entities.length >= STREAMING_CONFIG.STREAMING_THRESHOLD);
96
+ if (shouldStream && options?.outputPath) {
97
+ return this.streamExport(format, graph, options);
98
+ }
99
+ // Generate export content using existing method
100
+ const content = this.exportGraph(graph, format);
101
+ const originalSize = Buffer.byteLength(content, 'utf-8');
102
+ // Determine if compression should be applied
103
+ const shouldCompress = options?.compress === true ||
104
+ (options?.compress !== false &&
105
+ originalSize > COMPRESSION_CONFIG.AUTO_COMPRESS_EXPORT_SIZE);
106
+ if (shouldCompress) {
107
+ const quality = options?.compressionQuality ?? COMPRESSION_CONFIG.BROTLI_QUALITY_BATCH;
108
+ const compressionResult = await compress(content, {
109
+ quality,
110
+ mode: 'text',
111
+ });
112
+ return {
113
+ format,
114
+ content: compressionResult.compressed.toString('base64'),
115
+ entityCount: graph.entities.length,
116
+ relationCount: graph.relations.length,
117
+ compressed: true,
118
+ encoding: 'base64',
119
+ originalSize,
120
+ compressedSize: compressionResult.compressedSize,
121
+ compressionRatio: compressionResult.ratio,
122
+ };
123
+ }
124
+ // Return uncompressed content
125
+ return {
126
+ format,
127
+ content,
128
+ entityCount: graph.entities.length,
129
+ relationCount: graph.relations.length,
130
+ compressed: false,
131
+ encoding: 'utf-8',
132
+ originalSize,
133
+ compressedSize: originalSize,
134
+ compressionRatio: 1,
135
+ };
136
+ }
137
+ /**
138
+ * Stream export to a file for large graphs.
139
+ *
140
+ * Uses StreamingExporter to write entities and relations incrementally
141
+ * to avoid loading the entire export content into memory.
142
+ *
143
+ * @param format - Export format
144
+ * @param graph - Knowledge graph to export
145
+ * @param options - Export options with required outputPath
146
+ * @returns Export result with streaming metadata
147
+ * @private
148
+ */
149
+ async streamExport(format, graph, options) {
150
+ // Validate path to prevent path traversal attacks (defense in depth)
151
+ const validatedOutputPath = validateFilePath(options.outputPath);
152
+ const exporter = new StreamingExporter(validatedOutputPath);
153
+ let result;
154
+ switch (format) {
155
+ case 'json':
156
+ // Use JSONL format for streaming (line-delimited JSON)
157
+ result = await exporter.streamJSONL(graph);
158
+ break;
159
+ case 'csv':
160
+ result = await exporter.streamCSV(graph);
161
+ break;
162
+ default:
163
+ // Fallback to in-memory export for unsupported streaming formats
164
+ const content = this.exportGraph(graph, format);
165
+ await fs.writeFile(validatedOutputPath, content);
166
+ result = {
167
+ bytesWritten: Buffer.byteLength(content, 'utf-8'),
168
+ entitiesWritten: graph.entities.length,
169
+ relationsWritten: graph.relations.length,
170
+ durationMs: 0,
171
+ };
172
+ }
173
+ return {
174
+ format,
175
+ content: `Streamed to ${validatedOutputPath}`,
176
+ entityCount: result.entitiesWritten,
177
+ relationCount: result.relationsWritten,
178
+ compressed: false,
179
+ encoding: 'utf-8',
180
+ originalSize: result.bytesWritten,
181
+ compressedSize: result.bytesWritten,
182
+ compressionRatio: 1,
183
+ streamed: true,
184
+ outputPath: validatedOutputPath,
185
+ };
186
+ }
187
+ exportAsJson(graph) {
188
+ return JSON.stringify(graph, null, 2);
189
+ }
190
+ exportAsCsv(graph) {
191
+ const lines = [];
192
+ const escapeCsvField = (field) => {
193
+ if (field === undefined || field === null)
194
+ return '';
195
+ // First protect against CSV formula injection
196
+ let str = escapeCsvFormula(String(field));
197
+ // Then handle CSV special characters
198
+ if (str.includes(',') || str.includes('"') || str.includes('\n')) {
199
+ return `"${str.replace(/"/g, '""')}"`;
200
+ }
201
+ return str;
202
+ };
203
+ lines.push('# ENTITIES');
204
+ lines.push('name,entityType,observations,createdAt,lastModified,tags,importance');
205
+ for (const entity of graph.entities) {
206
+ const observationsStr = entity.observations.join('; ');
207
+ const tagsStr = entity.tags ? entity.tags.join('; ') : '';
208
+ const importanceStr = entity.importance !== undefined ? String(entity.importance) : '';
209
+ lines.push([
210
+ escapeCsvField(entity.name),
211
+ escapeCsvField(entity.entityType),
212
+ escapeCsvField(observationsStr),
213
+ escapeCsvField(entity.createdAt),
214
+ escapeCsvField(entity.lastModified),
215
+ escapeCsvField(tagsStr),
216
+ escapeCsvField(importanceStr),
217
+ ].join(','));
218
+ }
219
+ lines.push('');
220
+ lines.push('# RELATIONS');
221
+ lines.push('from,to,relationType,createdAt,lastModified');
222
+ for (const relation of graph.relations) {
223
+ lines.push([
224
+ escapeCsvField(relation.from),
225
+ escapeCsvField(relation.to),
226
+ escapeCsvField(relation.relationType),
227
+ escapeCsvField(relation.createdAt),
228
+ escapeCsvField(relation.lastModified),
229
+ ].join(','));
230
+ }
231
+ return lines.join('\n');
232
+ }
233
+ exportAsGraphML(graph) {
234
+ const lines = [];
235
+ const escapeXml = (str) => {
236
+ if (str === undefined || str === null)
237
+ return '';
238
+ return String(str)
239
+ .replace(/&/g, '&')
240
+ .replace(/</g, '&lt;')
241
+ .replace(/>/g, '&gt;')
242
+ .replace(/"/g, '&quot;')
243
+ .replace(/'/g, '&apos;');
244
+ };
245
+ lines.push('<?xml version="1.0" encoding="UTF-8"?>');
246
+ lines.push('<graphml xmlns="http://graphml.graphdrawing.org/xmlns">');
247
+ lines.push(' <key id="d0" for="node" attr.name="entityType" attr.type="string"/>');
248
+ lines.push(' <key id="d1" for="node" attr.name="observations" attr.type="string"/>');
249
+ lines.push(' <key id="d2" for="node" attr.name="createdAt" attr.type="string"/>');
250
+ lines.push(' <key id="d3" for="node" attr.name="lastModified" attr.type="string"/>');
251
+ lines.push(' <key id="d4" for="node" attr.name="tags" attr.type="string"/>');
252
+ lines.push(' <key id="d5" for="node" attr.name="importance" attr.type="double"/>');
253
+ lines.push(' <key id="e0" for="edge" attr.name="relationType" attr.type="string"/>');
254
+ lines.push(' <key id="e1" for="edge" attr.name="createdAt" attr.type="string"/>');
255
+ lines.push(' <key id="e2" for="edge" attr.name="lastModified" attr.type="string"/>');
256
+ lines.push(' <graph id="G" edgedefault="directed">');
257
+ for (const entity of graph.entities) {
258
+ const nodeId = escapeXml(entity.name);
259
+ lines.push(` <node id="${nodeId}">`);
260
+ lines.push(` <data key="d0">${escapeXml(entity.entityType)}</data>`);
261
+ lines.push(` <data key="d1">${escapeXml(entity.observations.join('; '))}</data>`);
262
+ if (entity.createdAt)
263
+ lines.push(` <data key="d2">${escapeXml(entity.createdAt)}</data>`);
264
+ if (entity.lastModified)
265
+ lines.push(` <data key="d3">${escapeXml(entity.lastModified)}</data>`);
266
+ if (entity.tags?.length)
267
+ lines.push(` <data key="d4">${escapeXml(entity.tags.join('; '))}</data>`);
268
+ if (entity.importance !== undefined)
269
+ lines.push(` <data key="d5">${entity.importance}</data>`);
270
+ lines.push(' </node>');
271
+ }
272
+ let edgeId = 0;
273
+ for (const relation of graph.relations) {
274
+ const sourceId = escapeXml(relation.from);
275
+ const targetId = escapeXml(relation.to);
276
+ lines.push(` <edge id="e${edgeId}" source="${sourceId}" target="${targetId}">`);
277
+ lines.push(` <data key="e0">${escapeXml(relation.relationType)}</data>`);
278
+ if (relation.createdAt)
279
+ lines.push(` <data key="e1">${escapeXml(relation.createdAt)}</data>`);
280
+ if (relation.lastModified)
281
+ lines.push(` <data key="e2">${escapeXml(relation.lastModified)}</data>`);
282
+ lines.push(' </edge>');
283
+ edgeId++;
284
+ }
285
+ lines.push(' </graph>');
286
+ lines.push('</graphml>');
287
+ return lines.join('\n');
288
+ }
289
+ exportAsGEXF(graph) {
290
+ const lines = [];
291
+ const escapeXml = (str) => {
292
+ if (str === undefined || str === null)
293
+ return '';
294
+ return String(str)
295
+ .replace(/&/g, '&amp;')
296
+ .replace(/</g, '&lt;')
297
+ .replace(/>/g, '&gt;')
298
+ .replace(/"/g, '&quot;')
299
+ .replace(/'/g, '&apos;');
300
+ };
301
+ lines.push('<?xml version="1.0" encoding="UTF-8"?>');
302
+ lines.push('<gexf xmlns="http://www.gexf.net/1.2draft" version="1.2">');
303
+ lines.push(' <meta>');
304
+ lines.push(' <creator>Memory MCP Server</creator>');
305
+ lines.push(' </meta>');
306
+ lines.push(' <graph mode="static" defaultedgetype="directed">');
307
+ lines.push(' <attributes class="node">');
308
+ lines.push(' <attribute id="0" title="entityType" type="string"/>');
309
+ lines.push(' <attribute id="1" title="observations" type="string"/>');
310
+ lines.push(' </attributes>');
311
+ lines.push(' <nodes>');
312
+ for (const entity of graph.entities) {
313
+ const nodeId = escapeXml(entity.name);
314
+ lines.push(` <node id="${nodeId}" label="${nodeId}">`);
315
+ lines.push(' <attvalues>');
316
+ lines.push(` <attvalue for="0" value="${escapeXml(entity.entityType)}"/>`);
317
+ lines.push(` <attvalue for="1" value="${escapeXml(entity.observations.join('; '))}"/>`);
318
+ lines.push(' </attvalues>');
319
+ lines.push(' </node>');
320
+ }
321
+ lines.push(' </nodes>');
322
+ lines.push(' <edges>');
323
+ let edgeId = 0;
324
+ for (const relation of graph.relations) {
325
+ const sourceId = escapeXml(relation.from);
326
+ const targetId = escapeXml(relation.to);
327
+ const label = escapeXml(relation.relationType);
328
+ lines.push(` <edge id="${edgeId}" source="${sourceId}" target="${targetId}" label="${label}"/>`);
329
+ edgeId++;
330
+ }
331
+ lines.push(' </edges>');
332
+ lines.push(' </graph>');
333
+ lines.push('</gexf>');
334
+ return lines.join('\n');
335
+ }
336
+ exportAsDOT(graph) {
337
+ const lines = [];
338
+ const escapeDot = (str) => {
339
+ return '"' + str.replace(/\\/g, '\\\\').replace(/"/g, '\\"').replace(/\n/g, '\\n') + '"';
340
+ };
341
+ lines.push('digraph KnowledgeGraph {');
342
+ lines.push(' rankdir=LR;');
343
+ lines.push(' node [shape=box, style=rounded];');
344
+ lines.push('');
345
+ for (const entity of graph.entities) {
346
+ const nodeId = escapeDot(entity.name);
347
+ const label = [`${entity.name}`, `Type: ${entity.entityType}`];
348
+ if (entity.tags?.length)
349
+ label.push(`Tags: ${entity.tags.join(', ')}`);
350
+ const labelStr = escapeDot(label.join('\\n'));
351
+ lines.push(` ${nodeId} [label=${labelStr}];`);
352
+ }
353
+ lines.push('');
354
+ for (const relation of graph.relations) {
355
+ const fromId = escapeDot(relation.from);
356
+ const toId = escapeDot(relation.to);
357
+ const label = escapeDot(relation.relationType);
358
+ lines.push(` ${fromId} -> ${toId} [label=${label}];`);
359
+ }
360
+ lines.push('}');
361
+ return lines.join('\n');
362
+ }
363
+ exportAsMarkdown(graph) {
364
+ const lines = [];
365
+ lines.push('# Knowledge Graph Export');
366
+ lines.push('');
367
+ lines.push(`**Exported:** ${new Date().toISOString()}`);
368
+ lines.push(`**Entities:** ${graph.entities.length}`);
369
+ lines.push(`**Relations:** ${graph.relations.length}`);
370
+ lines.push('');
371
+ lines.push('## Entities');
372
+ lines.push('');
373
+ for (const entity of graph.entities) {
374
+ lines.push(`### ${entity.name}`);
375
+ lines.push('');
376
+ lines.push(`- **Type:** ${entity.entityType}`);
377
+ if (entity.tags?.length)
378
+ lines.push(`- **Tags:** ${entity.tags.map(t => `\`${t}\``).join(', ')}`);
379
+ if (entity.importance !== undefined)
380
+ lines.push(`- **Importance:** ${entity.importance}/10`);
381
+ if (entity.observations.length > 0) {
382
+ lines.push('');
383
+ lines.push('**Observations:**');
384
+ for (const obs of entity.observations) {
385
+ lines.push(`- ${obs}`);
386
+ }
387
+ }
388
+ lines.push('');
389
+ }
390
+ if (graph.relations.length > 0) {
391
+ lines.push('## Relations');
392
+ lines.push('');
393
+ for (const relation of graph.relations) {
394
+ lines.push(`- **${relation.from}** → *${relation.relationType}* → **${relation.to}**`);
395
+ }
396
+ lines.push('');
397
+ }
398
+ return lines.join('\n');
399
+ }
400
+ exportAsMermaid(graph) {
401
+ const lines = [];
402
+ const sanitizeId = (str) => str.replace(/[^a-zA-Z0-9_]/g, '_');
403
+ const escapeLabel = (str) => str.replace(/"/g, '#quot;');
404
+ lines.push('graph LR');
405
+ lines.push(' %% Knowledge Graph');
406
+ lines.push('');
407
+ const nodeIds = new Map();
408
+ for (const entity of graph.entities) {
409
+ nodeIds.set(entity.name, sanitizeId(entity.name));
410
+ }
411
+ for (const entity of graph.entities) {
412
+ const nodeId = nodeIds.get(entity.name);
413
+ const labelParts = [entity.name, `Type: ${entity.entityType}`];
414
+ if (entity.tags?.length)
415
+ labelParts.push(`Tags: ${entity.tags.join(', ')}`);
416
+ const label = escapeLabel(labelParts.join('<br/>'));
417
+ lines.push(` ${nodeId}["${label}"]`);
418
+ }
419
+ lines.push('');
420
+ for (const relation of graph.relations) {
421
+ const fromId = nodeIds.get(relation.from);
422
+ const toId = nodeIds.get(relation.to);
423
+ if (fromId && toId) {
424
+ const label = escapeLabel(relation.relationType);
425
+ lines.push(` ${fromId} -->|"${label}"| ${toId}`);
426
+ }
427
+ }
428
+ return lines.join('\n');
429
+ }
430
+ // ============================================================
431
+ // IMPORT OPERATIONS
432
+ // ============================================================
433
+ /**
434
+ * Import graph from formatted data.
435
+ *
436
+ * Phase 9B: Supports progress tracking and cancellation via LongRunningOperationOptions.
437
+ *
438
+ * @param format - Import format
439
+ * @param data - Import data string
440
+ * @param mergeStrategy - How to handle conflicts
441
+ * @param dryRun - If true, preview changes without applying
442
+ * @param options - Optional progress/cancellation options (Phase 9B)
443
+ * @returns Import result with statistics
444
+ * @throws {OperationCancelledError} If operation is cancelled via signal (Phase 9B)
445
+ */
446
+ async importGraph(format, data, mergeStrategy = 'skip', dryRun = false, options) {
447
+ // Check for early cancellation
448
+ checkCancellation(options?.signal, 'importGraph');
449
+ // Setup progress reporter
450
+ const reportProgress = createProgressReporter(options?.onProgress);
451
+ reportProgress?.(createProgress(0, 100, 'importGraph'));
452
+ let importedGraph;
453
+ try {
454
+ // Parsing phase (0-20% progress)
455
+ reportProgress?.(createProgress(5, 100, 'parsing data'));
456
+ checkCancellation(options?.signal, 'importGraph');
457
+ switch (format) {
458
+ case 'json':
459
+ importedGraph = this.parseJsonImport(data);
460
+ break;
461
+ case 'csv':
462
+ importedGraph = this.parseCsvImport(data);
463
+ break;
464
+ case 'graphml':
465
+ importedGraph = this.parseGraphMLImport(data);
466
+ break;
467
+ default:
468
+ throw new Error(`Unsupported import format: ${format}`);
469
+ }
470
+ reportProgress?.(createProgress(20, 100, 'parsing complete'));
471
+ }
472
+ catch (error) {
473
+ return {
474
+ entitiesAdded: 0,
475
+ entitiesSkipped: 0,
476
+ entitiesUpdated: 0,
477
+ relationsAdded: 0,
478
+ relationsSkipped: 0,
479
+ errors: [`Failed to parse ${format} data: ${error instanceof Error ? error.message : String(error)}`],
480
+ };
481
+ }
482
+ // Merging phase (20-100% progress)
483
+ return await this.mergeImportedGraph(importedGraph, mergeStrategy, dryRun, options);
484
+ }
485
+ parseJsonImport(data) {
486
+ // Security: Limit input size to prevent DoS (10MB max)
487
+ const MAX_IMPORT_SIZE = 10 * 1024 * 1024;
488
+ if (data.length > MAX_IMPORT_SIZE) {
489
+ throw new FileOperationError(`JSON import data exceeds maximum size of ${MAX_IMPORT_SIZE / (1024 * 1024)}MB`, 'json-import');
490
+ }
491
+ const parsed = JSON.parse(data);
492
+ if (!parsed.entities || !Array.isArray(parsed.entities)) {
493
+ throw new Error('Invalid JSON: missing or invalid entities array');
494
+ }
495
+ if (!parsed.relations || !Array.isArray(parsed.relations)) {
496
+ throw new Error('Invalid JSON: missing or invalid relations array');
497
+ }
498
+ // Security: Limit maximum number of entities/relations
499
+ const MAX_ITEMS = 100000;
500
+ if (parsed.entities.length > MAX_ITEMS) {
501
+ throw new FileOperationError(`JSON import exceeds maximum entity count of ${MAX_ITEMS}`, 'json-import');
502
+ }
503
+ if (parsed.relations.length > MAX_ITEMS) {
504
+ throw new FileOperationError(`JSON import exceeds maximum relation count of ${MAX_ITEMS}`, 'json-import');
505
+ }
506
+ return {
507
+ entities: parsed.entities,
508
+ relations: parsed.relations,
509
+ };
510
+ }
511
+ parseCsvImport(data) {
512
+ // Security: Limit input size to prevent DoS (10MB max)
513
+ const MAX_IMPORT_SIZE = 10 * 1024 * 1024;
514
+ if (data.length > MAX_IMPORT_SIZE) {
515
+ throw new FileOperationError(`CSV import data exceeds maximum size of ${MAX_IMPORT_SIZE / (1024 * 1024)}MB`, 'csv-import');
516
+ }
517
+ // Security: Limit maximum number of entities/relations
518
+ const MAX_ITEMS = 100000;
519
+ const lines = data
520
+ .split('\n')
521
+ .map(line => line.trim())
522
+ .filter(line => line);
523
+ const entities = [];
524
+ const relations = [];
525
+ let section = null;
526
+ let headerParsed = false;
527
+ const parseCsvLine = (line) => {
528
+ const fields = [];
529
+ let current = '';
530
+ let inQuotes = false;
531
+ for (let i = 0; i < line.length; i++) {
532
+ const char = line[i];
533
+ if (char === '"') {
534
+ if (inQuotes && line[i + 1] === '"') {
535
+ current += '"';
536
+ i++;
537
+ }
538
+ else {
539
+ inQuotes = !inQuotes;
540
+ }
541
+ }
542
+ else if (char === ',' && !inQuotes) {
543
+ fields.push(current);
544
+ current = '';
545
+ }
546
+ else {
547
+ current += char;
548
+ }
549
+ }
550
+ fields.push(current);
551
+ return fields;
552
+ };
553
+ for (const line of lines) {
554
+ if (line.startsWith('# ENTITIES')) {
555
+ section = 'entities';
556
+ headerParsed = false;
557
+ continue;
558
+ }
559
+ else if (line.startsWith('# RELATIONS')) {
560
+ section = 'relations';
561
+ headerParsed = false;
562
+ continue;
563
+ }
564
+ if (line.startsWith('#'))
565
+ continue;
566
+ if (section === 'entities') {
567
+ if (!headerParsed) {
568
+ headerParsed = true;
569
+ continue;
570
+ }
571
+ const fields = parseCsvLine(line);
572
+ if (fields.length >= 2) {
573
+ // Security: Check entity limit
574
+ if (entities.length >= MAX_ITEMS) {
575
+ throw new FileOperationError(`CSV import exceeds maximum entity count of ${MAX_ITEMS}`, 'csv-import');
576
+ }
577
+ const entity = {
578
+ name: fields[0],
579
+ entityType: fields[1],
580
+ observations: fields[2]
581
+ ? fields[2]
582
+ .split(';')
583
+ .map(s => s.trim())
584
+ .filter(s => s)
585
+ : [],
586
+ createdAt: fields[3] || undefined,
587
+ lastModified: fields[4] || undefined,
588
+ tags: fields[5]
589
+ ? fields[5]
590
+ .split(';')
591
+ .map(s => s.trim().toLowerCase())
592
+ .filter(s => s)
593
+ : undefined,
594
+ importance: fields[6] ? parseFloat(fields[6]) : undefined,
595
+ };
596
+ entities.push(entity);
597
+ }
598
+ }
599
+ else if (section === 'relations') {
600
+ if (!headerParsed) {
601
+ headerParsed = true;
602
+ continue;
603
+ }
604
+ const fields = parseCsvLine(line);
605
+ if (fields.length >= 3) {
606
+ // Security: Check relation limit
607
+ if (relations.length >= MAX_ITEMS) {
608
+ throw new FileOperationError(`CSV import exceeds maximum relation count of ${MAX_ITEMS}`, 'csv-import');
609
+ }
610
+ const relation = {
611
+ from: fields[0],
612
+ to: fields[1],
613
+ relationType: fields[2],
614
+ createdAt: fields[3] || undefined,
615
+ lastModified: fields[4] || undefined,
616
+ };
617
+ relations.push(relation);
618
+ }
619
+ }
620
+ }
621
+ return { entities, relations };
622
+ }
623
+ parseGraphMLImport(data) {
624
+ const entities = [];
625
+ const relations = [];
626
+ // Security: Limit input size to prevent ReDoS attacks (10MB max)
627
+ const MAX_IMPORT_SIZE = 10 * 1024 * 1024;
628
+ if (data.length > MAX_IMPORT_SIZE) {
629
+ throw new FileOperationError(`GraphML import data exceeds maximum size of ${MAX_IMPORT_SIZE / (1024 * 1024)}MB`, 'graphml-import');
630
+ }
631
+ // Security: Limit maximum number of entities/relations to prevent infinite loops
632
+ const MAX_ITEMS = 100000;
633
+ let nodeCount = 0;
634
+ let relationCount = 0;
635
+ // Use non-greedy patterns with character class restrictions
636
+ const nodeRegex = /<node\s+id="([^"]+)"[^>]*>([\s\S]*?)<\/node>/g;
637
+ let nodeMatch;
638
+ while ((nodeMatch = nodeRegex.exec(data)) !== null) {
639
+ // Security: Limit iterations to prevent ReDoS
640
+ if (++nodeCount > MAX_ITEMS) {
641
+ throw new FileOperationError(`GraphML import exceeds maximum entity count of ${MAX_ITEMS}`, 'graphml-import');
642
+ }
643
+ const nodeId = nodeMatch[1];
644
+ const nodeContent = nodeMatch[2];
645
+ const getDataValue = (key) => {
646
+ const dataRegex = new RegExp(`<data\\s+key="${key}">([^<]*)<\/data>`);
647
+ const match = dataRegex.exec(nodeContent);
648
+ return match ? match[1] : undefined;
649
+ };
650
+ const entity = {
651
+ name: nodeId,
652
+ entityType: getDataValue('d0') || getDataValue('entityType') || 'unknown',
653
+ observations: (getDataValue('d1') || getDataValue('observations') || '')
654
+ .split(';')
655
+ .map(s => s.trim())
656
+ .filter(s => s),
657
+ createdAt: getDataValue('d2') || getDataValue('createdAt'),
658
+ lastModified: getDataValue('d3') || getDataValue('lastModified'),
659
+ tags: (getDataValue('d4') || getDataValue('tags') || '')
660
+ .split(';')
661
+ .map(s => s.trim().toLowerCase())
662
+ .filter(s => s),
663
+ importance: getDataValue('d5') || getDataValue('importance') ? parseFloat(getDataValue('d5') || getDataValue('importance') || '0') : undefined,
664
+ };
665
+ entities.push(entity);
666
+ }
667
+ const edgeRegex = /<edge\s+[^>]*source="([^"]+)"\s+target="([^"]+)"[^>]*>([\s\S]*?)<\/edge>/g;
668
+ let edgeMatch;
669
+ while ((edgeMatch = edgeRegex.exec(data)) !== null) {
670
+ // Security: Limit iterations to prevent ReDoS
671
+ if (++relationCount > MAX_ITEMS) {
672
+ throw new FileOperationError(`GraphML import exceeds maximum relation count of ${MAX_ITEMS}`, 'graphml-import');
673
+ }
674
+ const source = edgeMatch[1];
675
+ const target = edgeMatch[2];
676
+ const edgeContent = edgeMatch[3];
677
+ const getDataValue = (key) => {
678
+ const dataRegex = new RegExp(`<data\\s+key="${key}">([^<]*)<\/data>`);
679
+ const match = dataRegex.exec(edgeContent);
680
+ return match ? match[1] : undefined;
681
+ };
682
+ const relation = {
683
+ from: source,
684
+ to: target,
685
+ relationType: getDataValue('e0') || getDataValue('relationType') || 'related_to',
686
+ createdAt: getDataValue('e1') || getDataValue('createdAt'),
687
+ lastModified: getDataValue('e2') || getDataValue('lastModified'),
688
+ };
689
+ relations.push(relation);
690
+ }
691
+ return { entities, relations };
692
+ }
693
+ async mergeImportedGraph(importedGraph, mergeStrategy, dryRun, options) {
694
+ // Check for cancellation
695
+ checkCancellation(options?.signal, 'importGraph');
696
+ // Setup progress reporter (we're at 20% from parsing, need to go to 100%)
697
+ const reportProgress = createProgressReporter(options?.onProgress);
698
+ const existingGraph = await this.storage.getGraphForMutation();
699
+ const result = {
700
+ entitiesAdded: 0,
701
+ entitiesSkipped: 0,
702
+ entitiesUpdated: 0,
703
+ relationsAdded: 0,
704
+ relationsSkipped: 0,
705
+ errors: [],
706
+ };
707
+ const existingEntitiesMap = new Map();
708
+ for (const entity of existingGraph.entities) {
709
+ existingEntitiesMap.set(entity.name, entity);
710
+ }
711
+ const existingRelationsSet = new Set();
712
+ for (const relation of existingGraph.relations) {
713
+ existingRelationsSet.add(`${relation.from}|${relation.to}|${relation.relationType}`);
714
+ }
715
+ // Process entities (20-60% progress)
716
+ const totalEntities = importedGraph.entities.length;
717
+ const totalRelations = importedGraph.relations.length;
718
+ let processedEntities = 0;
719
+ for (const importedEntity of importedGraph.entities) {
720
+ // Check for cancellation periodically
721
+ checkCancellation(options?.signal, 'importGraph');
722
+ const existing = existingEntitiesMap.get(importedEntity.name);
723
+ if (!existing) {
724
+ result.entitiesAdded++;
725
+ if (!dryRun) {
726
+ existingGraph.entities.push(importedEntity);
727
+ existingEntitiesMap.set(importedEntity.name, importedEntity);
728
+ }
729
+ }
730
+ else {
731
+ switch (mergeStrategy) {
732
+ case 'replace':
733
+ result.entitiesUpdated++;
734
+ if (!dryRun) {
735
+ // Sanitize imported entity to prevent prototype pollution
736
+ Object.assign(existing, sanitizeObject(importedEntity));
737
+ }
738
+ break;
739
+ case 'skip':
740
+ result.entitiesSkipped++;
741
+ break;
742
+ case 'merge':
743
+ result.entitiesUpdated++;
744
+ if (!dryRun) {
745
+ existing.observations = [
746
+ ...new Set([...existing.observations, ...importedEntity.observations]),
747
+ ];
748
+ if (importedEntity.tags) {
749
+ existing.tags = existing.tags || [];
750
+ existing.tags = [...new Set([...existing.tags, ...importedEntity.tags])];
751
+ }
752
+ if (importedEntity.importance !== undefined) {
753
+ existing.importance = importedEntity.importance;
754
+ }
755
+ existing.lastModified = new Date().toISOString();
756
+ }
757
+ break;
758
+ case 'fail':
759
+ result.errors.push(`Entity "${importedEntity.name}" already exists`);
760
+ break;
761
+ }
762
+ }
763
+ processedEntities++;
764
+ // Map entity progress (0-100%) to overall progress (20-60%)
765
+ const entityProgress = totalEntities > 0 ? Math.round(20 + (processedEntities / totalEntities) * 40) : 60;
766
+ reportProgress?.(createProgress(entityProgress, 100, 'importing entities'));
767
+ }
768
+ reportProgress?.(createProgress(60, 100, 'importing relations'));
769
+ // Process relations (60-95% progress)
770
+ let processedRelations = 0;
771
+ for (const importedRelation of importedGraph.relations) {
772
+ // Check for cancellation periodically
773
+ checkCancellation(options?.signal, 'importGraph');
774
+ const relationKey = `${importedRelation.from}|${importedRelation.to}|${importedRelation.relationType}`;
775
+ if (!existingEntitiesMap.has(importedRelation.from)) {
776
+ result.errors.push(`Relation source entity "${importedRelation.from}" does not exist`);
777
+ processedRelations++;
778
+ continue;
779
+ }
780
+ if (!existingEntitiesMap.has(importedRelation.to)) {
781
+ result.errors.push(`Relation target entity "${importedRelation.to}" does not exist`);
782
+ processedRelations++;
783
+ continue;
784
+ }
785
+ if (!existingRelationsSet.has(relationKey)) {
786
+ result.relationsAdded++;
787
+ if (!dryRun) {
788
+ existingGraph.relations.push(importedRelation);
789
+ existingRelationsSet.add(relationKey);
790
+ }
791
+ }
792
+ else {
793
+ if (mergeStrategy === 'fail') {
794
+ result.errors.push(`Relation "${relationKey}" already exists`);
795
+ }
796
+ else {
797
+ result.relationsSkipped++;
798
+ }
799
+ }
800
+ processedRelations++;
801
+ // Map relation progress (0-100%) to overall progress (60-95%)
802
+ const relationProgress = totalRelations > 0 ? Math.round(60 + (processedRelations / totalRelations) * 35) : 95;
803
+ reportProgress?.(createProgress(relationProgress, 100, 'importing relations'));
804
+ }
805
+ // Check for cancellation before final save
806
+ checkCancellation(options?.signal, 'importGraph');
807
+ reportProgress?.(createProgress(95, 100, 'saving graph'));
808
+ if (!dryRun && (mergeStrategy !== 'fail' || result.errors.length === 0)) {
809
+ await this.storage.saveGraph(existingGraph);
810
+ }
811
+ // Report completion
812
+ reportProgress?.(createProgress(100, 100, 'importGraph'));
813
+ return result;
814
+ }
815
+ // ============================================================
816
+ // BACKUP OPERATIONS
817
+ // ============================================================
818
+ /**
819
+ * Ensure backup directory exists.
820
+ */
821
+ async ensureBackupDir() {
822
+ try {
823
+ await fs.mkdir(this.backupDir, { recursive: true });
824
+ }
825
+ catch (error) {
826
+ throw new FileOperationError('create backup directory', this.backupDir, error);
827
+ }
828
+ }
829
+ /**
830
+ * Generate backup file name with timestamp.
831
+ * @param compressed - Whether the backup will be compressed (affects extension)
832
+ */
833
+ generateBackupFileName(compressed = true) {
834
+ const now = new Date();
835
+ const timestamp = now.toISOString()
836
+ .replace(/:/g, '-')
837
+ .replace(/\./g, '-')
838
+ .replace('T', '_')
839
+ .replace('Z', '');
840
+ const extension = compressed ? '.jsonl.br' : '.jsonl';
841
+ return `backup_${timestamp}${extension}`;
842
+ }
843
+ /**
844
+ * Create a backup of the current knowledge graph.
845
+ *
846
+ * By default, backups are compressed with brotli for 50-70% space reduction.
847
+ * Use `options.compress = false` to create uncompressed backups.
848
+ *
849
+ * @param options - Backup options (compress, description) or legacy description string
850
+ * @returns Promise resolving to BackupResult with compression statistics
851
+ *
852
+ * @example
853
+ * ```typescript
854
+ * // Compressed backup (default)
855
+ * const result = await manager.createBackup({ description: 'Pre-migration backup' });
856
+ * console.log(`Compressed from ${result.originalSize} to ${result.compressedSize} bytes`);
857
+ *
858
+ * // Uncompressed backup
859
+ * const result = await manager.createBackup({ compress: false });
860
+ * ```
861
+ */
862
+ async createBackup(options) {
863
+ await this.ensureBackupDir();
864
+ // Handle legacy string argument (backward compatibility)
865
+ const opts = typeof options === 'string'
866
+ ? { description: options, compress: COMPRESSION_CONFIG.AUTO_COMPRESS_BACKUP }
867
+ : { compress: COMPRESSION_CONFIG.AUTO_COMPRESS_BACKUP, ...options };
868
+ const shouldCompress = opts.compress ?? COMPRESSION_CONFIG.AUTO_COMPRESS_BACKUP;
869
+ const graph = await this.storage.loadGraph();
870
+ const timestamp = new Date().toISOString();
871
+ const fileName = this.generateBackupFileName(shouldCompress);
872
+ const backupPath = join(this.backupDir, fileName);
873
+ try {
874
+ const originalPath = this.storage.getFilePath();
875
+ let fileContent;
876
+ try {
877
+ fileContent = await fs.readFile(originalPath, 'utf-8');
878
+ }
879
+ catch {
880
+ // If file doesn't exist, generate content from graph
881
+ const lines = [
882
+ ...graph.entities.map(e => JSON.stringify({ type: 'entity', ...e })),
883
+ ...graph.relations.map(r => JSON.stringify({ type: 'relation', ...r })),
884
+ ];
885
+ fileContent = lines.join('\n');
886
+ }
887
+ const originalSize = Buffer.byteLength(fileContent, 'utf-8');
888
+ let compressedSize = originalSize;
889
+ let compressionRatio = 1;
890
+ if (shouldCompress) {
891
+ // Compress with maximum quality for backups (archive quality)
892
+ const compressionResult = await compress(fileContent, {
893
+ quality: COMPRESSION_CONFIG.BROTLI_QUALITY_ARCHIVE,
894
+ mode: 'text',
895
+ });
896
+ await fs.writeFile(backupPath, compressionResult.compressed);
897
+ compressedSize = compressionResult.compressedSize;
898
+ compressionRatio = compressionResult.ratio;
899
+ }
900
+ else {
901
+ // Write uncompressed backup
902
+ await fs.writeFile(backupPath, fileContent);
903
+ }
904
+ const stats = await fs.stat(backupPath);
905
+ const metadata = {
906
+ timestamp,
907
+ entityCount: graph.entities.length,
908
+ relationCount: graph.relations.length,
909
+ fileSize: stats.size,
910
+ description: opts.description,
911
+ compressed: shouldCompress,
912
+ originalSize,
913
+ compressionRatio: shouldCompress ? compressionRatio : undefined,
914
+ compressionFormat: shouldCompress ? 'brotli' : 'none',
915
+ };
916
+ const metadataPath = `${backupPath}.meta.json`;
917
+ await fs.writeFile(metadataPath, JSON.stringify(metadata, null, 2));
918
+ return {
919
+ path: backupPath,
920
+ timestamp,
921
+ entityCount: graph.entities.length,
922
+ relationCount: graph.relations.length,
923
+ compressed: shouldCompress,
924
+ originalSize,
925
+ compressedSize,
926
+ compressionRatio,
927
+ description: opts.description,
928
+ };
929
+ }
930
+ catch (error) {
931
+ throw new FileOperationError('create backup', backupPath, error);
932
+ }
933
+ }
934
+ /**
935
+ * List all available backups, sorted by timestamp (newest first).
936
+ *
937
+ * Detects both compressed (.jsonl.br) and uncompressed (.jsonl) backups.
938
+ *
939
+ * @returns Promise resolving to array of backup information with compression details
940
+ */
941
+ async listBackups() {
942
+ try {
943
+ try {
944
+ await fs.access(this.backupDir);
945
+ }
946
+ catch {
947
+ return [];
948
+ }
949
+ const files = await fs.readdir(this.backupDir);
950
+ // Match both .jsonl and .jsonl.br backup files, exclude metadata files
951
+ const backupFiles = files.filter(f => f.startsWith('backup_') &&
952
+ (f.endsWith('.jsonl') || f.endsWith('.jsonl.br')) &&
953
+ !f.endsWith('.meta.json'));
954
+ const backups = [];
955
+ for (const fileName of backupFiles) {
956
+ const filePath = join(this.backupDir, fileName);
957
+ const isCompressed = hasBrotliExtension(fileName);
958
+ // Try to read metadata file (handles both .jsonl.meta.json and .jsonl.br.meta.json)
959
+ const metadataPath = `${filePath}.meta.json`;
960
+ try {
961
+ const [metadataContent, stats] = await Promise.all([
962
+ fs.readFile(metadataPath, 'utf-8'),
963
+ fs.stat(filePath),
964
+ ]);
965
+ const metadata = JSON.parse(metadataContent);
966
+ // Ensure compression fields are present (backward compatibility)
967
+ if (metadata.compressed === undefined) {
968
+ metadata.compressed = isCompressed;
969
+ }
970
+ if (metadata.compressionFormat === undefined) {
971
+ metadata.compressionFormat = isCompressed ? 'brotli' : 'none';
972
+ }
973
+ backups.push({
974
+ fileName,
975
+ filePath,
976
+ metadata,
977
+ compressed: isCompressed,
978
+ size: stats.size,
979
+ });
980
+ }
981
+ catch {
982
+ // Skip backups without valid metadata
983
+ continue;
984
+ }
985
+ }
986
+ backups.sort((a, b) => new Date(b.metadata.timestamp).getTime() - new Date(a.metadata.timestamp).getTime());
987
+ return backups;
988
+ }
989
+ catch (error) {
990
+ throw new FileOperationError('list backups', this.backupDir, error);
991
+ }
992
+ }
993
+ /**
994
+ * Restore the knowledge graph from a backup file.
995
+ *
996
+ * Automatically detects and decompresses brotli-compressed backups (.br extension).
997
+ * Maintains backward compatibility with uncompressed backups.
998
+ *
999
+ * @param backupPath - Path to the backup file to restore from
1000
+ * @returns Promise resolving to RestoreResult with restoration details
1001
+ *
1002
+ * @example
1003
+ * ```typescript
1004
+ * // Restore from compressed backup
1005
+ * const result = await manager.restoreFromBackup('/path/to/backup.jsonl.br');
1006
+ * console.log(`Restored ${result.entityCount} entities from compressed backup`);
1007
+ *
1008
+ * // Restore from uncompressed backup (legacy)
1009
+ * const result = await manager.restoreFromBackup('/path/to/backup.jsonl');
1010
+ * ```
1011
+ */
1012
+ async restoreFromBackup(backupPath) {
1013
+ try {
1014
+ await fs.access(backupPath);
1015
+ const isCompressed = hasBrotliExtension(backupPath);
1016
+ const backupBuffer = await fs.readFile(backupPath);
1017
+ let backupContent;
1018
+ if (isCompressed) {
1019
+ // Decompress the backup
1020
+ const decompressedBuffer = await decompress(backupBuffer);
1021
+ backupContent = decompressedBuffer.toString('utf-8');
1022
+ }
1023
+ else {
1024
+ // Read as plain text
1025
+ backupContent = backupBuffer.toString('utf-8');
1026
+ }
1027
+ const mainPath = this.storage.getFilePath();
1028
+ await fs.writeFile(mainPath, backupContent);
1029
+ this.storage.clearCache();
1030
+ // Load the restored graph to get counts
1031
+ const graph = await this.storage.loadGraph();
1032
+ return {
1033
+ entityCount: graph.entities.length,
1034
+ relationCount: graph.relations.length,
1035
+ restoredFrom: backupPath,
1036
+ wasCompressed: isCompressed,
1037
+ };
1038
+ }
1039
+ catch (error) {
1040
+ throw new FileOperationError('restore from backup', backupPath, error);
1041
+ }
1042
+ }
1043
+ /**
1044
+ * Delete a specific backup file.
1045
+ *
1046
+ * @param backupPath - Path to the backup file to delete
1047
+ */
1048
+ async deleteBackup(backupPath) {
1049
+ try {
1050
+ await fs.unlink(backupPath);
1051
+ try {
1052
+ await fs.unlink(`${backupPath}.meta.json`);
1053
+ }
1054
+ catch {
1055
+ // Metadata file doesn't exist - that's ok
1056
+ }
1057
+ }
1058
+ catch (error) {
1059
+ throw new FileOperationError('delete backup', backupPath, error);
1060
+ }
1061
+ }
1062
+ /**
1063
+ * Clean old backups, keeping only the most recent N backups.
1064
+ *
1065
+ * @param keepCount - Number of recent backups to keep (default: 10)
1066
+ * @returns Promise resolving to number of backups deleted
1067
+ */
1068
+ async cleanOldBackups(keepCount = 10) {
1069
+ const backups = await this.listBackups();
1070
+ if (backups.length <= keepCount) {
1071
+ return 0;
1072
+ }
1073
+ const backupsToDelete = backups.slice(keepCount);
1074
+ let deletedCount = 0;
1075
+ for (const backup of backupsToDelete) {
1076
+ try {
1077
+ await this.deleteBackup(backup.filePath);
1078
+ deletedCount++;
1079
+ }
1080
+ catch {
1081
+ continue;
1082
+ }
1083
+ }
1084
+ return deletedCount;
1085
+ }
1086
+ /**
1087
+ * Get the path to the backup directory.
1088
+ */
1089
+ getBackupDir() {
1090
+ return this.backupDir;
1091
+ }
1092
+ }