kiro-memory 2.1.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/README.md +5 -1
  2. package/package.json +3 -3
  3. package/plugin/dist/cli/contextkit.js +2315 -180
  4. package/plugin/dist/hooks/agentSpawn.js +548 -49
  5. package/plugin/dist/hooks/kiro-hooks.js +548 -49
  6. package/plugin/dist/hooks/postToolUse.js +556 -56
  7. package/plugin/dist/hooks/stop.js +548 -49
  8. package/plugin/dist/hooks/userPromptSubmit.js +551 -50
  9. package/plugin/dist/index.js +549 -50
  10. package/plugin/dist/plugins/github/github-client.js +152 -0
  11. package/plugin/dist/plugins/github/index.js +412 -0
  12. package/plugin/dist/plugins/github/issue-parser.js +54 -0
  13. package/plugin/dist/plugins/slack/formatter.js +90 -0
  14. package/plugin/dist/plugins/slack/index.js +215 -0
  15. package/plugin/dist/sdk/index.js +548 -49
  16. package/plugin/dist/servers/mcp-server.js +4461 -397
  17. package/plugin/dist/services/search/EmbeddingService.js +64 -20
  18. package/plugin/dist/services/search/HybridSearch.js +380 -38
  19. package/plugin/dist/services/search/VectorSearch.js +65 -21
  20. package/plugin/dist/services/search/index.js +380 -38
  21. package/plugin/dist/services/sqlite/Backup.js +416 -0
  22. package/plugin/dist/services/sqlite/Database.js +71 -0
  23. package/plugin/dist/services/sqlite/ImportExport.js +452 -0
  24. package/plugin/dist/services/sqlite/Observations.js +291 -7
  25. package/plugin/dist/services/sqlite/Prompts.js +1 -1
  26. package/plugin/dist/services/sqlite/Search.js +10 -10
  27. package/plugin/dist/services/sqlite/Summaries.js +4 -4
  28. package/plugin/dist/services/sqlite/index.js +1323 -28
  29. package/plugin/dist/viewer.css +1 -1
  30. package/plugin/dist/viewer.js +16 -8
  31. package/plugin/dist/viewer.js.map +4 -4
  32. package/plugin/dist/worker-service.js +326 -75
  33. package/plugin/dist/worker-service.js.map +4 -4
@@ -0,0 +1,416 @@
1
+ import { createRequire } from 'module';const require = createRequire(import.meta.url);
2
+
3
+ // src/services/sqlite/Backup.ts
4
+ import {
5
+ existsSync as existsSync2,
6
+ mkdirSync as mkdirSync2,
7
+ copyFileSync,
8
+ readdirSync,
9
+ statSync,
10
+ unlinkSync,
11
+ readFileSync as readFileSync2,
12
+ writeFileSync
13
+ } from "fs";
14
+ import { join as join2, basename } from "path";
15
+
16
+ // src/utils/logger.ts
17
+ import { appendFileSync, existsSync, mkdirSync, readFileSync } from "fs";
18
+ import { join } from "path";
19
+ import { homedir } from "os";
20
+ var LogLevel = /* @__PURE__ */ ((LogLevel2) => {
21
+ LogLevel2[LogLevel2["DEBUG"] = 0] = "DEBUG";
22
+ LogLevel2[LogLevel2["INFO"] = 1] = "INFO";
23
+ LogLevel2[LogLevel2["WARN"] = 2] = "WARN";
24
+ LogLevel2[LogLevel2["ERROR"] = 3] = "ERROR";
25
+ LogLevel2[LogLevel2["SILENT"] = 4] = "SILENT";
26
+ return LogLevel2;
27
+ })(LogLevel || {});
28
+ var DEFAULT_DATA_DIR = join(homedir(), ".contextkit");
29
+ var Logger = class {
30
+ level = null;
31
+ useColor;
32
+ logFilePath = null;
33
+ logFileInitialized = false;
34
+ constructor() {
35
+ this.useColor = process.stdout.isTTY ?? false;
36
+ }
37
+ /**
38
+ * Initialize log file path and ensure directory exists (lazy initialization)
39
+ */
40
+ ensureLogFileInitialized() {
41
+ if (this.logFileInitialized) return;
42
+ this.logFileInitialized = true;
43
+ try {
44
+ const logsDir = join(DEFAULT_DATA_DIR, "logs");
45
+ if (!existsSync(logsDir)) {
46
+ mkdirSync(logsDir, { recursive: true });
47
+ }
48
+ const date = (/* @__PURE__ */ new Date()).toISOString().split("T")[0];
49
+ this.logFilePath = join(logsDir, `kiro-memory-${date}.log`);
50
+ } catch (error) {
51
+ console.error("[LOGGER] Failed to initialize log file:", error);
52
+ this.logFilePath = null;
53
+ }
54
+ }
55
+ /**
56
+ * Lazy-load log level from settings file
57
+ */
58
+ getLevel() {
59
+ if (this.level === null) {
60
+ try {
61
+ const settingsPath = join(DEFAULT_DATA_DIR, "settings.json");
62
+ if (existsSync(settingsPath)) {
63
+ const settingsData = readFileSync(settingsPath, "utf-8");
64
+ const settings = JSON.parse(settingsData);
65
+ const envLevel = (settings.KIRO_MEMORY_LOG_LEVEL || settings.CONTEXTKIT_LOG_LEVEL || "INFO").toUpperCase();
66
+ this.level = LogLevel[envLevel] ?? 1 /* INFO */;
67
+ } else {
68
+ this.level = 1 /* INFO */;
69
+ }
70
+ } catch (error) {
71
+ this.level = 1 /* INFO */;
72
+ }
73
+ }
74
+ return this.level;
75
+ }
76
+ /**
77
+ * Create correlation ID for tracking an observation through the pipeline
78
+ */
79
+ correlationId(sessionId, observationNum) {
80
+ return `obs-${sessionId}-${observationNum}`;
81
+ }
82
+ /**
83
+ * Create session correlation ID
84
+ */
85
+ sessionId(sessionId) {
86
+ return `session-${sessionId}`;
87
+ }
88
+ /**
89
+ * Format data for logging - create compact summaries instead of full dumps
90
+ */
91
+ formatData(data) {
92
+ if (data === null || data === void 0) return "";
93
+ if (typeof data === "string") return data;
94
+ if (typeof data === "number") return data.toString();
95
+ if (typeof data === "boolean") return data.toString();
96
+ if (typeof data === "object") {
97
+ if (data instanceof Error) {
98
+ return this.getLevel() === 0 /* DEBUG */ ? `${data.message}
99
+ ${data.stack}` : data.message;
100
+ }
101
+ if (Array.isArray(data)) {
102
+ return `[${data.length} items]`;
103
+ }
104
+ const keys = Object.keys(data);
105
+ if (keys.length === 0) return "{}";
106
+ if (keys.length <= 3) {
107
+ return JSON.stringify(data);
108
+ }
109
+ return `{${keys.length} keys: ${keys.slice(0, 3).join(", ")}...}`;
110
+ }
111
+ return String(data);
112
+ }
113
+ /**
114
+ * Format timestamp in local timezone (YYYY-MM-DD HH:MM:SS.mmm)
115
+ */
116
+ formatTimestamp(date) {
117
+ const year = date.getFullYear();
118
+ const month = String(date.getMonth() + 1).padStart(2, "0");
119
+ const day = String(date.getDate()).padStart(2, "0");
120
+ const hours = String(date.getHours()).padStart(2, "0");
121
+ const minutes = String(date.getMinutes()).padStart(2, "0");
122
+ const seconds = String(date.getSeconds()).padStart(2, "0");
123
+ const ms = String(date.getMilliseconds()).padStart(3, "0");
124
+ return `${year}-${month}-${day} ${hours}:${minutes}:${seconds}.${ms}`;
125
+ }
126
+ /**
127
+ * Core logging method
128
+ */
129
+ log(level, component, message, context, data) {
130
+ if (level < this.getLevel()) return;
131
+ this.ensureLogFileInitialized();
132
+ const timestamp = this.formatTimestamp(/* @__PURE__ */ new Date());
133
+ const levelStr = LogLevel[level].padEnd(5);
134
+ const componentStr = component.padEnd(6);
135
+ let correlationStr = "";
136
+ if (context?.correlationId) {
137
+ correlationStr = `[${context.correlationId}] `;
138
+ } else if (context?.sessionId) {
139
+ correlationStr = `[session-${context.sessionId}] `;
140
+ }
141
+ let dataStr = "";
142
+ if (data !== void 0 && data !== null) {
143
+ if (data instanceof Error) {
144
+ dataStr = this.getLevel() === 0 /* DEBUG */ ? `
145
+ ${data.message}
146
+ ${data.stack}` : ` ${data.message}`;
147
+ } else if (this.getLevel() === 0 /* DEBUG */ && typeof data === "object") {
148
+ dataStr = "\n" + JSON.stringify(data, null, 2);
149
+ } else {
150
+ dataStr = " " + this.formatData(data);
151
+ }
152
+ }
153
+ let contextStr = "";
154
+ if (context) {
155
+ const { sessionId, memorySessionId, correlationId, ...rest } = context;
156
+ if (Object.keys(rest).length > 0) {
157
+ const pairs = Object.entries(rest).map(([k, v]) => `${k}=${v}`);
158
+ contextStr = ` {${pairs.join(", ")}}`;
159
+ }
160
+ }
161
+ const logLine = `[${timestamp}] [${levelStr}] [${componentStr}] ${correlationStr}${message}${contextStr}${dataStr}`;
162
+ if (this.logFilePath) {
163
+ try {
164
+ appendFileSync(this.logFilePath, logLine + "\n", "utf8");
165
+ } catch (error) {
166
+ process.stderr.write(`[LOGGER] Failed to write to log file: ${error}
167
+ `);
168
+ }
169
+ } else {
170
+ process.stderr.write(logLine + "\n");
171
+ }
172
+ }
173
+ // Public logging methods
174
+ debug(component, message, context, data) {
175
+ this.log(0 /* DEBUG */, component, message, context, data);
176
+ }
177
+ info(component, message, context, data) {
178
+ this.log(1 /* INFO */, component, message, context, data);
179
+ }
180
+ warn(component, message, context, data) {
181
+ this.log(2 /* WARN */, component, message, context, data);
182
+ }
183
+ error(component, message, context, data) {
184
+ this.log(3 /* ERROR */, component, message, context, data);
185
+ }
186
+ /**
187
+ * Log data flow: input → processing
188
+ */
189
+ dataIn(component, message, context, data) {
190
+ this.info(component, `\u2192 ${message}`, context, data);
191
+ }
192
+ /**
193
+ * Log data flow: processing → output
194
+ */
195
+ dataOut(component, message, context, data) {
196
+ this.info(component, `\u2190 ${message}`, context, data);
197
+ }
198
+ /**
199
+ * Log successful completion
200
+ */
201
+ success(component, message, context, data) {
202
+ this.info(component, `\u2713 ${message}`, context, data);
203
+ }
204
+ /**
205
+ * Log failure
206
+ */
207
+ failure(component, message, context, data) {
208
+ this.error(component, `\u2717 ${message}`, context, data);
209
+ }
210
+ /**
211
+ * Log timing information
212
+ */
213
+ timing(component, message, durationMs, context) {
214
+ this.info(component, `\u23F1 ${message}`, context, { duration: `${durationMs}ms` });
215
+ }
216
+ /**
217
+ * Happy Path Error - logs when the expected "happy path" fails but we have a fallback
218
+ */
219
+ happyPathError(component, message, context, data, fallback = "") {
220
+ const stack = new Error().stack || "";
221
+ const stackLines = stack.split("\n");
222
+ const callerLine = stackLines[2] || "";
223
+ const callerMatch = callerLine.match(/at\s+(?:.*\s+)?\(?([^:]+):(\d+):(\d+)\)?/);
224
+ const location = callerMatch ? `${callerMatch[1].split("/").pop()}:${callerMatch[2]}` : "unknown";
225
+ const enhancedContext = {
226
+ ...context,
227
+ location
228
+ };
229
+ this.warn(component, `[HAPPY-PATH] ${message}`, enhancedContext, data);
230
+ return fallback;
231
+ }
232
+ };
233
+ var logger = new Logger();
234
+
235
+ // src/services/sqlite/Backup.ts
236
+ function formatTimestamp(date) {
237
+ const pad = (n, len = 2) => String(n).padStart(len, "0");
238
+ const year = date.getFullYear();
239
+ const month = pad(date.getMonth() + 1);
240
+ const day = pad(date.getDate());
241
+ const hours = pad(date.getHours());
242
+ const mins = pad(date.getMinutes());
243
+ const secs = pad(date.getSeconds());
244
+ const ms = pad(date.getMilliseconds(), 3);
245
+ return `${year}-${month}-${day}-${hours}${mins}${secs}-${ms}`;
246
+ }
247
+ function collectStats(db, dbPath) {
248
+ const countTable = (table) => {
249
+ try {
250
+ const row = db.query(`SELECT COUNT(*) as c FROM ${table}`).get();
251
+ return row?.c ?? 0;
252
+ } catch {
253
+ return 0;
254
+ }
255
+ };
256
+ const dbSizeBytes = existsSync2(dbPath) ? statSync(dbPath).size : 0;
257
+ return {
258
+ observations: countTable("observations"),
259
+ sessions: countTable("sessions"),
260
+ summaries: countTable("summaries"),
261
+ prompts: countTable("prompts"),
262
+ dbSizeBytes
263
+ };
264
+ }
265
+ function getSchemaVersion(db) {
266
+ try {
267
+ const row = db.query("SELECT MAX(version) as v FROM schema_versions").get();
268
+ return row?.v ?? 0;
269
+ } catch {
270
+ return 0;
271
+ }
272
+ }
273
+ function createBackup(dbPath, backupDir, db) {
274
+ mkdirSync2(backupDir, { recursive: true });
275
+ const now = /* @__PURE__ */ new Date();
276
+ const ts = formatTimestamp(now);
277
+ const filename = `backup-${ts}.db`;
278
+ const destPath = join2(backupDir, filename);
279
+ const metaFilename = `backup-${ts}.meta.json`;
280
+ const metaPath = join2(backupDir, metaFilename);
281
+ if (!existsSync2(dbPath)) {
282
+ throw new Error(`Database non trovato: ${dbPath}`);
283
+ }
284
+ copyFileSync(dbPath, destPath);
285
+ logger.info("BACKUP", `File DB copiato: ${dbPath} \u2192 ${destPath}`);
286
+ const walPath = `${dbPath}-wal`;
287
+ const shmPath = `${dbPath}-shm`;
288
+ if (existsSync2(walPath)) {
289
+ copyFileSync(walPath, `${destPath}-wal`);
290
+ logger.debug("BACKUP", "File WAL copiato");
291
+ }
292
+ if (existsSync2(shmPath)) {
293
+ copyFileSync(shmPath, `${destPath}-shm`);
294
+ logger.debug("BACKUP", "File SHM copiato");
295
+ }
296
+ const stats = collectStats(db, dbPath);
297
+ const schemaVersion = getSchemaVersion(db);
298
+ const metadata = {
299
+ timestamp: now.toISOString(),
300
+ timestampEpoch: now.getTime(),
301
+ schemaVersion,
302
+ stats,
303
+ sourcePath: dbPath,
304
+ filename
305
+ };
306
+ writeFileSync(metaPath, JSON.stringify(metadata, null, 2), "utf8");
307
+ logger.info("BACKUP", `Metadata scritto: ${metaPath}`);
308
+ return {
309
+ filePath: destPath,
310
+ metaPath,
311
+ metadata
312
+ };
313
+ }
314
+ function listBackups(backupDir) {
315
+ if (!existsSync2(backupDir)) {
316
+ return [];
317
+ }
318
+ const entries = [];
319
+ let files;
320
+ try {
321
+ files = readdirSync(backupDir);
322
+ } catch (err) {
323
+ logger.warn("BACKUP", `Impossibile leggere la directory backup: ${backupDir}`, {}, err);
324
+ return [];
325
+ }
326
+ const metaFiles = files.filter((f) => f.startsWith("backup-") && f.endsWith(".meta.json"));
327
+ for (const metaFile of metaFiles) {
328
+ const metaPath = join2(backupDir, metaFile);
329
+ const dbFilename = metaFile.replace(/\.meta\.json$/, ".db");
330
+ const filePath = join2(backupDir, dbFilename);
331
+ let metadata;
332
+ try {
333
+ const raw = readFileSync2(metaPath, "utf8");
334
+ metadata = JSON.parse(raw);
335
+ } catch (err) {
336
+ logger.warn("BACKUP", `Metadata non leggibile: ${metaPath}`, {}, err);
337
+ continue;
338
+ }
339
+ if (!existsSync2(filePath)) {
340
+ logger.warn("BACKUP", `File backup mancante per metadata: ${filePath}`);
341
+ continue;
342
+ }
343
+ entries.push({ filePath, metaPath, metadata });
344
+ }
345
+ entries.sort((a, b) => b.metadata.timestampEpoch - a.metadata.timestampEpoch);
346
+ return entries;
347
+ }
348
+ function restoreBackup(backupFile, dbPath) {
349
+ if (!existsSync2(backupFile)) {
350
+ throw new Error(`File backup non trovato: ${backupFile}`);
351
+ }
352
+ copyFileSync(backupFile, dbPath);
353
+ logger.info("BACKUP", `Database ripristinato: ${backupFile} \u2192 ${dbPath}`);
354
+ const walBackup = `${backupFile}-wal`;
355
+ const shmBackup = `${backupFile}-shm`;
356
+ const walDest = `${dbPath}-wal`;
357
+ const shmDest = `${dbPath}-shm`;
358
+ if (existsSync2(walBackup)) {
359
+ copyFileSync(walBackup, walDest);
360
+ logger.debug("BACKUP", "File WAL ripristinato");
361
+ } else if (existsSync2(walDest)) {
362
+ unlinkSync(walDest);
363
+ logger.debug("BACKUP", "File WAL corrente rimosso (non presente nel backup)");
364
+ }
365
+ if (existsSync2(shmBackup)) {
366
+ copyFileSync(shmBackup, shmDest);
367
+ logger.debug("BACKUP", "File SHM ripristinato");
368
+ } else if (existsSync2(shmDest)) {
369
+ unlinkSync(shmDest);
370
+ logger.debug("BACKUP", "File SHM corrente rimosso (non presente nel backup)");
371
+ }
372
+ }
373
+ function rotateBackups(backupDir, maxKeep) {
374
+ if (maxKeep <= 0) {
375
+ throw new Error(`maxKeep deve essere > 0, ricevuto: ${maxKeep}`);
376
+ }
377
+ const entries = listBackups(backupDir);
378
+ if (entries.length <= maxKeep) {
379
+ logger.debug("BACKUP", `Rotazione non necessaria: ${entries.length}/${maxKeep} backup presenti`);
380
+ return 0;
381
+ }
382
+ const toDelete = entries.slice(maxKeep);
383
+ let deleted = 0;
384
+ for (const entry of toDelete) {
385
+ try {
386
+ if (existsSync2(entry.filePath)) {
387
+ unlinkSync(entry.filePath);
388
+ }
389
+ } catch (err) {
390
+ logger.warn("BACKUP", `Impossibile eliminare: ${entry.filePath}`, {}, err);
391
+ }
392
+ for (const extra of [`${entry.filePath}-wal`, `${entry.filePath}-shm`]) {
393
+ try {
394
+ if (existsSync2(extra)) unlinkSync(extra);
395
+ } catch {
396
+ }
397
+ }
398
+ try {
399
+ if (existsSync2(entry.metaPath)) {
400
+ unlinkSync(entry.metaPath);
401
+ }
402
+ } catch (err) {
403
+ logger.warn("BACKUP", `Impossibile eliminare metadata: ${entry.metaPath}`, {}, err);
404
+ }
405
+ logger.info("BACKUP", `Backup rimosso (rotazione): ${basename(entry.filePath)}`);
406
+ deleted++;
407
+ }
408
+ logger.info("BACKUP", `Rotazione completata: ${deleted} backup eliminati, ${maxKeep} mantenuti`);
409
+ return deleted;
410
+ }
411
+ export {
412
+ createBackup,
413
+ listBackups,
414
+ restoreBackup,
415
+ rotateBackups
416
+ };
@@ -624,6 +624,77 @@ var MigrationRunner = class {
624
624
  db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_epoch ON summaries(project, created_at_epoch DESC)");
625
625
  db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_epoch ON prompts(project, created_at_epoch DESC)");
626
626
  }
627
+ },
628
+ {
629
+ version: 10,
630
+ up: (db) => {
631
+ db.run(`
632
+ CREATE TABLE IF NOT EXISTS job_queue (
633
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
634
+ type TEXT NOT NULL,
635
+ status TEXT NOT NULL DEFAULT 'pending',
636
+ payload TEXT,
637
+ result TEXT,
638
+ error TEXT,
639
+ retry_count INTEGER DEFAULT 0,
640
+ max_retries INTEGER DEFAULT 3,
641
+ priority INTEGER DEFAULT 0,
642
+ created_at TEXT NOT NULL,
643
+ created_at_epoch INTEGER NOT NULL,
644
+ started_at_epoch INTEGER,
645
+ completed_at_epoch INTEGER
646
+ )
647
+ `);
648
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_status ON job_queue(status)");
649
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_type ON job_queue(type)");
650
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_priority ON job_queue(status, priority DESC, created_at_epoch ASC)");
651
+ }
652
+ },
653
+ {
654
+ version: 11,
655
+ up: (db) => {
656
+ db.run("ALTER TABLE observations ADD COLUMN auto_category TEXT");
657
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_category ON observations(auto_category)");
658
+ }
659
+ },
660
+ {
661
+ version: 12,
662
+ up: (db) => {
663
+ db.run(`
664
+ CREATE TABLE IF NOT EXISTS github_links (
665
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
666
+ observation_id INTEGER,
667
+ session_id TEXT,
668
+ repo TEXT NOT NULL,
669
+ issue_number INTEGER,
670
+ pr_number INTEGER,
671
+ event_type TEXT NOT NULL,
672
+ action TEXT,
673
+ title TEXT,
674
+ url TEXT,
675
+ author TEXT,
676
+ created_at TEXT NOT NULL,
677
+ created_at_epoch INTEGER NOT NULL,
678
+ FOREIGN KEY (observation_id) REFERENCES observations(id)
679
+ )
680
+ `);
681
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo ON github_links(repo)");
682
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_obs ON github_links(observation_id)");
683
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_event ON github_links(event_type)");
684
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_issue ON github_links(repo, issue_number)");
685
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_pr ON github_links(repo, pr_number)");
686
+ }
687
+ },
688
+ {
689
+ version: 13,
690
+ up: (db) => {
691
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_keyset ON observations(created_at_epoch DESC, id DESC)");
692
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_project_keyset ON observations(project, created_at_epoch DESC, id DESC)");
693
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_keyset ON summaries(created_at_epoch DESC, id DESC)");
694
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_keyset ON summaries(project, created_at_epoch DESC, id DESC)");
695
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_keyset ON prompts(created_at_epoch DESC, id DESC)");
696
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_keyset ON prompts(project, created_at_epoch DESC, id DESC)");
697
+ }
627
698
  }
628
699
  ];
629
700
  }