kiro-memory 2.1.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (33) hide show
  1. package/README.md +5 -1
  2. package/package.json +3 -3
  3. package/plugin/dist/cli/contextkit.js +2315 -180
  4. package/plugin/dist/hooks/agentSpawn.js +548 -49
  5. package/plugin/dist/hooks/kiro-hooks.js +548 -49
  6. package/plugin/dist/hooks/postToolUse.js +556 -56
  7. package/plugin/dist/hooks/stop.js +548 -49
  8. package/plugin/dist/hooks/userPromptSubmit.js +551 -50
  9. package/plugin/dist/index.js +549 -50
  10. package/plugin/dist/plugins/github/github-client.js +152 -0
  11. package/plugin/dist/plugins/github/index.js +412 -0
  12. package/plugin/dist/plugins/github/issue-parser.js +54 -0
  13. package/plugin/dist/plugins/slack/formatter.js +90 -0
  14. package/plugin/dist/plugins/slack/index.js +215 -0
  15. package/plugin/dist/sdk/index.js +548 -49
  16. package/plugin/dist/servers/mcp-server.js +4461 -397
  17. package/plugin/dist/services/search/EmbeddingService.js +64 -20
  18. package/plugin/dist/services/search/HybridSearch.js +380 -38
  19. package/plugin/dist/services/search/VectorSearch.js +65 -21
  20. package/plugin/dist/services/search/index.js +380 -38
  21. package/plugin/dist/services/sqlite/Backup.js +416 -0
  22. package/plugin/dist/services/sqlite/Database.js +71 -0
  23. package/plugin/dist/services/sqlite/ImportExport.js +452 -0
  24. package/plugin/dist/services/sqlite/Observations.js +291 -7
  25. package/plugin/dist/services/sqlite/Prompts.js +1 -1
  26. package/plugin/dist/services/sqlite/Search.js +10 -10
  27. package/plugin/dist/services/sqlite/Summaries.js +4 -4
  28. package/plugin/dist/services/sqlite/index.js +1323 -28
  29. package/plugin/dist/viewer.css +1 -1
  30. package/plugin/dist/viewer.js +16 -8
  31. package/plugin/dist/viewer.js.map +4 -4
  32. package/plugin/dist/worker-service.js +326 -75
  33. package/plugin/dist/worker-service.js.map +4 -4
@@ -0,0 +1,452 @@
1
+ import { createRequire } from 'module';const require = createRequire(import.meta.url);
2
+
3
+ // src/services/sqlite/ImportExport.ts
4
+ import { createHash } from "crypto";
5
+ var JSONL_SCHEMA_VERSION = "2.5.0";
6
+ var IMPORT_BATCH_SIZE = 100;
7
+ function countExportRecords(db, filters) {
8
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
9
+ const obsConds = buildConditions({ project: filters.project, type: filters.type, fromEpoch, toEpoch });
10
+ const sumConds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
11
+ const promptConds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
12
+ const obsCount = db.query(
13
+ `SELECT COUNT(*) as c FROM observations WHERE ${obsConds.where}`
14
+ ).get(...obsConds.params).c;
15
+ const sumCount = db.query(
16
+ `SELECT COUNT(*) as c FROM summaries WHERE ${sumConds.where}`
17
+ ).get(...sumConds.params).c;
18
+ const promptCount = db.query(
19
+ `SELECT COUNT(*) as c FROM prompts WHERE ${promptConds.where}`
20
+ ).get(...promptConds.params).c;
21
+ return { observations: obsCount, summaries: sumCount, prompts: promptCount };
22
+ }
23
+ function generateMetaRecord(db, filters) {
24
+ const counts = countExportRecords(db, filters);
25
+ const meta = {
26
+ _meta: {
27
+ version: JSONL_SCHEMA_VERSION,
28
+ exported_at: (/* @__PURE__ */ new Date()).toISOString(),
29
+ counts,
30
+ filters: Object.keys(filters).length > 0 ? filters : void 0
31
+ }
32
+ };
33
+ return JSON.stringify(meta);
34
+ }
35
+ function exportObservationsStreaming(db, filters, onRow, batchSize = 200) {
36
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
37
+ const conds = buildConditions({ project: filters.project, type: filters.type, fromEpoch, toEpoch });
38
+ let offset = 0;
39
+ let total = 0;
40
+ while (true) {
41
+ const rows = db.query(
42
+ `SELECT id, memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts,
43
+ files_read, files_modified, prompt_number, content_hash, discovery_tokens, auto_category,
44
+ created_at, created_at_epoch
45
+ FROM observations
46
+ WHERE ${conds.where}
47
+ ORDER BY created_at_epoch ASC, id ASC
48
+ LIMIT ? OFFSET ?`
49
+ ).all(...conds.params, batchSize, offset);
50
+ if (rows.length === 0) break;
51
+ for (const row of rows) {
52
+ const record = {
53
+ _type: "observation",
54
+ id: row.id,
55
+ memory_session_id: row.memory_session_id,
56
+ project: row.project,
57
+ type: row.type,
58
+ title: row.title,
59
+ subtitle: row.subtitle,
60
+ text: row.text,
61
+ narrative: row.narrative,
62
+ facts: row.facts,
63
+ concepts: row.concepts,
64
+ files_read: row.files_read,
65
+ files_modified: row.files_modified,
66
+ prompt_number: row.prompt_number,
67
+ content_hash: row.content_hash,
68
+ discovery_tokens: row.discovery_tokens ?? 0,
69
+ auto_category: row.auto_category,
70
+ created_at: row.created_at,
71
+ created_at_epoch: row.created_at_epoch
72
+ };
73
+ onRow(JSON.stringify(record));
74
+ total++;
75
+ }
76
+ offset += rows.length;
77
+ if (rows.length < batchSize) break;
78
+ }
79
+ return total;
80
+ }
81
+ function exportSummariesStreaming(db, filters, onRow, batchSize = 200) {
82
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
83
+ const conds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
84
+ let offset = 0;
85
+ let total = 0;
86
+ while (true) {
87
+ const rows = db.query(
88
+ `SELECT id, session_id, project, request, investigated, learned, completed, next_steps, notes,
89
+ discovery_tokens, created_at, created_at_epoch
90
+ FROM summaries
91
+ WHERE ${conds.where}
92
+ ORDER BY created_at_epoch ASC, id ASC
93
+ LIMIT ? OFFSET ?`
94
+ ).all(...conds.params, batchSize, offset);
95
+ if (rows.length === 0) break;
96
+ for (const row of rows) {
97
+ const record = {
98
+ _type: "summary",
99
+ id: row.id,
100
+ session_id: row.session_id,
101
+ project: row.project,
102
+ request: row.request,
103
+ investigated: row.investigated,
104
+ learned: row.learned,
105
+ completed: row.completed,
106
+ next_steps: row.next_steps,
107
+ notes: row.notes,
108
+ discovery_tokens: row.discovery_tokens ?? 0,
109
+ created_at: row.created_at,
110
+ created_at_epoch: row.created_at_epoch
111
+ };
112
+ onRow(JSON.stringify(record));
113
+ total++;
114
+ }
115
+ offset += rows.length;
116
+ if (rows.length < batchSize) break;
117
+ }
118
+ return total;
119
+ }
120
+ function exportPromptsStreaming(db, filters, onRow, batchSize = 200) {
121
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
122
+ const conds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
123
+ let offset = 0;
124
+ let total = 0;
125
+ while (true) {
126
+ const rows = db.query(
127
+ `SELECT id, content_session_id, project, prompt_number, prompt_text, created_at, created_at_epoch
128
+ FROM prompts
129
+ WHERE ${conds.where}
130
+ ORDER BY created_at_epoch ASC, id ASC
131
+ LIMIT ? OFFSET ?`
132
+ ).all(...conds.params, batchSize, offset);
133
+ if (rows.length === 0) break;
134
+ for (const row of rows) {
135
+ const record = {
136
+ _type: "prompt",
137
+ id: row.id,
138
+ content_session_id: row.content_session_id,
139
+ project: row.project,
140
+ prompt_number: row.prompt_number,
141
+ prompt_text: row.prompt_text,
142
+ created_at: row.created_at,
143
+ created_at_epoch: row.created_at_epoch
144
+ };
145
+ onRow(JSON.stringify(record));
146
+ total++;
147
+ }
148
+ offset += rows.length;
149
+ if (rows.length < batchSize) break;
150
+ }
151
+ return total;
152
+ }
153
+ function validateJsonlRow(raw) {
154
+ if (!raw || typeof raw !== "object") {
155
+ return "Il record non \xE8 un oggetto JSON valido";
156
+ }
157
+ const rec = raw;
158
+ if ("_meta" in rec) return null;
159
+ const validTypes = ["observation", "summary", "prompt"];
160
+ if (!rec._type || typeof rec._type !== "string" || !validTypes.includes(rec._type)) {
161
+ return `Campo "_type" obbligatorio, uno di: ${validTypes.join(", ")}`;
162
+ }
163
+ if (rec._type === "observation") {
164
+ if (!rec.project || typeof rec.project !== "string") return 'observation: campo "project" obbligatorio';
165
+ if (!rec.type || typeof rec.type !== "string") return 'observation: campo "type" obbligatorio';
166
+ if (!rec.title || typeof rec.title !== "string") return 'observation: campo "title" obbligatorio';
167
+ if (rec.project.length > 200) return 'observation: "project" troppo lungo (max 200)';
168
+ if (rec.title.length > 500) return 'observation: "title" troppo lungo (max 500)';
169
+ } else if (rec._type === "summary") {
170
+ if (!rec.project || typeof rec.project !== "string") return 'summary: campo "project" obbligatorio';
171
+ if (!rec.session_id || typeof rec.session_id !== "string") return 'summary: campo "session_id" obbligatorio';
172
+ } else if (rec._type === "prompt") {
173
+ if (!rec.project || typeof rec.project !== "string") return 'prompt: campo "project" obbligatorio';
174
+ if (!rec.content_session_id || typeof rec.content_session_id !== "string") return 'prompt: campo "content_session_id" obbligatorio';
175
+ if (!rec.prompt_text || typeof rec.prompt_text !== "string") return 'prompt: campo "prompt_text" obbligatorio';
176
+ }
177
+ return null;
178
+ }
179
+ function computeImportHash(rec) {
180
+ const payload = [
181
+ rec.project ?? "",
182
+ rec.type ?? "",
183
+ rec.title ?? "",
184
+ rec.narrative ?? ""
185
+ ].join("|");
186
+ return createHash("sha256").update(payload).digest("hex");
187
+ }
188
+ function hashExistsInObservations(db, hash) {
189
+ const result = db.query(
190
+ "SELECT id FROM observations WHERE content_hash = ? LIMIT 1"
191
+ ).get(hash);
192
+ return !!result;
193
+ }
194
+ function importObservationBatch(db, records, dryRun) {
195
+ let imported = 0;
196
+ let skipped = 0;
197
+ for (let i = 0; i < records.length; i += IMPORT_BATCH_SIZE) {
198
+ const batch = records.slice(i, i + IMPORT_BATCH_SIZE);
199
+ if (dryRun) {
200
+ for (const rec of batch) {
201
+ const hash = rec.content_hash || computeImportHash(rec);
202
+ if (hashExistsInObservations(db, hash)) {
203
+ skipped++;
204
+ } else {
205
+ imported++;
206
+ }
207
+ }
208
+ continue;
209
+ }
210
+ const insertBatch = db.transaction(() => {
211
+ for (const rec of batch) {
212
+ const hash = rec.content_hash || computeImportHash(rec);
213
+ if (hashExistsInObservations(db, hash)) {
214
+ skipped++;
215
+ continue;
216
+ }
217
+ const now = (/* @__PURE__ */ new Date()).toISOString();
218
+ db.run(
219
+ `INSERT INTO observations
220
+ (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts,
221
+ files_read, files_modified, prompt_number, content_hash, discovery_tokens, auto_category,
222
+ created_at, created_at_epoch)
223
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
224
+ [
225
+ rec.memory_session_id || "imported",
226
+ rec.project,
227
+ rec.type,
228
+ rec.title,
229
+ rec.subtitle ?? null,
230
+ rec.text ?? null,
231
+ rec.narrative ?? null,
232
+ rec.facts ?? null,
233
+ rec.concepts ?? null,
234
+ rec.files_read ?? null,
235
+ rec.files_modified ?? null,
236
+ rec.prompt_number ?? 0,
237
+ hash,
238
+ rec.discovery_tokens ?? 0,
239
+ rec.auto_category ?? null,
240
+ rec.created_at || now,
241
+ rec.created_at_epoch || Date.now()
242
+ ]
243
+ );
244
+ imported++;
245
+ }
246
+ });
247
+ insertBatch();
248
+ }
249
+ return { imported, skipped };
250
+ }
251
+ function importSummaryBatch(db, records, dryRun) {
252
+ let imported = 0;
253
+ let skipped = 0;
254
+ for (let i = 0; i < records.length; i += IMPORT_BATCH_SIZE) {
255
+ const batch = records.slice(i, i + IMPORT_BATCH_SIZE);
256
+ if (dryRun) {
257
+ for (const rec of batch) {
258
+ const exists = db.query(
259
+ "SELECT id FROM summaries WHERE session_id = ? AND project = ? AND created_at_epoch = ? LIMIT 1"
260
+ ).get(rec.session_id, rec.project, rec.created_at_epoch ?? 0);
261
+ if (exists) skipped++;
262
+ else imported++;
263
+ }
264
+ continue;
265
+ }
266
+ const insertBatch = db.transaction(() => {
267
+ for (const rec of batch) {
268
+ const exists = db.query(
269
+ "SELECT id FROM summaries WHERE session_id = ? AND project = ? AND created_at_epoch = ? LIMIT 1"
270
+ ).get(rec.session_id, rec.project, rec.created_at_epoch ?? 0);
271
+ if (exists) {
272
+ skipped++;
273
+ continue;
274
+ }
275
+ const now = (/* @__PURE__ */ new Date()).toISOString();
276
+ db.run(
277
+ `INSERT INTO summaries
278
+ (session_id, project, request, investigated, learned, completed, next_steps, notes,
279
+ discovery_tokens, created_at, created_at_epoch)
280
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
281
+ [
282
+ rec.session_id,
283
+ rec.project,
284
+ rec.request ?? null,
285
+ rec.investigated ?? null,
286
+ rec.learned ?? null,
287
+ rec.completed ?? null,
288
+ rec.next_steps ?? null,
289
+ rec.notes ?? null,
290
+ rec.discovery_tokens ?? 0,
291
+ rec.created_at || now,
292
+ rec.created_at_epoch || Date.now()
293
+ ]
294
+ );
295
+ imported++;
296
+ }
297
+ });
298
+ insertBatch();
299
+ }
300
+ return { imported, skipped };
301
+ }
302
+ function importPromptBatch(db, records, dryRun) {
303
+ let imported = 0;
304
+ let skipped = 0;
305
+ for (let i = 0; i < records.length; i += IMPORT_BATCH_SIZE) {
306
+ const batch = records.slice(i, i + IMPORT_BATCH_SIZE);
307
+ if (dryRun) {
308
+ for (const rec of batch) {
309
+ const exists = db.query(
310
+ "SELECT id FROM prompts WHERE content_session_id = ? AND prompt_number = ? LIMIT 1"
311
+ ).get(rec.content_session_id, rec.prompt_number ?? 0);
312
+ if (exists) skipped++;
313
+ else imported++;
314
+ }
315
+ continue;
316
+ }
317
+ const insertBatch = db.transaction(() => {
318
+ for (const rec of batch) {
319
+ const exists = db.query(
320
+ "SELECT id FROM prompts WHERE content_session_id = ? AND prompt_number = ? LIMIT 1"
321
+ ).get(rec.content_session_id, rec.prompt_number ?? 0);
322
+ if (exists) {
323
+ skipped++;
324
+ continue;
325
+ }
326
+ const now = (/* @__PURE__ */ new Date()).toISOString();
327
+ db.run(
328
+ `INSERT INTO prompts
329
+ (content_session_id, project, prompt_number, prompt_text, created_at, created_at_epoch)
330
+ VALUES (?, ?, ?, ?, ?, ?)`,
331
+ [
332
+ rec.content_session_id,
333
+ rec.project,
334
+ rec.prompt_number ?? 0,
335
+ rec.prompt_text,
336
+ rec.created_at || now,
337
+ rec.created_at_epoch || Date.now()
338
+ ]
339
+ );
340
+ imported++;
341
+ }
342
+ });
343
+ insertBatch();
344
+ }
345
+ return { imported, skipped };
346
+ }
347
+ function importJsonl(db, content, dryRun = false) {
348
+ const lines = content.split("\n");
349
+ const result = {
350
+ imported: 0,
351
+ skipped: 0,
352
+ errors: 0,
353
+ total: 0,
354
+ errorDetails: []
355
+ };
356
+ const obsBuf = [];
357
+ const sumBuf = [];
358
+ const promptBuf = [];
359
+ const flushBuffers = () => {
360
+ if (obsBuf.length > 0) {
361
+ const r = importObservationBatch(db, obsBuf.splice(0), dryRun);
362
+ result.imported += r.imported;
363
+ result.skipped += r.skipped;
364
+ }
365
+ if (sumBuf.length > 0) {
366
+ const r = importSummaryBatch(db, sumBuf.splice(0), dryRun);
367
+ result.imported += r.imported;
368
+ result.skipped += r.skipped;
369
+ }
370
+ if (promptBuf.length > 0) {
371
+ const r = importPromptBatch(db, promptBuf.splice(0), dryRun);
372
+ result.imported += r.imported;
373
+ result.skipped += r.skipped;
374
+ }
375
+ };
376
+ for (let i = 0; i < lines.length; i++) {
377
+ const raw = lines[i].trim();
378
+ if (!raw || raw.startsWith("#")) continue;
379
+ result.total++;
380
+ let parsed;
381
+ try {
382
+ parsed = JSON.parse(raw);
383
+ } catch {
384
+ result.errors++;
385
+ result.errorDetails.push({ line: i + 1, error: `JSON non valido: ${raw.substring(0, 60)}` });
386
+ continue;
387
+ }
388
+ if (parsed && typeof parsed === "object" && "_meta" in parsed) {
389
+ result.total--;
390
+ continue;
391
+ }
392
+ const validErr = validateJsonlRow(parsed);
393
+ if (validErr) {
394
+ result.errors++;
395
+ result.errorDetails.push({ line: i + 1, error: validErr });
396
+ continue;
397
+ }
398
+ const rec = parsed;
399
+ if (rec._type === "observation") {
400
+ obsBuf.push(rec);
401
+ } else if (rec._type === "summary") {
402
+ sumBuf.push(rec);
403
+ } else if (rec._type === "prompt") {
404
+ promptBuf.push(rec);
405
+ }
406
+ const totalBuf = obsBuf.length + sumBuf.length + promptBuf.length;
407
+ if (totalBuf >= IMPORT_BATCH_SIZE) {
408
+ flushBuffers();
409
+ }
410
+ }
411
+ flushBuffers();
412
+ return result;
413
+ }
414
+ function filtersToEpoch(filters) {
415
+ return {
416
+ fromEpoch: filters.from ? new Date(filters.from).getTime() : void 0,
417
+ toEpoch: filters.to ? new Date(filters.to).getTime() : void 0
418
+ };
419
+ }
420
+ function buildConditions(params) {
421
+ const conditions = ["1=1"];
422
+ const values = [];
423
+ if (params.project) {
424
+ conditions.push("project = ?");
425
+ values.push(params.project);
426
+ }
427
+ if (params.type) {
428
+ conditions.push("type = ?");
429
+ values.push(params.type);
430
+ }
431
+ if (params.fromEpoch !== void 0) {
432
+ conditions.push("created_at_epoch >= ?");
433
+ values.push(params.fromEpoch);
434
+ }
435
+ if (params.toEpoch !== void 0) {
436
+ conditions.push("created_at_epoch <= ?");
437
+ values.push(params.toEpoch);
438
+ }
439
+ return { where: conditions.join(" AND "), params: values };
440
+ }
441
+ export {
442
+ JSONL_SCHEMA_VERSION,
443
+ computeImportHash,
444
+ countExportRecords,
445
+ exportObservationsStreaming,
446
+ exportPromptsStreaming,
447
+ exportSummariesStreaming,
448
+ generateMetaRecord,
449
+ hashExistsInObservations,
450
+ importJsonl,
451
+ validateJsonlRow
452
+ };