kiro-memory 1.9.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +5 -1
  2. package/package.json +5 -5
  3. package/plugin/dist/cli/contextkit.js +2611 -345
  4. package/plugin/dist/hooks/agentSpawn.js +853 -223
  5. package/plugin/dist/hooks/kiro-hooks.js +841 -211
  6. package/plugin/dist/hooks/postToolUse.js +853 -222
  7. package/plugin/dist/hooks/stop.js +850 -220
  8. package/plugin/dist/hooks/userPromptSubmit.js +848 -216
  9. package/plugin/dist/index.js +843 -340
  10. package/plugin/dist/plugins/github/github-client.js +152 -0
  11. package/plugin/dist/plugins/github/index.js +412 -0
  12. package/plugin/dist/plugins/github/issue-parser.js +54 -0
  13. package/plugin/dist/plugins/slack/formatter.js +90 -0
  14. package/plugin/dist/plugins/slack/index.js +215 -0
  15. package/plugin/dist/sdk/index.js +841 -215
  16. package/plugin/dist/servers/mcp-server.js +4461 -397
  17. package/plugin/dist/services/search/EmbeddingService.js +146 -37
  18. package/plugin/dist/services/search/HybridSearch.js +564 -116
  19. package/plugin/dist/services/search/VectorSearch.js +187 -60
  20. package/plugin/dist/services/search/index.js +565 -254
  21. package/plugin/dist/services/sqlite/Backup.js +416 -0
  22. package/plugin/dist/services/sqlite/Database.js +126 -153
  23. package/plugin/dist/services/sqlite/ImportExport.js +452 -0
  24. package/plugin/dist/services/sqlite/Observations.js +314 -19
  25. package/plugin/dist/services/sqlite/Prompts.js +1 -1
  26. package/plugin/dist/services/sqlite/Search.js +41 -29
  27. package/plugin/dist/services/sqlite/Summaries.js +4 -4
  28. package/plugin/dist/services/sqlite/index.js +1428 -208
  29. package/plugin/dist/viewer.css +1 -0
  30. package/plugin/dist/viewer.html +2 -179
  31. package/plugin/dist/viewer.js +23 -24942
  32. package/plugin/dist/viewer.js.map +7 -0
  33. package/plugin/dist/worker-service.js +427 -5569
  34. package/plugin/dist/worker-service.js.map +7 -0
@@ -16,6 +16,290 @@ var __export = (target, all) => {
16
16
  __defProp(target, name, { get: all[name], enumerable: true });
17
17
  };
18
18
 
19
+ // src/utils/secrets.ts
20
+ function redactSecrets(text) {
21
+ if (!text) return text;
22
+ let redacted = text;
23
+ for (const { pattern } of SECRET_PATTERNS) {
24
+ pattern.lastIndex = 0;
25
+ redacted = redacted.replace(pattern, (match) => {
26
+ const prefix = match.substring(0, Math.min(4, match.length));
27
+ return `${prefix}***REDACTED***`;
28
+ });
29
+ }
30
+ return redacted;
31
+ }
32
+ var SECRET_PATTERNS;
33
+ var init_secrets = __esm({
34
+ "src/utils/secrets.ts"() {
35
+ "use strict";
36
+ SECRET_PATTERNS = [
37
+ // AWS Access Keys (AKIA, ABIA, ACCA, ASIA prefixes + 16 alphanumeric chars)
38
+ { name: "aws-key", pattern: /(?:AKIA|ABIA|ACCA|ASIA)[A-Z0-9]{16}/g },
39
+ // JWT tokens (three base64url segments separated by dots)
40
+ { name: "jwt", pattern: /eyJ[a-zA-Z0-9_-]{10,}\.eyJ[a-zA-Z0-9_-]{10,}\.[a-zA-Z0-9_-]{10,}/g },
41
+ // Generic API keys in key=value or key: value assignments
42
+ { name: "api-key", pattern: /(?:api[_-]?key|apikey|api[_-]?secret)\s*[:=]\s*['"]?([a-zA-Z0-9_\-]{20,})['"]?/gi },
43
+ // Password/secret/token in variable assignments
44
+ { name: "credential", pattern: /(?:password|passwd|pwd|secret|token|auth[_-]?token|access[_-]?token|bearer)\s*[:=]\s*['"]?([^\s'"]{8,})['"]?/gi },
45
+ // Credentials embedded in URLs (user:pass@host)
46
+ { name: "url-credential", pattern: /(?:https?:\/\/)([^:]+):([^@]+)@/g },
47
+ // PEM-encoded private keys (RSA, EC, DSA, OpenSSH)
48
+ { name: "private-key", pattern: /-----BEGIN (?:RSA |EC |DSA |OPENSSH )?PRIVATE KEY-----/g },
49
+ // GitHub personal access tokens (ghp_, gho_, ghu_, ghs_, ghr_ prefixes)
50
+ { name: "github-token", pattern: /gh[pousr]_[a-zA-Z0-9]{36,}/g },
51
+ // Slack bot/user/app tokens
52
+ { name: "slack-token", pattern: /xox[bpoas]-[a-zA-Z0-9-]{10,}/g },
53
+ // HTTP Authorization Bearer header values
54
+ { name: "bearer-header", pattern: /\bBearer\s+([a-zA-Z0-9_\-\.]{20,})/g },
55
+ // Generic hex secrets (32+ hex chars after a key/secret/token/password label)
56
+ { name: "hex-secret", pattern: /(?:key|secret|token|password)\s*[:=]\s*['"]?([0-9a-f]{32,})['"]?/gi }
57
+ ];
58
+ }
59
+ });
60
+
61
+ // src/utils/categorizer.ts
62
+ function categorize(input) {
63
+ const scores = /* @__PURE__ */ new Map();
64
+ const searchText = [
65
+ input.title,
66
+ input.text || "",
67
+ input.narrative || "",
68
+ input.concepts || ""
69
+ ].join(" ").toLowerCase();
70
+ const allFiles = [input.filesModified || "", input.filesRead || ""].join(",");
71
+ for (const rule of CATEGORY_RULES) {
72
+ let score = 0;
73
+ for (const kw of rule.keywords) {
74
+ if (searchText.includes(kw.toLowerCase())) {
75
+ score += rule.weight;
76
+ }
77
+ }
78
+ if (rule.types && rule.types.includes(input.type)) {
79
+ score += rule.weight * 2;
80
+ }
81
+ if (rule.filePatterns && allFiles) {
82
+ for (const pattern of rule.filePatterns) {
83
+ if (pattern.test(allFiles)) {
84
+ score += rule.weight;
85
+ }
86
+ }
87
+ }
88
+ if (score > 0) {
89
+ scores.set(rule.category, (scores.get(rule.category) || 0) + score);
90
+ }
91
+ }
92
+ let bestCategory = "general";
93
+ let bestScore = 0;
94
+ for (const [category, score] of scores) {
95
+ if (score > bestScore) {
96
+ bestScore = score;
97
+ bestCategory = category;
98
+ }
99
+ }
100
+ return bestCategory;
101
+ }
102
+ var CATEGORY_RULES;
103
+ var init_categorizer = __esm({
104
+ "src/utils/categorizer.ts"() {
105
+ "use strict";
106
+ CATEGORY_RULES = [
107
+ {
108
+ category: "security",
109
+ keywords: [
110
+ "security",
111
+ "vulnerability",
112
+ "cve",
113
+ "xss",
114
+ "csrf",
115
+ "injection",
116
+ "sanitize",
117
+ "escape",
118
+ "auth",
119
+ "authentication",
120
+ "authorization",
121
+ "permission",
122
+ "helmet",
123
+ "cors",
124
+ "rate-limit",
125
+ "token",
126
+ "encrypt",
127
+ "decrypt",
128
+ "secret",
129
+ "redact",
130
+ "owasp"
131
+ ],
132
+ filePatterns: [/security/i, /auth/i, /secrets?\.ts/i],
133
+ weight: 10
134
+ },
135
+ {
136
+ category: "testing",
137
+ keywords: [
138
+ "test",
139
+ "spec",
140
+ "expect",
141
+ "assert",
142
+ "mock",
143
+ "stub",
144
+ "fixture",
145
+ "coverage",
146
+ "jest",
147
+ "vitest",
148
+ "bun test",
149
+ "unit test",
150
+ "integration test",
151
+ "e2e"
152
+ ],
153
+ types: ["test"],
154
+ filePatterns: [/\.test\./i, /\.spec\./i, /tests?\//i, /__tests__/i],
155
+ weight: 8
156
+ },
157
+ {
158
+ category: "debugging",
159
+ keywords: [
160
+ "debug",
161
+ "fix",
162
+ "bug",
163
+ "error",
164
+ "crash",
165
+ "stacktrace",
166
+ "stack trace",
167
+ "exception",
168
+ "breakpoint",
169
+ "investigate",
170
+ "root cause",
171
+ "troubleshoot",
172
+ "diagnose",
173
+ "bisect",
174
+ "regression"
175
+ ],
176
+ types: ["bugfix"],
177
+ weight: 8
178
+ },
179
+ {
180
+ category: "architecture",
181
+ keywords: [
182
+ "architect",
183
+ "design",
184
+ "pattern",
185
+ "modular",
186
+ "migration",
187
+ "schema",
188
+ "database",
189
+ "api design",
190
+ "abstract",
191
+ "dependency injection",
192
+ "singleton",
193
+ "factory",
194
+ "observer",
195
+ "middleware",
196
+ "pipeline",
197
+ "microservice",
198
+ "monolith"
199
+ ],
200
+ types: ["decision", "constraint"],
201
+ weight: 7
202
+ },
203
+ {
204
+ category: "refactoring",
205
+ keywords: [
206
+ "refactor",
207
+ "rename",
208
+ "extract",
209
+ "inline",
210
+ "move",
211
+ "split",
212
+ "merge",
213
+ "simplify",
214
+ "cleanup",
215
+ "clean up",
216
+ "dead code",
217
+ "consolidate",
218
+ "reorganize",
219
+ "restructure",
220
+ "decouple"
221
+ ],
222
+ weight: 6
223
+ },
224
+ {
225
+ category: "config",
226
+ keywords: [
227
+ "config",
228
+ "configuration",
229
+ "env",
230
+ "environment",
231
+ "dotenv",
232
+ ".env",
233
+ "settings",
234
+ "tsconfig",
235
+ "eslint",
236
+ "prettier",
237
+ "webpack",
238
+ "vite",
239
+ "esbuild",
240
+ "docker",
241
+ "ci/cd",
242
+ "github actions",
243
+ "deploy",
244
+ "build",
245
+ "bundle",
246
+ "package.json"
247
+ ],
248
+ filePatterns: [
249
+ /\.config\./i,
250
+ /\.env/i,
251
+ /tsconfig/i,
252
+ /\.ya?ml/i,
253
+ /Dockerfile/i,
254
+ /docker-compose/i
255
+ ],
256
+ weight: 5
257
+ },
258
+ {
259
+ category: "docs",
260
+ keywords: [
261
+ "document",
262
+ "readme",
263
+ "changelog",
264
+ "jsdoc",
265
+ "comment",
266
+ "explain",
267
+ "guide",
268
+ "tutorial",
269
+ "api doc",
270
+ "openapi",
271
+ "swagger"
272
+ ],
273
+ types: ["docs"],
274
+ filePatterns: [/\.md$/i, /docs?\//i, /readme/i, /changelog/i],
275
+ weight: 5
276
+ },
277
+ {
278
+ category: "feature-dev",
279
+ keywords: [
280
+ "feature",
281
+ "implement",
282
+ "add",
283
+ "create",
284
+ "new",
285
+ "endpoint",
286
+ "component",
287
+ "module",
288
+ "service",
289
+ "handler",
290
+ "route",
291
+ "hook",
292
+ "plugin",
293
+ "integration"
294
+ ],
295
+ types: ["feature", "file-write"],
296
+ weight: 3
297
+ // lowest — generic catch-all for development
298
+ }
299
+ ];
300
+ }
301
+ });
302
+
19
303
  // src/services/sqlite/Observations.ts
20
304
  var Observations_exports = {};
21
305
  __export(Observations_exports, {
@@ -41,11 +325,23 @@ function isDuplicateObservation(db, contentHash, windowMs = 3e4) {
41
325
  }
42
326
  function createObservation(db, memorySessionId, project, type, title, subtitle, text, narrative, facts, concepts, filesRead, filesModified, promptNumber, contentHash = null, discoveryTokens = 0) {
43
327
  const now = /* @__PURE__ */ new Date();
328
+ const safeTitle = redactSecrets(title);
329
+ const safeText = text ? redactSecrets(text) : text;
330
+ const safeNarrative = narrative ? redactSecrets(narrative) : narrative;
331
+ const autoCategory = categorize({
332
+ type,
333
+ title: safeTitle,
334
+ text: safeText,
335
+ narrative: safeNarrative,
336
+ concepts,
337
+ filesModified,
338
+ filesRead
339
+ });
44
340
  const result = db.run(
45
341
  `INSERT INTO observations
46
- (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch, content_hash, discovery_tokens)
47
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
48
- [memorySessionId, project, type, title, subtitle, text, narrative, facts, concepts, filesRead, filesModified, promptNumber, now.toISOString(), now.getTime(), contentHash, discoveryTokens]
342
+ (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch, content_hash, discovery_tokens, auto_category)
343
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
344
+ [memorySessionId, project, type, safeTitle, subtitle, safeText, safeNarrative, facts, concepts, filesRead, filesModified, promptNumber, now.toISOString(), now.getTime(), contentHash, discoveryTokens, autoCategory]
49
345
  );
50
346
  return Number(result.lastInsertRowid);
51
347
  }
@@ -57,16 +353,16 @@ function getObservationsBySession(db, memorySessionId) {
57
353
  }
58
354
  function getObservationsByProject(db, project, limit = 100) {
59
355
  const query = db.query(
60
- "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
356
+ "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
61
357
  );
62
358
  return query.all(project, limit);
63
359
  }
64
360
  function searchObservations(db, searchTerm, project) {
65
361
  const sql = project ? `SELECT * FROM observations
66
362
  WHERE project = ? AND (title LIKE ? ESCAPE '\\' OR text LIKE ? ESCAPE '\\' OR narrative LIKE ? ESCAPE '\\')
67
- ORDER BY created_at_epoch DESC` : `SELECT * FROM observations
363
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT * FROM observations
68
364
  WHERE title LIKE ? ESCAPE '\\' OR text LIKE ? ESCAPE '\\' OR narrative LIKE ? ESCAPE '\\'
69
- ORDER BY created_at_epoch DESC`;
365
+ ORDER BY created_at_epoch DESC, id DESC`;
70
366
  const pattern = `%${escapeLikePattern(searchTerm)}%`;
71
367
  const query = db.query(sql);
72
368
  if (project) {
@@ -99,21 +395,32 @@ function consolidateObservations(db, project, options = {}) {
99
395
  ORDER BY cnt DESC
100
396
  `).all(project, minGroupSize);
101
397
  if (groups.length === 0) return { merged: 0, removed: 0 };
102
- let totalMerged = 0;
103
- let totalRemoved = 0;
398
+ if (options.dryRun) {
399
+ let totalMerged = 0;
400
+ let totalRemoved = 0;
401
+ for (const group of groups) {
402
+ const obsIds = group.ids.split(",").map(Number);
403
+ const placeholders = obsIds.map(() => "?").join(",");
404
+ const count = db.query(
405
+ `SELECT COUNT(*) as cnt FROM observations WHERE id IN (${placeholders})`
406
+ ).get(...obsIds)?.cnt || 0;
407
+ if (count >= minGroupSize) {
408
+ totalMerged += 1;
409
+ totalRemoved += count - 1;
410
+ }
411
+ }
412
+ return { merged: totalMerged, removed: totalRemoved };
413
+ }
104
414
  const runConsolidation = db.transaction(() => {
415
+ let merged = 0;
416
+ let removed = 0;
105
417
  for (const group of groups) {
106
418
  const obsIds = group.ids.split(",").map(Number);
107
419
  const placeholders = obsIds.map(() => "?").join(",");
108
420
  const observations = db.query(
109
- `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC`
421
+ `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC, id DESC`
110
422
  ).all(...obsIds);
111
423
  if (observations.length < minGroupSize) continue;
112
- if (options.dryRun) {
113
- totalMerged += 1;
114
- totalRemoved += observations.length - 1;
115
- continue;
116
- }
117
424
  const keeper = observations[0];
118
425
  const others = observations.slice(1);
119
426
  const uniqueTexts = /* @__PURE__ */ new Set();
@@ -126,22 +433,24 @@ function consolidateObservations(db, project, options = {}) {
126
433
  const consolidatedText = Array.from(uniqueTexts).join("\n---\n").substring(0, 1e5);
127
434
  db.run(
128
435
  "UPDATE observations SET text = ?, title = ? WHERE id = ?",
129
- [consolidatedText, `[consolidato x${observations.length}] ${keeper.title}`, keeper.id]
436
+ [consolidatedText, `[consolidated x${observations.length}] ${keeper.title}`, keeper.id]
130
437
  );
131
438
  const removeIds = others.map((o) => o.id);
132
439
  const removePlaceholders = removeIds.map(() => "?").join(",");
133
440
  db.run(`DELETE FROM observations WHERE id IN (${removePlaceholders})`, removeIds);
134
441
  db.run(`DELETE FROM observation_embeddings WHERE observation_id IN (${removePlaceholders})`, removeIds);
135
- totalMerged += 1;
136
- totalRemoved += removeIds.length;
442
+ merged += 1;
443
+ removed += removeIds.length;
137
444
  }
445
+ return { merged, removed };
138
446
  });
139
- runConsolidation();
140
- return { merged: totalMerged, removed: totalRemoved };
447
+ return runConsolidation();
141
448
  }
142
449
  var init_Observations = __esm({
143
450
  "src/services/sqlite/Observations.ts"() {
144
451
  "use strict";
452
+ init_secrets();
453
+ init_categorizer();
145
454
  }
146
455
  });
147
456
 
@@ -164,7 +473,7 @@ function escapeLikePattern3(input) {
164
473
  }
165
474
  function sanitizeFTS5Query(query) {
166
475
  const trimmed = query.length > 1e4 ? query.substring(0, 1e4) : query;
167
- const terms = trimmed.replace(/[""]/g, "").split(/\s+/).filter((t) => t.length > 0).slice(0, 100).map((t) => `"${t}"`);
476
+ const terms = trimmed.replace(/[""\u0022]/g, "").split(/\s+/).filter((t) => t.length > 0).slice(0, 100).map((t) => `"${t}"`);
168
477
  return terms.join(" ");
169
478
  }
170
479
  function searchObservationsFTS(db, query, filters = {}) {
@@ -261,7 +570,7 @@ function searchObservationsLIKE(db, query, filters = {}) {
261
570
  sql += " AND created_at_epoch <= ?";
262
571
  params.push(filters.dateEnd);
263
572
  }
264
- sql += " ORDER BY created_at_epoch DESC LIMIT ?";
573
+ sql += " ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
265
574
  params.push(limit);
266
575
  const stmt = db.query(sql);
267
576
  return stmt.all(...params);
@@ -286,7 +595,7 @@ function searchSummariesFiltered(db, query, filters = {}) {
286
595
  sql += " AND created_at_epoch <= ?";
287
596
  params.push(filters.dateEnd);
288
597
  }
289
- sql += " ORDER BY created_at_epoch DESC LIMIT ?";
598
+ sql += " ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
290
599
  params.push(limit);
291
600
  const stmt = db.query(sql);
292
601
  return stmt.all(...params);
@@ -296,7 +605,7 @@ function getObservationsByIds(db, ids) {
296
605
  const validIds = ids.filter((id) => typeof id === "number" && Number.isInteger(id) && id > 0).slice(0, 500);
297
606
  if (validIds.length === 0) return [];
298
607
  const placeholders = validIds.map(() => "?").join(",");
299
- const sql = `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC`;
608
+ const sql = `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC, id DESC`;
300
609
  const stmt = db.query(sql);
301
610
  return stmt.all(...validIds);
302
611
  }
@@ -308,11 +617,11 @@ function getTimeline(db, anchorId, depthBefore = 5, depthAfter = 5) {
308
617
  const beforeStmt = db.query(`
309
618
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
310
619
  FROM observations
311
- WHERE created_at_epoch < ?
312
- ORDER BY created_at_epoch DESC
620
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
621
+ ORDER BY created_at_epoch DESC, id DESC
313
622
  LIMIT ?
314
623
  `);
315
- const before = beforeStmt.all(anchorEpoch, depthBefore).reverse();
624
+ const before = beforeStmt.all(anchorEpoch, anchorEpoch, anchorId, depthBefore).reverse();
316
625
  const selfStmt = db.query(`
317
626
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
318
627
  FROM observations WHERE id = ?
@@ -321,34 +630,46 @@ function getTimeline(db, anchorId, depthBefore = 5, depthAfter = 5) {
321
630
  const afterStmt = db.query(`
322
631
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
323
632
  FROM observations
324
- WHERE created_at_epoch > ?
325
- ORDER BY created_at_epoch ASC
633
+ WHERE (created_at_epoch > ? OR (created_at_epoch = ? AND id > ?))
634
+ ORDER BY created_at_epoch ASC, id ASC
326
635
  LIMIT ?
327
636
  `);
328
- const after = afterStmt.all(anchorEpoch, depthAfter);
637
+ const after = afterStmt.all(anchorEpoch, anchorEpoch, anchorId, depthAfter);
329
638
  return [...before, ...self, ...after];
330
639
  }
331
640
  function getProjectStats(db, project) {
332
- const obsStmt = db.query("SELECT COUNT(*) as count FROM observations WHERE project = ?");
333
- const sumStmt = db.query("SELECT COUNT(*) as count FROM summaries WHERE project = ?");
334
- const sesStmt = db.query("SELECT COUNT(*) as count FROM sessions WHERE project = ?");
335
- const prmStmt = db.query("SELECT COUNT(*) as count FROM prompts WHERE project = ?");
336
- const discoveryStmt = db.query(
337
- "SELECT COALESCE(SUM(discovery_tokens), 0) as total FROM observations WHERE project = ?"
338
- );
339
- const discoveryTokens = discoveryStmt.get(project)?.total || 0;
340
- const readStmt = db.query(
341
- `SELECT COALESCE(SUM(
342
- CAST((LENGTH(COALESCE(title, '')) + LENGTH(COALESCE(narrative, ''))) / 4 AS INTEGER)
343
- ), 0) as total FROM observations WHERE project = ?`
344
- );
345
- const readTokens = readStmt.get(project)?.total || 0;
641
+ const sql = `
642
+ WITH
643
+ obs_stats AS (
644
+ SELECT
645
+ COUNT(*) as count,
646
+ COALESCE(SUM(discovery_tokens), 0) as discovery_tokens,
647
+ COALESCE(SUM(
648
+ CAST((LENGTH(COALESCE(title, '')) + LENGTH(COALESCE(narrative, ''))) / 4 AS INTEGER)
649
+ ), 0) as read_tokens
650
+ FROM observations WHERE project = ?
651
+ ),
652
+ sum_count AS (SELECT COUNT(*) as count FROM summaries WHERE project = ?),
653
+ ses_count AS (SELECT COUNT(*) as count FROM sessions WHERE project = ?),
654
+ prm_count AS (SELECT COUNT(*) as count FROM prompts WHERE project = ?)
655
+ SELECT
656
+ obs_stats.count as observations,
657
+ obs_stats.discovery_tokens,
658
+ obs_stats.read_tokens,
659
+ sum_count.count as summaries,
660
+ ses_count.count as sessions,
661
+ prm_count.count as prompts
662
+ FROM obs_stats, sum_count, ses_count, prm_count
663
+ `;
664
+ const row = db.query(sql).get(project, project, project, project);
665
+ const discoveryTokens = row?.discovery_tokens || 0;
666
+ const readTokens = row?.read_tokens || 0;
346
667
  const savings = Math.max(0, discoveryTokens - readTokens);
347
668
  return {
348
- observations: obsStmt.get(project)?.count || 0,
349
- summaries: sumStmt.get(project)?.count || 0,
350
- sessions: sesStmt.get(project)?.count || 0,
351
- prompts: prmStmt.get(project)?.count || 0,
669
+ observations: row?.observations || 0,
670
+ summaries: row?.summaries || 0,
671
+ sessions: row?.sessions || 0,
672
+ prompts: row?.prompts || 0,
352
673
  tokenEconomics: { discoveryTokens, readTokens, savings }
353
674
  };
354
675
  }
@@ -356,7 +677,7 @@ function getStaleObservations(db, project) {
356
677
  const rows = db.query(`
357
678
  SELECT * FROM observations
358
679
  WHERE project = ? AND files_modified IS NOT NULL AND files_modified != ''
359
- ORDER BY created_at_epoch DESC
680
+ ORDER BY created_at_epoch DESC, id DESC
360
681
  LIMIT 500
361
682
  `).all(project);
362
683
  const staleObs = [];
@@ -493,7 +814,7 @@ async function readStdin() {
493
814
  }
494
815
  resolve(JSON.parse(data));
495
816
  } catch (err) {
496
- reject(new Error(`Errore parsing stdin JSON: ${err}`));
817
+ reject(new Error(`Error parsing stdin JSON: ${err}`));
497
818
  }
498
819
  });
499
820
  process.stdin.on("error", (err) => {
@@ -551,11 +872,11 @@ async function runHook(name, handler) {
551
872
  }
552
873
  debugLog(name, "stdin", input);
553
874
  await handler(input);
554
- debugLog(name, "completato", { success: true });
875
+ debugLog(name, "completed", { success: true });
555
876
  process.exit(0);
556
877
  } catch (error) {
557
- debugLog(name, "errore", { error: String(error) });
558
- process.stderr.write(`[kiro-memory:${name}] Errore: ${error}
878
+ debugLog(name, "error", { error: String(error) });
879
+ process.stderr.write(`[kiro-memory:${name}] Error: ${error}
559
880
  `);
560
881
  process.exit(0);
561
882
  }
@@ -565,14 +886,15 @@ async function runHook(name, handler) {
565
886
  import BetterSqlite3 from "better-sqlite3";
566
887
  var Database = class {
567
888
  _db;
889
+ _stmtCache = /* @__PURE__ */ new Map();
568
890
  constructor(path, options) {
569
891
  this._db = new BetterSqlite3(path, {
570
- // better-sqlite3 crea il file di default (non serve 'create')
892
+ // better-sqlite3 creates the file by default ('create' not needed)
571
893
  readonly: options?.readwrite === false ? true : false
572
894
  });
573
895
  }
574
896
  /**
575
- * Esegui una query SQL senza risultati
897
+ * Execute a SQL query without results
576
898
  */
577
899
  run(sql, params) {
578
900
  const stmt = this._db.prepare(sql);
@@ -580,51 +902,53 @@ var Database = class {
580
902
  return result;
581
903
  }
582
904
  /**
583
- * Prepara una query con interfaccia compatibile bun:sqlite
905
+ * Prepare a query with bun:sqlite-compatible interface.
906
+ * Returns a cached prepared statement for repeated queries.
584
907
  */
585
908
  query(sql) {
586
- return new BunQueryCompat(this._db, sql);
909
+ let cached = this._stmtCache.get(sql);
910
+ if (!cached) {
911
+ cached = new BunQueryCompat(this._db, sql);
912
+ this._stmtCache.set(sql, cached);
913
+ }
914
+ return cached;
587
915
  }
588
916
  /**
589
- * Crea una transazione
917
+ * Create a transaction
590
918
  */
591
919
  transaction(fn) {
592
920
  return this._db.transaction(fn);
593
921
  }
594
922
  /**
595
- * Chiudi la connessione
923
+ * Close the connection
596
924
  */
597
925
  close() {
926
+ this._stmtCache.clear();
598
927
  this._db.close();
599
928
  }
600
929
  };
601
930
  var BunQueryCompat = class {
602
- _db;
603
- _sql;
931
+ _stmt;
604
932
  constructor(db, sql) {
605
- this._db = db;
606
- this._sql = sql;
933
+ this._stmt = db.prepare(sql);
607
934
  }
608
935
  /**
609
- * Restituisce tutte le righe
936
+ * Returns all rows
610
937
  */
611
938
  all(...params) {
612
- const stmt = this._db.prepare(this._sql);
613
- return params.length > 0 ? stmt.all(...params) : stmt.all();
939
+ return params.length > 0 ? this._stmt.all(...params) : this._stmt.all();
614
940
  }
615
941
  /**
616
- * Restituisce la prima riga o null
942
+ * Returns the first row or null
617
943
  */
618
944
  get(...params) {
619
- const stmt = this._db.prepare(this._sql);
620
- return params.length > 0 ? stmt.get(...params) : stmt.get();
945
+ return params.length > 0 ? this._stmt.get(...params) : this._stmt.get();
621
946
  }
622
947
  /**
623
- * Esegui senza risultati
948
+ * Execute without results
624
949
  */
625
950
  run(...params) {
626
- const stmt = this._db.prepare(this._sql);
627
- return params.length > 0 ? stmt.run(...params) : stmt.run();
951
+ return params.length > 0 ? this._stmt.run(...params) : this._stmt.run();
628
952
  }
629
953
  };
630
954
 
@@ -886,40 +1210,62 @@ function ensureDir(dirPath) {
886
1210
  var SQLITE_MMAP_SIZE_BYTES = 256 * 1024 * 1024;
887
1211
  var SQLITE_CACHE_SIZE_PAGES = 1e4;
888
1212
  var KiroMemoryDatabase = class {
889
- db;
1213
+ _db;
890
1214
  /**
891
- * @param dbPath - Percorso al file SQLite (default: DB_PATH)
892
- * @param skipMigrations - Se true, salta il migration runner (per hook ad alta frequenza)
1215
+ * Readonly accessor for the underlying Database instance.
1216
+ * Prefer using query() and run() proxy methods directly.
1217
+ */
1218
+ get db() {
1219
+ return this._db;
1220
+ }
1221
+ /**
1222
+ * @param dbPath - Path to the SQLite file (default: DB_PATH)
1223
+ * @param skipMigrations - If true, skip the migration runner (for high-frequency hooks)
893
1224
  */
894
1225
  constructor(dbPath = DB_PATH, skipMigrations = false) {
895
1226
  if (dbPath !== ":memory:") {
896
1227
  ensureDir(DATA_DIR2);
897
1228
  }
898
- this.db = new Database(dbPath, { create: true, readwrite: true });
899
- this.db.run("PRAGMA journal_mode = WAL");
900
- this.db.run("PRAGMA synchronous = NORMAL");
901
- this.db.run("PRAGMA foreign_keys = ON");
902
- this.db.run("PRAGMA temp_store = memory");
903
- this.db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
904
- this.db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
1229
+ this._db = new Database(dbPath, { create: true, readwrite: true });
1230
+ this._db.run("PRAGMA journal_mode = WAL");
1231
+ this._db.run("PRAGMA busy_timeout = 5000");
1232
+ this._db.run("PRAGMA synchronous = NORMAL");
1233
+ this._db.run("PRAGMA foreign_keys = ON");
1234
+ this._db.run("PRAGMA temp_store = memory");
1235
+ this._db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
1236
+ this._db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
905
1237
  if (!skipMigrations) {
906
- const migrationRunner = new MigrationRunner(this.db);
1238
+ const migrationRunner = new MigrationRunner(this._db);
907
1239
  migrationRunner.runAllMigrations();
908
1240
  }
909
1241
  }
910
1242
  /**
911
- * Esegue una funzione all'interno di una transazione atomica.
912
- * Se fn() lancia un errore, la transazione viene annullata automaticamente.
1243
+ * Prepare a query (delegates to underlying Database).
1244
+ * Proxy method to avoid ctx.db.db.query() double access.
1245
+ */
1246
+ query(sql) {
1247
+ return this._db.query(sql);
1248
+ }
1249
+ /**
1250
+ * Execute a SQL statement without results (delegates to underlying Database).
1251
+ * Proxy method to avoid ctx.db.db.run() double access.
1252
+ */
1253
+ run(sql, params) {
1254
+ return this._db.run(sql, params);
1255
+ }
1256
+ /**
1257
+ * Executes a function within an atomic transaction.
1258
+ * If fn() throws an error, the transaction is automatically rolled back.
913
1259
  */
914
1260
  withTransaction(fn) {
915
- const transaction = this.db.transaction(fn);
916
- return transaction(this.db);
1261
+ const transaction = this._db.transaction(fn);
1262
+ return transaction(this._db);
917
1263
  }
918
1264
  /**
919
1265
  * Close the database connection
920
1266
  */
921
1267
  close() {
922
- this.db.close();
1268
+ this._db.close();
923
1269
  }
924
1270
  };
925
1271
  var MigrationRunner = class {
@@ -1160,11 +1506,104 @@ var MigrationRunner = class {
1160
1506
  db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_epoch ON summaries(project, created_at_epoch DESC)");
1161
1507
  db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_epoch ON prompts(project, created_at_epoch DESC)");
1162
1508
  }
1509
+ },
1510
+ {
1511
+ version: 10,
1512
+ up: (db) => {
1513
+ db.run(`
1514
+ CREATE TABLE IF NOT EXISTS job_queue (
1515
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1516
+ type TEXT NOT NULL,
1517
+ status TEXT NOT NULL DEFAULT 'pending',
1518
+ payload TEXT,
1519
+ result TEXT,
1520
+ error TEXT,
1521
+ retry_count INTEGER DEFAULT 0,
1522
+ max_retries INTEGER DEFAULT 3,
1523
+ priority INTEGER DEFAULT 0,
1524
+ created_at TEXT NOT NULL,
1525
+ created_at_epoch INTEGER NOT NULL,
1526
+ started_at_epoch INTEGER,
1527
+ completed_at_epoch INTEGER
1528
+ )
1529
+ `);
1530
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_status ON job_queue(status)");
1531
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_type ON job_queue(type)");
1532
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_priority ON job_queue(status, priority DESC, created_at_epoch ASC)");
1533
+ }
1534
+ },
1535
+ {
1536
+ version: 11,
1537
+ up: (db) => {
1538
+ db.run("ALTER TABLE observations ADD COLUMN auto_category TEXT");
1539
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_category ON observations(auto_category)");
1540
+ }
1541
+ },
1542
+ {
1543
+ version: 12,
1544
+ up: (db) => {
1545
+ db.run(`
1546
+ CREATE TABLE IF NOT EXISTS github_links (
1547
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1548
+ observation_id INTEGER,
1549
+ session_id TEXT,
1550
+ repo TEXT NOT NULL,
1551
+ issue_number INTEGER,
1552
+ pr_number INTEGER,
1553
+ event_type TEXT NOT NULL,
1554
+ action TEXT,
1555
+ title TEXT,
1556
+ url TEXT,
1557
+ author TEXT,
1558
+ created_at TEXT NOT NULL,
1559
+ created_at_epoch INTEGER NOT NULL,
1560
+ FOREIGN KEY (observation_id) REFERENCES observations(id)
1561
+ )
1562
+ `);
1563
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo ON github_links(repo)");
1564
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_obs ON github_links(observation_id)");
1565
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_event ON github_links(event_type)");
1566
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_issue ON github_links(repo, issue_number)");
1567
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_pr ON github_links(repo, pr_number)");
1568
+ }
1569
+ },
1570
+ {
1571
+ version: 13,
1572
+ up: (db) => {
1573
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_keyset ON observations(created_at_epoch DESC, id DESC)");
1574
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_project_keyset ON observations(project, created_at_epoch DESC, id DESC)");
1575
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_keyset ON summaries(created_at_epoch DESC, id DESC)");
1576
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_keyset ON summaries(project, created_at_epoch DESC, id DESC)");
1577
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_keyset ON prompts(created_at_epoch DESC, id DESC)");
1578
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_keyset ON prompts(project, created_at_epoch DESC, id DESC)");
1579
+ }
1163
1580
  }
1164
1581
  ];
1165
1582
  }
1166
1583
  };
1167
1584
 
1585
+ // src/services/sqlite/cursor.ts
1586
+ function encodeCursor(id, epoch) {
1587
+ const raw = `${epoch}:${id}`;
1588
+ return Buffer.from(raw, "utf8").toString("base64url");
1589
+ }
1590
+ function decodeCursor(cursor) {
1591
+ try {
1592
+ const raw = Buffer.from(cursor, "base64url").toString("utf8");
1593
+ const colonIdx = raw.indexOf(":");
1594
+ if (colonIdx === -1) return null;
1595
+ const epochStr = raw.substring(0, colonIdx);
1596
+ const idStr = raw.substring(colonIdx + 1);
1597
+ const epoch = parseInt(epochStr, 10);
1598
+ const id = parseInt(idStr, 10);
1599
+ if (!Number.isInteger(epoch) || epoch <= 0) return null;
1600
+ if (!Number.isInteger(id) || id <= 0) return null;
1601
+ return { epoch, id };
1602
+ } catch {
1603
+ return null;
1604
+ }
1605
+ }
1606
+
1168
1607
  // src/services/sqlite/Sessions.ts
1169
1608
  function createSession(db, contentSessionId, project, userPrompt) {
1170
1609
  const now = /* @__PURE__ */ new Date();
@@ -1208,16 +1647,16 @@ function createSummary(db, sessionId, project, request, investigated, learned, c
1208
1647
  }
1209
1648
  function getSummariesByProject(db, project, limit = 50) {
1210
1649
  const query = db.query(
1211
- "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1650
+ "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
1212
1651
  );
1213
1652
  return query.all(project, limit);
1214
1653
  }
1215
1654
  function searchSummaries(db, searchTerm, project) {
1216
1655
  const sql = project ? `SELECT * FROM summaries
1217
1656
  WHERE project = ? AND (request LIKE ? ESCAPE '\\' OR learned LIKE ? ESCAPE '\\' OR completed LIKE ? ESCAPE '\\' OR notes LIKE ? ESCAPE '\\')
1218
- ORDER BY created_at_epoch DESC` : `SELECT * FROM summaries
1657
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT * FROM summaries
1219
1658
  WHERE request LIKE ? ESCAPE '\\' OR learned LIKE ? ESCAPE '\\' OR completed LIKE ? ESCAPE '\\' OR notes LIKE ? ESCAPE '\\'
1220
- ORDER BY created_at_epoch DESC`;
1659
+ ORDER BY created_at_epoch DESC, id DESC`;
1221
1660
  const pattern = `%${escapeLikePattern2(searchTerm)}%`;
1222
1661
  const query = db.query(sql);
1223
1662
  if (project) {
@@ -1239,7 +1678,7 @@ function createPrompt(db, contentSessionId, project, promptNumber, promptText) {
1239
1678
  }
1240
1679
  function getPromptsByProject(db, project, limit = 100) {
1241
1680
  const query = db.query(
1242
- "SELECT * FROM prompts WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1681
+ "SELECT * FROM prompts WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
1243
1682
  );
1244
1683
  return query.all(project, limit);
1245
1684
  }
@@ -1267,13 +1706,13 @@ function createCheckpoint(db, sessionId, project, data) {
1267
1706
  }
1268
1707
  function getLatestCheckpoint(db, sessionId) {
1269
1708
  const query = db.query(
1270
- "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC LIMIT 1"
1709
+ "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1"
1271
1710
  );
1272
1711
  return query.get(sessionId);
1273
1712
  }
1274
1713
  function getLatestCheckpointByProject(db, project) {
1275
1714
  const query = db.query(
1276
- "SELECT * FROM checkpoints WHERE project = ? ORDER BY created_at_epoch DESC LIMIT 1"
1715
+ "SELECT * FROM checkpoints WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1"
1277
1716
  );
1278
1717
  return query.get(project);
1279
1718
  }
@@ -1335,9 +1774,9 @@ function getReportData(db, project, startEpoch, endEpoch) {
1335
1774
  const staleCount = (project ? db.query(staleSql).get(project, startEpoch, endEpoch)?.count : db.query(staleSql).get(startEpoch, endEpoch)?.count) || 0;
1336
1775
  const summarySql = project ? `SELECT learned, completed, next_steps FROM summaries
1337
1776
  WHERE project = ? AND created_at_epoch >= ? AND created_at_epoch <= ?
1338
- ORDER BY created_at_epoch DESC` : `SELECT learned, completed, next_steps FROM summaries
1777
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT learned, completed, next_steps FROM summaries
1339
1778
  WHERE created_at_epoch >= ? AND created_at_epoch <= ?
1340
- ORDER BY created_at_epoch DESC`;
1779
+ ORDER BY created_at_epoch DESC, id DESC`;
1341
1780
  const summaryRows = project ? db.query(summarySql).all(project, startEpoch, endEpoch) : db.query(summarySql).all(startEpoch, endEpoch);
1342
1781
  const topLearnings = [];
1343
1782
  const completedTasks = [];
@@ -1402,20 +1841,61 @@ function getReportData(db, project, startEpoch, endEpoch) {
1402
1841
  // src/services/sqlite/index.ts
1403
1842
  init_Search();
1404
1843
 
1844
+ // src/types/worker-types.ts
1845
+ var KNOWLEDGE_TYPES = ["constraint", "decision", "heuristic", "rejected"];
1846
+
1847
+ // src/services/sqlite/Retention.ts
1848
+ var KNOWLEDGE_TYPE_LIST = KNOWLEDGE_TYPES;
1849
+ var KNOWLEDGE_PLACEHOLDERS = KNOWLEDGE_TYPE_LIST.map(() => "?").join(", ");
1850
+
1405
1851
  // src/sdk/index.ts
1406
1852
  init_Observations();
1407
1853
  import { createHash } from "crypto";
1408
1854
  init_Search();
1409
1855
 
1410
1856
  // src/services/search/EmbeddingService.ts
1857
+ var MODEL_CONFIGS = {
1858
+ "all-MiniLM-L6-v2": {
1859
+ modelId: "Xenova/all-MiniLM-L6-v2",
1860
+ dimensions: 384
1861
+ },
1862
+ "jina-code-v2": {
1863
+ modelId: "jinaai/jina-embeddings-v2-base-code",
1864
+ dimensions: 768
1865
+ },
1866
+ "bge-small-en": {
1867
+ modelId: "BAAI/bge-small-en-v1.5",
1868
+ dimensions: 384
1869
+ }
1870
+ };
1871
+ var FASTEMBED_COMPATIBLE_MODELS = /* @__PURE__ */ new Set(["all-MiniLM-L6-v2", "bge-small-en"]);
1411
1872
  var EmbeddingService = class {
1412
1873
  provider = null;
1413
1874
  model = null;
1414
1875
  initialized = false;
1415
1876
  initializing = null;
1877
+ config;
1878
+ configName;
1879
+ constructor() {
1880
+ const envModel = process.env.KIRO_MEMORY_EMBEDDING_MODEL || "all-MiniLM-L6-v2";
1881
+ this.configName = envModel;
1882
+ if (MODEL_CONFIGS[envModel]) {
1883
+ this.config = MODEL_CONFIGS[envModel];
1884
+ } else if (envModel.includes("/")) {
1885
+ const dimensions = parseInt(process.env.KIRO_MEMORY_EMBEDDING_DIMENSIONS || "384", 10);
1886
+ this.config = {
1887
+ modelId: envModel,
1888
+ dimensions: isNaN(dimensions) ? 384 : dimensions
1889
+ };
1890
+ } else {
1891
+ logger.warn("EMBEDDING", `Unknown model name '${envModel}', falling back to 'all-MiniLM-L6-v2'`);
1892
+ this.configName = "all-MiniLM-L6-v2";
1893
+ this.config = MODEL_CONFIGS["all-MiniLM-L6-v2"];
1894
+ }
1895
+ }
1416
1896
  /**
1417
- * Inizializza il servizio di embedding.
1418
- * Tenta fastembed, poi @huggingface/transformers, poi fallback a null.
1897
+ * Initialize the embedding service.
1898
+ * Tries fastembed (when compatible), then @huggingface/transformers, then falls back to null.
1419
1899
  */
1420
1900
  async initialize() {
1421
1901
  if (this.initialized) return this.provider !== null;
@@ -1426,45 +1906,48 @@ var EmbeddingService = class {
1426
1906
  return result;
1427
1907
  }
1428
1908
  async _doInitialize() {
1429
- try {
1430
- const fastembed = await import("fastembed");
1431
- const EmbeddingModel = fastembed.EmbeddingModel || fastembed.default?.EmbeddingModel;
1432
- const FlagEmbedding = fastembed.FlagEmbedding || fastembed.default?.FlagEmbedding;
1433
- if (FlagEmbedding && EmbeddingModel) {
1434
- this.model = await FlagEmbedding.init({
1435
- model: EmbeddingModel.BGESmallENV15
1436
- });
1437
- this.provider = "fastembed";
1438
- this.initialized = true;
1439
- logger.info("EMBEDDING", "Inizializzato con fastembed (BGE-small-en-v1.5)");
1440
- return true;
1909
+ const fastembedCompatible = FASTEMBED_COMPATIBLE_MODELS.has(this.configName);
1910
+ if (fastembedCompatible) {
1911
+ try {
1912
+ const fastembed = await import("fastembed");
1913
+ const EmbeddingModel = fastembed.EmbeddingModel || fastembed.default?.EmbeddingModel;
1914
+ const FlagEmbedding = fastembed.FlagEmbedding || fastembed.default?.FlagEmbedding;
1915
+ if (FlagEmbedding && EmbeddingModel) {
1916
+ this.model = await FlagEmbedding.init({
1917
+ model: EmbeddingModel.BGESmallENV15
1918
+ });
1919
+ this.provider = "fastembed";
1920
+ this.initialized = true;
1921
+ logger.info("EMBEDDING", `Initialized with fastembed (BGE-small-en-v1.5) for model '${this.configName}'`);
1922
+ return true;
1923
+ }
1924
+ } catch (error) {
1925
+ logger.debug("EMBEDDING", `fastembed not available: ${error}`);
1441
1926
  }
1442
- } catch (error) {
1443
- logger.debug("EMBEDDING", `fastembed non disponibile: ${error}`);
1444
1927
  }
1445
1928
  try {
1446
1929
  const transformers = await import("@huggingface/transformers");
1447
1930
  const pipeline = transformers.pipeline || transformers.default?.pipeline;
1448
1931
  if (pipeline) {
1449
- this.model = await pipeline("feature-extraction", "Xenova/all-MiniLM-L6-v2", {
1932
+ this.model = await pipeline("feature-extraction", this.config.modelId, {
1450
1933
  quantized: true
1451
1934
  });
1452
1935
  this.provider = "transformers";
1453
1936
  this.initialized = true;
1454
- logger.info("EMBEDDING", "Inizializzato con @huggingface/transformers (all-MiniLM-L6-v2)");
1937
+ logger.info("EMBEDDING", `Initialized with @huggingface/transformers (${this.config.modelId})`);
1455
1938
  return true;
1456
1939
  }
1457
1940
  } catch (error) {
1458
- logger.debug("EMBEDDING", `@huggingface/transformers non disponibile: ${error}`);
1941
+ logger.debug("EMBEDDING", `@huggingface/transformers not available: ${error}`);
1459
1942
  }
1460
1943
  this.provider = null;
1461
1944
  this.initialized = true;
1462
- logger.warn("EMBEDDING", "Nessun provider embedding disponibile, ricerca semantica disabilitata");
1945
+ logger.warn("EMBEDDING", "No embedding provider available, semantic search disabled");
1463
1946
  return false;
1464
1947
  }
1465
1948
  /**
1466
- * Genera embedding per un singolo testo.
1467
- * Ritorna Float32Array con 384 dimensioni, o null se non disponibile.
1949
+ * Generate embedding for a single text.
1950
+ * Returns Float32Array with configured dimensions, or null if not available.
1468
1951
  */
1469
1952
  async embed(text) {
1470
1953
  if (!this.initialized) await this.initialize();
@@ -1477,46 +1960,118 @@ var EmbeddingService = class {
1477
1960
  return await this._embedTransformers(truncated);
1478
1961
  }
1479
1962
  } catch (error) {
1480
- logger.error("EMBEDDING", `Errore generazione embedding: ${error}`);
1963
+ logger.error("EMBEDDING", `Error generating embedding: ${error}`);
1481
1964
  }
1482
1965
  return null;
1483
1966
  }
1484
1967
  /**
1485
- * Genera embeddings in batch.
1968
+ * Generate embeddings in batch.
1969
+ * Uses native batch support when available (fastembed, transformers),
1970
+ * falls back to serial processing on batch failure.
1486
1971
  */
1487
1972
  async embedBatch(texts) {
1488
1973
  if (!this.initialized) await this.initialize();
1489
1974
  if (!this.provider || !this.model) return texts.map(() => null);
1490
- const results = [];
1491
- for (const text of texts) {
1492
- try {
1493
- const embedding = await this.embed(text);
1494
- results.push(embedding);
1495
- } catch {
1496
- results.push(null);
1975
+ if (texts.length === 0) return [];
1976
+ const truncated = texts.map((t) => t.substring(0, 2e3));
1977
+ try {
1978
+ if (this.provider === "fastembed") {
1979
+ return await this._embedBatchFastembed(truncated);
1980
+ } else if (this.provider === "transformers") {
1981
+ return await this._embedBatchTransformers(truncated);
1497
1982
  }
1983
+ } catch (error) {
1984
+ logger.warn("EMBEDDING", `Batch embedding failed, falling back to serial: ${error}`);
1498
1985
  }
1499
- return results;
1986
+ return this._embedBatchSerial(truncated);
1500
1987
  }
1501
1988
  /**
1502
- * Verifica se il servizio è disponibile.
1989
+ * Check if the service is available.
1503
1990
  */
1504
1991
  isAvailable() {
1505
1992
  return this.initialized && this.provider !== null;
1506
1993
  }
1507
1994
  /**
1508
- * Nome del provider attivo.
1995
+ * Name of the active provider.
1509
1996
  */
1510
1997
  getProvider() {
1511
1998
  return this.provider;
1512
1999
  }
1513
2000
  /**
1514
- * Dimensioni del vettore embedding.
2001
+ * Embedding vector dimensions for the active model configuration.
1515
2002
  */
1516
2003
  getDimensions() {
1517
- return 384;
2004
+ return this.config.dimensions;
2005
+ }
2006
+ /**
2007
+ * Human-readable model name used as identifier in the observation_embeddings table.
2008
+ * Returns the short name (e.g., 'all-MiniLM-L6-v2') or the full HF model ID for custom models.
2009
+ */
2010
+ getModelName() {
2011
+ return this.configName;
2012
+ }
2013
+ // --- Batch implementations ---
2014
+ /**
2015
+ * Native batch embedding with fastembed.
2016
+ * FlagEmbedding.embed() accepts string[] and returns an async iterable of batches.
2017
+ */
2018
+ async _embedBatchFastembed(texts) {
2019
+ const results = [];
2020
+ const embeddings = this.model.embed(texts, texts.length);
2021
+ for await (const batch of embeddings) {
2022
+ if (batch) {
2023
+ for (const vec of batch) {
2024
+ results.push(vec instanceof Float32Array ? vec : new Float32Array(vec));
2025
+ }
2026
+ }
2027
+ }
2028
+ while (results.length < texts.length) {
2029
+ results.push(null);
2030
+ }
2031
+ return results;
1518
2032
  }
1519
- // --- Provider specifici ---
2033
+ /**
2034
+ * Batch embedding with @huggingface/transformers pipeline.
2035
+ * The pipeline accepts string[] and returns a Tensor with shape [N, dims].
2036
+ */
2037
+ async _embedBatchTransformers(texts) {
2038
+ const output = await this.model(texts, {
2039
+ pooling: "mean",
2040
+ normalize: true
2041
+ });
2042
+ if (!output?.data) {
2043
+ return texts.map(() => null);
2044
+ }
2045
+ const dims = this.getDimensions();
2046
+ const data = output.data instanceof Float32Array ? output.data : new Float32Array(output.data);
2047
+ const results = [];
2048
+ for (let i = 0; i < texts.length; i++) {
2049
+ const offset = i * dims;
2050
+ if (offset + dims <= data.length) {
2051
+ results.push(data.slice(offset, offset + dims));
2052
+ } else {
2053
+ results.push(null);
2054
+ }
2055
+ }
2056
+ return results;
2057
+ }
2058
+ /**
2059
+ * Serial fallback: embed texts one at a time.
2060
+ * Used when native batch fails.
2061
+ */
2062
+ async _embedBatchSerial(texts) {
2063
+ const results = [];
2064
+ for (const text of texts) {
2065
+ try {
2066
+ const embedding = await this.embed(text);
2067
+ results.push(embedding);
2068
+ } catch {
2069
+ results.push(null);
2070
+ }
2071
+ }
2072
+ return results;
2073
+ }
2074
+ // --- Single-text provider implementations ---
1520
2075
  async _embedFastembed(text) {
1521
2076
  const embeddings = this.model.embed([text], 1);
1522
2077
  for await (const batch of embeddings) {
@@ -1547,17 +2102,21 @@ function getEmbeddingService() {
1547
2102
  }
1548
2103
 
1549
2104
  // src/services/search/VectorSearch.ts
2105
+ var DEFAULT_MAX_CANDIDATES = 2e3;
1550
2106
  function cosineSimilarity(a, b) {
1551
- if (a.length !== b.length) return 0;
2107
+ const len = a.length;
2108
+ if (len !== b.length) return 0;
1552
2109
  let dotProduct = 0;
1553
2110
  let normA = 0;
1554
2111
  let normB = 0;
1555
- for (let i = 0; i < a.length; i++) {
1556
- dotProduct += a[i] * b[i];
1557
- normA += a[i] * a[i];
1558
- normB += b[i] * b[i];
1559
- }
1560
- const denominator = Math.sqrt(normA) * Math.sqrt(normB);
2112
+ for (let i = 0; i < len; i++) {
2113
+ const ai = a[i];
2114
+ const bi = b[i];
2115
+ dotProduct += ai * bi;
2116
+ normA += ai * ai;
2117
+ normB += bi * bi;
2118
+ }
2119
+ const denominator = Math.sqrt(normA * normB);
1561
2120
  if (denominator === 0) return 0;
1562
2121
  return dotProduct / denominator;
1563
2122
  }
@@ -1570,23 +2129,36 @@ function bufferToFloat32(buf) {
1570
2129
  }
1571
2130
  var VectorSearch = class {
1572
2131
  /**
1573
- * Ricerca semantica: calcola cosine similarity tra query e tutti gli embeddings.
2132
+ * Semantic search with SQL pre-filtering for scalability.
2133
+ *
2134
+ * 2-phase strategy:
2135
+ * 1. SQL pre-filters by project + sorts by recency (loads max N candidates)
2136
+ * 2. JS computes cosine similarity only on filtered candidates
2137
+ *
2138
+ * With 50k observations and maxCandidates=2000, loads only ~4% of data.
1574
2139
  */
1575
2140
  async search(db, queryEmbedding, options = {}) {
1576
2141
  const limit = options.limit || 10;
1577
2142
  const threshold = options.threshold || 0.3;
2143
+ const maxCandidates = options.maxCandidates || DEFAULT_MAX_CANDIDATES;
1578
2144
  try {
1579
- let sql = `
2145
+ const conditions = [];
2146
+ const params = [];
2147
+ if (options.project) {
2148
+ conditions.push("o.project = ?");
2149
+ params.push(options.project);
2150
+ }
2151
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2152
+ const sql = `
1580
2153
  SELECT e.observation_id, e.embedding,
1581
2154
  o.title, o.text, o.type, o.project, o.created_at, o.created_at_epoch
1582
2155
  FROM observation_embeddings e
1583
2156
  JOIN observations o ON o.id = e.observation_id
2157
+ ${whereClause}
2158
+ ORDER BY o.created_at_epoch DESC
2159
+ LIMIT ?
1584
2160
  `;
1585
- const params = [];
1586
- if (options.project) {
1587
- sql += " WHERE o.project = ?";
1588
- params.push(options.project);
1589
- }
2161
+ params.push(maxCandidates);
1590
2162
  const rows = db.query(sql).all(...params);
1591
2163
  const scored = [];
1592
2164
  for (const row of rows) {
@@ -1607,14 +2179,15 @@ var VectorSearch = class {
1607
2179
  }
1608
2180
  }
1609
2181
  scored.sort((a, b) => b.similarity - a.similarity);
2182
+ logger.debug("VECTOR", `Search: ${rows.length} candidates \u2192 ${scored.length} above threshold \u2192 ${Math.min(scored.length, limit)} results`);
1610
2183
  return scored.slice(0, limit);
1611
2184
  } catch (error) {
1612
- logger.error("VECTOR", `Errore ricerca vettoriale: ${error}`);
2185
+ logger.error("VECTOR", `Vector search error: ${error}`);
1613
2186
  return [];
1614
2187
  }
1615
2188
  }
1616
2189
  /**
1617
- * Salva embedding per un'osservazione.
2190
+ * Store embedding for an observation.
1618
2191
  */
1619
2192
  async storeEmbedding(db, observationId, embedding, model) {
1620
2193
  try {
@@ -1630,18 +2203,18 @@ var VectorSearch = class {
1630
2203
  embedding.length,
1631
2204
  (/* @__PURE__ */ new Date()).toISOString()
1632
2205
  );
1633
- logger.debug("VECTOR", `Embedding salvato per osservazione ${observationId}`);
2206
+ logger.debug("VECTOR", `Embedding saved for observation ${observationId}`);
1634
2207
  } catch (error) {
1635
- logger.error("VECTOR", `Errore salvataggio embedding: ${error}`);
2208
+ logger.error("VECTOR", `Error saving embedding: ${error}`);
1636
2209
  }
1637
2210
  }
1638
2211
  /**
1639
- * Genera embeddings per osservazioni che non li hanno ancora.
2212
+ * Generate embeddings for observations that don't have them yet.
1640
2213
  */
1641
2214
  async backfillEmbeddings(db, batchSize = 50) {
1642
2215
  const embeddingService2 = getEmbeddingService();
1643
2216
  if (!await embeddingService2.initialize()) {
1644
- logger.warn("VECTOR", "Embedding service non disponibile, backfill saltato");
2217
+ logger.warn("VECTOR", "Embedding service not available, backfill skipped");
1645
2218
  return 0;
1646
2219
  }
1647
2220
  const rows = db.query(`
@@ -1654,7 +2227,7 @@ var VectorSearch = class {
1654
2227
  `).all(batchSize);
1655
2228
  if (rows.length === 0) return 0;
1656
2229
  let count = 0;
1657
- const model = embeddingService2.getProvider() || "unknown";
2230
+ const model = embeddingService2.getModelName();
1658
2231
  for (const row of rows) {
1659
2232
  const parts = [row.title];
1660
2233
  if (row.text) parts.push(row.text);
@@ -1667,11 +2240,11 @@ var VectorSearch = class {
1667
2240
  count++;
1668
2241
  }
1669
2242
  }
1670
- logger.info("VECTOR", `Backfill completato: ${count}/${rows.length} embeddings generati`);
2243
+ logger.info("VECTOR", `Backfill completed: ${count}/${rows.length} embeddings generated`);
1671
2244
  return count;
1672
2245
  }
1673
2246
  /**
1674
- * Statistiche sugli embeddings.
2247
+ * Embedding statistics.
1675
2248
  */
1676
2249
  getStats(db) {
1677
2250
  try {
@@ -1698,21 +2271,21 @@ function getVectorSearch() {
1698
2271
  var HybridSearch = class {
1699
2272
  embeddingInitialized = false;
1700
2273
  /**
1701
- * Inizializza il servizio di embedding (lazy, non bloccante)
2274
+ * Initialize the embedding service (lazy, non-blocking)
1702
2275
  */
1703
2276
  async initialize() {
1704
2277
  try {
1705
2278
  const embeddingService2 = getEmbeddingService();
1706
2279
  await embeddingService2.initialize();
1707
2280
  this.embeddingInitialized = embeddingService2.isAvailable();
1708
- logger.info("SEARCH", `HybridSearch inizializzato (embedding: ${this.embeddingInitialized ? "attivo" : "disattivato"})`);
2281
+ logger.info("SEARCH", `HybridSearch initialized (embedding: ${this.embeddingInitialized ? "active" : "disabled"})`);
1709
2282
  } catch (error) {
1710
- logger.warn("SEARCH", "Inizializzazione embedding fallita, uso solo FTS5", {}, error);
2283
+ logger.warn("SEARCH", "Embedding initialization failed, using only FTS5", {}, error);
1711
2284
  this.embeddingInitialized = false;
1712
2285
  }
1713
2286
  }
1714
2287
  /**
1715
- * Ricerca ibrida con scoring a 4 segnali
2288
+ * Hybrid search with 4-signal scoring
1716
2289
  */
1717
2290
  async search(db, query, options = {}) {
1718
2291
  const limit = options.limit || 10;
@@ -1728,7 +2301,7 @@ var HybridSearch = class {
1728
2301
  const vectorResults = await vectorSearch2.search(db, queryEmbedding, {
1729
2302
  project: options.project,
1730
2303
  limit: limit * 2,
1731
- // Prendiamo piu risultati per il ranking
2304
+ // Fetch more results for ranking
1732
2305
  threshold: 0.3
1733
2306
  });
1734
2307
  for (const hit of vectorResults) {
@@ -1745,10 +2318,10 @@ var HybridSearch = class {
1745
2318
  source: "vector"
1746
2319
  });
1747
2320
  }
1748
- logger.debug("SEARCH", `Vector search: ${vectorResults.length} risultati`);
2321
+ logger.debug("SEARCH", `Vector search: ${vectorResults.length} results`);
1749
2322
  }
1750
2323
  } catch (error) {
1751
- logger.warn("SEARCH", "Ricerca vettoriale fallita, uso solo keyword", {}, error);
2324
+ logger.warn("SEARCH", "Vector search failed, using only keyword", {}, error);
1752
2325
  }
1753
2326
  }
1754
2327
  try {
@@ -1778,9 +2351,9 @@ var HybridSearch = class {
1778
2351
  });
1779
2352
  }
1780
2353
  }
1781
- logger.debug("SEARCH", `Keyword search: ${keywordResults.length} risultati`);
2354
+ logger.debug("SEARCH", `Keyword search: ${keywordResults.length} results`);
1782
2355
  } catch (error) {
1783
- logger.error("SEARCH", "Ricerca keyword fallita", {}, error);
2356
+ logger.error("SEARCH", "Keyword search failed", {}, error);
1784
2357
  }
1785
2358
  if (rawItems.size === 0) return [];
1786
2359
  const allFTS5Ranks = Array.from(rawItems.values()).filter((item) => item.fts5Rank !== null).map((item) => item.fts5Rank);
@@ -1832,9 +2405,6 @@ function getHybridSearch() {
1832
2405
  return hybridSearch;
1833
2406
  }
1834
2407
 
1835
- // src/types/worker-types.ts
1836
- var KNOWLEDGE_TYPES = ["constraint", "decision", "heuristic", "rejected"];
1837
-
1838
2408
  // src/sdk/index.ts
1839
2409
  var KiroMemorySDK = class {
1840
2410
  db;
@@ -1868,33 +2438,33 @@ var KiroMemorySDK = class {
1868
2438
  };
1869
2439
  }
1870
2440
  /**
1871
- * Valida input per storeObservation
2441
+ * Validate input for storeObservation
1872
2442
  */
1873
2443
  validateObservationInput(data) {
1874
2444
  if (!data.type || typeof data.type !== "string" || data.type.length > 100) {
1875
- throw new Error("type \xE8 obbligatorio (stringa, max 100 caratteri)");
2445
+ throw new Error("type is required (string, max 100 chars)");
1876
2446
  }
1877
2447
  if (!data.title || typeof data.title !== "string" || data.title.length > 500) {
1878
- throw new Error("title \xE8 obbligatorio (stringa, max 500 caratteri)");
2448
+ throw new Error("title is required (string, max 500 chars)");
1879
2449
  }
1880
2450
  if (!data.content || typeof data.content !== "string" || data.content.length > 1e5) {
1881
- throw new Error("content \xE8 obbligatorio (stringa, max 100KB)");
2451
+ throw new Error("content is required (string, max 100KB)");
1882
2452
  }
1883
2453
  }
1884
2454
  /**
1885
- * Valida input per storeSummary
2455
+ * Validate input for storeSummary
1886
2456
  */
1887
2457
  validateSummaryInput(data) {
1888
2458
  const MAX = 5e4;
1889
2459
  for (const [key, val] of Object.entries(data)) {
1890
2460
  if (val !== void 0 && val !== null) {
1891
- if (typeof val !== "string") throw new Error(`${key} deve essere una stringa`);
1892
- if (val.length > MAX) throw new Error(`${key} troppo grande (max 50KB)`);
2461
+ if (typeof val !== "string") throw new Error(`${key} must be a string`);
2462
+ if (val.length > MAX) throw new Error(`${key} too large (max 50KB)`);
1893
2463
  }
1894
2464
  }
1895
2465
  }
1896
2466
  /**
1897
- * Genera e salva embedding per un'osservazione (fire-and-forget, non blocca)
2467
+ * Generate and store embedding for an observation (fire-and-forget, non-blocking)
1898
2468
  */
1899
2469
  async generateEmbeddingAsync(observationId, title, content, concepts) {
1900
2470
  try {
@@ -1914,39 +2484,39 @@ var KiroMemorySDK = class {
1914
2484
  );
1915
2485
  }
1916
2486
  } catch (error) {
1917
- logger.debug("SDK", `Embedding generation fallita per obs ${observationId}: ${error}`);
2487
+ logger.debug("SDK", `Embedding generation failed for obs ${observationId}: ${error}`);
1918
2488
  }
1919
2489
  }
1920
2490
  /**
1921
- * Genera content hash SHA256 per deduplicazione basata su contenuto.
1922
- * Usa (project + type + title + narrative) come tupla di identità semantica.
1923
- * NON include sessionId perché è unico ad ogni invocazione.
2491
+ * Generate SHA256 content hash for content-based deduplication.
2492
+ * Uses (project + type + title + narrative) as semantic identity tuple.
2493
+ * Does NOT include sessionId since it's unique per invocation.
1924
2494
  */
1925
2495
  generateContentHash(type, title, narrative) {
1926
2496
  const payload = `${this.project}|${type}|${title}|${narrative || ""}`;
1927
2497
  return createHash("sha256").update(payload).digest("hex");
1928
2498
  }
1929
2499
  /**
1930
- * Finestre di deduplicazione per tipo (ms).
1931
- * Tipi con molte ripetizioni hanno finestre più ampie.
2500
+ * Deduplication windows per type (ms).
2501
+ * Types with many repetitions have wider windows.
1932
2502
  */
1933
2503
  getDeduplicationWindow(type) {
1934
2504
  switch (type) {
1935
2505
  case "file-read":
1936
2506
  return 6e4;
1937
- // 60s — letture frequenti sugli stessi file
2507
+ // 60s — frequent reads on the same files
1938
2508
  case "file-write":
1939
2509
  return 1e4;
1940
- // 10s — scritture rapide consecutive
2510
+ // 10s — rapid consecutive writes
1941
2511
  case "command":
1942
2512
  return 3e4;
1943
2513
  // 30s — standard
1944
2514
  case "research":
1945
2515
  return 12e4;
1946
- // 120s — web search e fetch ripetuti
2516
+ // 120s — repeated web search and fetch
1947
2517
  case "delegation":
1948
2518
  return 6e4;
1949
- // 60s — delegazioni rapide
2519
+ // 60s — rapid delegations
1950
2520
  default:
1951
2521
  return 3e4;
1952
2522
  }
@@ -1960,7 +2530,7 @@ var KiroMemorySDK = class {
1960
2530
  const contentHash = this.generateContentHash(data.type, data.title, data.narrative);
1961
2531
  const dedupWindow = this.getDeduplicationWindow(data.type);
1962
2532
  if (isDuplicateObservation(this.db.db, contentHash, dedupWindow)) {
1963
- logger.debug("SDK", `Osservazione duplicata scartata (${data.type}, ${dedupWindow}ms): ${data.title}`);
2533
+ logger.debug("SDK", `Duplicate observation discarded (${data.type}, ${dedupWindow}ms): ${data.title}`);
1964
2534
  return -1;
1965
2535
  }
1966
2536
  const filesRead = data.filesRead || (data.type === "file-read" ? data.files : void 0);
@@ -1988,12 +2558,12 @@ var KiroMemorySDK = class {
1988
2558
  return observationId;
1989
2559
  }
1990
2560
  /**
1991
- * Salva conoscenza strutturata (constraint, decision, heuristic, rejected).
1992
- * Usa il campo `type` per il knowledgeType e `facts` per i metadati JSON.
2561
+ * Store structured knowledge (constraint, decision, heuristic, rejected).
2562
+ * Uses the `type` field for knowledgeType and `facts` for JSON metadata.
1993
2563
  */
1994
2564
  async storeKnowledge(data) {
1995
2565
  if (!KNOWLEDGE_TYPES.includes(data.knowledgeType)) {
1996
- throw new Error(`knowledgeType non valido: ${data.knowledgeType}. Valori ammessi: ${KNOWLEDGE_TYPES.join(", ")}`);
2566
+ throw new Error(`Invalid knowledgeType: ${data.knowledgeType}. Allowed values: ${KNOWLEDGE_TYPES.join(", ")}`);
1997
2567
  }
1998
2568
  this.validateObservationInput({ type: data.knowledgeType, title: data.title, content: data.content });
1999
2569
  const metadata = (() => {
@@ -2025,9 +2595,9 @@ var KiroMemorySDK = class {
2025
2595
  }
2026
2596
  })();
2027
2597
  const sessionId = "sdk-" + Date.now();
2028
- const contentHash = this.generateContentHash(data.type, data.title);
2598
+ const contentHash = this.generateContentHash(data.knowledgeType, data.title);
2029
2599
  if (isDuplicateObservation(this.db.db, contentHash)) {
2030
- logger.debug("SDK", `Knowledge duplicata scartata: ${data.title}`);
2600
+ logger.debug("SDK", `Duplicate knowledge discarded: ${data.title}`);
2031
2601
  return -1;
2032
2602
  }
2033
2603
  const discoveryTokens = Math.ceil(data.content.length / 4);
@@ -2044,11 +2614,11 @@ var KiroMemorySDK = class {
2044
2614
  null,
2045
2615
  // narrative
2046
2616
  JSON.stringify(metadata),
2047
- // facts = metadati JSON
2617
+ // facts = JSON metadata
2048
2618
  data.concepts?.join(", ") || null,
2049
2619
  data.files?.join(", ") || null,
2050
2620
  null,
2051
- // filesModified: knowledge non modifica file
2621
+ // filesModified: knowledge doesn't modify files
2052
2622
  0,
2053
2623
  // prompt_number
2054
2624
  contentHash,
@@ -2159,8 +2729,8 @@ var KiroMemorySDK = class {
2159
2729
  return this.project;
2160
2730
  }
2161
2731
  /**
2162
- * Ricerca ibrida: vector search + keyword FTS5
2163
- * Richiede inizializzazione HybridSearch (embedding service)
2732
+ * Hybrid search: vector search + keyword FTS5
2733
+ * Requires HybridSearch initialization (embedding service)
2164
2734
  */
2165
2735
  async hybridSearch(query, options = {}) {
2166
2736
  const hybridSearch2 = getHybridSearch();
@@ -2170,8 +2740,8 @@ var KiroMemorySDK = class {
2170
2740
  });
2171
2741
  }
2172
2742
  /**
2173
- * Ricerca solo semantica (vector search)
2174
- * Ritorna risultati basati su similarità coseno con gli embeddings
2743
+ * Semantic-only search (vector search)
2744
+ * Returns results based on cosine similarity with embeddings
2175
2745
  */
2176
2746
  async semanticSearch(query, options = {}) {
2177
2747
  const embeddingService2 = getEmbeddingService();
@@ -2206,21 +2776,21 @@ var KiroMemorySDK = class {
2206
2776
  }));
2207
2777
  }
2208
2778
  /**
2209
- * Genera embeddings per osservazioni che non li hanno ancora
2779
+ * Generate embeddings for observations that don't have them yet
2210
2780
  */
2211
2781
  async backfillEmbeddings(batchSize = 50) {
2212
2782
  const vectorSearch2 = getVectorSearch();
2213
2783
  return vectorSearch2.backfillEmbeddings(this.db.db, batchSize);
2214
2784
  }
2215
2785
  /**
2216
- * Statistiche sugli embeddings nel database
2786
+ * Embedding statistics in the database
2217
2787
  */
2218
2788
  getEmbeddingStats() {
2219
2789
  const vectorSearch2 = getVectorSearch();
2220
2790
  return vectorSearch2.getStats(this.db.db);
2221
2791
  }
2222
2792
  /**
2223
- * Inizializza il servizio di embedding (lazy, chiamare prima di hybridSearch)
2793
+ * Initialize the embedding service (lazy, call before hybridSearch)
2224
2794
  */
2225
2795
  async initializeEmbeddings() {
2226
2796
  const hybridSearch2 = getHybridSearch();
@@ -2228,10 +2798,10 @@ var KiroMemorySDK = class {
2228
2798
  return getEmbeddingService().isAvailable();
2229
2799
  }
2230
2800
  /**
2231
- * Contesto smart con ranking a 4 segnali e budget token.
2801
+ * Smart context with 4-signal ranking and token budget.
2232
2802
  *
2233
- * Se query presente: usa HybridSearch con SEARCH_WEIGHTS.
2234
- * Se senza query: ranking per recency + project match (CONTEXT_WEIGHTS).
2803
+ * If query present: uses HybridSearch with SEARCH_WEIGHTS.
2804
+ * If no query: ranking by recency + project match (CONTEXT_WEIGHTS).
2235
2805
  */
2236
2806
  async getSmartContext(options = {}) {
2237
2807
  const tokenBudget = options.tokenBudget || parseInt(process.env.KIRO_MEMORY_CONTEXT_TOKENS || "0", 10) || 2e3;
@@ -2305,8 +2875,8 @@ var KiroMemorySDK = class {
2305
2875
  };
2306
2876
  }
2307
2877
  /**
2308
- * Rileva osservazioni stale (file modificati dopo la creazione) e le marca nel DB.
2309
- * Ritorna il numero di osservazioni marcate come stale.
2878
+ * Detect stale observations (files modified after creation) and mark them in DB.
2879
+ * Returns the number of observations marked as stale.
2310
2880
  */
2311
2881
  async detectStaleObservations() {
2312
2882
  const staleObs = getStaleObservations(this.db.db, this.project);
@@ -2317,14 +2887,14 @@ var KiroMemorySDK = class {
2317
2887
  return staleObs.length;
2318
2888
  }
2319
2889
  /**
2320
- * Consolida osservazioni duplicate sullo stesso file e tipo.
2321
- * Raggruppa per (project, type, files_modified), mantiene la piu recente.
2890
+ * Consolidate duplicate observations on the same file and type.
2891
+ * Groups by (project, type, files_modified), keeps the most recent.
2322
2892
  */
2323
2893
  async consolidateObservations(options = {}) {
2324
2894
  return consolidateObservations(this.db.db, this.project, options);
2325
2895
  }
2326
2896
  /**
2327
- * Statistiche decay: totale, stale, mai accedute, accedute di recente.
2897
+ * Decay statistics: total, stale, never accessed, recently accessed.
2328
2898
  */
2329
2899
  async getDecayStats() {
2330
2900
  const total = this.db.db.query(
@@ -2343,8 +2913,8 @@ var KiroMemorySDK = class {
2343
2913
  return { total, stale, neverAccessed, recentlyAccessed };
2344
2914
  }
2345
2915
  /**
2346
- * Crea un checkpoint strutturato per resume sessione.
2347
- * Salva automaticamente un context_snapshot con le ultime 10 osservazioni.
2916
+ * Create a structured checkpoint for session resume.
2917
+ * Automatically saves a context_snapshot with the last 10 observations.
2348
2918
  */
2349
2919
  async createCheckpoint(sessionId, data) {
2350
2920
  const recentObs = getObservationsByProject(this.db.db, this.project, 10);
@@ -2361,21 +2931,21 @@ var KiroMemorySDK = class {
2361
2931
  });
2362
2932
  }
2363
2933
  /**
2364
- * Recupera l'ultimo checkpoint di una sessione specifica.
2934
+ * Retrieve the latest checkpoint of a specific session.
2365
2935
  */
2366
2936
  async getCheckpoint(sessionId) {
2367
2937
  return getLatestCheckpoint(this.db.db, sessionId);
2368
2938
  }
2369
2939
  /**
2370
- * Recupera l'ultimo checkpoint per il progetto corrente.
2371
- * Utile per resume automatico senza specificare session ID.
2940
+ * Retrieve the latest checkpoint for the current project.
2941
+ * Useful for automatic resume without specifying session ID.
2372
2942
  */
2373
2943
  async getLatestProjectCheckpoint() {
2374
2944
  return getLatestCheckpointByProject(this.db.db, this.project);
2375
2945
  }
2376
2946
  /**
2377
- * Genera un report di attività per il progetto corrente.
2378
- * Aggrega osservazioni, sessioni, summaries e file per un periodo temporale.
2947
+ * Generate an activity report for the current project.
2948
+ * Aggregates observations, sessions, summaries and files for a time period.
2379
2949
  */
2380
2950
  async generateReport(options) {
2381
2951
  const now = /* @__PURE__ */ new Date();
@@ -2391,6 +2961,66 @@ var KiroMemorySDK = class {
2391
2961
  }
2392
2962
  return getReportData(this.db.db, this.project, startEpoch, endEpoch);
2393
2963
  }
2964
+ /**
2965
+ * Lista osservazioni con keyset pagination.
2966
+ * Restituisce un oggetto { data, next_cursor, has_more }.
2967
+ *
2968
+ * Esempio:
2969
+ * const page1 = await sdk.listObservations({ limit: 50 });
2970
+ * const page2 = await sdk.listObservations({ cursor: page1.next_cursor });
2971
+ */
2972
+ async listObservations(options = {}) {
2973
+ const limit = Math.min(Math.max(options.limit ?? 50, 1), 200);
2974
+ const project = options.project ?? this.project;
2975
+ let rows;
2976
+ if (options.cursor) {
2977
+ const decoded = decodeCursor(options.cursor);
2978
+ if (!decoded) throw new Error("Cursor non valido");
2979
+ const sql = project ? `SELECT * FROM observations
2980
+ WHERE project = ? AND (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
2981
+ ORDER BY created_at_epoch DESC, id DESC
2982
+ LIMIT ?` : `SELECT * FROM observations
2983
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
2984
+ ORDER BY created_at_epoch DESC, id DESC
2985
+ LIMIT ?`;
2986
+ rows = project ? this.db.db.query(sql).all(project, decoded.epoch, decoded.epoch, decoded.id, limit) : this.db.db.query(sql).all(decoded.epoch, decoded.epoch, decoded.id, limit);
2987
+ } else {
2988
+ const sql = project ? "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?" : "SELECT * FROM observations ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
2989
+ rows = project ? this.db.db.query(sql).all(project, limit) : this.db.db.query(sql).all(limit);
2990
+ }
2991
+ const next_cursor = rows.length >= limit ? encodeCursor(rows[rows.length - 1].id, rows[rows.length - 1].created_at_epoch) : null;
2992
+ return { data: rows, next_cursor, has_more: next_cursor !== null };
2993
+ }
2994
+ /**
2995
+ * Lista sommari con keyset pagination.
2996
+ * Restituisce un oggetto { data, next_cursor, has_more }.
2997
+ *
2998
+ * Esempio:
2999
+ * const page1 = await sdk.listSummaries({ limit: 20 });
3000
+ * const page2 = await sdk.listSummaries({ cursor: page1.next_cursor });
3001
+ */
3002
+ async listSummaries(options = {}) {
3003
+ const limit = Math.min(Math.max(options.limit ?? 20, 1), 200);
3004
+ const project = options.project ?? this.project;
3005
+ let rows;
3006
+ if (options.cursor) {
3007
+ const decoded = decodeCursor(options.cursor);
3008
+ if (!decoded) throw new Error("Cursor non valido");
3009
+ const sql = project ? `SELECT * FROM summaries
3010
+ WHERE project = ? AND (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
3011
+ ORDER BY created_at_epoch DESC, id DESC
3012
+ LIMIT ?` : `SELECT * FROM summaries
3013
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
3014
+ ORDER BY created_at_epoch DESC, id DESC
3015
+ LIMIT ?`;
3016
+ rows = project ? this.db.db.query(sql).all(project, decoded.epoch, decoded.epoch, decoded.id, limit) : this.db.db.query(sql).all(decoded.epoch, decoded.epoch, decoded.id, limit);
3017
+ } else {
3018
+ const sql = project ? "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?" : "SELECT * FROM summaries ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
3019
+ rows = project ? this.db.db.query(sql).all(project, limit) : this.db.db.query(sql).all(limit);
3020
+ }
3021
+ const next_cursor = rows.length >= limit ? encodeCursor(rows[rows.length - 1].id, rows[rows.length - 1].created_at_epoch) : null;
3022
+ return { data: rows, next_cursor, has_more: next_cursor !== null };
3023
+ }
2394
3024
  /**
2395
3025
  * Getter for direct database access (for API routes)
2396
3026
  */
@@ -2413,10 +3043,11 @@ runHook("stop", async (input) => {
2413
3043
  const project = detectProject(input.cwd);
2414
3044
  const sdk = createKiroMemory({ project, skipMigrations: true });
2415
3045
  try {
3046
+ const contentSessionId = input.session_id || `stop-${Date.now()}`;
3047
+ const session = await sdk.getOrCreateSession(contentSessionId);
2416
3048
  const recentObs = await sdk.getRecentObservations(50);
2417
- const sessionId = input.session_id;
2418
- const fourHoursAgo = Date.now() - 4 * 60 * 60 * 1e3;
2419
- const sessionObs = sessionId ? recentObs.filter((o) => o.memory_session_id === sessionId) : recentObs.filter((o) => o.created_at_epoch > fourHoursAgo);
3049
+ const sessionStart = session.started_at_epoch || Date.now() - 4 * 60 * 60 * 1e3;
3050
+ const sessionObs = recentObs.filter((o) => o.created_at_epoch >= sessionStart);
2420
3051
  if (sessionObs.length === 0) return;
2421
3052
  const byType = /* @__PURE__ */ new Map();
2422
3053
  for (const obs of sessionObs) {
@@ -2447,7 +3078,7 @@ runHook("stop", async (input) => {
2447
3078
  filesModified.length > 0 ? `Files modified: ${filesModified.slice(0, 10).join(", ")}` : "",
2448
3079
  sessionConcepts.length > 0 ? `Concepts: ${sessionConcepts.join(", ")}` : ""
2449
3080
  ].filter(Boolean).join(". ") || void 0;
2450
- const mainAction = writes.length > 0 ? `${writes.length} file modific${writes.length === 1 ? "ato" : "ati"}` : commands.length > 0 ? `${commands.length} comand${commands.length === 1 ? "o" : "i"}` : `${sessionObs.length} osservazion${sessionObs.length === 1 ? "e" : "i"}`;
3081
+ const mainAction = writes.length > 0 ? `${writes.length} file${writes.length === 1 ? "" : "s"} modified` : commands.length > 0 ? `${commands.length} command${commands.length === 1 ? "" : "s"}` : `${sessionObs.length} observation${sessionObs.length === 1 ? "" : "s"}`;
2451
3082
  await sdk.storeSummary({
2452
3083
  request: `${project} \u2014 ${mainAction} \u2014 ${(/* @__PURE__ */ new Date()).toISOString().split("T")[0]}`,
2453
3084
  investigated,
@@ -2456,7 +3087,6 @@ runHook("stop", async (input) => {
2456
3087
  nextSteps
2457
3088
  });
2458
3089
  await notifyWorker("summary-created", { project });
2459
- const session = await sdk.getOrCreateSession(input.session_id || `stop-${Date.now()}`);
2460
3090
  const task = sessionObs[0]?.title || `${project} session`;
2461
3091
  const progress = completed || "No progress recorded";
2462
3092
  const nextStepsCheckpoint = filesModified.length > 0 ? `Continue work on: ${filesModified.slice(0, 5).join(", ")}` : void 0;