kiro-memory 1.9.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +5 -1
  2. package/package.json +5 -5
  3. package/plugin/dist/cli/contextkit.js +2611 -345
  4. package/plugin/dist/hooks/agentSpawn.js +853 -223
  5. package/plugin/dist/hooks/kiro-hooks.js +841 -211
  6. package/plugin/dist/hooks/postToolUse.js +853 -222
  7. package/plugin/dist/hooks/stop.js +850 -220
  8. package/plugin/dist/hooks/userPromptSubmit.js +848 -216
  9. package/plugin/dist/index.js +843 -340
  10. package/plugin/dist/plugins/github/github-client.js +152 -0
  11. package/plugin/dist/plugins/github/index.js +412 -0
  12. package/plugin/dist/plugins/github/issue-parser.js +54 -0
  13. package/plugin/dist/plugins/slack/formatter.js +90 -0
  14. package/plugin/dist/plugins/slack/index.js +215 -0
  15. package/plugin/dist/sdk/index.js +841 -215
  16. package/plugin/dist/servers/mcp-server.js +4461 -397
  17. package/plugin/dist/services/search/EmbeddingService.js +146 -37
  18. package/plugin/dist/services/search/HybridSearch.js +564 -116
  19. package/plugin/dist/services/search/VectorSearch.js +187 -60
  20. package/plugin/dist/services/search/index.js +565 -254
  21. package/plugin/dist/services/sqlite/Backup.js +416 -0
  22. package/plugin/dist/services/sqlite/Database.js +126 -153
  23. package/plugin/dist/services/sqlite/ImportExport.js +452 -0
  24. package/plugin/dist/services/sqlite/Observations.js +314 -19
  25. package/plugin/dist/services/sqlite/Prompts.js +1 -1
  26. package/plugin/dist/services/sqlite/Search.js +41 -29
  27. package/plugin/dist/services/sqlite/Summaries.js +4 -4
  28. package/plugin/dist/services/sqlite/index.js +1428 -208
  29. package/plugin/dist/viewer.css +1 -0
  30. package/plugin/dist/viewer.html +2 -179
  31. package/plugin/dist/viewer.js +23 -24942
  32. package/plugin/dist/viewer.js.map +7 -0
  33. package/plugin/dist/worker-service.js +427 -5569
  34. package/plugin/dist/worker-service.js.map +7 -0
@@ -15,6 +15,290 @@ var __export = (target, all) => {
15
15
  __defProp(target, name, { get: all[name], enumerable: true });
16
16
  };
17
17
 
18
+ // src/utils/secrets.ts
19
+ function redactSecrets(text) {
20
+ if (!text) return text;
21
+ let redacted = text;
22
+ for (const { pattern } of SECRET_PATTERNS) {
23
+ pattern.lastIndex = 0;
24
+ redacted = redacted.replace(pattern, (match) => {
25
+ const prefix = match.substring(0, Math.min(4, match.length));
26
+ return `${prefix}***REDACTED***`;
27
+ });
28
+ }
29
+ return redacted;
30
+ }
31
+ var SECRET_PATTERNS;
32
+ var init_secrets = __esm({
33
+ "src/utils/secrets.ts"() {
34
+ "use strict";
35
+ SECRET_PATTERNS = [
36
+ // AWS Access Keys (AKIA, ABIA, ACCA, ASIA prefixes + 16 alphanumeric chars)
37
+ { name: "aws-key", pattern: /(?:AKIA|ABIA|ACCA|ASIA)[A-Z0-9]{16}/g },
38
+ // JWT tokens (three base64url segments separated by dots)
39
+ { name: "jwt", pattern: /eyJ[a-zA-Z0-9_-]{10,}\.eyJ[a-zA-Z0-9_-]{10,}\.[a-zA-Z0-9_-]{10,}/g },
40
+ // Generic API keys in key=value or key: value assignments
41
+ { name: "api-key", pattern: /(?:api[_-]?key|apikey|api[_-]?secret)\s*[:=]\s*['"]?([a-zA-Z0-9_\-]{20,})['"]?/gi },
42
+ // Password/secret/token in variable assignments
43
+ { name: "credential", pattern: /(?:password|passwd|pwd|secret|token|auth[_-]?token|access[_-]?token|bearer)\s*[:=]\s*['"]?([^\s'"]{8,})['"]?/gi },
44
+ // Credentials embedded in URLs (user:pass@host)
45
+ { name: "url-credential", pattern: /(?:https?:\/\/)([^:]+):([^@]+)@/g },
46
+ // PEM-encoded private keys (RSA, EC, DSA, OpenSSH)
47
+ { name: "private-key", pattern: /-----BEGIN (?:RSA |EC |DSA |OPENSSH )?PRIVATE KEY-----/g },
48
+ // GitHub personal access tokens (ghp_, gho_, ghu_, ghs_, ghr_ prefixes)
49
+ { name: "github-token", pattern: /gh[pousr]_[a-zA-Z0-9]{36,}/g },
50
+ // Slack bot/user/app tokens
51
+ { name: "slack-token", pattern: /xox[bpoas]-[a-zA-Z0-9-]{10,}/g },
52
+ // HTTP Authorization Bearer header values
53
+ { name: "bearer-header", pattern: /\bBearer\s+([a-zA-Z0-9_\-\.]{20,})/g },
54
+ // Generic hex secrets (32+ hex chars after a key/secret/token/password label)
55
+ { name: "hex-secret", pattern: /(?:key|secret|token|password)\s*[:=]\s*['"]?([0-9a-f]{32,})['"]?/gi }
56
+ ];
57
+ }
58
+ });
59
+
60
+ // src/utils/categorizer.ts
61
+ function categorize(input) {
62
+ const scores = /* @__PURE__ */ new Map();
63
+ const searchText = [
64
+ input.title,
65
+ input.text || "",
66
+ input.narrative || "",
67
+ input.concepts || ""
68
+ ].join(" ").toLowerCase();
69
+ const allFiles = [input.filesModified || "", input.filesRead || ""].join(",");
70
+ for (const rule of CATEGORY_RULES) {
71
+ let score = 0;
72
+ for (const kw of rule.keywords) {
73
+ if (searchText.includes(kw.toLowerCase())) {
74
+ score += rule.weight;
75
+ }
76
+ }
77
+ if (rule.types && rule.types.includes(input.type)) {
78
+ score += rule.weight * 2;
79
+ }
80
+ if (rule.filePatterns && allFiles) {
81
+ for (const pattern of rule.filePatterns) {
82
+ if (pattern.test(allFiles)) {
83
+ score += rule.weight;
84
+ }
85
+ }
86
+ }
87
+ if (score > 0) {
88
+ scores.set(rule.category, (scores.get(rule.category) || 0) + score);
89
+ }
90
+ }
91
+ let bestCategory = "general";
92
+ let bestScore = 0;
93
+ for (const [category, score] of scores) {
94
+ if (score > bestScore) {
95
+ bestScore = score;
96
+ bestCategory = category;
97
+ }
98
+ }
99
+ return bestCategory;
100
+ }
101
+ var CATEGORY_RULES;
102
+ var init_categorizer = __esm({
103
+ "src/utils/categorizer.ts"() {
104
+ "use strict";
105
+ CATEGORY_RULES = [
106
+ {
107
+ category: "security",
108
+ keywords: [
109
+ "security",
110
+ "vulnerability",
111
+ "cve",
112
+ "xss",
113
+ "csrf",
114
+ "injection",
115
+ "sanitize",
116
+ "escape",
117
+ "auth",
118
+ "authentication",
119
+ "authorization",
120
+ "permission",
121
+ "helmet",
122
+ "cors",
123
+ "rate-limit",
124
+ "token",
125
+ "encrypt",
126
+ "decrypt",
127
+ "secret",
128
+ "redact",
129
+ "owasp"
130
+ ],
131
+ filePatterns: [/security/i, /auth/i, /secrets?\.ts/i],
132
+ weight: 10
133
+ },
134
+ {
135
+ category: "testing",
136
+ keywords: [
137
+ "test",
138
+ "spec",
139
+ "expect",
140
+ "assert",
141
+ "mock",
142
+ "stub",
143
+ "fixture",
144
+ "coverage",
145
+ "jest",
146
+ "vitest",
147
+ "bun test",
148
+ "unit test",
149
+ "integration test",
150
+ "e2e"
151
+ ],
152
+ types: ["test"],
153
+ filePatterns: [/\.test\./i, /\.spec\./i, /tests?\//i, /__tests__/i],
154
+ weight: 8
155
+ },
156
+ {
157
+ category: "debugging",
158
+ keywords: [
159
+ "debug",
160
+ "fix",
161
+ "bug",
162
+ "error",
163
+ "crash",
164
+ "stacktrace",
165
+ "stack trace",
166
+ "exception",
167
+ "breakpoint",
168
+ "investigate",
169
+ "root cause",
170
+ "troubleshoot",
171
+ "diagnose",
172
+ "bisect",
173
+ "regression"
174
+ ],
175
+ types: ["bugfix"],
176
+ weight: 8
177
+ },
178
+ {
179
+ category: "architecture",
180
+ keywords: [
181
+ "architect",
182
+ "design",
183
+ "pattern",
184
+ "modular",
185
+ "migration",
186
+ "schema",
187
+ "database",
188
+ "api design",
189
+ "abstract",
190
+ "dependency injection",
191
+ "singleton",
192
+ "factory",
193
+ "observer",
194
+ "middleware",
195
+ "pipeline",
196
+ "microservice",
197
+ "monolith"
198
+ ],
199
+ types: ["decision", "constraint"],
200
+ weight: 7
201
+ },
202
+ {
203
+ category: "refactoring",
204
+ keywords: [
205
+ "refactor",
206
+ "rename",
207
+ "extract",
208
+ "inline",
209
+ "move",
210
+ "split",
211
+ "merge",
212
+ "simplify",
213
+ "cleanup",
214
+ "clean up",
215
+ "dead code",
216
+ "consolidate",
217
+ "reorganize",
218
+ "restructure",
219
+ "decouple"
220
+ ],
221
+ weight: 6
222
+ },
223
+ {
224
+ category: "config",
225
+ keywords: [
226
+ "config",
227
+ "configuration",
228
+ "env",
229
+ "environment",
230
+ "dotenv",
231
+ ".env",
232
+ "settings",
233
+ "tsconfig",
234
+ "eslint",
235
+ "prettier",
236
+ "webpack",
237
+ "vite",
238
+ "esbuild",
239
+ "docker",
240
+ "ci/cd",
241
+ "github actions",
242
+ "deploy",
243
+ "build",
244
+ "bundle",
245
+ "package.json"
246
+ ],
247
+ filePatterns: [
248
+ /\.config\./i,
249
+ /\.env/i,
250
+ /tsconfig/i,
251
+ /\.ya?ml/i,
252
+ /Dockerfile/i,
253
+ /docker-compose/i
254
+ ],
255
+ weight: 5
256
+ },
257
+ {
258
+ category: "docs",
259
+ keywords: [
260
+ "document",
261
+ "readme",
262
+ "changelog",
263
+ "jsdoc",
264
+ "comment",
265
+ "explain",
266
+ "guide",
267
+ "tutorial",
268
+ "api doc",
269
+ "openapi",
270
+ "swagger"
271
+ ],
272
+ types: ["docs"],
273
+ filePatterns: [/\.md$/i, /docs?\//i, /readme/i, /changelog/i],
274
+ weight: 5
275
+ },
276
+ {
277
+ category: "feature-dev",
278
+ keywords: [
279
+ "feature",
280
+ "implement",
281
+ "add",
282
+ "create",
283
+ "new",
284
+ "endpoint",
285
+ "component",
286
+ "module",
287
+ "service",
288
+ "handler",
289
+ "route",
290
+ "hook",
291
+ "plugin",
292
+ "integration"
293
+ ],
294
+ types: ["feature", "file-write"],
295
+ weight: 3
296
+ // lowest — generic catch-all for development
297
+ }
298
+ ];
299
+ }
300
+ });
301
+
18
302
  // src/services/sqlite/Observations.ts
19
303
  var Observations_exports = {};
20
304
  __export(Observations_exports, {
@@ -40,11 +324,23 @@ function isDuplicateObservation(db, contentHash, windowMs = 3e4) {
40
324
  }
41
325
  function createObservation(db, memorySessionId, project, type, title, subtitle, text, narrative, facts, concepts, filesRead, filesModified, promptNumber, contentHash = null, discoveryTokens = 0) {
42
326
  const now = /* @__PURE__ */ new Date();
327
+ const safeTitle = redactSecrets(title);
328
+ const safeText = text ? redactSecrets(text) : text;
329
+ const safeNarrative = narrative ? redactSecrets(narrative) : narrative;
330
+ const autoCategory = categorize({
331
+ type,
332
+ title: safeTitle,
333
+ text: safeText,
334
+ narrative: safeNarrative,
335
+ concepts,
336
+ filesModified,
337
+ filesRead
338
+ });
43
339
  const result = db.run(
44
340
  `INSERT INTO observations
45
- (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch, content_hash, discovery_tokens)
46
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
47
- [memorySessionId, project, type, title, subtitle, text, narrative, facts, concepts, filesRead, filesModified, promptNumber, now.toISOString(), now.getTime(), contentHash, discoveryTokens]
341
+ (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch, content_hash, discovery_tokens, auto_category)
342
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
343
+ [memorySessionId, project, type, safeTitle, subtitle, safeText, safeNarrative, facts, concepts, filesRead, filesModified, promptNumber, now.toISOString(), now.getTime(), contentHash, discoveryTokens, autoCategory]
48
344
  );
49
345
  return Number(result.lastInsertRowid);
50
346
  }
@@ -56,16 +352,16 @@ function getObservationsBySession(db, memorySessionId) {
56
352
  }
57
353
  function getObservationsByProject(db, project, limit = 100) {
58
354
  const query = db.query(
59
- "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
355
+ "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
60
356
  );
61
357
  return query.all(project, limit);
62
358
  }
63
359
  function searchObservations(db, searchTerm, project) {
64
360
  const sql = project ? `SELECT * FROM observations
65
361
  WHERE project = ? AND (title LIKE ? ESCAPE '\\' OR text LIKE ? ESCAPE '\\' OR narrative LIKE ? ESCAPE '\\')
66
- ORDER BY created_at_epoch DESC` : `SELECT * FROM observations
362
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT * FROM observations
67
363
  WHERE title LIKE ? ESCAPE '\\' OR text LIKE ? ESCAPE '\\' OR narrative LIKE ? ESCAPE '\\'
68
- ORDER BY created_at_epoch DESC`;
364
+ ORDER BY created_at_epoch DESC, id DESC`;
69
365
  const pattern = `%${escapeLikePattern(searchTerm)}%`;
70
366
  const query = db.query(sql);
71
367
  if (project) {
@@ -98,21 +394,32 @@ function consolidateObservations(db, project, options = {}) {
98
394
  ORDER BY cnt DESC
99
395
  `).all(project, minGroupSize);
100
396
  if (groups.length === 0) return { merged: 0, removed: 0 };
101
- let totalMerged = 0;
102
- let totalRemoved = 0;
397
+ if (options.dryRun) {
398
+ let totalMerged = 0;
399
+ let totalRemoved = 0;
400
+ for (const group of groups) {
401
+ const obsIds = group.ids.split(",").map(Number);
402
+ const placeholders = obsIds.map(() => "?").join(",");
403
+ const count = db.query(
404
+ `SELECT COUNT(*) as cnt FROM observations WHERE id IN (${placeholders})`
405
+ ).get(...obsIds)?.cnt || 0;
406
+ if (count >= minGroupSize) {
407
+ totalMerged += 1;
408
+ totalRemoved += count - 1;
409
+ }
410
+ }
411
+ return { merged: totalMerged, removed: totalRemoved };
412
+ }
103
413
  const runConsolidation = db.transaction(() => {
414
+ let merged = 0;
415
+ let removed = 0;
104
416
  for (const group of groups) {
105
417
  const obsIds = group.ids.split(",").map(Number);
106
418
  const placeholders = obsIds.map(() => "?").join(",");
107
419
  const observations = db.query(
108
- `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC`
420
+ `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC, id DESC`
109
421
  ).all(...obsIds);
110
422
  if (observations.length < minGroupSize) continue;
111
- if (options.dryRun) {
112
- totalMerged += 1;
113
- totalRemoved += observations.length - 1;
114
- continue;
115
- }
116
423
  const keeper = observations[0];
117
424
  const others = observations.slice(1);
118
425
  const uniqueTexts = /* @__PURE__ */ new Set();
@@ -125,22 +432,24 @@ function consolidateObservations(db, project, options = {}) {
125
432
  const consolidatedText = Array.from(uniqueTexts).join("\n---\n").substring(0, 1e5);
126
433
  db.run(
127
434
  "UPDATE observations SET text = ?, title = ? WHERE id = ?",
128
- [consolidatedText, `[consolidato x${observations.length}] ${keeper.title}`, keeper.id]
435
+ [consolidatedText, `[consolidated x${observations.length}] ${keeper.title}`, keeper.id]
129
436
  );
130
437
  const removeIds = others.map((o) => o.id);
131
438
  const removePlaceholders = removeIds.map(() => "?").join(",");
132
439
  db.run(`DELETE FROM observations WHERE id IN (${removePlaceholders})`, removeIds);
133
440
  db.run(`DELETE FROM observation_embeddings WHERE observation_id IN (${removePlaceholders})`, removeIds);
134
- totalMerged += 1;
135
- totalRemoved += removeIds.length;
441
+ merged += 1;
442
+ removed += removeIds.length;
136
443
  }
444
+ return { merged, removed };
137
445
  });
138
- runConsolidation();
139
- return { merged: totalMerged, removed: totalRemoved };
446
+ return runConsolidation();
140
447
  }
141
448
  var init_Observations = __esm({
142
449
  "src/services/sqlite/Observations.ts"() {
143
450
  "use strict";
451
+ init_secrets();
452
+ init_categorizer();
144
453
  }
145
454
  });
146
455
 
@@ -163,7 +472,7 @@ function escapeLikePattern3(input) {
163
472
  }
164
473
  function sanitizeFTS5Query(query) {
165
474
  const trimmed = query.length > 1e4 ? query.substring(0, 1e4) : query;
166
- const terms = trimmed.replace(/[""]/g, "").split(/\s+/).filter((t) => t.length > 0).slice(0, 100).map((t) => `"${t}"`);
475
+ const terms = trimmed.replace(/[""\u0022]/g, "").split(/\s+/).filter((t) => t.length > 0).slice(0, 100).map((t) => `"${t}"`);
167
476
  return terms.join(" ");
168
477
  }
169
478
  function searchObservationsFTS(db, query, filters = {}) {
@@ -260,7 +569,7 @@ function searchObservationsLIKE(db, query, filters = {}) {
260
569
  sql += " AND created_at_epoch <= ?";
261
570
  params.push(filters.dateEnd);
262
571
  }
263
- sql += " ORDER BY created_at_epoch DESC LIMIT ?";
572
+ sql += " ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
264
573
  params.push(limit);
265
574
  const stmt = db.query(sql);
266
575
  return stmt.all(...params);
@@ -285,7 +594,7 @@ function searchSummariesFiltered(db, query, filters = {}) {
285
594
  sql += " AND created_at_epoch <= ?";
286
595
  params.push(filters.dateEnd);
287
596
  }
288
- sql += " ORDER BY created_at_epoch DESC LIMIT ?";
597
+ sql += " ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
289
598
  params.push(limit);
290
599
  const stmt = db.query(sql);
291
600
  return stmt.all(...params);
@@ -295,7 +604,7 @@ function getObservationsByIds(db, ids) {
295
604
  const validIds = ids.filter((id) => typeof id === "number" && Number.isInteger(id) && id > 0).slice(0, 500);
296
605
  if (validIds.length === 0) return [];
297
606
  const placeholders = validIds.map(() => "?").join(",");
298
- const sql = `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC`;
607
+ const sql = `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC, id DESC`;
299
608
  const stmt = db.query(sql);
300
609
  return stmt.all(...validIds);
301
610
  }
@@ -307,11 +616,11 @@ function getTimeline(db, anchorId, depthBefore = 5, depthAfter = 5) {
307
616
  const beforeStmt = db.query(`
308
617
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
309
618
  FROM observations
310
- WHERE created_at_epoch < ?
311
- ORDER BY created_at_epoch DESC
619
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
620
+ ORDER BY created_at_epoch DESC, id DESC
312
621
  LIMIT ?
313
622
  `);
314
- const before = beforeStmt.all(anchorEpoch, depthBefore).reverse();
623
+ const before = beforeStmt.all(anchorEpoch, anchorEpoch, anchorId, depthBefore).reverse();
315
624
  const selfStmt = db.query(`
316
625
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
317
626
  FROM observations WHERE id = ?
@@ -320,34 +629,46 @@ function getTimeline(db, anchorId, depthBefore = 5, depthAfter = 5) {
320
629
  const afterStmt = db.query(`
321
630
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
322
631
  FROM observations
323
- WHERE created_at_epoch > ?
324
- ORDER BY created_at_epoch ASC
632
+ WHERE (created_at_epoch > ? OR (created_at_epoch = ? AND id > ?))
633
+ ORDER BY created_at_epoch ASC, id ASC
325
634
  LIMIT ?
326
635
  `);
327
- const after = afterStmt.all(anchorEpoch, depthAfter);
636
+ const after = afterStmt.all(anchorEpoch, anchorEpoch, anchorId, depthAfter);
328
637
  return [...before, ...self, ...after];
329
638
  }
330
639
  function getProjectStats(db, project) {
331
- const obsStmt = db.query("SELECT COUNT(*) as count FROM observations WHERE project = ?");
332
- const sumStmt = db.query("SELECT COUNT(*) as count FROM summaries WHERE project = ?");
333
- const sesStmt = db.query("SELECT COUNT(*) as count FROM sessions WHERE project = ?");
334
- const prmStmt = db.query("SELECT COUNT(*) as count FROM prompts WHERE project = ?");
335
- const discoveryStmt = db.query(
336
- "SELECT COALESCE(SUM(discovery_tokens), 0) as total FROM observations WHERE project = ?"
337
- );
338
- const discoveryTokens = discoveryStmt.get(project)?.total || 0;
339
- const readStmt = db.query(
340
- `SELECT COALESCE(SUM(
341
- CAST((LENGTH(COALESCE(title, '')) + LENGTH(COALESCE(narrative, ''))) / 4 AS INTEGER)
342
- ), 0) as total FROM observations WHERE project = ?`
343
- );
344
- const readTokens = readStmt.get(project)?.total || 0;
640
+ const sql = `
641
+ WITH
642
+ obs_stats AS (
643
+ SELECT
644
+ COUNT(*) as count,
645
+ COALESCE(SUM(discovery_tokens), 0) as discovery_tokens,
646
+ COALESCE(SUM(
647
+ CAST((LENGTH(COALESCE(title, '')) + LENGTH(COALESCE(narrative, ''))) / 4 AS INTEGER)
648
+ ), 0) as read_tokens
649
+ FROM observations WHERE project = ?
650
+ ),
651
+ sum_count AS (SELECT COUNT(*) as count FROM summaries WHERE project = ?),
652
+ ses_count AS (SELECT COUNT(*) as count FROM sessions WHERE project = ?),
653
+ prm_count AS (SELECT COUNT(*) as count FROM prompts WHERE project = ?)
654
+ SELECT
655
+ obs_stats.count as observations,
656
+ obs_stats.discovery_tokens,
657
+ obs_stats.read_tokens,
658
+ sum_count.count as summaries,
659
+ ses_count.count as sessions,
660
+ prm_count.count as prompts
661
+ FROM obs_stats, sum_count, ses_count, prm_count
662
+ `;
663
+ const row = db.query(sql).get(project, project, project, project);
664
+ const discoveryTokens = row?.discovery_tokens || 0;
665
+ const readTokens = row?.read_tokens || 0;
345
666
  const savings = Math.max(0, discoveryTokens - readTokens);
346
667
  return {
347
- observations: obsStmt.get(project)?.count || 0,
348
- summaries: sumStmt.get(project)?.count || 0,
349
- sessions: sesStmt.get(project)?.count || 0,
350
- prompts: prmStmt.get(project)?.count || 0,
668
+ observations: row?.observations || 0,
669
+ summaries: row?.summaries || 0,
670
+ sessions: row?.sessions || 0,
671
+ prompts: row?.prompts || 0,
351
672
  tokenEconomics: { discoveryTokens, readTokens, savings }
352
673
  };
353
674
  }
@@ -355,7 +676,7 @@ function getStaleObservations(db, project) {
355
676
  const rows = db.query(`
356
677
  SELECT * FROM observations
357
678
  WHERE project = ? AND files_modified IS NOT NULL AND files_modified != ''
358
- ORDER BY created_at_epoch DESC
679
+ ORDER BY created_at_epoch DESC, id DESC
359
680
  LIMIT 500
360
681
  `).all(project);
361
682
  const staleObs = [];
@@ -402,14 +723,15 @@ var init_Search = __esm({
402
723
  import BetterSqlite3 from "better-sqlite3";
403
724
  var Database = class {
404
725
  _db;
726
+ _stmtCache = /* @__PURE__ */ new Map();
405
727
  constructor(path, options) {
406
728
  this._db = new BetterSqlite3(path, {
407
- // better-sqlite3 crea il file di default (non serve 'create')
729
+ // better-sqlite3 creates the file by default ('create' not needed)
408
730
  readonly: options?.readwrite === false ? true : false
409
731
  });
410
732
  }
411
733
  /**
412
- * Esegui una query SQL senza risultati
734
+ * Execute a SQL query without results
413
735
  */
414
736
  run(sql, params) {
415
737
  const stmt = this._db.prepare(sql);
@@ -417,51 +739,53 @@ var Database = class {
417
739
  return result;
418
740
  }
419
741
  /**
420
- * Prepara una query con interfaccia compatibile bun:sqlite
742
+ * Prepare a query with bun:sqlite-compatible interface.
743
+ * Returns a cached prepared statement for repeated queries.
421
744
  */
422
745
  query(sql) {
423
- return new BunQueryCompat(this._db, sql);
746
+ let cached = this._stmtCache.get(sql);
747
+ if (!cached) {
748
+ cached = new BunQueryCompat(this._db, sql);
749
+ this._stmtCache.set(sql, cached);
750
+ }
751
+ return cached;
424
752
  }
425
753
  /**
426
- * Crea una transazione
754
+ * Create a transaction
427
755
  */
428
756
  transaction(fn) {
429
757
  return this._db.transaction(fn);
430
758
  }
431
759
  /**
432
- * Chiudi la connessione
760
+ * Close the connection
433
761
  */
434
762
  close() {
763
+ this._stmtCache.clear();
435
764
  this._db.close();
436
765
  }
437
766
  };
438
767
  var BunQueryCompat = class {
439
- _db;
440
- _sql;
768
+ _stmt;
441
769
  constructor(db, sql) {
442
- this._db = db;
443
- this._sql = sql;
770
+ this._stmt = db.prepare(sql);
444
771
  }
445
772
  /**
446
- * Restituisce tutte le righe
773
+ * Returns all rows
447
774
  */
448
775
  all(...params) {
449
- const stmt = this._db.prepare(this._sql);
450
- return params.length > 0 ? stmt.all(...params) : stmt.all();
776
+ return params.length > 0 ? this._stmt.all(...params) : this._stmt.all();
451
777
  }
452
778
  /**
453
- * Restituisce la prima riga o null
779
+ * Returns the first row or null
454
780
  */
455
781
  get(...params) {
456
- const stmt = this._db.prepare(this._sql);
457
- return params.length > 0 ? stmt.get(...params) : stmt.get();
782
+ return params.length > 0 ? this._stmt.get(...params) : this._stmt.get();
458
783
  }
459
784
  /**
460
- * Esegui senza risultati
785
+ * Execute without results
461
786
  */
462
787
  run(...params) {
463
- const stmt = this._db.prepare(this._sql);
464
- return params.length > 0 ? stmt.run(...params) : stmt.run();
788
+ return params.length > 0 ? this._stmt.run(...params) : this._stmt.run();
465
789
  }
466
790
  };
467
791
 
@@ -723,40 +1047,62 @@ function ensureDir(dirPath) {
723
1047
  var SQLITE_MMAP_SIZE_BYTES = 256 * 1024 * 1024;
724
1048
  var SQLITE_CACHE_SIZE_PAGES = 1e4;
725
1049
  var KiroMemoryDatabase = class {
726
- db;
1050
+ _db;
727
1051
  /**
728
- * @param dbPath - Percorso al file SQLite (default: DB_PATH)
729
- * @param skipMigrations - Se true, salta il migration runner (per hook ad alta frequenza)
1052
+ * Readonly accessor for the underlying Database instance.
1053
+ * Prefer using query() and run() proxy methods directly.
1054
+ */
1055
+ get db() {
1056
+ return this._db;
1057
+ }
1058
+ /**
1059
+ * @param dbPath - Path to the SQLite file (default: DB_PATH)
1060
+ * @param skipMigrations - If true, skip the migration runner (for high-frequency hooks)
730
1061
  */
731
1062
  constructor(dbPath = DB_PATH, skipMigrations = false) {
732
1063
  if (dbPath !== ":memory:") {
733
1064
  ensureDir(DATA_DIR);
734
1065
  }
735
- this.db = new Database(dbPath, { create: true, readwrite: true });
736
- this.db.run("PRAGMA journal_mode = WAL");
737
- this.db.run("PRAGMA synchronous = NORMAL");
738
- this.db.run("PRAGMA foreign_keys = ON");
739
- this.db.run("PRAGMA temp_store = memory");
740
- this.db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
741
- this.db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
1066
+ this._db = new Database(dbPath, { create: true, readwrite: true });
1067
+ this._db.run("PRAGMA journal_mode = WAL");
1068
+ this._db.run("PRAGMA busy_timeout = 5000");
1069
+ this._db.run("PRAGMA synchronous = NORMAL");
1070
+ this._db.run("PRAGMA foreign_keys = ON");
1071
+ this._db.run("PRAGMA temp_store = memory");
1072
+ this._db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
1073
+ this._db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
742
1074
  if (!skipMigrations) {
743
- const migrationRunner = new MigrationRunner(this.db);
1075
+ const migrationRunner = new MigrationRunner(this._db);
744
1076
  migrationRunner.runAllMigrations();
745
1077
  }
746
1078
  }
747
1079
  /**
748
- * Esegue una funzione all'interno di una transazione atomica.
749
- * Se fn() lancia un errore, la transazione viene annullata automaticamente.
1080
+ * Prepare a query (delegates to underlying Database).
1081
+ * Proxy method to avoid ctx.db.db.query() double access.
1082
+ */
1083
+ query(sql) {
1084
+ return this._db.query(sql);
1085
+ }
1086
+ /**
1087
+ * Execute a SQL statement without results (delegates to underlying Database).
1088
+ * Proxy method to avoid ctx.db.db.run() double access.
1089
+ */
1090
+ run(sql, params) {
1091
+ return this._db.run(sql, params);
1092
+ }
1093
+ /**
1094
+ * Executes a function within an atomic transaction.
1095
+ * If fn() throws an error, the transaction is automatically rolled back.
750
1096
  */
751
1097
  withTransaction(fn) {
752
- const transaction = this.db.transaction(fn);
753
- return transaction(this.db);
1098
+ const transaction = this._db.transaction(fn);
1099
+ return transaction(this._db);
754
1100
  }
755
1101
  /**
756
1102
  * Close the database connection
757
1103
  */
758
1104
  close() {
759
- this.db.close();
1105
+ this._db.close();
760
1106
  }
761
1107
  };
762
1108
  var MigrationRunner = class {
@@ -997,11 +1343,104 @@ var MigrationRunner = class {
997
1343
  db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_epoch ON summaries(project, created_at_epoch DESC)");
998
1344
  db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_epoch ON prompts(project, created_at_epoch DESC)");
999
1345
  }
1346
+ },
1347
+ {
1348
+ version: 10,
1349
+ up: (db) => {
1350
+ db.run(`
1351
+ CREATE TABLE IF NOT EXISTS job_queue (
1352
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1353
+ type TEXT NOT NULL,
1354
+ status TEXT NOT NULL DEFAULT 'pending',
1355
+ payload TEXT,
1356
+ result TEXT,
1357
+ error TEXT,
1358
+ retry_count INTEGER DEFAULT 0,
1359
+ max_retries INTEGER DEFAULT 3,
1360
+ priority INTEGER DEFAULT 0,
1361
+ created_at TEXT NOT NULL,
1362
+ created_at_epoch INTEGER NOT NULL,
1363
+ started_at_epoch INTEGER,
1364
+ completed_at_epoch INTEGER
1365
+ )
1366
+ `);
1367
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_status ON job_queue(status)");
1368
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_type ON job_queue(type)");
1369
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_priority ON job_queue(status, priority DESC, created_at_epoch ASC)");
1370
+ }
1371
+ },
1372
+ {
1373
+ version: 11,
1374
+ up: (db) => {
1375
+ db.run("ALTER TABLE observations ADD COLUMN auto_category TEXT");
1376
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_category ON observations(auto_category)");
1377
+ }
1378
+ },
1379
+ {
1380
+ version: 12,
1381
+ up: (db) => {
1382
+ db.run(`
1383
+ CREATE TABLE IF NOT EXISTS github_links (
1384
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1385
+ observation_id INTEGER,
1386
+ session_id TEXT,
1387
+ repo TEXT NOT NULL,
1388
+ issue_number INTEGER,
1389
+ pr_number INTEGER,
1390
+ event_type TEXT NOT NULL,
1391
+ action TEXT,
1392
+ title TEXT,
1393
+ url TEXT,
1394
+ author TEXT,
1395
+ created_at TEXT NOT NULL,
1396
+ created_at_epoch INTEGER NOT NULL,
1397
+ FOREIGN KEY (observation_id) REFERENCES observations(id)
1398
+ )
1399
+ `);
1400
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo ON github_links(repo)");
1401
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_obs ON github_links(observation_id)");
1402
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_event ON github_links(event_type)");
1403
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_issue ON github_links(repo, issue_number)");
1404
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_pr ON github_links(repo, pr_number)");
1405
+ }
1406
+ },
1407
+ {
1408
+ version: 13,
1409
+ up: (db) => {
1410
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_keyset ON observations(created_at_epoch DESC, id DESC)");
1411
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_project_keyset ON observations(project, created_at_epoch DESC, id DESC)");
1412
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_keyset ON summaries(created_at_epoch DESC, id DESC)");
1413
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_keyset ON summaries(project, created_at_epoch DESC, id DESC)");
1414
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_keyset ON prompts(created_at_epoch DESC, id DESC)");
1415
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_keyset ON prompts(project, created_at_epoch DESC, id DESC)");
1416
+ }
1000
1417
  }
1001
1418
  ];
1002
1419
  }
1003
1420
  };
1004
1421
 
1422
+ // src/services/sqlite/cursor.ts
1423
+ function encodeCursor(id, epoch) {
1424
+ const raw = `${epoch}:${id}`;
1425
+ return Buffer.from(raw, "utf8").toString("base64url");
1426
+ }
1427
+ function decodeCursor(cursor) {
1428
+ try {
1429
+ const raw = Buffer.from(cursor, "base64url").toString("utf8");
1430
+ const colonIdx = raw.indexOf(":");
1431
+ if (colonIdx === -1) return null;
1432
+ const epochStr = raw.substring(0, colonIdx);
1433
+ const idStr = raw.substring(colonIdx + 1);
1434
+ const epoch = parseInt(epochStr, 10);
1435
+ const id = parseInt(idStr, 10);
1436
+ if (!Number.isInteger(epoch) || epoch <= 0) return null;
1437
+ if (!Number.isInteger(id) || id <= 0) return null;
1438
+ return { epoch, id };
1439
+ } catch {
1440
+ return null;
1441
+ }
1442
+ }
1443
+
1005
1444
  // src/services/sqlite/Sessions.ts
1006
1445
  function createSession(db, contentSessionId, project, userPrompt) {
1007
1446
  const now = /* @__PURE__ */ new Date();
@@ -1045,16 +1484,16 @@ function createSummary(db, sessionId, project, request, investigated, learned, c
1045
1484
  }
1046
1485
  function getSummariesByProject(db, project, limit = 50) {
1047
1486
  const query = db.query(
1048
- "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1487
+ "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
1049
1488
  );
1050
1489
  return query.all(project, limit);
1051
1490
  }
1052
1491
  function searchSummaries(db, searchTerm, project) {
1053
1492
  const sql = project ? `SELECT * FROM summaries
1054
1493
  WHERE project = ? AND (request LIKE ? ESCAPE '\\' OR learned LIKE ? ESCAPE '\\' OR completed LIKE ? ESCAPE '\\' OR notes LIKE ? ESCAPE '\\')
1055
- ORDER BY created_at_epoch DESC` : `SELECT * FROM summaries
1494
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT * FROM summaries
1056
1495
  WHERE request LIKE ? ESCAPE '\\' OR learned LIKE ? ESCAPE '\\' OR completed LIKE ? ESCAPE '\\' OR notes LIKE ? ESCAPE '\\'
1057
- ORDER BY created_at_epoch DESC`;
1496
+ ORDER BY created_at_epoch DESC, id DESC`;
1058
1497
  const pattern = `%${escapeLikePattern2(searchTerm)}%`;
1059
1498
  const query = db.query(sql);
1060
1499
  if (project) {
@@ -1076,7 +1515,7 @@ function createPrompt(db, contentSessionId, project, promptNumber, promptText) {
1076
1515
  }
1077
1516
  function getPromptsByProject(db, project, limit = 100) {
1078
1517
  const query = db.query(
1079
- "SELECT * FROM prompts WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1518
+ "SELECT * FROM prompts WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
1080
1519
  );
1081
1520
  return query.all(project, limit);
1082
1521
  }
@@ -1104,13 +1543,13 @@ function createCheckpoint(db, sessionId, project, data) {
1104
1543
  }
1105
1544
  function getLatestCheckpoint(db, sessionId) {
1106
1545
  const query = db.query(
1107
- "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC LIMIT 1"
1546
+ "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1"
1108
1547
  );
1109
1548
  return query.get(sessionId);
1110
1549
  }
1111
1550
  function getLatestCheckpointByProject(db, project) {
1112
1551
  const query = db.query(
1113
- "SELECT * FROM checkpoints WHERE project = ? ORDER BY created_at_epoch DESC LIMIT 1"
1552
+ "SELECT * FROM checkpoints WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1"
1114
1553
  );
1115
1554
  return query.get(project);
1116
1555
  }
@@ -1172,9 +1611,9 @@ function getReportData(db, project, startEpoch, endEpoch) {
1172
1611
  const staleCount = (project ? db.query(staleSql).get(project, startEpoch, endEpoch)?.count : db.query(staleSql).get(startEpoch, endEpoch)?.count) || 0;
1173
1612
  const summarySql = project ? `SELECT learned, completed, next_steps FROM summaries
1174
1613
  WHERE project = ? AND created_at_epoch >= ? AND created_at_epoch <= ?
1175
- ORDER BY created_at_epoch DESC` : `SELECT learned, completed, next_steps FROM summaries
1614
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT learned, completed, next_steps FROM summaries
1176
1615
  WHERE created_at_epoch >= ? AND created_at_epoch <= ?
1177
- ORDER BY created_at_epoch DESC`;
1616
+ ORDER BY created_at_epoch DESC, id DESC`;
1178
1617
  const summaryRows = project ? db.query(summarySql).all(project, startEpoch, endEpoch) : db.query(summarySql).all(startEpoch, endEpoch);
1179
1618
  const topLearnings = [];
1180
1619
  const completedTasks = [];
@@ -1239,20 +1678,61 @@ function getReportData(db, project, startEpoch, endEpoch) {
1239
1678
  // src/services/sqlite/index.ts
1240
1679
  init_Search();
1241
1680
 
1681
+ // src/types/worker-types.ts
1682
+ var KNOWLEDGE_TYPES = ["constraint", "decision", "heuristic", "rejected"];
1683
+
1684
+ // src/services/sqlite/Retention.ts
1685
+ var KNOWLEDGE_TYPE_LIST = KNOWLEDGE_TYPES;
1686
+ var KNOWLEDGE_PLACEHOLDERS = KNOWLEDGE_TYPE_LIST.map(() => "?").join(", ");
1687
+
1242
1688
  // src/sdk/index.ts
1243
1689
  init_Observations();
1244
1690
  import { createHash } from "crypto";
1245
1691
  init_Search();
1246
1692
 
1247
1693
  // src/services/search/EmbeddingService.ts
1694
+ var MODEL_CONFIGS = {
1695
+ "all-MiniLM-L6-v2": {
1696
+ modelId: "Xenova/all-MiniLM-L6-v2",
1697
+ dimensions: 384
1698
+ },
1699
+ "jina-code-v2": {
1700
+ modelId: "jinaai/jina-embeddings-v2-base-code",
1701
+ dimensions: 768
1702
+ },
1703
+ "bge-small-en": {
1704
+ modelId: "BAAI/bge-small-en-v1.5",
1705
+ dimensions: 384
1706
+ }
1707
+ };
1708
+ var FASTEMBED_COMPATIBLE_MODELS = /* @__PURE__ */ new Set(["all-MiniLM-L6-v2", "bge-small-en"]);
1248
1709
  var EmbeddingService = class {
1249
1710
  provider = null;
1250
1711
  model = null;
1251
1712
  initialized = false;
1252
1713
  initializing = null;
1714
+ config;
1715
+ configName;
1716
+ constructor() {
1717
+ const envModel = process.env.KIRO_MEMORY_EMBEDDING_MODEL || "all-MiniLM-L6-v2";
1718
+ this.configName = envModel;
1719
+ if (MODEL_CONFIGS[envModel]) {
1720
+ this.config = MODEL_CONFIGS[envModel];
1721
+ } else if (envModel.includes("/")) {
1722
+ const dimensions = parseInt(process.env.KIRO_MEMORY_EMBEDDING_DIMENSIONS || "384", 10);
1723
+ this.config = {
1724
+ modelId: envModel,
1725
+ dimensions: isNaN(dimensions) ? 384 : dimensions
1726
+ };
1727
+ } else {
1728
+ logger.warn("EMBEDDING", `Unknown model name '${envModel}', falling back to 'all-MiniLM-L6-v2'`);
1729
+ this.configName = "all-MiniLM-L6-v2";
1730
+ this.config = MODEL_CONFIGS["all-MiniLM-L6-v2"];
1731
+ }
1732
+ }
1253
1733
  /**
1254
- * Inizializza il servizio di embedding.
1255
- * Tenta fastembed, poi @huggingface/transformers, poi fallback a null.
1734
+ * Initialize the embedding service.
1735
+ * Tries fastembed (when compatible), then @huggingface/transformers, then falls back to null.
1256
1736
  */
1257
1737
  async initialize() {
1258
1738
  if (this.initialized) return this.provider !== null;
@@ -1263,45 +1743,48 @@ var EmbeddingService = class {
1263
1743
  return result;
1264
1744
  }
1265
1745
  async _doInitialize() {
1266
- try {
1267
- const fastembed = await import("fastembed");
1268
- const EmbeddingModel = fastembed.EmbeddingModel || fastembed.default?.EmbeddingModel;
1269
- const FlagEmbedding = fastembed.FlagEmbedding || fastembed.default?.FlagEmbedding;
1270
- if (FlagEmbedding && EmbeddingModel) {
1271
- this.model = await FlagEmbedding.init({
1272
- model: EmbeddingModel.BGESmallENV15
1273
- });
1274
- this.provider = "fastembed";
1275
- this.initialized = true;
1276
- logger.info("EMBEDDING", "Inizializzato con fastembed (BGE-small-en-v1.5)");
1277
- return true;
1746
+ const fastembedCompatible = FASTEMBED_COMPATIBLE_MODELS.has(this.configName);
1747
+ if (fastembedCompatible) {
1748
+ try {
1749
+ const fastembed = await import("fastembed");
1750
+ const EmbeddingModel = fastembed.EmbeddingModel || fastembed.default?.EmbeddingModel;
1751
+ const FlagEmbedding = fastembed.FlagEmbedding || fastembed.default?.FlagEmbedding;
1752
+ if (FlagEmbedding && EmbeddingModel) {
1753
+ this.model = await FlagEmbedding.init({
1754
+ model: EmbeddingModel.BGESmallENV15
1755
+ });
1756
+ this.provider = "fastembed";
1757
+ this.initialized = true;
1758
+ logger.info("EMBEDDING", `Initialized with fastembed (BGE-small-en-v1.5) for model '${this.configName}'`);
1759
+ return true;
1760
+ }
1761
+ } catch (error) {
1762
+ logger.debug("EMBEDDING", `fastembed not available: ${error}`);
1278
1763
  }
1279
- } catch (error) {
1280
- logger.debug("EMBEDDING", `fastembed non disponibile: ${error}`);
1281
1764
  }
1282
1765
  try {
1283
1766
  const transformers = await import("@huggingface/transformers");
1284
1767
  const pipeline = transformers.pipeline || transformers.default?.pipeline;
1285
1768
  if (pipeline) {
1286
- this.model = await pipeline("feature-extraction", "Xenova/all-MiniLM-L6-v2", {
1769
+ this.model = await pipeline("feature-extraction", this.config.modelId, {
1287
1770
  quantized: true
1288
1771
  });
1289
1772
  this.provider = "transformers";
1290
1773
  this.initialized = true;
1291
- logger.info("EMBEDDING", "Inizializzato con @huggingface/transformers (all-MiniLM-L6-v2)");
1774
+ logger.info("EMBEDDING", `Initialized with @huggingface/transformers (${this.config.modelId})`);
1292
1775
  return true;
1293
1776
  }
1294
1777
  } catch (error) {
1295
- logger.debug("EMBEDDING", `@huggingface/transformers non disponibile: ${error}`);
1778
+ logger.debug("EMBEDDING", `@huggingface/transformers not available: ${error}`);
1296
1779
  }
1297
1780
  this.provider = null;
1298
1781
  this.initialized = true;
1299
- logger.warn("EMBEDDING", "Nessun provider embedding disponibile, ricerca semantica disabilitata");
1782
+ logger.warn("EMBEDDING", "No embedding provider available, semantic search disabled");
1300
1783
  return false;
1301
1784
  }
1302
1785
  /**
1303
- * Genera embedding per un singolo testo.
1304
- * Ritorna Float32Array con 384 dimensioni, o null se non disponibile.
1786
+ * Generate embedding for a single text.
1787
+ * Returns Float32Array with configured dimensions, or null if not available.
1305
1788
  */
1306
1789
  async embed(text) {
1307
1790
  if (!this.initialized) await this.initialize();
@@ -1314,46 +1797,118 @@ var EmbeddingService = class {
1314
1797
  return await this._embedTransformers(truncated);
1315
1798
  }
1316
1799
  } catch (error) {
1317
- logger.error("EMBEDDING", `Errore generazione embedding: ${error}`);
1800
+ logger.error("EMBEDDING", `Error generating embedding: ${error}`);
1318
1801
  }
1319
1802
  return null;
1320
1803
  }
1321
1804
  /**
1322
- * Genera embeddings in batch.
1805
+ * Generate embeddings in batch.
1806
+ * Uses native batch support when available (fastembed, transformers),
1807
+ * falls back to serial processing on batch failure.
1323
1808
  */
1324
1809
  async embedBatch(texts) {
1325
1810
  if (!this.initialized) await this.initialize();
1326
1811
  if (!this.provider || !this.model) return texts.map(() => null);
1327
- const results = [];
1328
- for (const text of texts) {
1329
- try {
1330
- const embedding = await this.embed(text);
1331
- results.push(embedding);
1332
- } catch {
1333
- results.push(null);
1812
+ if (texts.length === 0) return [];
1813
+ const truncated = texts.map((t) => t.substring(0, 2e3));
1814
+ try {
1815
+ if (this.provider === "fastembed") {
1816
+ return await this._embedBatchFastembed(truncated);
1817
+ } else if (this.provider === "transformers") {
1818
+ return await this._embedBatchTransformers(truncated);
1334
1819
  }
1820
+ } catch (error) {
1821
+ logger.warn("EMBEDDING", `Batch embedding failed, falling back to serial: ${error}`);
1335
1822
  }
1336
- return results;
1823
+ return this._embedBatchSerial(truncated);
1337
1824
  }
1338
1825
  /**
1339
- * Verifica se il servizio è disponibile.
1826
+ * Check if the service is available.
1340
1827
  */
1341
1828
  isAvailable() {
1342
1829
  return this.initialized && this.provider !== null;
1343
1830
  }
1344
1831
  /**
1345
- * Nome del provider attivo.
1832
+ * Name of the active provider.
1346
1833
  */
1347
1834
  getProvider() {
1348
1835
  return this.provider;
1349
1836
  }
1350
1837
  /**
1351
- * Dimensioni del vettore embedding.
1838
+ * Embedding vector dimensions for the active model configuration.
1352
1839
  */
1353
1840
  getDimensions() {
1354
- return 384;
1841
+ return this.config.dimensions;
1842
+ }
1843
+ /**
1844
+ * Human-readable model name used as identifier in the observation_embeddings table.
1845
+ * Returns the short name (e.g., 'all-MiniLM-L6-v2') or the full HF model ID for custom models.
1846
+ */
1847
+ getModelName() {
1848
+ return this.configName;
1849
+ }
1850
+ // --- Batch implementations ---
1851
+ /**
1852
+ * Native batch embedding with fastembed.
1853
+ * FlagEmbedding.embed() accepts string[] and returns an async iterable of batches.
1854
+ */
1855
+ async _embedBatchFastembed(texts) {
1856
+ const results = [];
1857
+ const embeddings = this.model.embed(texts, texts.length);
1858
+ for await (const batch of embeddings) {
1859
+ if (batch) {
1860
+ for (const vec of batch) {
1861
+ results.push(vec instanceof Float32Array ? vec : new Float32Array(vec));
1862
+ }
1863
+ }
1864
+ }
1865
+ while (results.length < texts.length) {
1866
+ results.push(null);
1867
+ }
1868
+ return results;
1355
1869
  }
1356
- // --- Provider specifici ---
1870
+ /**
1871
+ * Batch embedding with @huggingface/transformers pipeline.
1872
+ * The pipeline accepts string[] and returns a Tensor with shape [N, dims].
1873
+ */
1874
+ async _embedBatchTransformers(texts) {
1875
+ const output = await this.model(texts, {
1876
+ pooling: "mean",
1877
+ normalize: true
1878
+ });
1879
+ if (!output?.data) {
1880
+ return texts.map(() => null);
1881
+ }
1882
+ const dims = this.getDimensions();
1883
+ const data = output.data instanceof Float32Array ? output.data : new Float32Array(output.data);
1884
+ const results = [];
1885
+ for (let i = 0; i < texts.length; i++) {
1886
+ const offset = i * dims;
1887
+ if (offset + dims <= data.length) {
1888
+ results.push(data.slice(offset, offset + dims));
1889
+ } else {
1890
+ results.push(null);
1891
+ }
1892
+ }
1893
+ return results;
1894
+ }
1895
+ /**
1896
+ * Serial fallback: embed texts one at a time.
1897
+ * Used when native batch fails.
1898
+ */
1899
+ async _embedBatchSerial(texts) {
1900
+ const results = [];
1901
+ for (const text of texts) {
1902
+ try {
1903
+ const embedding = await this.embed(text);
1904
+ results.push(embedding);
1905
+ } catch {
1906
+ results.push(null);
1907
+ }
1908
+ }
1909
+ return results;
1910
+ }
1911
+ // --- Single-text provider implementations ---
1357
1912
  async _embedFastembed(text) {
1358
1913
  const embeddings = this.model.embed([text], 1);
1359
1914
  for await (const batch of embeddings) {
@@ -1384,17 +1939,21 @@ function getEmbeddingService() {
1384
1939
  }
1385
1940
 
1386
1941
  // src/services/search/VectorSearch.ts
1942
+ var DEFAULT_MAX_CANDIDATES = 2e3;
1387
1943
  function cosineSimilarity(a, b) {
1388
- if (a.length !== b.length) return 0;
1944
+ const len = a.length;
1945
+ if (len !== b.length) return 0;
1389
1946
  let dotProduct = 0;
1390
1947
  let normA = 0;
1391
1948
  let normB = 0;
1392
- for (let i = 0; i < a.length; i++) {
1393
- dotProduct += a[i] * b[i];
1394
- normA += a[i] * a[i];
1395
- normB += b[i] * b[i];
1396
- }
1397
- const denominator = Math.sqrt(normA) * Math.sqrt(normB);
1949
+ for (let i = 0; i < len; i++) {
1950
+ const ai = a[i];
1951
+ const bi = b[i];
1952
+ dotProduct += ai * bi;
1953
+ normA += ai * ai;
1954
+ normB += bi * bi;
1955
+ }
1956
+ const denominator = Math.sqrt(normA * normB);
1398
1957
  if (denominator === 0) return 0;
1399
1958
  return dotProduct / denominator;
1400
1959
  }
@@ -1407,23 +1966,36 @@ function bufferToFloat32(buf) {
1407
1966
  }
1408
1967
  var VectorSearch = class {
1409
1968
  /**
1410
- * Ricerca semantica: calcola cosine similarity tra query e tutti gli embeddings.
1969
+ * Semantic search with SQL pre-filtering for scalability.
1970
+ *
1971
+ * 2-phase strategy:
1972
+ * 1. SQL pre-filters by project + sorts by recency (loads max N candidates)
1973
+ * 2. JS computes cosine similarity only on filtered candidates
1974
+ *
1975
+ * With 50k observations and maxCandidates=2000, loads only ~4% of data.
1411
1976
  */
1412
1977
  async search(db, queryEmbedding, options = {}) {
1413
1978
  const limit = options.limit || 10;
1414
1979
  const threshold = options.threshold || 0.3;
1980
+ const maxCandidates = options.maxCandidates || DEFAULT_MAX_CANDIDATES;
1415
1981
  try {
1416
- let sql = `
1982
+ const conditions = [];
1983
+ const params = [];
1984
+ if (options.project) {
1985
+ conditions.push("o.project = ?");
1986
+ params.push(options.project);
1987
+ }
1988
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1989
+ const sql = `
1417
1990
  SELECT e.observation_id, e.embedding,
1418
1991
  o.title, o.text, o.type, o.project, o.created_at, o.created_at_epoch
1419
1992
  FROM observation_embeddings e
1420
1993
  JOIN observations o ON o.id = e.observation_id
1994
+ ${whereClause}
1995
+ ORDER BY o.created_at_epoch DESC
1996
+ LIMIT ?
1421
1997
  `;
1422
- const params = [];
1423
- if (options.project) {
1424
- sql += " WHERE o.project = ?";
1425
- params.push(options.project);
1426
- }
1998
+ params.push(maxCandidates);
1427
1999
  const rows = db.query(sql).all(...params);
1428
2000
  const scored = [];
1429
2001
  for (const row of rows) {
@@ -1444,14 +2016,15 @@ var VectorSearch = class {
1444
2016
  }
1445
2017
  }
1446
2018
  scored.sort((a, b) => b.similarity - a.similarity);
2019
+ logger.debug("VECTOR", `Search: ${rows.length} candidates \u2192 ${scored.length} above threshold \u2192 ${Math.min(scored.length, limit)} results`);
1447
2020
  return scored.slice(0, limit);
1448
2021
  } catch (error) {
1449
- logger.error("VECTOR", `Errore ricerca vettoriale: ${error}`);
2022
+ logger.error("VECTOR", `Vector search error: ${error}`);
1450
2023
  return [];
1451
2024
  }
1452
2025
  }
1453
2026
  /**
1454
- * Salva embedding per un'osservazione.
2027
+ * Store embedding for an observation.
1455
2028
  */
1456
2029
  async storeEmbedding(db, observationId, embedding, model) {
1457
2030
  try {
@@ -1467,18 +2040,18 @@ var VectorSearch = class {
1467
2040
  embedding.length,
1468
2041
  (/* @__PURE__ */ new Date()).toISOString()
1469
2042
  );
1470
- logger.debug("VECTOR", `Embedding salvato per osservazione ${observationId}`);
2043
+ logger.debug("VECTOR", `Embedding saved for observation ${observationId}`);
1471
2044
  } catch (error) {
1472
- logger.error("VECTOR", `Errore salvataggio embedding: ${error}`);
2045
+ logger.error("VECTOR", `Error saving embedding: ${error}`);
1473
2046
  }
1474
2047
  }
1475
2048
  /**
1476
- * Genera embeddings per osservazioni che non li hanno ancora.
2049
+ * Generate embeddings for observations that don't have them yet.
1477
2050
  */
1478
2051
  async backfillEmbeddings(db, batchSize = 50) {
1479
2052
  const embeddingService2 = getEmbeddingService();
1480
2053
  if (!await embeddingService2.initialize()) {
1481
- logger.warn("VECTOR", "Embedding service non disponibile, backfill saltato");
2054
+ logger.warn("VECTOR", "Embedding service not available, backfill skipped");
1482
2055
  return 0;
1483
2056
  }
1484
2057
  const rows = db.query(`
@@ -1491,7 +2064,7 @@ var VectorSearch = class {
1491
2064
  `).all(batchSize);
1492
2065
  if (rows.length === 0) return 0;
1493
2066
  let count = 0;
1494
- const model = embeddingService2.getProvider() || "unknown";
2067
+ const model = embeddingService2.getModelName();
1495
2068
  for (const row of rows) {
1496
2069
  const parts = [row.title];
1497
2070
  if (row.text) parts.push(row.text);
@@ -1504,11 +2077,11 @@ var VectorSearch = class {
1504
2077
  count++;
1505
2078
  }
1506
2079
  }
1507
- logger.info("VECTOR", `Backfill completato: ${count}/${rows.length} embeddings generati`);
2080
+ logger.info("VECTOR", `Backfill completed: ${count}/${rows.length} embeddings generated`);
1508
2081
  return count;
1509
2082
  }
1510
2083
  /**
1511
- * Statistiche sugli embeddings.
2084
+ * Embedding statistics.
1512
2085
  */
1513
2086
  getStats(db) {
1514
2087
  try {
@@ -1581,21 +2154,21 @@ function knowledgeTypeBoost(type) {
1581
2154
  var HybridSearch = class {
1582
2155
  embeddingInitialized = false;
1583
2156
  /**
1584
- * Inizializza il servizio di embedding (lazy, non bloccante)
2157
+ * Initialize the embedding service (lazy, non-blocking)
1585
2158
  */
1586
2159
  async initialize() {
1587
2160
  try {
1588
2161
  const embeddingService2 = getEmbeddingService();
1589
2162
  await embeddingService2.initialize();
1590
2163
  this.embeddingInitialized = embeddingService2.isAvailable();
1591
- logger.info("SEARCH", `HybridSearch inizializzato (embedding: ${this.embeddingInitialized ? "attivo" : "disattivato"})`);
2164
+ logger.info("SEARCH", `HybridSearch initialized (embedding: ${this.embeddingInitialized ? "active" : "disabled"})`);
1592
2165
  } catch (error) {
1593
- logger.warn("SEARCH", "Inizializzazione embedding fallita, uso solo FTS5", {}, error);
2166
+ logger.warn("SEARCH", "Embedding initialization failed, using only FTS5", {}, error);
1594
2167
  this.embeddingInitialized = false;
1595
2168
  }
1596
2169
  }
1597
2170
  /**
1598
- * Ricerca ibrida con scoring a 4 segnali
2171
+ * Hybrid search with 4-signal scoring
1599
2172
  */
1600
2173
  async search(db, query, options = {}) {
1601
2174
  const limit = options.limit || 10;
@@ -1611,7 +2184,7 @@ var HybridSearch = class {
1611
2184
  const vectorResults = await vectorSearch2.search(db, queryEmbedding, {
1612
2185
  project: options.project,
1613
2186
  limit: limit * 2,
1614
- // Prendiamo piu risultati per il ranking
2187
+ // Fetch more results for ranking
1615
2188
  threshold: 0.3
1616
2189
  });
1617
2190
  for (const hit of vectorResults) {
@@ -1628,10 +2201,10 @@ var HybridSearch = class {
1628
2201
  source: "vector"
1629
2202
  });
1630
2203
  }
1631
- logger.debug("SEARCH", `Vector search: ${vectorResults.length} risultati`);
2204
+ logger.debug("SEARCH", `Vector search: ${vectorResults.length} results`);
1632
2205
  }
1633
2206
  } catch (error) {
1634
- logger.warn("SEARCH", "Ricerca vettoriale fallita, uso solo keyword", {}, error);
2207
+ logger.warn("SEARCH", "Vector search failed, using only keyword", {}, error);
1635
2208
  }
1636
2209
  }
1637
2210
  try {
@@ -1661,9 +2234,9 @@ var HybridSearch = class {
1661
2234
  });
1662
2235
  }
1663
2236
  }
1664
- logger.debug("SEARCH", `Keyword search: ${keywordResults.length} risultati`);
2237
+ logger.debug("SEARCH", `Keyword search: ${keywordResults.length} results`);
1665
2238
  } catch (error) {
1666
- logger.error("SEARCH", "Ricerca keyword fallita", {}, error);
2239
+ logger.error("SEARCH", "Keyword search failed", {}, error);
1667
2240
  }
1668
2241
  if (rawItems.size === 0) return [];
1669
2242
  const allFTS5Ranks = Array.from(rawItems.values()).filter((item) => item.fts5Rank !== null).map((item) => item.fts5Rank);
@@ -1715,9 +2288,6 @@ function getHybridSearch() {
1715
2288
  return hybridSearch;
1716
2289
  }
1717
2290
 
1718
- // src/types/worker-types.ts
1719
- var KNOWLEDGE_TYPES = ["constraint", "decision", "heuristic", "rejected"];
1720
-
1721
2291
  // src/sdk/index.ts
1722
2292
  var KiroMemorySDK = class {
1723
2293
  db;
@@ -1751,33 +2321,33 @@ var KiroMemorySDK = class {
1751
2321
  };
1752
2322
  }
1753
2323
  /**
1754
- * Valida input per storeObservation
2324
+ * Validate input for storeObservation
1755
2325
  */
1756
2326
  validateObservationInput(data) {
1757
2327
  if (!data.type || typeof data.type !== "string" || data.type.length > 100) {
1758
- throw new Error("type \xE8 obbligatorio (stringa, max 100 caratteri)");
2328
+ throw new Error("type is required (string, max 100 chars)");
1759
2329
  }
1760
2330
  if (!data.title || typeof data.title !== "string" || data.title.length > 500) {
1761
- throw new Error("title \xE8 obbligatorio (stringa, max 500 caratteri)");
2331
+ throw new Error("title is required (string, max 500 chars)");
1762
2332
  }
1763
2333
  if (!data.content || typeof data.content !== "string" || data.content.length > 1e5) {
1764
- throw new Error("content \xE8 obbligatorio (stringa, max 100KB)");
2334
+ throw new Error("content is required (string, max 100KB)");
1765
2335
  }
1766
2336
  }
1767
2337
  /**
1768
- * Valida input per storeSummary
2338
+ * Validate input for storeSummary
1769
2339
  */
1770
2340
  validateSummaryInput(data) {
1771
2341
  const MAX = 5e4;
1772
2342
  for (const [key, val] of Object.entries(data)) {
1773
2343
  if (val !== void 0 && val !== null) {
1774
- if (typeof val !== "string") throw new Error(`${key} deve essere una stringa`);
1775
- if (val.length > MAX) throw new Error(`${key} troppo grande (max 50KB)`);
2344
+ if (typeof val !== "string") throw new Error(`${key} must be a string`);
2345
+ if (val.length > MAX) throw new Error(`${key} too large (max 50KB)`);
1776
2346
  }
1777
2347
  }
1778
2348
  }
1779
2349
  /**
1780
- * Genera e salva embedding per un'osservazione (fire-and-forget, non blocca)
2350
+ * Generate and store embedding for an observation (fire-and-forget, non-blocking)
1781
2351
  */
1782
2352
  async generateEmbeddingAsync(observationId, title, content, concepts) {
1783
2353
  try {
@@ -1797,39 +2367,39 @@ var KiroMemorySDK = class {
1797
2367
  );
1798
2368
  }
1799
2369
  } catch (error) {
1800
- logger.debug("SDK", `Embedding generation fallita per obs ${observationId}: ${error}`);
2370
+ logger.debug("SDK", `Embedding generation failed for obs ${observationId}: ${error}`);
1801
2371
  }
1802
2372
  }
1803
2373
  /**
1804
- * Genera content hash SHA256 per deduplicazione basata su contenuto.
1805
- * Usa (project + type + title + narrative) come tupla di identità semantica.
1806
- * NON include sessionId perché è unico ad ogni invocazione.
2374
+ * Generate SHA256 content hash for content-based deduplication.
2375
+ * Uses (project + type + title + narrative) as semantic identity tuple.
2376
+ * Does NOT include sessionId since it's unique per invocation.
1807
2377
  */
1808
2378
  generateContentHash(type, title, narrative) {
1809
2379
  const payload = `${this.project}|${type}|${title}|${narrative || ""}`;
1810
2380
  return createHash("sha256").update(payload).digest("hex");
1811
2381
  }
1812
2382
  /**
1813
- * Finestre di deduplicazione per tipo (ms).
1814
- * Tipi con molte ripetizioni hanno finestre più ampie.
2383
+ * Deduplication windows per type (ms).
2384
+ * Types with many repetitions have wider windows.
1815
2385
  */
1816
2386
  getDeduplicationWindow(type) {
1817
2387
  switch (type) {
1818
2388
  case "file-read":
1819
2389
  return 6e4;
1820
- // 60s — letture frequenti sugli stessi file
2390
+ // 60s — frequent reads on the same files
1821
2391
  case "file-write":
1822
2392
  return 1e4;
1823
- // 10s — scritture rapide consecutive
2393
+ // 10s — rapid consecutive writes
1824
2394
  case "command":
1825
2395
  return 3e4;
1826
2396
  // 30s — standard
1827
2397
  case "research":
1828
2398
  return 12e4;
1829
- // 120s — web search e fetch ripetuti
2399
+ // 120s — repeated web search and fetch
1830
2400
  case "delegation":
1831
2401
  return 6e4;
1832
- // 60s — delegazioni rapide
2402
+ // 60s — rapid delegations
1833
2403
  default:
1834
2404
  return 3e4;
1835
2405
  }
@@ -1843,7 +2413,7 @@ var KiroMemorySDK = class {
1843
2413
  const contentHash = this.generateContentHash(data.type, data.title, data.narrative);
1844
2414
  const dedupWindow = this.getDeduplicationWindow(data.type);
1845
2415
  if (isDuplicateObservation(this.db.db, contentHash, dedupWindow)) {
1846
- logger.debug("SDK", `Osservazione duplicata scartata (${data.type}, ${dedupWindow}ms): ${data.title}`);
2416
+ logger.debug("SDK", `Duplicate observation discarded (${data.type}, ${dedupWindow}ms): ${data.title}`);
1847
2417
  return -1;
1848
2418
  }
1849
2419
  const filesRead = data.filesRead || (data.type === "file-read" ? data.files : void 0);
@@ -1871,12 +2441,12 @@ var KiroMemorySDK = class {
1871
2441
  return observationId;
1872
2442
  }
1873
2443
  /**
1874
- * Salva conoscenza strutturata (constraint, decision, heuristic, rejected).
1875
- * Usa il campo `type` per il knowledgeType e `facts` per i metadati JSON.
2444
+ * Store structured knowledge (constraint, decision, heuristic, rejected).
2445
+ * Uses the `type` field for knowledgeType and `facts` for JSON metadata.
1876
2446
  */
1877
2447
  async storeKnowledge(data) {
1878
2448
  if (!KNOWLEDGE_TYPES.includes(data.knowledgeType)) {
1879
- throw new Error(`knowledgeType non valido: ${data.knowledgeType}. Valori ammessi: ${KNOWLEDGE_TYPES.join(", ")}`);
2449
+ throw new Error(`Invalid knowledgeType: ${data.knowledgeType}. Allowed values: ${KNOWLEDGE_TYPES.join(", ")}`);
1880
2450
  }
1881
2451
  this.validateObservationInput({ type: data.knowledgeType, title: data.title, content: data.content });
1882
2452
  const metadata = (() => {
@@ -1908,9 +2478,9 @@ var KiroMemorySDK = class {
1908
2478
  }
1909
2479
  })();
1910
2480
  const sessionId = "sdk-" + Date.now();
1911
- const contentHash = this.generateContentHash(data.type, data.title);
2481
+ const contentHash = this.generateContentHash(data.knowledgeType, data.title);
1912
2482
  if (isDuplicateObservation(this.db.db, contentHash)) {
1913
- logger.debug("SDK", `Knowledge duplicata scartata: ${data.title}`);
2483
+ logger.debug("SDK", `Duplicate knowledge discarded: ${data.title}`);
1914
2484
  return -1;
1915
2485
  }
1916
2486
  const discoveryTokens = Math.ceil(data.content.length / 4);
@@ -1927,11 +2497,11 @@ var KiroMemorySDK = class {
1927
2497
  null,
1928
2498
  // narrative
1929
2499
  JSON.stringify(metadata),
1930
- // facts = metadati JSON
2500
+ // facts = JSON metadata
1931
2501
  data.concepts?.join(", ") || null,
1932
2502
  data.files?.join(", ") || null,
1933
2503
  null,
1934
- // filesModified: knowledge non modifica file
2504
+ // filesModified: knowledge doesn't modify files
1935
2505
  0,
1936
2506
  // prompt_number
1937
2507
  contentHash,
@@ -2042,8 +2612,8 @@ var KiroMemorySDK = class {
2042
2612
  return this.project;
2043
2613
  }
2044
2614
  /**
2045
- * Ricerca ibrida: vector search + keyword FTS5
2046
- * Richiede inizializzazione HybridSearch (embedding service)
2615
+ * Hybrid search: vector search + keyword FTS5
2616
+ * Requires HybridSearch initialization (embedding service)
2047
2617
  */
2048
2618
  async hybridSearch(query, options = {}) {
2049
2619
  const hybridSearch2 = getHybridSearch();
@@ -2053,8 +2623,8 @@ var KiroMemorySDK = class {
2053
2623
  });
2054
2624
  }
2055
2625
  /**
2056
- * Ricerca solo semantica (vector search)
2057
- * Ritorna risultati basati su similarità coseno con gli embeddings
2626
+ * Semantic-only search (vector search)
2627
+ * Returns results based on cosine similarity with embeddings
2058
2628
  */
2059
2629
  async semanticSearch(query, options = {}) {
2060
2630
  const embeddingService2 = getEmbeddingService();
@@ -2089,21 +2659,21 @@ var KiroMemorySDK = class {
2089
2659
  }));
2090
2660
  }
2091
2661
  /**
2092
- * Genera embeddings per osservazioni che non li hanno ancora
2662
+ * Generate embeddings for observations that don't have them yet
2093
2663
  */
2094
2664
  async backfillEmbeddings(batchSize = 50) {
2095
2665
  const vectorSearch2 = getVectorSearch();
2096
2666
  return vectorSearch2.backfillEmbeddings(this.db.db, batchSize);
2097
2667
  }
2098
2668
  /**
2099
- * Statistiche sugli embeddings nel database
2669
+ * Embedding statistics in the database
2100
2670
  */
2101
2671
  getEmbeddingStats() {
2102
2672
  const vectorSearch2 = getVectorSearch();
2103
2673
  return vectorSearch2.getStats(this.db.db);
2104
2674
  }
2105
2675
  /**
2106
- * Inizializza il servizio di embedding (lazy, chiamare prima di hybridSearch)
2676
+ * Initialize the embedding service (lazy, call before hybridSearch)
2107
2677
  */
2108
2678
  async initializeEmbeddings() {
2109
2679
  const hybridSearch2 = getHybridSearch();
@@ -2111,10 +2681,10 @@ var KiroMemorySDK = class {
2111
2681
  return getEmbeddingService().isAvailable();
2112
2682
  }
2113
2683
  /**
2114
- * Contesto smart con ranking a 4 segnali e budget token.
2684
+ * Smart context with 4-signal ranking and token budget.
2115
2685
  *
2116
- * Se query presente: usa HybridSearch con SEARCH_WEIGHTS.
2117
- * Se senza query: ranking per recency + project match (CONTEXT_WEIGHTS).
2686
+ * If query present: uses HybridSearch with SEARCH_WEIGHTS.
2687
+ * If no query: ranking by recency + project match (CONTEXT_WEIGHTS).
2118
2688
  */
2119
2689
  async getSmartContext(options = {}) {
2120
2690
  const tokenBudget = options.tokenBudget || parseInt(process.env.KIRO_MEMORY_CONTEXT_TOKENS || "0", 10) || 2e3;
@@ -2188,8 +2758,8 @@ var KiroMemorySDK = class {
2188
2758
  };
2189
2759
  }
2190
2760
  /**
2191
- * Rileva osservazioni stale (file modificati dopo la creazione) e le marca nel DB.
2192
- * Ritorna il numero di osservazioni marcate come stale.
2761
+ * Detect stale observations (files modified after creation) and mark them in DB.
2762
+ * Returns the number of observations marked as stale.
2193
2763
  */
2194
2764
  async detectStaleObservations() {
2195
2765
  const staleObs = getStaleObservations(this.db.db, this.project);
@@ -2200,14 +2770,14 @@ var KiroMemorySDK = class {
2200
2770
  return staleObs.length;
2201
2771
  }
2202
2772
  /**
2203
- * Consolida osservazioni duplicate sullo stesso file e tipo.
2204
- * Raggruppa per (project, type, files_modified), mantiene la piu recente.
2773
+ * Consolidate duplicate observations on the same file and type.
2774
+ * Groups by (project, type, files_modified), keeps the most recent.
2205
2775
  */
2206
2776
  async consolidateObservations(options = {}) {
2207
2777
  return consolidateObservations(this.db.db, this.project, options);
2208
2778
  }
2209
2779
  /**
2210
- * Statistiche decay: totale, stale, mai accedute, accedute di recente.
2780
+ * Decay statistics: total, stale, never accessed, recently accessed.
2211
2781
  */
2212
2782
  async getDecayStats() {
2213
2783
  const total = this.db.db.query(
@@ -2226,8 +2796,8 @@ var KiroMemorySDK = class {
2226
2796
  return { total, stale, neverAccessed, recentlyAccessed };
2227
2797
  }
2228
2798
  /**
2229
- * Crea un checkpoint strutturato per resume sessione.
2230
- * Salva automaticamente un context_snapshot con le ultime 10 osservazioni.
2799
+ * Create a structured checkpoint for session resume.
2800
+ * Automatically saves a context_snapshot with the last 10 observations.
2231
2801
  */
2232
2802
  async createCheckpoint(sessionId, data) {
2233
2803
  const recentObs = getObservationsByProject(this.db.db, this.project, 10);
@@ -2244,21 +2814,21 @@ var KiroMemorySDK = class {
2244
2814
  });
2245
2815
  }
2246
2816
  /**
2247
- * Recupera l'ultimo checkpoint di una sessione specifica.
2817
+ * Retrieve the latest checkpoint of a specific session.
2248
2818
  */
2249
2819
  async getCheckpoint(sessionId) {
2250
2820
  return getLatestCheckpoint(this.db.db, sessionId);
2251
2821
  }
2252
2822
  /**
2253
- * Recupera l'ultimo checkpoint per il progetto corrente.
2254
- * Utile per resume automatico senza specificare session ID.
2823
+ * Retrieve the latest checkpoint for the current project.
2824
+ * Useful for automatic resume without specifying session ID.
2255
2825
  */
2256
2826
  async getLatestProjectCheckpoint() {
2257
2827
  return getLatestCheckpointByProject(this.db.db, this.project);
2258
2828
  }
2259
2829
  /**
2260
- * Genera un report di attività per il progetto corrente.
2261
- * Aggrega osservazioni, sessioni, summaries e file per un periodo temporale.
2830
+ * Generate an activity report for the current project.
2831
+ * Aggregates observations, sessions, summaries and files for a time period.
2262
2832
  */
2263
2833
  async generateReport(options) {
2264
2834
  const now = /* @__PURE__ */ new Date();
@@ -2274,6 +2844,66 @@ var KiroMemorySDK = class {
2274
2844
  }
2275
2845
  return getReportData(this.db.db, this.project, startEpoch, endEpoch);
2276
2846
  }
2847
+ /**
2848
+ * Lista osservazioni con keyset pagination.
2849
+ * Restituisce un oggetto { data, next_cursor, has_more }.
2850
+ *
2851
+ * Esempio:
2852
+ * const page1 = await sdk.listObservations({ limit: 50 });
2853
+ * const page2 = await sdk.listObservations({ cursor: page1.next_cursor });
2854
+ */
2855
+ async listObservations(options = {}) {
2856
+ const limit = Math.min(Math.max(options.limit ?? 50, 1), 200);
2857
+ const project = options.project ?? this.project;
2858
+ let rows;
2859
+ if (options.cursor) {
2860
+ const decoded = decodeCursor(options.cursor);
2861
+ if (!decoded) throw new Error("Cursor non valido");
2862
+ const sql = project ? `SELECT * FROM observations
2863
+ WHERE project = ? AND (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
2864
+ ORDER BY created_at_epoch DESC, id DESC
2865
+ LIMIT ?` : `SELECT * FROM observations
2866
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
2867
+ ORDER BY created_at_epoch DESC, id DESC
2868
+ LIMIT ?`;
2869
+ rows = project ? this.db.db.query(sql).all(project, decoded.epoch, decoded.epoch, decoded.id, limit) : this.db.db.query(sql).all(decoded.epoch, decoded.epoch, decoded.id, limit);
2870
+ } else {
2871
+ const sql = project ? "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?" : "SELECT * FROM observations ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
2872
+ rows = project ? this.db.db.query(sql).all(project, limit) : this.db.db.query(sql).all(limit);
2873
+ }
2874
+ const next_cursor = rows.length >= limit ? encodeCursor(rows[rows.length - 1].id, rows[rows.length - 1].created_at_epoch) : null;
2875
+ return { data: rows, next_cursor, has_more: next_cursor !== null };
2876
+ }
2877
+ /**
2878
+ * Lista sommari con keyset pagination.
2879
+ * Restituisce un oggetto { data, next_cursor, has_more }.
2880
+ *
2881
+ * Esempio:
2882
+ * const page1 = await sdk.listSummaries({ limit: 20 });
2883
+ * const page2 = await sdk.listSummaries({ cursor: page1.next_cursor });
2884
+ */
2885
+ async listSummaries(options = {}) {
2886
+ const limit = Math.min(Math.max(options.limit ?? 20, 1), 200);
2887
+ const project = options.project ?? this.project;
2888
+ let rows;
2889
+ if (options.cursor) {
2890
+ const decoded = decodeCursor(options.cursor);
2891
+ if (!decoded) throw new Error("Cursor non valido");
2892
+ const sql = project ? `SELECT * FROM summaries
2893
+ WHERE project = ? AND (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
2894
+ ORDER BY created_at_epoch DESC, id DESC
2895
+ LIMIT ?` : `SELECT * FROM summaries
2896
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
2897
+ ORDER BY created_at_epoch DESC, id DESC
2898
+ LIMIT ?`;
2899
+ rows = project ? this.db.db.query(sql).all(project, decoded.epoch, decoded.epoch, decoded.id, limit) : this.db.db.query(sql).all(decoded.epoch, decoded.epoch, decoded.id, limit);
2900
+ } else {
2901
+ const sql = project ? "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?" : "SELECT * FROM summaries ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
2902
+ rows = project ? this.db.db.query(sql).all(project, limit) : this.db.db.query(sql).all(limit);
2903
+ }
2904
+ const next_cursor = rows.length >= limit ? encodeCursor(rows[rows.length - 1].id, rows[rows.length - 1].created_at_epoch) : null;
2905
+ return { data: rows, next_cursor, has_more: next_cursor !== null };
2906
+ }
2277
2907
  /**
2278
2908
  * Getter for direct database access (for API routes)
2279
2909
  */