kiro-memory 1.9.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. package/README.md +5 -1
  2. package/package.json +5 -5
  3. package/plugin/dist/cli/contextkit.js +2611 -345
  4. package/plugin/dist/hooks/agentSpawn.js +853 -223
  5. package/plugin/dist/hooks/kiro-hooks.js +841 -211
  6. package/plugin/dist/hooks/postToolUse.js +853 -222
  7. package/plugin/dist/hooks/stop.js +850 -220
  8. package/plugin/dist/hooks/userPromptSubmit.js +848 -216
  9. package/plugin/dist/index.js +843 -340
  10. package/plugin/dist/plugins/github/github-client.js +152 -0
  11. package/plugin/dist/plugins/github/index.js +412 -0
  12. package/plugin/dist/plugins/github/issue-parser.js +54 -0
  13. package/plugin/dist/plugins/slack/formatter.js +90 -0
  14. package/plugin/dist/plugins/slack/index.js +215 -0
  15. package/plugin/dist/sdk/index.js +841 -215
  16. package/plugin/dist/servers/mcp-server.js +4461 -397
  17. package/plugin/dist/services/search/EmbeddingService.js +146 -37
  18. package/plugin/dist/services/search/HybridSearch.js +564 -116
  19. package/plugin/dist/services/search/VectorSearch.js +187 -60
  20. package/plugin/dist/services/search/index.js +565 -254
  21. package/plugin/dist/services/sqlite/Backup.js +416 -0
  22. package/plugin/dist/services/sqlite/Database.js +126 -153
  23. package/plugin/dist/services/sqlite/ImportExport.js +452 -0
  24. package/plugin/dist/services/sqlite/Observations.js +314 -19
  25. package/plugin/dist/services/sqlite/Prompts.js +1 -1
  26. package/plugin/dist/services/sqlite/Search.js +41 -29
  27. package/plugin/dist/services/sqlite/Summaries.js +4 -4
  28. package/plugin/dist/services/sqlite/index.js +1428 -208
  29. package/plugin/dist/viewer.css +1 -0
  30. package/plugin/dist/viewer.html +2 -179
  31. package/plugin/dist/viewer.js +23 -24942
  32. package/plugin/dist/viewer.js.map +7 -0
  33. package/plugin/dist/worker-service.js +427 -5569
  34. package/plugin/dist/worker-service.js.map +7 -0
@@ -4,14 +4,15 @@ import { createRequire } from 'module';const require = createRequire(import.meta
4
4
  import BetterSqlite3 from "better-sqlite3";
5
5
  var Database = class {
6
6
  _db;
7
+ _stmtCache = /* @__PURE__ */ new Map();
7
8
  constructor(path, options) {
8
9
  this._db = new BetterSqlite3(path, {
9
- // better-sqlite3 crea il file di default (non serve 'create')
10
+ // better-sqlite3 creates the file by default ('create' not needed)
10
11
  readonly: options?.readwrite === false ? true : false
11
12
  });
12
13
  }
13
14
  /**
14
- * Esegui una query SQL senza risultati
15
+ * Execute a SQL query without results
15
16
  */
16
17
  run(sql, params) {
17
18
  const stmt = this._db.prepare(sql);
@@ -19,51 +20,53 @@ var Database = class {
19
20
  return result;
20
21
  }
21
22
  /**
22
- * Prepara una query con interfaccia compatibile bun:sqlite
23
+ * Prepare a query with bun:sqlite-compatible interface.
24
+ * Returns a cached prepared statement for repeated queries.
23
25
  */
24
26
  query(sql) {
25
- return new BunQueryCompat(this._db, sql);
27
+ let cached = this._stmtCache.get(sql);
28
+ if (!cached) {
29
+ cached = new BunQueryCompat(this._db, sql);
30
+ this._stmtCache.set(sql, cached);
31
+ }
32
+ return cached;
26
33
  }
27
34
  /**
28
- * Crea una transazione
35
+ * Create a transaction
29
36
  */
30
37
  transaction(fn) {
31
38
  return this._db.transaction(fn);
32
39
  }
33
40
  /**
34
- * Chiudi la connessione
41
+ * Close the connection
35
42
  */
36
43
  close() {
44
+ this._stmtCache.clear();
37
45
  this._db.close();
38
46
  }
39
47
  };
40
48
  var BunQueryCompat = class {
41
- _db;
42
- _sql;
49
+ _stmt;
43
50
  constructor(db, sql) {
44
- this._db = db;
45
- this._sql = sql;
51
+ this._stmt = db.prepare(sql);
46
52
  }
47
53
  /**
48
- * Restituisce tutte le righe
54
+ * Returns all rows
49
55
  */
50
56
  all(...params) {
51
- const stmt = this._db.prepare(this._sql);
52
- return params.length > 0 ? stmt.all(...params) : stmt.all();
57
+ return params.length > 0 ? this._stmt.all(...params) : this._stmt.all();
53
58
  }
54
59
  /**
55
- * Restituisce la prima riga o null
60
+ * Returns the first row or null
56
61
  */
57
62
  get(...params) {
58
- const stmt = this._db.prepare(this._sql);
59
- return params.length > 0 ? stmt.get(...params) : stmt.get();
63
+ return params.length > 0 ? this._stmt.get(...params) : this._stmt.get();
60
64
  }
61
65
  /**
62
- * Esegui senza risultati
66
+ * Execute without results
63
67
  */
64
68
  run(...params) {
65
- const stmt = this._db.prepare(this._sql);
66
- return params.length > 0 ? stmt.run(...params) : stmt.run();
69
+ return params.length > 0 ? this._stmt.run(...params) : this._stmt.run();
67
70
  }
68
71
  };
69
72
 
@@ -324,150 +327,63 @@ function ensureDir(dirPath) {
324
327
  // src/services/sqlite/Database.ts
325
328
  var SQLITE_MMAP_SIZE_BYTES = 256 * 1024 * 1024;
326
329
  var SQLITE_CACHE_SIZE_PAGES = 1e4;
327
- var dbInstance = null;
328
330
  var KiroMemoryDatabase = class {
329
- db;
331
+ _db;
330
332
  /**
331
- * @param dbPath - Percorso al file SQLite (default: DB_PATH)
332
- * @param skipMigrations - Se true, salta il migration runner (per hook ad alta frequenza)
333
+ * Readonly accessor for the underlying Database instance.
334
+ * Prefer using query() and run() proxy methods directly.
335
+ */
336
+ get db() {
337
+ return this._db;
338
+ }
339
+ /**
340
+ * @param dbPath - Path to the SQLite file (default: DB_PATH)
341
+ * @param skipMigrations - If true, skip the migration runner (for high-frequency hooks)
333
342
  */
334
343
  constructor(dbPath = DB_PATH, skipMigrations = false) {
335
344
  if (dbPath !== ":memory:") {
336
345
  ensureDir(DATA_DIR);
337
346
  }
338
- this.db = new Database(dbPath, { create: true, readwrite: true });
339
- this.db.run("PRAGMA journal_mode = WAL");
340
- this.db.run("PRAGMA synchronous = NORMAL");
341
- this.db.run("PRAGMA foreign_keys = ON");
342
- this.db.run("PRAGMA temp_store = memory");
343
- this.db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
344
- this.db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
347
+ this._db = new Database(dbPath, { create: true, readwrite: true });
348
+ this._db.run("PRAGMA journal_mode = WAL");
349
+ this._db.run("PRAGMA busy_timeout = 5000");
350
+ this._db.run("PRAGMA synchronous = NORMAL");
351
+ this._db.run("PRAGMA foreign_keys = ON");
352
+ this._db.run("PRAGMA temp_store = memory");
353
+ this._db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
354
+ this._db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
345
355
  if (!skipMigrations) {
346
- const migrationRunner = new MigrationRunner(this.db);
356
+ const migrationRunner = new MigrationRunner(this._db);
347
357
  migrationRunner.runAllMigrations();
348
358
  }
349
359
  }
350
360
  /**
351
- * Esegue una funzione all'interno di una transazione atomica.
352
- * Se fn() lancia un errore, la transazione viene annullata automaticamente.
353
- */
354
- withTransaction(fn) {
355
- const transaction = this.db.transaction(fn);
356
- return transaction(this.db);
357
- }
358
- /**
359
- * Close the database connection
361
+ * Prepare a query (delegates to underlying Database).
362
+ * Proxy method to avoid ctx.db.db.query() double access.
360
363
  */
361
- close() {
362
- this.db.close();
363
- }
364
- };
365
- var DatabaseManager = class _DatabaseManager {
366
- static instance;
367
- db = null;
368
- migrations = [];
369
- static getInstance() {
370
- if (!_DatabaseManager.instance) {
371
- _DatabaseManager.instance = new _DatabaseManager();
372
- }
373
- return _DatabaseManager.instance;
374
- }
375
- /**
376
- * Register a migration to be run during initialization
377
- */
378
- registerMigration(migration) {
379
- this.migrations.push(migration);
380
- this.migrations.sort((a, b) => a.version - b.version);
381
- }
382
- /**
383
- * Initialize database connection with optimized settings
384
- */
385
- async initialize() {
386
- if (this.db) {
387
- return this.db;
388
- }
389
- ensureDir(DATA_DIR);
390
- this.db = new Database(DB_PATH, { create: true, readwrite: true });
391
- this.db.run("PRAGMA journal_mode = WAL");
392
- this.db.run("PRAGMA synchronous = NORMAL");
393
- this.db.run("PRAGMA foreign_keys = ON");
394
- this.db.run("PRAGMA temp_store = memory");
395
- this.db.run(`PRAGMA mmap_size = ${SQLITE_MMAP_SIZE_BYTES}`);
396
- this.db.run(`PRAGMA cache_size = ${SQLITE_CACHE_SIZE_PAGES}`);
397
- this.initializeSchemaVersions();
398
- await this.runMigrations();
399
- dbInstance = this.db;
400
- return this.db;
364
+ query(sql) {
365
+ return this._db.query(sql);
401
366
  }
402
367
  /**
403
- * Get the current database connection
368
+ * Execute a SQL statement without results (delegates to underlying Database).
369
+ * Proxy method to avoid ctx.db.db.run() double access.
404
370
  */
405
- getConnection() {
406
- if (!this.db) {
407
- throw new Error("Database not initialized. Call initialize() first.");
408
- }
409
- return this.db;
371
+ run(sql, params) {
372
+ return this._db.run(sql, params);
410
373
  }
411
374
  /**
412
- * Execute a function within a transaction
375
+ * Executes a function within an atomic transaction.
376
+ * If fn() throws an error, the transaction is automatically rolled back.
413
377
  */
414
378
  withTransaction(fn) {
415
- const db = this.getConnection();
416
- const transaction = db.transaction(fn);
417
- return transaction(db);
379
+ const transaction = this._db.transaction(fn);
380
+ return transaction(this._db);
418
381
  }
419
382
  /**
420
383
  * Close the database connection
421
384
  */
422
385
  close() {
423
- if (this.db) {
424
- this.db.close();
425
- this.db = null;
426
- dbInstance = null;
427
- }
428
- }
429
- /**
430
- * Initialize the schema_versions table
431
- */
432
- initializeSchemaVersions() {
433
- if (!this.db) return;
434
- this.db.run(`
435
- CREATE TABLE IF NOT EXISTS schema_versions (
436
- id INTEGER PRIMARY KEY,
437
- version INTEGER UNIQUE NOT NULL,
438
- applied_at TEXT NOT NULL
439
- )
440
- `);
441
- }
442
- /**
443
- * Run all pending migrations
444
- */
445
- async runMigrations() {
446
- if (!this.db) return;
447
- const query = this.db.query("SELECT version FROM schema_versions ORDER BY version");
448
- const appliedVersions = query.all().map((row) => row.version);
449
- const maxApplied = appliedVersions.length > 0 ? Math.max(...appliedVersions) : 0;
450
- for (const migration of this.migrations) {
451
- if (migration.version > maxApplied) {
452
- logger.info("DB", `Applying migration ${migration.version}`);
453
- const transaction = this.db.transaction(() => {
454
- migration.up(this.db);
455
- const insertQuery = this.db.query("INSERT INTO schema_versions (version, applied_at) VALUES (?, ?)");
456
- insertQuery.run(migration.version, (/* @__PURE__ */ new Date()).toISOString());
457
- });
458
- transaction();
459
- logger.info("DB", `Migration ${migration.version} applied successfully`);
460
- }
461
- }
462
- }
463
- /**
464
- * Get current schema version
465
- */
466
- getCurrentVersion() {
467
- if (!this.db) return 0;
468
- const query = this.db.query("SELECT MAX(version) as version FROM schema_versions");
469
- const result = query.get();
470
- return result?.version || 0;
386
+ this._db.close();
471
387
  }
472
388
  };
473
389
  var MigrationRunner = class {
@@ -708,19 +624,108 @@ var MigrationRunner = class {
708
624
  db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_epoch ON summaries(project, created_at_epoch DESC)");
709
625
  db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_epoch ON prompts(project, created_at_epoch DESC)");
710
626
  }
627
+ },
628
+ {
629
+ version: 10,
630
+ up: (db) => {
631
+ db.run(`
632
+ CREATE TABLE IF NOT EXISTS job_queue (
633
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
634
+ type TEXT NOT NULL,
635
+ status TEXT NOT NULL DEFAULT 'pending',
636
+ payload TEXT,
637
+ result TEXT,
638
+ error TEXT,
639
+ retry_count INTEGER DEFAULT 0,
640
+ max_retries INTEGER DEFAULT 3,
641
+ priority INTEGER DEFAULT 0,
642
+ created_at TEXT NOT NULL,
643
+ created_at_epoch INTEGER NOT NULL,
644
+ started_at_epoch INTEGER,
645
+ completed_at_epoch INTEGER
646
+ )
647
+ `);
648
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_status ON job_queue(status)");
649
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_type ON job_queue(type)");
650
+ db.run("CREATE INDEX IF NOT EXISTS idx_jobs_priority ON job_queue(status, priority DESC, created_at_epoch ASC)");
651
+ }
652
+ },
653
+ {
654
+ version: 11,
655
+ up: (db) => {
656
+ db.run("ALTER TABLE observations ADD COLUMN auto_category TEXT");
657
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_category ON observations(auto_category)");
658
+ }
659
+ },
660
+ {
661
+ version: 12,
662
+ up: (db) => {
663
+ db.run(`
664
+ CREATE TABLE IF NOT EXISTS github_links (
665
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
666
+ observation_id INTEGER,
667
+ session_id TEXT,
668
+ repo TEXT NOT NULL,
669
+ issue_number INTEGER,
670
+ pr_number INTEGER,
671
+ event_type TEXT NOT NULL,
672
+ action TEXT,
673
+ title TEXT,
674
+ url TEXT,
675
+ author TEXT,
676
+ created_at TEXT NOT NULL,
677
+ created_at_epoch INTEGER NOT NULL,
678
+ FOREIGN KEY (observation_id) REFERENCES observations(id)
679
+ )
680
+ `);
681
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo ON github_links(repo)");
682
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_obs ON github_links(observation_id)");
683
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_event ON github_links(event_type)");
684
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_issue ON github_links(repo, issue_number)");
685
+ db.run("CREATE INDEX IF NOT EXISTS idx_github_links_repo_pr ON github_links(repo, pr_number)");
686
+ }
687
+ },
688
+ {
689
+ version: 13,
690
+ up: (db) => {
691
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_keyset ON observations(created_at_epoch DESC, id DESC)");
692
+ db.run("CREATE INDEX IF NOT EXISTS idx_observations_project_keyset ON observations(project, created_at_epoch DESC, id DESC)");
693
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_keyset ON summaries(created_at_epoch DESC, id DESC)");
694
+ db.run("CREATE INDEX IF NOT EXISTS idx_summaries_project_keyset ON summaries(project, created_at_epoch DESC, id DESC)");
695
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_keyset ON prompts(created_at_epoch DESC, id DESC)");
696
+ db.run("CREATE INDEX IF NOT EXISTS idx_prompts_project_keyset ON prompts(project, created_at_epoch DESC, id DESC)");
697
+ }
711
698
  }
712
699
  ];
713
700
  }
714
701
  };
715
- function getDatabase() {
716
- if (!dbInstance) {
717
- throw new Error("Database not initialized. Call DatabaseManager.getInstance().initialize() first.");
702
+
703
+ // src/services/sqlite/cursor.ts
704
+ function encodeCursor(id, epoch) {
705
+ const raw = `${epoch}:${id}`;
706
+ return Buffer.from(raw, "utf8").toString("base64url");
707
+ }
708
+ function decodeCursor(cursor) {
709
+ try {
710
+ const raw = Buffer.from(cursor, "base64url").toString("utf8");
711
+ const colonIdx = raw.indexOf(":");
712
+ if (colonIdx === -1) return null;
713
+ const epochStr = raw.substring(0, colonIdx);
714
+ const idStr = raw.substring(colonIdx + 1);
715
+ const epoch = parseInt(epochStr, 10);
716
+ const id = parseInt(idStr, 10);
717
+ if (!Number.isInteger(epoch) || epoch <= 0) return null;
718
+ if (!Number.isInteger(id) || id <= 0) return null;
719
+ return { epoch, id };
720
+ } catch {
721
+ return null;
718
722
  }
719
- return dbInstance;
720
723
  }
721
- async function initializeDatabase() {
722
- const manager = DatabaseManager.getInstance();
723
- return await manager.initialize();
724
+ function buildNextCursor(rows, limit) {
725
+ if (rows.length < limit) return null;
726
+ const last = rows[rows.length - 1];
727
+ if (!last) return null;
728
+ return encodeCursor(last.id, last.created_at_epoch);
724
729
  }
725
730
 
726
731
  // src/services/sqlite/Sessions.ts
@@ -778,6 +783,278 @@ function getSessionsByProject(db, project, limit = 100) {
778
783
  return query.all(project, limit);
779
784
  }
780
785
 
786
+ // src/utils/secrets.ts
787
+ var SECRET_PATTERNS = [
788
+ // AWS Access Keys (AKIA, ABIA, ACCA, ASIA prefixes + 16 alphanumeric chars)
789
+ { name: "aws-key", pattern: /(?:AKIA|ABIA|ACCA|ASIA)[A-Z0-9]{16}/g },
790
+ // JWT tokens (three base64url segments separated by dots)
791
+ { name: "jwt", pattern: /eyJ[a-zA-Z0-9_-]{10,}\.eyJ[a-zA-Z0-9_-]{10,}\.[a-zA-Z0-9_-]{10,}/g },
792
+ // Generic API keys in key=value or key: value assignments
793
+ { name: "api-key", pattern: /(?:api[_-]?key|apikey|api[_-]?secret)\s*[:=]\s*['"]?([a-zA-Z0-9_\-]{20,})['"]?/gi },
794
+ // Password/secret/token in variable assignments
795
+ { name: "credential", pattern: /(?:password|passwd|pwd|secret|token|auth[_-]?token|access[_-]?token|bearer)\s*[:=]\s*['"]?([^\s'"]{8,})['"]?/gi },
796
+ // Credentials embedded in URLs (user:pass@host)
797
+ { name: "url-credential", pattern: /(?:https?:\/\/)([^:]+):([^@]+)@/g },
798
+ // PEM-encoded private keys (RSA, EC, DSA, OpenSSH)
799
+ { name: "private-key", pattern: /-----BEGIN (?:RSA |EC |DSA |OPENSSH )?PRIVATE KEY-----/g },
800
+ // GitHub personal access tokens (ghp_, gho_, ghu_, ghs_, ghr_ prefixes)
801
+ { name: "github-token", pattern: /gh[pousr]_[a-zA-Z0-9]{36,}/g },
802
+ // Slack bot/user/app tokens
803
+ { name: "slack-token", pattern: /xox[bpoas]-[a-zA-Z0-9-]{10,}/g },
804
+ // HTTP Authorization Bearer header values
805
+ { name: "bearer-header", pattern: /\bBearer\s+([a-zA-Z0-9_\-\.]{20,})/g },
806
+ // Generic hex secrets (32+ hex chars after a key/secret/token/password label)
807
+ { name: "hex-secret", pattern: /(?:key|secret|token|password)\s*[:=]\s*['"]?([0-9a-f]{32,})['"]?/gi }
808
+ ];
809
+ function redactSecrets(text) {
810
+ if (!text) return text;
811
+ let redacted = text;
812
+ for (const { pattern } of SECRET_PATTERNS) {
813
+ pattern.lastIndex = 0;
814
+ redacted = redacted.replace(pattern, (match) => {
815
+ const prefix = match.substring(0, Math.min(4, match.length));
816
+ return `${prefix}***REDACTED***`;
817
+ });
818
+ }
819
+ return redacted;
820
+ }
821
+
822
+ // src/utils/categorizer.ts
823
+ var CATEGORY_RULES = [
824
+ {
825
+ category: "security",
826
+ keywords: [
827
+ "security",
828
+ "vulnerability",
829
+ "cve",
830
+ "xss",
831
+ "csrf",
832
+ "injection",
833
+ "sanitize",
834
+ "escape",
835
+ "auth",
836
+ "authentication",
837
+ "authorization",
838
+ "permission",
839
+ "helmet",
840
+ "cors",
841
+ "rate-limit",
842
+ "token",
843
+ "encrypt",
844
+ "decrypt",
845
+ "secret",
846
+ "redact",
847
+ "owasp"
848
+ ],
849
+ filePatterns: [/security/i, /auth/i, /secrets?\.ts/i],
850
+ weight: 10
851
+ },
852
+ {
853
+ category: "testing",
854
+ keywords: [
855
+ "test",
856
+ "spec",
857
+ "expect",
858
+ "assert",
859
+ "mock",
860
+ "stub",
861
+ "fixture",
862
+ "coverage",
863
+ "jest",
864
+ "vitest",
865
+ "bun test",
866
+ "unit test",
867
+ "integration test",
868
+ "e2e"
869
+ ],
870
+ types: ["test"],
871
+ filePatterns: [/\.test\./i, /\.spec\./i, /tests?\//i, /__tests__/i],
872
+ weight: 8
873
+ },
874
+ {
875
+ category: "debugging",
876
+ keywords: [
877
+ "debug",
878
+ "fix",
879
+ "bug",
880
+ "error",
881
+ "crash",
882
+ "stacktrace",
883
+ "stack trace",
884
+ "exception",
885
+ "breakpoint",
886
+ "investigate",
887
+ "root cause",
888
+ "troubleshoot",
889
+ "diagnose",
890
+ "bisect",
891
+ "regression"
892
+ ],
893
+ types: ["bugfix"],
894
+ weight: 8
895
+ },
896
+ {
897
+ category: "architecture",
898
+ keywords: [
899
+ "architect",
900
+ "design",
901
+ "pattern",
902
+ "modular",
903
+ "migration",
904
+ "schema",
905
+ "database",
906
+ "api design",
907
+ "abstract",
908
+ "dependency injection",
909
+ "singleton",
910
+ "factory",
911
+ "observer",
912
+ "middleware",
913
+ "pipeline",
914
+ "microservice",
915
+ "monolith"
916
+ ],
917
+ types: ["decision", "constraint"],
918
+ weight: 7
919
+ },
920
+ {
921
+ category: "refactoring",
922
+ keywords: [
923
+ "refactor",
924
+ "rename",
925
+ "extract",
926
+ "inline",
927
+ "move",
928
+ "split",
929
+ "merge",
930
+ "simplify",
931
+ "cleanup",
932
+ "clean up",
933
+ "dead code",
934
+ "consolidate",
935
+ "reorganize",
936
+ "restructure",
937
+ "decouple"
938
+ ],
939
+ weight: 6
940
+ },
941
+ {
942
+ category: "config",
943
+ keywords: [
944
+ "config",
945
+ "configuration",
946
+ "env",
947
+ "environment",
948
+ "dotenv",
949
+ ".env",
950
+ "settings",
951
+ "tsconfig",
952
+ "eslint",
953
+ "prettier",
954
+ "webpack",
955
+ "vite",
956
+ "esbuild",
957
+ "docker",
958
+ "ci/cd",
959
+ "github actions",
960
+ "deploy",
961
+ "build",
962
+ "bundle",
963
+ "package.json"
964
+ ],
965
+ filePatterns: [
966
+ /\.config\./i,
967
+ /\.env/i,
968
+ /tsconfig/i,
969
+ /\.ya?ml/i,
970
+ /Dockerfile/i,
971
+ /docker-compose/i
972
+ ],
973
+ weight: 5
974
+ },
975
+ {
976
+ category: "docs",
977
+ keywords: [
978
+ "document",
979
+ "readme",
980
+ "changelog",
981
+ "jsdoc",
982
+ "comment",
983
+ "explain",
984
+ "guide",
985
+ "tutorial",
986
+ "api doc",
987
+ "openapi",
988
+ "swagger"
989
+ ],
990
+ types: ["docs"],
991
+ filePatterns: [/\.md$/i, /docs?\//i, /readme/i, /changelog/i],
992
+ weight: 5
993
+ },
994
+ {
995
+ category: "feature-dev",
996
+ keywords: [
997
+ "feature",
998
+ "implement",
999
+ "add",
1000
+ "create",
1001
+ "new",
1002
+ "endpoint",
1003
+ "component",
1004
+ "module",
1005
+ "service",
1006
+ "handler",
1007
+ "route",
1008
+ "hook",
1009
+ "plugin",
1010
+ "integration"
1011
+ ],
1012
+ types: ["feature", "file-write"],
1013
+ weight: 3
1014
+ // lowest — generic catch-all for development
1015
+ }
1016
+ ];
1017
+ function categorize(input) {
1018
+ const scores = /* @__PURE__ */ new Map();
1019
+ const searchText = [
1020
+ input.title,
1021
+ input.text || "",
1022
+ input.narrative || "",
1023
+ input.concepts || ""
1024
+ ].join(" ").toLowerCase();
1025
+ const allFiles = [input.filesModified || "", input.filesRead || ""].join(",");
1026
+ for (const rule of CATEGORY_RULES) {
1027
+ let score = 0;
1028
+ for (const kw of rule.keywords) {
1029
+ if (searchText.includes(kw.toLowerCase())) {
1030
+ score += rule.weight;
1031
+ }
1032
+ }
1033
+ if (rule.types && rule.types.includes(input.type)) {
1034
+ score += rule.weight * 2;
1035
+ }
1036
+ if (rule.filePatterns && allFiles) {
1037
+ for (const pattern of rule.filePatterns) {
1038
+ if (pattern.test(allFiles)) {
1039
+ score += rule.weight;
1040
+ }
1041
+ }
1042
+ }
1043
+ if (score > 0) {
1044
+ scores.set(rule.category, (scores.get(rule.category) || 0) + score);
1045
+ }
1046
+ }
1047
+ let bestCategory = "general";
1048
+ let bestScore = 0;
1049
+ for (const [category, score] of scores) {
1050
+ if (score > bestScore) {
1051
+ bestScore = score;
1052
+ bestCategory = category;
1053
+ }
1054
+ }
1055
+ return bestCategory;
1056
+ }
1057
+
781
1058
  // src/services/sqlite/Observations.ts
782
1059
  function escapeLikePattern(input) {
783
1060
  return input.replace(/[%_\\]/g, "\\$&");
@@ -792,11 +1069,23 @@ function isDuplicateObservation(db, contentHash, windowMs = 3e4) {
792
1069
  }
793
1070
  function createObservation(db, memorySessionId, project, type, title, subtitle, text, narrative, facts, concepts, filesRead, filesModified, promptNumber, contentHash = null, discoveryTokens = 0) {
794
1071
  const now = /* @__PURE__ */ new Date();
1072
+ const safeTitle = redactSecrets(title);
1073
+ const safeText = text ? redactSecrets(text) : text;
1074
+ const safeNarrative = narrative ? redactSecrets(narrative) : narrative;
1075
+ const autoCategory = categorize({
1076
+ type,
1077
+ title: safeTitle,
1078
+ text: safeText,
1079
+ narrative: safeNarrative,
1080
+ concepts,
1081
+ filesModified,
1082
+ filesRead
1083
+ });
795
1084
  const result = db.run(
796
1085
  `INSERT INTO observations
797
- (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch, content_hash, discovery_tokens)
798
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
799
- [memorySessionId, project, type, title, subtitle, text, narrative, facts, concepts, filesRead, filesModified, promptNumber, now.toISOString(), now.getTime(), contentHash, discoveryTokens]
1086
+ (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts, files_read, files_modified, prompt_number, created_at, created_at_epoch, content_hash, discovery_tokens, auto_category)
1087
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
1088
+ [memorySessionId, project, type, safeTitle, subtitle, safeText, safeNarrative, facts, concepts, filesRead, filesModified, promptNumber, now.toISOString(), now.getTime(), contentHash, discoveryTokens, autoCategory]
800
1089
  );
801
1090
  return Number(result.lastInsertRowid);
802
1091
  }
@@ -808,16 +1097,16 @@ function getObservationsBySession(db, memorySessionId) {
808
1097
  }
809
1098
  function getObservationsByProject(db, project, limit = 100) {
810
1099
  const query = db.query(
811
- "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1100
+ "SELECT * FROM observations WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
812
1101
  );
813
1102
  return query.all(project, limit);
814
1103
  }
815
1104
  function searchObservations(db, searchTerm, project) {
816
1105
  const sql = project ? `SELECT * FROM observations
817
1106
  WHERE project = ? AND (title LIKE ? ESCAPE '\\' OR text LIKE ? ESCAPE '\\' OR narrative LIKE ? ESCAPE '\\')
818
- ORDER BY created_at_epoch DESC` : `SELECT * FROM observations
1107
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT * FROM observations
819
1108
  WHERE title LIKE ? ESCAPE '\\' OR text LIKE ? ESCAPE '\\' OR narrative LIKE ? ESCAPE '\\'
820
- ORDER BY created_at_epoch DESC`;
1109
+ ORDER BY created_at_epoch DESC, id DESC`;
821
1110
  const pattern = `%${escapeLikePattern(searchTerm)}%`;
822
1111
  const query = db.query(sql);
823
1112
  if (project) {
@@ -850,21 +1139,32 @@ function consolidateObservations(db, project, options = {}) {
850
1139
  ORDER BY cnt DESC
851
1140
  `).all(project, minGroupSize);
852
1141
  if (groups.length === 0) return { merged: 0, removed: 0 };
853
- let totalMerged = 0;
854
- let totalRemoved = 0;
1142
+ if (options.dryRun) {
1143
+ let totalMerged = 0;
1144
+ let totalRemoved = 0;
1145
+ for (const group of groups) {
1146
+ const obsIds = group.ids.split(",").map(Number);
1147
+ const placeholders = obsIds.map(() => "?").join(",");
1148
+ const count = db.query(
1149
+ `SELECT COUNT(*) as cnt FROM observations WHERE id IN (${placeholders})`
1150
+ ).get(...obsIds)?.cnt || 0;
1151
+ if (count >= minGroupSize) {
1152
+ totalMerged += 1;
1153
+ totalRemoved += count - 1;
1154
+ }
1155
+ }
1156
+ return { merged: totalMerged, removed: totalRemoved };
1157
+ }
855
1158
  const runConsolidation = db.transaction(() => {
1159
+ let merged = 0;
1160
+ let removed = 0;
856
1161
  for (const group of groups) {
857
1162
  const obsIds = group.ids.split(",").map(Number);
858
1163
  const placeholders = obsIds.map(() => "?").join(",");
859
1164
  const observations = db.query(
860
- `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC`
1165
+ `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC, id DESC`
861
1166
  ).all(...obsIds);
862
1167
  if (observations.length < minGroupSize) continue;
863
- if (options.dryRun) {
864
- totalMerged += 1;
865
- totalRemoved += observations.length - 1;
866
- continue;
867
- }
868
1168
  const keeper = observations[0];
869
1169
  const others = observations.slice(1);
870
1170
  const uniqueTexts = /* @__PURE__ */ new Set();
@@ -877,18 +1177,18 @@ function consolidateObservations(db, project, options = {}) {
877
1177
  const consolidatedText = Array.from(uniqueTexts).join("\n---\n").substring(0, 1e5);
878
1178
  db.run(
879
1179
  "UPDATE observations SET text = ?, title = ? WHERE id = ?",
880
- [consolidatedText, `[consolidato x${observations.length}] ${keeper.title}`, keeper.id]
1180
+ [consolidatedText, `[consolidated x${observations.length}] ${keeper.title}`, keeper.id]
881
1181
  );
882
1182
  const removeIds = others.map((o) => o.id);
883
1183
  const removePlaceholders = removeIds.map(() => "?").join(",");
884
1184
  db.run(`DELETE FROM observations WHERE id IN (${removePlaceholders})`, removeIds);
885
1185
  db.run(`DELETE FROM observation_embeddings WHERE observation_id IN (${removePlaceholders})`, removeIds);
886
- totalMerged += 1;
887
- totalRemoved += removeIds.length;
1186
+ merged += 1;
1187
+ removed += removeIds.length;
888
1188
  }
1189
+ return { merged, removed };
889
1190
  });
890
- runConsolidation();
891
- return { merged: totalMerged, removed: totalRemoved };
1191
+ return runConsolidation();
892
1192
  }
893
1193
 
894
1194
  // src/services/sqlite/Summaries.ts
@@ -906,21 +1206,21 @@ function createSummary(db, sessionId, project, request, investigated, learned, c
906
1206
  return Number(result.lastInsertRowid);
907
1207
  }
908
1208
  function getSummaryBySession(db, sessionId) {
909
- const query = db.query("SELECT * FROM summaries WHERE session_id = ? ORDER BY created_at_epoch DESC LIMIT 1");
1209
+ const query = db.query("SELECT * FROM summaries WHERE session_id = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1");
910
1210
  return query.get(sessionId);
911
1211
  }
912
1212
  function getSummariesByProject(db, project, limit = 50) {
913
1213
  const query = db.query(
914
- "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1214
+ "SELECT * FROM summaries WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
915
1215
  );
916
1216
  return query.all(project, limit);
917
1217
  }
918
1218
  function searchSummaries(db, searchTerm, project) {
919
1219
  const sql = project ? `SELECT * FROM summaries
920
1220
  WHERE project = ? AND (request LIKE ? ESCAPE '\\' OR learned LIKE ? ESCAPE '\\' OR completed LIKE ? ESCAPE '\\' OR notes LIKE ? ESCAPE '\\')
921
- ORDER BY created_at_epoch DESC` : `SELECT * FROM summaries
1221
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT * FROM summaries
922
1222
  WHERE request LIKE ? ESCAPE '\\' OR learned LIKE ? ESCAPE '\\' OR completed LIKE ? ESCAPE '\\' OR notes LIKE ? ESCAPE '\\'
923
- ORDER BY created_at_epoch DESC`;
1223
+ ORDER BY created_at_epoch DESC, id DESC`;
924
1224
  const pattern = `%${escapeLikePattern2(searchTerm)}%`;
925
1225
  const query = db.query(sql);
926
1226
  if (project) {
@@ -951,7 +1251,7 @@ function getPromptsBySession(db, contentSessionId) {
951
1251
  }
952
1252
  function getPromptsByProject(db, project, limit = 100) {
953
1253
  const query = db.query(
954
- "SELECT * FROM prompts WHERE project = ? ORDER BY created_at_epoch DESC LIMIT ?"
1254
+ "SELECT * FROM prompts WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT ?"
955
1255
  );
956
1256
  return query.all(project, limit);
957
1257
  }
@@ -988,19 +1288,19 @@ function createCheckpoint(db, sessionId, project, data) {
988
1288
  }
989
1289
  function getLatestCheckpoint(db, sessionId) {
990
1290
  const query = db.query(
991
- "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC LIMIT 1"
1291
+ "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1"
992
1292
  );
993
1293
  return query.get(sessionId);
994
1294
  }
995
1295
  function getLatestCheckpointByProject(db, project) {
996
1296
  const query = db.query(
997
- "SELECT * FROM checkpoints WHERE project = ? ORDER BY created_at_epoch DESC LIMIT 1"
1297
+ "SELECT * FROM checkpoints WHERE project = ? ORDER BY created_at_epoch DESC, id DESC LIMIT 1"
998
1298
  );
999
1299
  return query.get(project);
1000
1300
  }
1001
1301
  function getCheckpointsBySession(db, sessionId) {
1002
1302
  const query = db.query(
1003
- "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC"
1303
+ "SELECT * FROM checkpoints WHERE session_id = ? ORDER BY created_at_epoch DESC, id DESC"
1004
1304
  );
1005
1305
  return query.all(sessionId);
1006
1306
  }
@@ -1062,9 +1362,9 @@ function getReportData(db, project, startEpoch, endEpoch) {
1062
1362
  const staleCount = (project ? db.query(staleSql).get(project, startEpoch, endEpoch)?.count : db.query(staleSql).get(startEpoch, endEpoch)?.count) || 0;
1063
1363
  const summarySql = project ? `SELECT learned, completed, next_steps FROM summaries
1064
1364
  WHERE project = ? AND created_at_epoch >= ? AND created_at_epoch <= ?
1065
- ORDER BY created_at_epoch DESC` : `SELECT learned, completed, next_steps FROM summaries
1365
+ ORDER BY created_at_epoch DESC, id DESC` : `SELECT learned, completed, next_steps FROM summaries
1066
1366
  WHERE created_at_epoch >= ? AND created_at_epoch <= ?
1067
- ORDER BY created_at_epoch DESC`;
1367
+ ORDER BY created_at_epoch DESC, id DESC`;
1068
1368
  const summaryRows = project ? db.query(summarySql).all(project, startEpoch, endEpoch) : db.query(summarySql).all(startEpoch, endEpoch);
1069
1369
  const topLearnings = [];
1070
1370
  const completedTasks = [];
@@ -1134,7 +1434,7 @@ function escapeLikePattern3(input) {
1134
1434
  }
1135
1435
  function sanitizeFTS5Query(query) {
1136
1436
  const trimmed = query.length > 1e4 ? query.substring(0, 1e4) : query;
1137
- const terms = trimmed.replace(/[""]/g, "").split(/\s+/).filter((t) => t.length > 0).slice(0, 100).map((t) => `"${t}"`);
1437
+ const terms = trimmed.replace(/[""\u0022]/g, "").split(/\s+/).filter((t) => t.length > 0).slice(0, 100).map((t) => `"${t}"`);
1138
1438
  return terms.join(" ");
1139
1439
  }
1140
1440
  function searchObservationsFTS(db, query, filters = {}) {
@@ -1231,7 +1531,7 @@ function searchObservationsLIKE(db, query, filters = {}) {
1231
1531
  sql += " AND created_at_epoch <= ?";
1232
1532
  params.push(filters.dateEnd);
1233
1533
  }
1234
- sql += " ORDER BY created_at_epoch DESC LIMIT ?";
1534
+ sql += " ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
1235
1535
  params.push(limit);
1236
1536
  const stmt = db.query(sql);
1237
1537
  return stmt.all(...params);
@@ -1256,7 +1556,7 @@ function searchSummariesFiltered(db, query, filters = {}) {
1256
1556
  sql += " AND created_at_epoch <= ?";
1257
1557
  params.push(filters.dateEnd);
1258
1558
  }
1259
- sql += " ORDER BY created_at_epoch DESC LIMIT ?";
1559
+ sql += " ORDER BY created_at_epoch DESC, id DESC LIMIT ?";
1260
1560
  params.push(limit);
1261
1561
  const stmt = db.query(sql);
1262
1562
  return stmt.all(...params);
@@ -1266,7 +1566,7 @@ function getObservationsByIds(db, ids) {
1266
1566
  const validIds = ids.filter((id) => typeof id === "number" && Number.isInteger(id) && id > 0).slice(0, 500);
1267
1567
  if (validIds.length === 0) return [];
1268
1568
  const placeholders = validIds.map(() => "?").join(",");
1269
- const sql = `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC`;
1569
+ const sql = `SELECT * FROM observations WHERE id IN (${placeholders}) ORDER BY created_at_epoch DESC, id DESC`;
1270
1570
  const stmt = db.query(sql);
1271
1571
  return stmt.all(...validIds);
1272
1572
  }
@@ -1278,11 +1578,11 @@ function getTimeline(db, anchorId, depthBefore = 5, depthAfter = 5) {
1278
1578
  const beforeStmt = db.query(`
1279
1579
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
1280
1580
  FROM observations
1281
- WHERE created_at_epoch < ?
1282
- ORDER BY created_at_epoch DESC
1581
+ WHERE (created_at_epoch < ? OR (created_at_epoch = ? AND id < ?))
1582
+ ORDER BY created_at_epoch DESC, id DESC
1283
1583
  LIMIT ?
1284
1584
  `);
1285
- const before = beforeStmt.all(anchorEpoch, depthBefore).reverse();
1585
+ const before = beforeStmt.all(anchorEpoch, anchorEpoch, anchorId, depthBefore).reverse();
1286
1586
  const selfStmt = db.query(`
1287
1587
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
1288
1588
  FROM observations WHERE id = ?
@@ -1291,34 +1591,46 @@ function getTimeline(db, anchorId, depthBefore = 5, depthAfter = 5) {
1291
1591
  const afterStmt = db.query(`
1292
1592
  SELECT id, 'observation' as type, title, text as content, project, created_at, created_at_epoch
1293
1593
  FROM observations
1294
- WHERE created_at_epoch > ?
1295
- ORDER BY created_at_epoch ASC
1594
+ WHERE (created_at_epoch > ? OR (created_at_epoch = ? AND id > ?))
1595
+ ORDER BY created_at_epoch ASC, id ASC
1296
1596
  LIMIT ?
1297
1597
  `);
1298
- const after = afterStmt.all(anchorEpoch, depthAfter);
1598
+ const after = afterStmt.all(anchorEpoch, anchorEpoch, anchorId, depthAfter);
1299
1599
  return [...before, ...self, ...after];
1300
1600
  }
1301
1601
  function getProjectStats(db, project) {
1302
- const obsStmt = db.query("SELECT COUNT(*) as count FROM observations WHERE project = ?");
1303
- const sumStmt = db.query("SELECT COUNT(*) as count FROM summaries WHERE project = ?");
1304
- const sesStmt = db.query("SELECT COUNT(*) as count FROM sessions WHERE project = ?");
1305
- const prmStmt = db.query("SELECT COUNT(*) as count FROM prompts WHERE project = ?");
1306
- const discoveryStmt = db.query(
1307
- "SELECT COALESCE(SUM(discovery_tokens), 0) as total FROM observations WHERE project = ?"
1308
- );
1309
- const discoveryTokens = discoveryStmt.get(project)?.total || 0;
1310
- const readStmt = db.query(
1311
- `SELECT COALESCE(SUM(
1312
- CAST((LENGTH(COALESCE(title, '')) + LENGTH(COALESCE(narrative, ''))) / 4 AS INTEGER)
1313
- ), 0) as total FROM observations WHERE project = ?`
1314
- );
1315
- const readTokens = readStmt.get(project)?.total || 0;
1602
+ const sql = `
1603
+ WITH
1604
+ obs_stats AS (
1605
+ SELECT
1606
+ COUNT(*) as count,
1607
+ COALESCE(SUM(discovery_tokens), 0) as discovery_tokens,
1608
+ COALESCE(SUM(
1609
+ CAST((LENGTH(COALESCE(title, '')) + LENGTH(COALESCE(narrative, ''))) / 4 AS INTEGER)
1610
+ ), 0) as read_tokens
1611
+ FROM observations WHERE project = ?
1612
+ ),
1613
+ sum_count AS (SELECT COUNT(*) as count FROM summaries WHERE project = ?),
1614
+ ses_count AS (SELECT COUNT(*) as count FROM sessions WHERE project = ?),
1615
+ prm_count AS (SELECT COUNT(*) as count FROM prompts WHERE project = ?)
1616
+ SELECT
1617
+ obs_stats.count as observations,
1618
+ obs_stats.discovery_tokens,
1619
+ obs_stats.read_tokens,
1620
+ sum_count.count as summaries,
1621
+ ses_count.count as sessions,
1622
+ prm_count.count as prompts
1623
+ FROM obs_stats, sum_count, ses_count, prm_count
1624
+ `;
1625
+ const row = db.query(sql).get(project, project, project, project);
1626
+ const discoveryTokens = row?.discovery_tokens || 0;
1627
+ const readTokens = row?.read_tokens || 0;
1316
1628
  const savings = Math.max(0, discoveryTokens - readTokens);
1317
1629
  return {
1318
- observations: obsStmt.get(project)?.count || 0,
1319
- summaries: sumStmt.get(project)?.count || 0,
1320
- sessions: sesStmt.get(project)?.count || 0,
1321
- prompts: prmStmt.get(project)?.count || 0,
1630
+ observations: row?.observations || 0,
1631
+ summaries: row?.summaries || 0,
1632
+ sessions: row?.sessions || 0,
1633
+ prompts: row?.prompts || 0,
1322
1634
  tokenEconomics: { discoveryTokens, readTokens, savings }
1323
1635
  };
1324
1636
  }
@@ -1326,7 +1638,7 @@ function getStaleObservations(db, project) {
1326
1638
  const rows = db.query(`
1327
1639
  SELECT * FROM observations
1328
1640
  WHERE project = ? AND files_modified IS NOT NULL AND files_modified != ''
1329
- ORDER BY created_at_epoch DESC
1641
+ ORDER BY created_at_epoch DESC, id DESC
1330
1642
  LIMIT 500
1331
1643
  `).all(project);
1332
1644
  const staleObs = [];
@@ -1361,25 +1673,925 @@ function markObservationsStale(db, ids, stale) {
1361
1673
  [stale ? 1 : 0, ...validIds]
1362
1674
  );
1363
1675
  }
1676
+
1677
+ // src/services/sqlite/GithubLinks.ts
1678
+ function createGithubLink(db, data) {
1679
+ const now = /* @__PURE__ */ new Date();
1680
+ const result = db.run(
1681
+ `INSERT INTO github_links
1682
+ (observation_id, session_id, repo, issue_number, pr_number, event_type,
1683
+ action, title, url, author, created_at, created_at_epoch)
1684
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
1685
+ [
1686
+ data.observation_id ?? null,
1687
+ data.session_id ?? null,
1688
+ data.repo,
1689
+ data.issue_number ?? null,
1690
+ data.pr_number ?? null,
1691
+ data.event_type,
1692
+ data.action ?? null,
1693
+ data.title ?? null,
1694
+ data.url ?? null,
1695
+ data.author ?? null,
1696
+ now.toISOString(),
1697
+ now.getTime()
1698
+ ]
1699
+ );
1700
+ return Number(result.lastInsertRowid);
1701
+ }
1702
+ function getGithubLinksByObservation(db, observationId) {
1703
+ return db.query(
1704
+ `SELECT * FROM github_links
1705
+ WHERE observation_id = ?
1706
+ ORDER BY created_at_epoch DESC, id DESC`
1707
+ ).all(observationId);
1708
+ }
1709
+ function getGithubLinksByRepo(db, repo, limit = 50) {
1710
+ return db.query(
1711
+ `SELECT * FROM github_links
1712
+ WHERE repo = ?
1713
+ ORDER BY created_at_epoch DESC, id DESC
1714
+ LIMIT ?`
1715
+ ).all(repo, limit);
1716
+ }
1717
+ function getGithubLinksByIssue(db, repo, issueNumber) {
1718
+ return db.query(
1719
+ `SELECT * FROM github_links
1720
+ WHERE repo = ? AND issue_number = ?
1721
+ ORDER BY created_at_epoch DESC, id DESC`
1722
+ ).all(repo, issueNumber);
1723
+ }
1724
+ function getGithubLinksByPR(db, repo, prNumber) {
1725
+ return db.query(
1726
+ `SELECT * FROM github_links
1727
+ WHERE repo = ? AND pr_number = ?
1728
+ ORDER BY created_at_epoch DESC, id DESC`
1729
+ ).all(repo, prNumber);
1730
+ }
1731
+ function searchGithubLinks(db, query, options = {}) {
1732
+ const { repo, event_type, limit = 50 } = options;
1733
+ const safeLimit = Math.min(Math.max(1, limit), 200);
1734
+ const conditions = [];
1735
+ const params = [];
1736
+ if (query && query.trim().length > 0) {
1737
+ const pattern = `%${query.replace(/[%_\\]/g, "\\$&")}%`;
1738
+ conditions.push(`(title LIKE ? ESCAPE '\\' OR url LIKE ? ESCAPE '\\')`);
1739
+ params.push(pattern, pattern);
1740
+ }
1741
+ if (repo) {
1742
+ conditions.push("repo = ?");
1743
+ params.push(repo);
1744
+ }
1745
+ if (event_type) {
1746
+ conditions.push("event_type = ?");
1747
+ params.push(event_type);
1748
+ }
1749
+ const where = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1750
+ params.push(safeLimit);
1751
+ return db.query(
1752
+ `SELECT * FROM github_links
1753
+ ${where}
1754
+ ORDER BY created_at_epoch DESC, id DESC
1755
+ LIMIT ?`
1756
+ ).all(...params);
1757
+ }
1758
+ function listReposWithLinkCount(db) {
1759
+ return db.query(
1760
+ `SELECT repo,
1761
+ COUNT(*) as count,
1762
+ MAX(created_at) as last_event_at
1763
+ FROM github_links
1764
+ GROUP BY repo
1765
+ ORDER BY count DESC, repo ASC`
1766
+ ).all();
1767
+ }
1768
+
1769
+ // src/services/sqlite/ImportExport.ts
1770
+ import { createHash } from "crypto";
1771
+ var JSONL_SCHEMA_VERSION = "2.5.0";
1772
+ var IMPORT_BATCH_SIZE = 100;
1773
+ function countExportRecords(db, filters) {
1774
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
1775
+ const obsConds = buildConditions({ project: filters.project, type: filters.type, fromEpoch, toEpoch });
1776
+ const sumConds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
1777
+ const promptConds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
1778
+ const obsCount = db.query(
1779
+ `SELECT COUNT(*) as c FROM observations WHERE ${obsConds.where}`
1780
+ ).get(...obsConds.params).c;
1781
+ const sumCount = db.query(
1782
+ `SELECT COUNT(*) as c FROM summaries WHERE ${sumConds.where}`
1783
+ ).get(...sumConds.params).c;
1784
+ const promptCount = db.query(
1785
+ `SELECT COUNT(*) as c FROM prompts WHERE ${promptConds.where}`
1786
+ ).get(...promptConds.params).c;
1787
+ return { observations: obsCount, summaries: sumCount, prompts: promptCount };
1788
+ }
1789
+ function generateMetaRecord(db, filters) {
1790
+ const counts = countExportRecords(db, filters);
1791
+ const meta = {
1792
+ _meta: {
1793
+ version: JSONL_SCHEMA_VERSION,
1794
+ exported_at: (/* @__PURE__ */ new Date()).toISOString(),
1795
+ counts,
1796
+ filters: Object.keys(filters).length > 0 ? filters : void 0
1797
+ }
1798
+ };
1799
+ return JSON.stringify(meta);
1800
+ }
1801
+ function exportObservationsStreaming(db, filters, onRow, batchSize = 200) {
1802
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
1803
+ const conds = buildConditions({ project: filters.project, type: filters.type, fromEpoch, toEpoch });
1804
+ let offset = 0;
1805
+ let total = 0;
1806
+ while (true) {
1807
+ const rows = db.query(
1808
+ `SELECT id, memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts,
1809
+ files_read, files_modified, prompt_number, content_hash, discovery_tokens, auto_category,
1810
+ created_at, created_at_epoch
1811
+ FROM observations
1812
+ WHERE ${conds.where}
1813
+ ORDER BY created_at_epoch ASC, id ASC
1814
+ LIMIT ? OFFSET ?`
1815
+ ).all(...conds.params, batchSize, offset);
1816
+ if (rows.length === 0) break;
1817
+ for (const row of rows) {
1818
+ const record = {
1819
+ _type: "observation",
1820
+ id: row.id,
1821
+ memory_session_id: row.memory_session_id,
1822
+ project: row.project,
1823
+ type: row.type,
1824
+ title: row.title,
1825
+ subtitle: row.subtitle,
1826
+ text: row.text,
1827
+ narrative: row.narrative,
1828
+ facts: row.facts,
1829
+ concepts: row.concepts,
1830
+ files_read: row.files_read,
1831
+ files_modified: row.files_modified,
1832
+ prompt_number: row.prompt_number,
1833
+ content_hash: row.content_hash,
1834
+ discovery_tokens: row.discovery_tokens ?? 0,
1835
+ auto_category: row.auto_category,
1836
+ created_at: row.created_at,
1837
+ created_at_epoch: row.created_at_epoch
1838
+ };
1839
+ onRow(JSON.stringify(record));
1840
+ total++;
1841
+ }
1842
+ offset += rows.length;
1843
+ if (rows.length < batchSize) break;
1844
+ }
1845
+ return total;
1846
+ }
1847
+ function exportSummariesStreaming(db, filters, onRow, batchSize = 200) {
1848
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
1849
+ const conds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
1850
+ let offset = 0;
1851
+ let total = 0;
1852
+ while (true) {
1853
+ const rows = db.query(
1854
+ `SELECT id, session_id, project, request, investigated, learned, completed, next_steps, notes,
1855
+ discovery_tokens, created_at, created_at_epoch
1856
+ FROM summaries
1857
+ WHERE ${conds.where}
1858
+ ORDER BY created_at_epoch ASC, id ASC
1859
+ LIMIT ? OFFSET ?`
1860
+ ).all(...conds.params, batchSize, offset);
1861
+ if (rows.length === 0) break;
1862
+ for (const row of rows) {
1863
+ const record = {
1864
+ _type: "summary",
1865
+ id: row.id,
1866
+ session_id: row.session_id,
1867
+ project: row.project,
1868
+ request: row.request,
1869
+ investigated: row.investigated,
1870
+ learned: row.learned,
1871
+ completed: row.completed,
1872
+ next_steps: row.next_steps,
1873
+ notes: row.notes,
1874
+ discovery_tokens: row.discovery_tokens ?? 0,
1875
+ created_at: row.created_at,
1876
+ created_at_epoch: row.created_at_epoch
1877
+ };
1878
+ onRow(JSON.stringify(record));
1879
+ total++;
1880
+ }
1881
+ offset += rows.length;
1882
+ if (rows.length < batchSize) break;
1883
+ }
1884
+ return total;
1885
+ }
1886
+ function exportPromptsStreaming(db, filters, onRow, batchSize = 200) {
1887
+ const { fromEpoch, toEpoch } = filtersToEpoch(filters);
1888
+ const conds = buildConditions({ project: filters.project, fromEpoch, toEpoch });
1889
+ let offset = 0;
1890
+ let total = 0;
1891
+ while (true) {
1892
+ const rows = db.query(
1893
+ `SELECT id, content_session_id, project, prompt_number, prompt_text, created_at, created_at_epoch
1894
+ FROM prompts
1895
+ WHERE ${conds.where}
1896
+ ORDER BY created_at_epoch ASC, id ASC
1897
+ LIMIT ? OFFSET ?`
1898
+ ).all(...conds.params, batchSize, offset);
1899
+ if (rows.length === 0) break;
1900
+ for (const row of rows) {
1901
+ const record = {
1902
+ _type: "prompt",
1903
+ id: row.id,
1904
+ content_session_id: row.content_session_id,
1905
+ project: row.project,
1906
+ prompt_number: row.prompt_number,
1907
+ prompt_text: row.prompt_text,
1908
+ created_at: row.created_at,
1909
+ created_at_epoch: row.created_at_epoch
1910
+ };
1911
+ onRow(JSON.stringify(record));
1912
+ total++;
1913
+ }
1914
+ offset += rows.length;
1915
+ if (rows.length < batchSize) break;
1916
+ }
1917
+ return total;
1918
+ }
1919
+ function validateJsonlRow(raw) {
1920
+ if (!raw || typeof raw !== "object") {
1921
+ return "Il record non \xE8 un oggetto JSON valido";
1922
+ }
1923
+ const rec = raw;
1924
+ if ("_meta" in rec) return null;
1925
+ const validTypes = ["observation", "summary", "prompt"];
1926
+ if (!rec._type || typeof rec._type !== "string" || !validTypes.includes(rec._type)) {
1927
+ return `Campo "_type" obbligatorio, uno di: ${validTypes.join(", ")}`;
1928
+ }
1929
+ if (rec._type === "observation") {
1930
+ if (!rec.project || typeof rec.project !== "string") return 'observation: campo "project" obbligatorio';
1931
+ if (!rec.type || typeof rec.type !== "string") return 'observation: campo "type" obbligatorio';
1932
+ if (!rec.title || typeof rec.title !== "string") return 'observation: campo "title" obbligatorio';
1933
+ if (rec.project.length > 200) return 'observation: "project" troppo lungo (max 200)';
1934
+ if (rec.title.length > 500) return 'observation: "title" troppo lungo (max 500)';
1935
+ } else if (rec._type === "summary") {
1936
+ if (!rec.project || typeof rec.project !== "string") return 'summary: campo "project" obbligatorio';
1937
+ if (!rec.session_id || typeof rec.session_id !== "string") return 'summary: campo "session_id" obbligatorio';
1938
+ } else if (rec._type === "prompt") {
1939
+ if (!rec.project || typeof rec.project !== "string") return 'prompt: campo "project" obbligatorio';
1940
+ if (!rec.content_session_id || typeof rec.content_session_id !== "string") return 'prompt: campo "content_session_id" obbligatorio';
1941
+ if (!rec.prompt_text || typeof rec.prompt_text !== "string") return 'prompt: campo "prompt_text" obbligatorio';
1942
+ }
1943
+ return null;
1944
+ }
1945
+ function computeImportHash(rec) {
1946
+ const payload = [
1947
+ rec.project ?? "",
1948
+ rec.type ?? "",
1949
+ rec.title ?? "",
1950
+ rec.narrative ?? ""
1951
+ ].join("|");
1952
+ return createHash("sha256").update(payload).digest("hex");
1953
+ }
1954
+ function hashExistsInObservations(db, hash) {
1955
+ const result = db.query(
1956
+ "SELECT id FROM observations WHERE content_hash = ? LIMIT 1"
1957
+ ).get(hash);
1958
+ return !!result;
1959
+ }
1960
+ function importObservationBatch(db, records, dryRun) {
1961
+ let imported = 0;
1962
+ let skipped = 0;
1963
+ for (let i = 0; i < records.length; i += IMPORT_BATCH_SIZE) {
1964
+ const batch = records.slice(i, i + IMPORT_BATCH_SIZE);
1965
+ if (dryRun) {
1966
+ for (const rec of batch) {
1967
+ const hash = rec.content_hash || computeImportHash(rec);
1968
+ if (hashExistsInObservations(db, hash)) {
1969
+ skipped++;
1970
+ } else {
1971
+ imported++;
1972
+ }
1973
+ }
1974
+ continue;
1975
+ }
1976
+ const insertBatch = db.transaction(() => {
1977
+ for (const rec of batch) {
1978
+ const hash = rec.content_hash || computeImportHash(rec);
1979
+ if (hashExistsInObservations(db, hash)) {
1980
+ skipped++;
1981
+ continue;
1982
+ }
1983
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1984
+ db.run(
1985
+ `INSERT INTO observations
1986
+ (memory_session_id, project, type, title, subtitle, text, narrative, facts, concepts,
1987
+ files_read, files_modified, prompt_number, content_hash, discovery_tokens, auto_category,
1988
+ created_at, created_at_epoch)
1989
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
1990
+ [
1991
+ rec.memory_session_id || "imported",
1992
+ rec.project,
1993
+ rec.type,
1994
+ rec.title,
1995
+ rec.subtitle ?? null,
1996
+ rec.text ?? null,
1997
+ rec.narrative ?? null,
1998
+ rec.facts ?? null,
1999
+ rec.concepts ?? null,
2000
+ rec.files_read ?? null,
2001
+ rec.files_modified ?? null,
2002
+ rec.prompt_number ?? 0,
2003
+ hash,
2004
+ rec.discovery_tokens ?? 0,
2005
+ rec.auto_category ?? null,
2006
+ rec.created_at || now,
2007
+ rec.created_at_epoch || Date.now()
2008
+ ]
2009
+ );
2010
+ imported++;
2011
+ }
2012
+ });
2013
+ insertBatch();
2014
+ }
2015
+ return { imported, skipped };
2016
+ }
2017
+ function importSummaryBatch(db, records, dryRun) {
2018
+ let imported = 0;
2019
+ let skipped = 0;
2020
+ for (let i = 0; i < records.length; i += IMPORT_BATCH_SIZE) {
2021
+ const batch = records.slice(i, i + IMPORT_BATCH_SIZE);
2022
+ if (dryRun) {
2023
+ for (const rec of batch) {
2024
+ const exists = db.query(
2025
+ "SELECT id FROM summaries WHERE session_id = ? AND project = ? AND created_at_epoch = ? LIMIT 1"
2026
+ ).get(rec.session_id, rec.project, rec.created_at_epoch ?? 0);
2027
+ if (exists) skipped++;
2028
+ else imported++;
2029
+ }
2030
+ continue;
2031
+ }
2032
+ const insertBatch = db.transaction(() => {
2033
+ for (const rec of batch) {
2034
+ const exists = db.query(
2035
+ "SELECT id FROM summaries WHERE session_id = ? AND project = ? AND created_at_epoch = ? LIMIT 1"
2036
+ ).get(rec.session_id, rec.project, rec.created_at_epoch ?? 0);
2037
+ if (exists) {
2038
+ skipped++;
2039
+ continue;
2040
+ }
2041
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2042
+ db.run(
2043
+ `INSERT INTO summaries
2044
+ (session_id, project, request, investigated, learned, completed, next_steps, notes,
2045
+ discovery_tokens, created_at, created_at_epoch)
2046
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2047
+ [
2048
+ rec.session_id,
2049
+ rec.project,
2050
+ rec.request ?? null,
2051
+ rec.investigated ?? null,
2052
+ rec.learned ?? null,
2053
+ rec.completed ?? null,
2054
+ rec.next_steps ?? null,
2055
+ rec.notes ?? null,
2056
+ rec.discovery_tokens ?? 0,
2057
+ rec.created_at || now,
2058
+ rec.created_at_epoch || Date.now()
2059
+ ]
2060
+ );
2061
+ imported++;
2062
+ }
2063
+ });
2064
+ insertBatch();
2065
+ }
2066
+ return { imported, skipped };
2067
+ }
2068
+ function importPromptBatch(db, records, dryRun) {
2069
+ let imported = 0;
2070
+ let skipped = 0;
2071
+ for (let i = 0; i < records.length; i += IMPORT_BATCH_SIZE) {
2072
+ const batch = records.slice(i, i + IMPORT_BATCH_SIZE);
2073
+ if (dryRun) {
2074
+ for (const rec of batch) {
2075
+ const exists = db.query(
2076
+ "SELECT id FROM prompts WHERE content_session_id = ? AND prompt_number = ? LIMIT 1"
2077
+ ).get(rec.content_session_id, rec.prompt_number ?? 0);
2078
+ if (exists) skipped++;
2079
+ else imported++;
2080
+ }
2081
+ continue;
2082
+ }
2083
+ const insertBatch = db.transaction(() => {
2084
+ for (const rec of batch) {
2085
+ const exists = db.query(
2086
+ "SELECT id FROM prompts WHERE content_session_id = ? AND prompt_number = ? LIMIT 1"
2087
+ ).get(rec.content_session_id, rec.prompt_number ?? 0);
2088
+ if (exists) {
2089
+ skipped++;
2090
+ continue;
2091
+ }
2092
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2093
+ db.run(
2094
+ `INSERT INTO prompts
2095
+ (content_session_id, project, prompt_number, prompt_text, created_at, created_at_epoch)
2096
+ VALUES (?, ?, ?, ?, ?, ?)`,
2097
+ [
2098
+ rec.content_session_id,
2099
+ rec.project,
2100
+ rec.prompt_number ?? 0,
2101
+ rec.prompt_text,
2102
+ rec.created_at || now,
2103
+ rec.created_at_epoch || Date.now()
2104
+ ]
2105
+ );
2106
+ imported++;
2107
+ }
2108
+ });
2109
+ insertBatch();
2110
+ }
2111
+ return { imported, skipped };
2112
+ }
2113
+ function importJsonl(db, content, dryRun = false) {
2114
+ const lines = content.split("\n");
2115
+ const result = {
2116
+ imported: 0,
2117
+ skipped: 0,
2118
+ errors: 0,
2119
+ total: 0,
2120
+ errorDetails: []
2121
+ };
2122
+ const obsBuf = [];
2123
+ const sumBuf = [];
2124
+ const promptBuf = [];
2125
+ const flushBuffers = () => {
2126
+ if (obsBuf.length > 0) {
2127
+ const r = importObservationBatch(db, obsBuf.splice(0), dryRun);
2128
+ result.imported += r.imported;
2129
+ result.skipped += r.skipped;
2130
+ }
2131
+ if (sumBuf.length > 0) {
2132
+ const r = importSummaryBatch(db, sumBuf.splice(0), dryRun);
2133
+ result.imported += r.imported;
2134
+ result.skipped += r.skipped;
2135
+ }
2136
+ if (promptBuf.length > 0) {
2137
+ const r = importPromptBatch(db, promptBuf.splice(0), dryRun);
2138
+ result.imported += r.imported;
2139
+ result.skipped += r.skipped;
2140
+ }
2141
+ };
2142
+ for (let i = 0; i < lines.length; i++) {
2143
+ const raw = lines[i].trim();
2144
+ if (!raw || raw.startsWith("#")) continue;
2145
+ result.total++;
2146
+ let parsed;
2147
+ try {
2148
+ parsed = JSON.parse(raw);
2149
+ } catch {
2150
+ result.errors++;
2151
+ result.errorDetails.push({ line: i + 1, error: `JSON non valido: ${raw.substring(0, 60)}` });
2152
+ continue;
2153
+ }
2154
+ if (parsed && typeof parsed === "object" && "_meta" in parsed) {
2155
+ result.total--;
2156
+ continue;
2157
+ }
2158
+ const validErr = validateJsonlRow(parsed);
2159
+ if (validErr) {
2160
+ result.errors++;
2161
+ result.errorDetails.push({ line: i + 1, error: validErr });
2162
+ continue;
2163
+ }
2164
+ const rec = parsed;
2165
+ if (rec._type === "observation") {
2166
+ obsBuf.push(rec);
2167
+ } else if (rec._type === "summary") {
2168
+ sumBuf.push(rec);
2169
+ } else if (rec._type === "prompt") {
2170
+ promptBuf.push(rec);
2171
+ }
2172
+ const totalBuf = obsBuf.length + sumBuf.length + promptBuf.length;
2173
+ if (totalBuf >= IMPORT_BATCH_SIZE) {
2174
+ flushBuffers();
2175
+ }
2176
+ }
2177
+ flushBuffers();
2178
+ return result;
2179
+ }
2180
+ function filtersToEpoch(filters) {
2181
+ return {
2182
+ fromEpoch: filters.from ? new Date(filters.from).getTime() : void 0,
2183
+ toEpoch: filters.to ? new Date(filters.to).getTime() : void 0
2184
+ };
2185
+ }
2186
+ function buildConditions(params) {
2187
+ const conditions = ["1=1"];
2188
+ const values = [];
2189
+ if (params.project) {
2190
+ conditions.push("project = ?");
2191
+ values.push(params.project);
2192
+ }
2193
+ if (params.type) {
2194
+ conditions.push("type = ?");
2195
+ values.push(params.type);
2196
+ }
2197
+ if (params.fromEpoch !== void 0) {
2198
+ conditions.push("created_at_epoch >= ?");
2199
+ values.push(params.fromEpoch);
2200
+ }
2201
+ if (params.toEpoch !== void 0) {
2202
+ conditions.push("created_at_epoch <= ?");
2203
+ values.push(params.toEpoch);
2204
+ }
2205
+ return { where: conditions.join(" AND "), params: values };
2206
+ }
2207
+
2208
+ // src/types/worker-types.ts
2209
+ var KNOWLEDGE_TYPES = ["constraint", "decision", "heuristic", "rejected"];
2210
+
2211
+ // src/services/sqlite/Retention.ts
2212
+ var KNOWLEDGE_TYPE_LIST = KNOWLEDGE_TYPES;
2213
+ var KNOWLEDGE_PLACEHOLDERS = KNOWLEDGE_TYPE_LIST.map(() => "?").join(", ");
2214
+ function toEpochThreshold(maxAgeDays) {
2215
+ if (maxAgeDays <= 0) return null;
2216
+ return Date.now() - maxAgeDays * 864e5;
2217
+ }
2218
+ function buildKnowledgeImportanceExemptionClause() {
2219
+ return `AND NOT (
2220
+ facts IS NOT NULL AND (
2221
+ facts LIKE '%"importance":4%'
2222
+ OR facts LIKE '%"importance": 4%'
2223
+ OR facts LIKE '%"importance":5%'
2224
+ OR facts LIKE '%"importance": 5%'
2225
+ )
2226
+ )`;
2227
+ }
2228
+ function getRetentionStats(db, config) {
2229
+ const obsThreshold = toEpochThreshold(config.observationsMaxAgeDays);
2230
+ const sumThreshold = toEpochThreshold(config.summariesMaxAgeDays);
2231
+ const promptThreshold = toEpochThreshold(config.promptsMaxAgeDays);
2232
+ const knowledgeThreshold = toEpochThreshold(config.knowledgeMaxAgeDays);
2233
+ const importanceExemption = buildKnowledgeImportanceExemptionClause();
2234
+ let observations = 0;
2235
+ if (obsThreshold !== null) {
2236
+ const row = db.query(
2237
+ `SELECT COUNT(*) as c FROM observations
2238
+ WHERE created_at_epoch < ?
2239
+ AND type NOT IN (${KNOWLEDGE_PLACEHOLDERS})`
2240
+ ).get(obsThreshold, ...KNOWLEDGE_TYPE_LIST);
2241
+ observations = row?.c ?? 0;
2242
+ }
2243
+ let summaries = 0;
2244
+ if (sumThreshold !== null) {
2245
+ const row = db.query(
2246
+ "SELECT COUNT(*) as c FROM summaries WHERE created_at_epoch < ?"
2247
+ ).get(sumThreshold);
2248
+ summaries = row?.c ?? 0;
2249
+ }
2250
+ let prompts = 0;
2251
+ if (promptThreshold !== null) {
2252
+ const row = db.query(
2253
+ "SELECT COUNT(*) as c FROM prompts WHERE created_at_epoch < ?"
2254
+ ).get(promptThreshold);
2255
+ prompts = row?.c ?? 0;
2256
+ }
2257
+ let knowledge = 0;
2258
+ if (knowledgeThreshold !== null) {
2259
+ const row = db.query(
2260
+ `SELECT COUNT(*) as c FROM observations
2261
+ WHERE created_at_epoch < ?
2262
+ AND type IN (${KNOWLEDGE_PLACEHOLDERS})
2263
+ ${importanceExemption}`
2264
+ ).get(knowledgeThreshold, ...KNOWLEDGE_TYPE_LIST);
2265
+ knowledge = row?.c ?? 0;
2266
+ }
2267
+ const total = observations + summaries + prompts + knowledge;
2268
+ return { observations, summaries, prompts, knowledge, total };
2269
+ }
2270
+ function countRows(db, sql, params) {
2271
+ const row = db.query(sql).get(...params);
2272
+ return row?.c ?? 0;
2273
+ }
2274
+ function applyRetention(db, config) {
2275
+ const obsThreshold = toEpochThreshold(config.observationsMaxAgeDays);
2276
+ const sumThreshold = toEpochThreshold(config.summariesMaxAgeDays);
2277
+ const promptThreshold = toEpochThreshold(config.promptsMaxAgeDays);
2278
+ const knowledgeThreshold = toEpochThreshold(config.knowledgeMaxAgeDays);
2279
+ const importanceExemption = buildKnowledgeImportanceExemptionClause();
2280
+ const deleteAll = db.transaction(() => {
2281
+ let observations = 0;
2282
+ let summaries = 0;
2283
+ let prompts = 0;
2284
+ let knowledge = 0;
2285
+ if (obsThreshold !== null) {
2286
+ const obsParams = [obsThreshold, ...KNOWLEDGE_TYPE_LIST];
2287
+ const obsWhere = `WHERE created_at_epoch < ? AND type NOT IN (${KNOWLEDGE_PLACEHOLDERS})`;
2288
+ observations = countRows(
2289
+ db,
2290
+ `SELECT COUNT(*) as c FROM observations ${obsWhere}`,
2291
+ obsParams
2292
+ );
2293
+ if (observations > 0) {
2294
+ db.run(
2295
+ `DELETE FROM observation_embeddings
2296
+ WHERE observation_id IN (
2297
+ SELECT id FROM observations ${obsWhere}
2298
+ )`,
2299
+ obsParams
2300
+ );
2301
+ db.run(
2302
+ `DELETE FROM observations ${obsWhere}`,
2303
+ obsParams
2304
+ );
2305
+ }
2306
+ }
2307
+ if (sumThreshold !== null) {
2308
+ summaries = countRows(
2309
+ db,
2310
+ "SELECT COUNT(*) as c FROM summaries WHERE created_at_epoch < ?",
2311
+ [sumThreshold]
2312
+ );
2313
+ if (summaries > 0) {
2314
+ db.run("DELETE FROM summaries WHERE created_at_epoch < ?", [sumThreshold]);
2315
+ }
2316
+ }
2317
+ if (promptThreshold !== null) {
2318
+ prompts = countRows(
2319
+ db,
2320
+ "SELECT COUNT(*) as c FROM prompts WHERE created_at_epoch < ?",
2321
+ [promptThreshold]
2322
+ );
2323
+ if (prompts > 0) {
2324
+ db.run("DELETE FROM prompts WHERE created_at_epoch < ?", [promptThreshold]);
2325
+ }
2326
+ }
2327
+ if (knowledgeThreshold !== null) {
2328
+ const kParams = [knowledgeThreshold, ...KNOWLEDGE_TYPE_LIST];
2329
+ const kWhere = `WHERE created_at_epoch < ? AND type IN (${KNOWLEDGE_PLACEHOLDERS}) ${importanceExemption}`;
2330
+ knowledge = countRows(
2331
+ db,
2332
+ `SELECT COUNT(*) as c FROM observations ${kWhere}`,
2333
+ kParams
2334
+ );
2335
+ if (knowledge > 0) {
2336
+ db.run(
2337
+ `DELETE FROM observation_embeddings
2338
+ WHERE observation_id IN (
2339
+ SELECT id FROM observations ${kWhere}
2340
+ )`,
2341
+ kParams
2342
+ );
2343
+ db.run(
2344
+ `DELETE FROM observations ${kWhere}`,
2345
+ kParams
2346
+ );
2347
+ }
2348
+ }
2349
+ return { observations, summaries, prompts, knowledge };
2350
+ });
2351
+ const counts = deleteAll();
2352
+ const total = counts.observations + counts.summaries + counts.prompts + counts.knowledge;
2353
+ return {
2354
+ ...counts,
2355
+ total,
2356
+ executedAt: (/* @__PURE__ */ new Date()).toISOString()
2357
+ };
2358
+ }
2359
+ function buildRetentionConfig(config) {
2360
+ function getNum(key, fallback) {
2361
+ const v = config[key];
2362
+ if (v === null || v === void 0) return fallback;
2363
+ const n = Number(v);
2364
+ return isNaN(n) ? fallback : n;
2365
+ }
2366
+ return {
2367
+ observationsMaxAgeDays: getNum("retention.observations.maxAgeDays", 90),
2368
+ summariesMaxAgeDays: getNum("retention.summaries.maxAgeDays", 365),
2369
+ promptsMaxAgeDays: getNum("retention.prompts.maxAgeDays", 30),
2370
+ knowledgeMaxAgeDays: getNum("retention.knowledge.maxAgeDays", 0)
2371
+ };
2372
+ }
2373
+
2374
+ // src/services/sqlite/Backup.ts
2375
+ import {
2376
+ existsSync as existsSync4,
2377
+ mkdirSync as mkdirSync3,
2378
+ copyFileSync,
2379
+ readdirSync,
2380
+ statSync as statSync2,
2381
+ unlinkSync,
2382
+ readFileSync as readFileSync2,
2383
+ writeFileSync
2384
+ } from "fs";
2385
+ import { join as join3, basename as basename2 } from "path";
2386
+ function formatTimestamp(date) {
2387
+ const pad = (n, len = 2) => String(n).padStart(len, "0");
2388
+ const year = date.getFullYear();
2389
+ const month = pad(date.getMonth() + 1);
2390
+ const day = pad(date.getDate());
2391
+ const hours = pad(date.getHours());
2392
+ const mins = pad(date.getMinutes());
2393
+ const secs = pad(date.getSeconds());
2394
+ const ms = pad(date.getMilliseconds(), 3);
2395
+ return `${year}-${month}-${day}-${hours}${mins}${secs}-${ms}`;
2396
+ }
2397
+ function collectStats(db, dbPath) {
2398
+ const countTable = (table) => {
2399
+ try {
2400
+ const row = db.query(`SELECT COUNT(*) as c FROM ${table}`).get();
2401
+ return row?.c ?? 0;
2402
+ } catch {
2403
+ return 0;
2404
+ }
2405
+ };
2406
+ const dbSizeBytes = existsSync4(dbPath) ? statSync2(dbPath).size : 0;
2407
+ return {
2408
+ observations: countTable("observations"),
2409
+ sessions: countTable("sessions"),
2410
+ summaries: countTable("summaries"),
2411
+ prompts: countTable("prompts"),
2412
+ dbSizeBytes
2413
+ };
2414
+ }
2415
+ function getSchemaVersion(db) {
2416
+ try {
2417
+ const row = db.query("SELECT MAX(version) as v FROM schema_versions").get();
2418
+ return row?.v ?? 0;
2419
+ } catch {
2420
+ return 0;
2421
+ }
2422
+ }
2423
+ function createBackup(dbPath, backupDir, db) {
2424
+ mkdirSync3(backupDir, { recursive: true });
2425
+ const now = /* @__PURE__ */ new Date();
2426
+ const ts = formatTimestamp(now);
2427
+ const filename = `backup-${ts}.db`;
2428
+ const destPath = join3(backupDir, filename);
2429
+ const metaFilename = `backup-${ts}.meta.json`;
2430
+ const metaPath = join3(backupDir, metaFilename);
2431
+ if (!existsSync4(dbPath)) {
2432
+ throw new Error(`Database non trovato: ${dbPath}`);
2433
+ }
2434
+ copyFileSync(dbPath, destPath);
2435
+ logger.info("BACKUP", `File DB copiato: ${dbPath} \u2192 ${destPath}`);
2436
+ const walPath = `${dbPath}-wal`;
2437
+ const shmPath = `${dbPath}-shm`;
2438
+ if (existsSync4(walPath)) {
2439
+ copyFileSync(walPath, `${destPath}-wal`);
2440
+ logger.debug("BACKUP", "File WAL copiato");
2441
+ }
2442
+ if (existsSync4(shmPath)) {
2443
+ copyFileSync(shmPath, `${destPath}-shm`);
2444
+ logger.debug("BACKUP", "File SHM copiato");
2445
+ }
2446
+ const stats = collectStats(db, dbPath);
2447
+ const schemaVersion = getSchemaVersion(db);
2448
+ const metadata = {
2449
+ timestamp: now.toISOString(),
2450
+ timestampEpoch: now.getTime(),
2451
+ schemaVersion,
2452
+ stats,
2453
+ sourcePath: dbPath,
2454
+ filename
2455
+ };
2456
+ writeFileSync(metaPath, JSON.stringify(metadata, null, 2), "utf8");
2457
+ logger.info("BACKUP", `Metadata scritto: ${metaPath}`);
2458
+ return {
2459
+ filePath: destPath,
2460
+ metaPath,
2461
+ metadata
2462
+ };
2463
+ }
2464
+ function listBackups(backupDir) {
2465
+ if (!existsSync4(backupDir)) {
2466
+ return [];
2467
+ }
2468
+ const entries = [];
2469
+ let files;
2470
+ try {
2471
+ files = readdirSync(backupDir);
2472
+ } catch (err) {
2473
+ logger.warn("BACKUP", `Impossibile leggere la directory backup: ${backupDir}`, {}, err);
2474
+ return [];
2475
+ }
2476
+ const metaFiles = files.filter((f) => f.startsWith("backup-") && f.endsWith(".meta.json"));
2477
+ for (const metaFile of metaFiles) {
2478
+ const metaPath = join3(backupDir, metaFile);
2479
+ const dbFilename = metaFile.replace(/\.meta\.json$/, ".db");
2480
+ const filePath = join3(backupDir, dbFilename);
2481
+ let metadata;
2482
+ try {
2483
+ const raw = readFileSync2(metaPath, "utf8");
2484
+ metadata = JSON.parse(raw);
2485
+ } catch (err) {
2486
+ logger.warn("BACKUP", `Metadata non leggibile: ${metaPath}`, {}, err);
2487
+ continue;
2488
+ }
2489
+ if (!existsSync4(filePath)) {
2490
+ logger.warn("BACKUP", `File backup mancante per metadata: ${filePath}`);
2491
+ continue;
2492
+ }
2493
+ entries.push({ filePath, metaPath, metadata });
2494
+ }
2495
+ entries.sort((a, b) => b.metadata.timestampEpoch - a.metadata.timestampEpoch);
2496
+ return entries;
2497
+ }
2498
+ function restoreBackup(backupFile, dbPath) {
2499
+ if (!existsSync4(backupFile)) {
2500
+ throw new Error(`File backup non trovato: ${backupFile}`);
2501
+ }
2502
+ copyFileSync(backupFile, dbPath);
2503
+ logger.info("BACKUP", `Database ripristinato: ${backupFile} \u2192 ${dbPath}`);
2504
+ const walBackup = `${backupFile}-wal`;
2505
+ const shmBackup = `${backupFile}-shm`;
2506
+ const walDest = `${dbPath}-wal`;
2507
+ const shmDest = `${dbPath}-shm`;
2508
+ if (existsSync4(walBackup)) {
2509
+ copyFileSync(walBackup, walDest);
2510
+ logger.debug("BACKUP", "File WAL ripristinato");
2511
+ } else if (existsSync4(walDest)) {
2512
+ unlinkSync(walDest);
2513
+ logger.debug("BACKUP", "File WAL corrente rimosso (non presente nel backup)");
2514
+ }
2515
+ if (existsSync4(shmBackup)) {
2516
+ copyFileSync(shmBackup, shmDest);
2517
+ logger.debug("BACKUP", "File SHM ripristinato");
2518
+ } else if (existsSync4(shmDest)) {
2519
+ unlinkSync(shmDest);
2520
+ logger.debug("BACKUP", "File SHM corrente rimosso (non presente nel backup)");
2521
+ }
2522
+ }
2523
+ function rotateBackups(backupDir, maxKeep) {
2524
+ if (maxKeep <= 0) {
2525
+ throw new Error(`maxKeep deve essere > 0, ricevuto: ${maxKeep}`);
2526
+ }
2527
+ const entries = listBackups(backupDir);
2528
+ if (entries.length <= maxKeep) {
2529
+ logger.debug("BACKUP", `Rotazione non necessaria: ${entries.length}/${maxKeep} backup presenti`);
2530
+ return 0;
2531
+ }
2532
+ const toDelete = entries.slice(maxKeep);
2533
+ let deleted = 0;
2534
+ for (const entry of toDelete) {
2535
+ try {
2536
+ if (existsSync4(entry.filePath)) {
2537
+ unlinkSync(entry.filePath);
2538
+ }
2539
+ } catch (err) {
2540
+ logger.warn("BACKUP", `Impossibile eliminare: ${entry.filePath}`, {}, err);
2541
+ }
2542
+ for (const extra of [`${entry.filePath}-wal`, `${entry.filePath}-shm`]) {
2543
+ try {
2544
+ if (existsSync4(extra)) unlinkSync(extra);
2545
+ } catch {
2546
+ }
2547
+ }
2548
+ try {
2549
+ if (existsSync4(entry.metaPath)) {
2550
+ unlinkSync(entry.metaPath);
2551
+ }
2552
+ } catch (err) {
2553
+ logger.warn("BACKUP", `Impossibile eliminare metadata: ${entry.metaPath}`, {}, err);
2554
+ }
2555
+ logger.info("BACKUP", `Backup rimosso (rotazione): ${basename2(entry.filePath)}`);
2556
+ deleted++;
2557
+ }
2558
+ logger.info("BACKUP", `Rotazione completata: ${deleted} backup eliminati, ${maxKeep} mantenuti`);
2559
+ return deleted;
2560
+ }
1364
2561
  export {
1365
- KiroMemoryDatabase as ContextKitDatabase,
1366
- DatabaseManager,
2562
+ JSONL_SCHEMA_VERSION,
1367
2563
  KiroMemoryDatabase,
2564
+ applyRetention,
2565
+ buildNextCursor,
2566
+ buildRetentionConfig,
1368
2567
  completeSession,
2568
+ computeImportHash,
1369
2569
  consolidateObservations,
2570
+ countExportRecords,
2571
+ createBackup,
1370
2572
  createCheckpoint,
2573
+ createGithubLink,
1371
2574
  createObservation,
1372
2575
  createPrompt,
1373
2576
  createSession,
1374
2577
  createSummary,
2578
+ decodeCursor,
1375
2579
  deleteObservation,
1376
2580
  deletePrompt,
1377
2581
  deleteSummary,
2582
+ encodeCursor,
2583
+ exportObservationsStreaming,
2584
+ exportPromptsStreaming,
2585
+ exportSummariesStreaming,
1378
2586
  failSession,
2587
+ generateMetaRecord,
1379
2588
  getActiveSessions,
1380
2589
  getAllSessions,
1381
2590
  getCheckpointsBySession,
1382
- getDatabase,
2591
+ getGithubLinksByIssue,
2592
+ getGithubLinksByObservation,
2593
+ getGithubLinksByPR,
2594
+ getGithubLinksByRepo,
1383
2595
  getLatestCheckpoint,
1384
2596
  getLatestCheckpointByProject,
1385
2597
  getLatestPrompt,
@@ -1390,6 +2602,7 @@ export {
1390
2602
  getPromptsByProject,
1391
2603
  getPromptsBySession,
1392
2604
  getReportData,
2605
+ getRetentionStats,
1393
2606
  getSessionByContentId,
1394
2607
  getSessionById,
1395
2608
  getSessionsByProject,
@@ -1397,9 +2610,15 @@ export {
1397
2610
  getSummariesByProject,
1398
2611
  getSummaryBySession,
1399
2612
  getTimeline,
1400
- initializeDatabase,
2613
+ hashExistsInObservations,
2614
+ importJsonl,
1401
2615
  isDuplicateObservation,
2616
+ listBackups,
2617
+ listReposWithLinkCount,
1402
2618
  markObservationsStale,
2619
+ restoreBackup,
2620
+ rotateBackups,
2621
+ searchGithubLinks,
1403
2622
  searchObservations,
1404
2623
  searchObservationsFTS,
1405
2624
  searchObservationsFTSWithRank,
@@ -1407,5 +2626,6 @@ export {
1407
2626
  searchSummaries,
1408
2627
  searchSummariesFiltered,
1409
2628
  updateLastAccessed,
1410
- updateSessionMemoryId
2629
+ updateSessionMemoryId,
2630
+ validateJsonlRow
1411
2631
  };