@loreai/core 0.15.0 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +11 -0
- package/dist/bun/agents-file.d.ts +13 -1
- package/dist/bun/agents-file.d.ts.map +1 -1
- package/dist/bun/config.d.ts +20 -1
- package/dist/bun/config.d.ts.map +1 -1
- package/dist/bun/data.d.ts +174 -0
- package/dist/bun/data.d.ts.map +1 -0
- package/dist/bun/db.d.ts +65 -0
- package/dist/bun/db.d.ts.map +1 -1
- package/dist/bun/distillation.d.ts +49 -6
- package/dist/bun/distillation.d.ts.map +1 -1
- package/dist/bun/embedding-vendor.d.ts +66 -0
- package/dist/bun/embedding-vendor.d.ts.map +1 -0
- package/dist/bun/embedding-worker-types.d.ts +66 -0
- package/dist/bun/embedding-worker-types.d.ts.map +1 -0
- package/dist/bun/embedding-worker.d.ts +16 -0
- package/dist/bun/embedding-worker.d.ts.map +1 -0
- package/dist/bun/embedding-worker.js +100 -0
- package/dist/bun/embedding-worker.js.map +7 -0
- package/dist/bun/embedding.d.ts +91 -8
- package/dist/bun/embedding.d.ts.map +1 -1
- package/dist/bun/git.d.ts +47 -0
- package/dist/bun/git.d.ts.map +1 -0
- package/dist/bun/gradient.d.ts +19 -1
- package/dist/bun/gradient.d.ts.map +1 -1
- package/dist/bun/index.d.ts +9 -6
- package/dist/bun/index.d.ts.map +1 -1
- package/dist/bun/index.js +13205 -11259
- package/dist/bun/index.js.map +4 -4
- package/dist/bun/lat-reader.d.ts +1 -1
- package/dist/bun/lat-reader.d.ts.map +1 -1
- package/dist/bun/ltm.d.ts.map +1 -1
- package/dist/bun/markdown.d.ts +11 -0
- package/dist/bun/markdown.d.ts.map +1 -1
- package/dist/bun/prompt.d.ts +1 -1
- package/dist/bun/prompt.d.ts.map +1 -1
- package/dist/bun/recall.d.ts +53 -0
- package/dist/bun/recall.d.ts.map +1 -1
- package/dist/bun/search.d.ts +29 -0
- package/dist/bun/search.d.ts.map +1 -1
- package/dist/bun/temporal.d.ts +2 -0
- package/dist/bun/temporal.d.ts.map +1 -1
- package/dist/bun/types.d.ts +15 -0
- package/dist/bun/types.d.ts.map +1 -1
- package/dist/bun/worker-model.d.ts +15 -80
- package/dist/bun/worker-model.d.ts.map +1 -1
- package/dist/node/agents-file.d.ts +13 -1
- package/dist/node/agents-file.d.ts.map +1 -1
- package/dist/node/config.d.ts +20 -1
- package/dist/node/config.d.ts.map +1 -1
- package/dist/node/data.d.ts +174 -0
- package/dist/node/data.d.ts.map +1 -0
- package/dist/node/db.d.ts +65 -0
- package/dist/node/db.d.ts.map +1 -1
- package/dist/node/distillation.d.ts +49 -6
- package/dist/node/distillation.d.ts.map +1 -1
- package/dist/node/embedding-vendor.d.ts +66 -0
- package/dist/node/embedding-vendor.d.ts.map +1 -0
- package/dist/node/embedding-worker-types.d.ts +66 -0
- package/dist/node/embedding-worker-types.d.ts.map +1 -0
- package/dist/node/embedding-worker.d.ts +16 -0
- package/dist/node/embedding-worker.d.ts.map +1 -0
- package/dist/node/embedding-worker.js +100 -0
- package/dist/node/embedding-worker.js.map +7 -0
- package/dist/node/embedding.d.ts +91 -8
- package/dist/node/embedding.d.ts.map +1 -1
- package/dist/node/git.d.ts +47 -0
- package/dist/node/git.d.ts.map +1 -0
- package/dist/node/gradient.d.ts +19 -1
- package/dist/node/gradient.d.ts.map +1 -1
- package/dist/node/index.d.ts +9 -6
- package/dist/node/index.d.ts.map +1 -1
- package/dist/node/index.js +13205 -11259
- package/dist/node/index.js.map +4 -4
- package/dist/node/lat-reader.d.ts +1 -1
- package/dist/node/lat-reader.d.ts.map +1 -1
- package/dist/node/ltm.d.ts.map +1 -1
- package/dist/node/markdown.d.ts +11 -0
- package/dist/node/markdown.d.ts.map +1 -1
- package/dist/node/prompt.d.ts +1 -1
- package/dist/node/prompt.d.ts.map +1 -1
- package/dist/node/recall.d.ts +53 -0
- package/dist/node/recall.d.ts.map +1 -1
- package/dist/node/search.d.ts +29 -0
- package/dist/node/search.d.ts.map +1 -1
- package/dist/node/temporal.d.ts +2 -0
- package/dist/node/temporal.d.ts.map +1 -1
- package/dist/node/types.d.ts +15 -0
- package/dist/node/types.d.ts.map +1 -1
- package/dist/node/worker-model.d.ts +15 -80
- package/dist/node/worker-model.d.ts.map +1 -1
- package/dist/types/agents-file.d.ts +13 -1
- package/dist/types/agents-file.d.ts.map +1 -1
- package/dist/types/config.d.ts +20 -1
- package/dist/types/config.d.ts.map +1 -1
- package/dist/types/data.d.ts +174 -0
- package/dist/types/data.d.ts.map +1 -0
- package/dist/types/db.d.ts +65 -0
- package/dist/types/db.d.ts.map +1 -1
- package/dist/types/distillation.d.ts +49 -6
- package/dist/types/distillation.d.ts.map +1 -1
- package/dist/types/embedding-vendor.d.ts +66 -0
- package/dist/types/embedding-vendor.d.ts.map +1 -0
- package/dist/types/embedding-worker-types.d.ts +66 -0
- package/dist/types/embedding-worker-types.d.ts.map +1 -0
- package/dist/types/embedding-worker.d.ts +16 -0
- package/dist/types/embedding-worker.d.ts.map +1 -0
- package/dist/types/embedding.d.ts +91 -8
- package/dist/types/embedding.d.ts.map +1 -1
- package/dist/types/git.d.ts +47 -0
- package/dist/types/git.d.ts.map +1 -0
- package/dist/types/gradient.d.ts +19 -1
- package/dist/types/gradient.d.ts.map +1 -1
- package/dist/types/index.d.ts +9 -6
- package/dist/types/index.d.ts.map +1 -1
- package/dist/types/lat-reader.d.ts +1 -1
- package/dist/types/lat-reader.d.ts.map +1 -1
- package/dist/types/ltm.d.ts.map +1 -1
- package/dist/types/markdown.d.ts +11 -0
- package/dist/types/markdown.d.ts.map +1 -1
- package/dist/types/prompt.d.ts +1 -1
- package/dist/types/prompt.d.ts.map +1 -1
- package/dist/types/recall.d.ts +53 -0
- package/dist/types/recall.d.ts.map +1 -1
- package/dist/types/search.d.ts +29 -0
- package/dist/types/search.d.ts.map +1 -1
- package/dist/types/temporal.d.ts +2 -0
- package/dist/types/temporal.d.ts.map +1 -1
- package/dist/types/types.d.ts +15 -0
- package/dist/types/types.d.ts.map +1 -1
- package/dist/types/worker-model.d.ts +15 -80
- package/dist/types/worker-model.d.ts.map +1 -1
- package/package.json +5 -2
- package/src/agents-file.ts +87 -4
- package/src/config.ts +68 -5
- package/src/curator.ts +2 -2
- package/src/data.ts +768 -0
- package/src/db.ts +386 -7
- package/src/distillation.ts +178 -35
- package/src/embedding-vendor.ts +102 -0
- package/src/embedding-worker-types.ts +82 -0
- package/src/embedding-worker.ts +185 -0
- package/src/embedding.ts +607 -61
- package/src/git.ts +144 -0
- package/src/gradient.ts +174 -17
- package/src/index.ts +20 -0
- package/src/lat-reader.ts +5 -11
- package/src/ltm.ts +17 -44
- package/src/markdown.ts +15 -0
- package/src/prompt.ts +1 -2
- package/src/recall.ts +401 -70
- package/src/search.ts +71 -1
- package/src/temporal.ts +42 -35
- package/src/types.ts +15 -0
- package/src/worker-model.ts +17 -363
package/src/db.ts
CHANGED
|
@@ -2,8 +2,26 @@ import { Database } from "#db/driver";
|
|
|
2
2
|
import { join, dirname } from "path";
|
|
3
3
|
import { mkdirSync } from "fs";
|
|
4
4
|
import { homedir } from "os";
|
|
5
|
+
import { getGitRemote } from "./git";
|
|
5
6
|
|
|
6
|
-
|
|
7
|
+
/**
|
|
8
|
+
* Extract the repository name from a normalized git remote URL.
|
|
9
|
+
*
|
|
10
|
+
* Examples:
|
|
11
|
+
* "github.com/BYK/LoreAI" → "LoreAI"
|
|
12
|
+
* "github.com/org/repo" → "repo"
|
|
13
|
+
* "github.com" → null (no path components)
|
|
14
|
+
* null → null
|
|
15
|
+
*/
|
|
16
|
+
export function repoNameFromRemote(remote: string | null): string | null {
|
|
17
|
+
if (!remote) return null;
|
|
18
|
+
const lastSlash = remote.lastIndexOf("/");
|
|
19
|
+
if (lastSlash < 0) return null;
|
|
20
|
+
const name = remote.slice(lastSlash + 1);
|
|
21
|
+
return name.length > 0 ? name : null;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
const SCHEMA_VERSION = 16;
|
|
7
25
|
|
|
8
26
|
const MIGRATIONS: string[] = [
|
|
9
27
|
`
|
|
@@ -362,6 +380,79 @@ const MIGRATIONS: string[] = [
|
|
|
362
380
|
updated_at INTEGER NOT NULL
|
|
363
381
|
);
|
|
364
382
|
`,
|
|
383
|
+
`
|
|
384
|
+
-- Version 14: Git-based project identification.
|
|
385
|
+
--
|
|
386
|
+
-- Projects can now be identified by their git remote URL in addition to
|
|
387
|
+
-- filesystem path. This enables worktree, clone, and fork awareness:
|
|
388
|
+
-- the same repository accessed from different paths shares one project.
|
|
389
|
+
--
|
|
390
|
+
-- git_remote: Normalized canonical remote URL (e.g. "github.com/user/repo").
|
|
391
|
+
-- NULL for non-git directories or repos with no remotes.
|
|
392
|
+
--
|
|
393
|
+
-- project_path_aliases: Maps additional filesystem paths to existing
|
|
394
|
+
-- projects. When ensureProject() finds a match by git_remote, the
|
|
395
|
+
-- alternate path is registered here for O(1) subsequent lookups.
|
|
396
|
+
ALTER TABLE projects ADD COLUMN git_remote TEXT;
|
|
397
|
+
CREATE INDEX IF NOT EXISTS idx_projects_git_remote ON projects(git_remote);
|
|
398
|
+
|
|
399
|
+
CREATE TABLE IF NOT EXISTS project_path_aliases (
|
|
400
|
+
path TEXT PRIMARY KEY,
|
|
401
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE
|
|
402
|
+
);
|
|
403
|
+
`,
|
|
404
|
+
|
|
405
|
+
`
|
|
406
|
+
-- Version 15: Cache warming survival histograms.
|
|
407
|
+
--
|
|
408
|
+
-- Persists global (per-project, per-time-slot) inter-turn gap histograms
|
|
409
|
+
-- across gateway restarts. These histograms feed the survival analysis
|
|
410
|
+
-- model that decides whether to send speculative cache-warming pings.
|
|
411
|
+
-- Without persistence, the model has no data until enough turns rebuild
|
|
412
|
+
-- the histogram from scratch (cold start problem).
|
|
413
|
+
--
|
|
414
|
+
-- counts: JSON array of bin counts (21 elements: 20 bins + 1 overflow).
|
|
415
|
+
-- total: Sum of counts (denormalized for fast reads).
|
|
416
|
+
CREATE TABLE IF NOT EXISTS warmup_histograms (
|
|
417
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE,
|
|
418
|
+
time_slot TEXT NOT NULL,
|
|
419
|
+
counts TEXT NOT NULL DEFAULT '[]',
|
|
420
|
+
total INTEGER NOT NULL DEFAULT 0,
|
|
421
|
+
updated_at INTEGER NOT NULL DEFAULT 0,
|
|
422
|
+
PRIMARY KEY (project_id, time_slot)
|
|
423
|
+
);
|
|
424
|
+
`,
|
|
425
|
+
`
|
|
426
|
+
-- Version 16: Embedding BLOB column for temporal message vector search.
|
|
427
|
+
-- Same pattern as knowledge (v8) and distillation (v9) embeddings.
|
|
428
|
+
-- Only undistilled messages are embedded; the column is NULLed when
|
|
429
|
+
-- a message is marked as distilled (its semantic content is captured
|
|
430
|
+
-- by the distillation embedding at that point).
|
|
431
|
+
-- No backfill — new messages get embedded lazily at write time.
|
|
432
|
+
ALTER TABLE temporal_messages ADD COLUMN embedding BLOB;
|
|
433
|
+
`,
|
|
434
|
+
`
|
|
435
|
+
-- Version 17: Track whether distillation used batch API pricing.
|
|
436
|
+
-- NULL for pre-migration rows (treated as 'direct' for conservative estimates).
|
|
437
|
+
-- 'batch' = 50% discount on input+output, 'direct' = full price.
|
|
438
|
+
ALTER TABLE distillations ADD COLUMN call_type TEXT;
|
|
439
|
+
`,
|
|
440
|
+
`
|
|
441
|
+
-- Version 18: Persist live session cost data so historical estimates
|
|
442
|
+
-- include cache warming, 1h TTL savings, and batch API savings — metrics
|
|
443
|
+
-- that were previously lost on gateway restart.
|
|
444
|
+
-- All cost columns are in USD. Token columns are raw counts.
|
|
445
|
+
ALTER TABLE session_state ADD COLUMN conversation_cost REAL NOT NULL DEFAULT 0;
|
|
446
|
+
ALTER TABLE session_state ADD COLUMN worker_cost REAL NOT NULL DEFAULT 0;
|
|
447
|
+
ALTER TABLE session_state ADD COLUMN conversation_turns INTEGER NOT NULL DEFAULT 0;
|
|
448
|
+
ALTER TABLE session_state ADD COLUMN cache_read_tokens INTEGER NOT NULL DEFAULT 0;
|
|
449
|
+
ALTER TABLE session_state ADD COLUMN cache_write_tokens INTEGER NOT NULL DEFAULT 0;
|
|
450
|
+
ALTER TABLE session_state ADD COLUMN warmup_savings REAL NOT NULL DEFAULT 0;
|
|
451
|
+
ALTER TABLE session_state ADD COLUMN warmup_hits INTEGER NOT NULL DEFAULT 0;
|
|
452
|
+
ALTER TABLE session_state ADD COLUMN ttl_savings REAL NOT NULL DEFAULT 0;
|
|
453
|
+
ALTER TABLE session_state ADD COLUMN ttl_hits INTEGER NOT NULL DEFAULT 0;
|
|
454
|
+
ALTER TABLE session_state ADD COLUMN batch_savings REAL NOT NULL DEFAULT 0;
|
|
455
|
+
`,
|
|
365
456
|
];
|
|
366
457
|
|
|
367
458
|
function dataDir() {
|
|
@@ -370,6 +461,13 @@ function dataDir() {
|
|
|
370
461
|
return join(base, "opencode-lore");
|
|
371
462
|
}
|
|
372
463
|
|
|
464
|
+
/** Return the resolved path of the SQLite database file. */
|
|
465
|
+
export function dbPath(): string {
|
|
466
|
+
const envPath = process.env.LORE_DB_PATH;
|
|
467
|
+
if (envPath) return envPath;
|
|
468
|
+
return join(dataDir(), "lore.db");
|
|
469
|
+
}
|
|
470
|
+
|
|
373
471
|
let instance: Database | undefined;
|
|
374
472
|
|
|
375
473
|
export function db(): Database {
|
|
@@ -380,6 +478,18 @@ export function db(): Database {
|
|
|
380
478
|
mkdirSync(dirname(envPath), { recursive: true });
|
|
381
479
|
path = envPath;
|
|
382
480
|
} else {
|
|
481
|
+
// Guard: refuse to open the production DB during test runs.
|
|
482
|
+
// The test preload (setup.ts) sets LORE_DB_PATH to a temp directory.
|
|
483
|
+
// If we reach here with NODE_ENV=test, the preload didn't fire
|
|
484
|
+
// (e.g. bun test invoked from outside the repo). Throw instead of
|
|
485
|
+
// silently writing test fixtures into the user's live database.
|
|
486
|
+
if (process.env.NODE_ENV === "test") {
|
|
487
|
+
throw new Error(
|
|
488
|
+
"LORE_DB_PATH is not set but NODE_ENV=test. " +
|
|
489
|
+
"Run tests via `bun test` from the repo root, or set " +
|
|
490
|
+
"LORE_DB_PATH to a temp path to avoid polluting the production DB.",
|
|
491
|
+
);
|
|
492
|
+
}
|
|
383
493
|
const dir = dataDir();
|
|
384
494
|
mkdirSync(dir, { recursive: true });
|
|
385
495
|
path = join(dir, "lore.db");
|
|
@@ -507,7 +617,72 @@ function recoverMissingObjects(database: Database) {
|
|
|
507
617
|
value TEXT NOT NULL,
|
|
508
618
|
updated_at INTEGER NOT NULL
|
|
509
619
|
);
|
|
620
|
+
CREATE TABLE IF NOT EXISTS project_path_aliases (
|
|
621
|
+
path TEXT PRIMARY KEY,
|
|
622
|
+
project_id TEXT NOT NULL REFERENCES projects(id) ON DELETE CASCADE
|
|
623
|
+
);
|
|
510
624
|
`);
|
|
625
|
+
|
|
626
|
+
// Recover missing columns from partial migration runs.
|
|
627
|
+
// Version 17 added call_type to distillations but the ALTER could have been
|
|
628
|
+
// skipped if the version was bumped without the column being created.
|
|
629
|
+
const cols = database
|
|
630
|
+
.query("PRAGMA table_info(distillations)")
|
|
631
|
+
.all() as Array<{ name: string }>;
|
|
632
|
+
if (!cols.some((c) => c.name === "call_type")) {
|
|
633
|
+
database.exec("ALTER TABLE distillations ADD COLUMN call_type TEXT;");
|
|
634
|
+
}
|
|
635
|
+
}
|
|
636
|
+
|
|
637
|
+
/**
|
|
638
|
+
* Merge all data from `sourceId` project into `targetId` project.
|
|
639
|
+
*
|
|
640
|
+
* Moves knowledge, temporal messages, distillations, LAT sections, and
|
|
641
|
+
* path aliases from source to target. Registers the source project's path
|
|
642
|
+
* as an alias of the target. Deletes the source project row.
|
|
643
|
+
*
|
|
644
|
+
* Used internally during lazy git-remote backfill when two path-only
|
|
645
|
+
* projects are discovered to share the same git remote.
|
|
646
|
+
*/
|
|
647
|
+
export function mergeProjectInternal(
|
|
648
|
+
sourceId: string,
|
|
649
|
+
targetId: string,
|
|
650
|
+
): void {
|
|
651
|
+
const d = db();
|
|
652
|
+
d.exec("BEGIN IMMEDIATE");
|
|
653
|
+
try {
|
|
654
|
+
d.query("UPDATE knowledge SET project_id = ? WHERE project_id = ?").run(
|
|
655
|
+
targetId,
|
|
656
|
+
sourceId,
|
|
657
|
+
);
|
|
658
|
+
d.query(
|
|
659
|
+
"UPDATE temporal_messages SET project_id = ? WHERE project_id = ?",
|
|
660
|
+
).run(targetId, sourceId);
|
|
661
|
+
d.query(
|
|
662
|
+
"UPDATE distillations SET project_id = ? WHERE project_id = ?",
|
|
663
|
+
).run(targetId, sourceId);
|
|
664
|
+
d.query("UPDATE lat_sections SET project_id = ? WHERE project_id = ?").run(
|
|
665
|
+
targetId,
|
|
666
|
+
sourceId,
|
|
667
|
+
);
|
|
668
|
+
d.query(
|
|
669
|
+
"UPDATE OR IGNORE project_path_aliases SET project_id = ? WHERE project_id = ?",
|
|
670
|
+
).run(targetId, sourceId);
|
|
671
|
+
// Register source's path as alias of target
|
|
672
|
+
const sourceRow = d
|
|
673
|
+
.query("SELECT path FROM projects WHERE id = ?")
|
|
674
|
+
.get(sourceId) as { path: string } | null;
|
|
675
|
+
if (sourceRow) {
|
|
676
|
+
d.query(
|
|
677
|
+
"INSERT OR IGNORE INTO project_path_aliases (path, project_id) VALUES (?, ?)",
|
|
678
|
+
).run(sourceRow.path, targetId);
|
|
679
|
+
}
|
|
680
|
+
d.query("DELETE FROM projects WHERE id = ?").run(sourceId);
|
|
681
|
+
d.exec("COMMIT");
|
|
682
|
+
} catch (e) {
|
|
683
|
+
d.exec("ROLLBACK");
|
|
684
|
+
throw e;
|
|
685
|
+
}
|
|
511
686
|
}
|
|
512
687
|
|
|
513
688
|
export function close() {
|
|
@@ -518,17 +693,85 @@ export function close() {
|
|
|
518
693
|
}
|
|
519
694
|
|
|
520
695
|
// Project management
|
|
696
|
+
|
|
697
|
+
/**
|
|
698
|
+
* Look up or create a project by filesystem path, with git-remote awareness.
|
|
699
|
+
*
|
|
700
|
+
* Resolution order:
|
|
701
|
+
* 1. Exact path match in `projects` table (fast path, O(1) index scan)
|
|
702
|
+
* 2. Path alias match in `project_path_aliases` (worktree/clone re-visits)
|
|
703
|
+
* 3. Git remote match — runs `git remote -v` (once per unique path, cached),
|
|
704
|
+
* finds an existing project with the same normalized remote URL
|
|
705
|
+
* 4. Create a new project row
|
|
706
|
+
*
|
|
707
|
+
* When a git-remote match is found (step 3), the new path is registered as
|
|
708
|
+
* an alias so subsequent calls skip the subprocess. If the matched project's
|
|
709
|
+
* git_remote was not yet populated (pre-v14 rows), it is backfilled lazily.
|
|
710
|
+
*/
|
|
521
711
|
export function ensureProject(path: string, name?: string): string {
|
|
712
|
+
// 1. Exact path match (fast path)
|
|
522
713
|
const existing = db()
|
|
523
|
-
.query("SELECT id FROM projects WHERE path = ?")
|
|
524
|
-
.get(path) as { id: string } | null;
|
|
525
|
-
if (existing)
|
|
714
|
+
.query("SELECT id, git_remote FROM projects WHERE path = ?")
|
|
715
|
+
.get(path) as { id: string; git_remote: string | null } | null;
|
|
716
|
+
if (existing) {
|
|
717
|
+
// Lazy backfill: populate git_remote on pre-v14 rows
|
|
718
|
+
if (!existing.git_remote) {
|
|
719
|
+
const gitRemote = getGitRemote(path);
|
|
720
|
+
if (gitRemote) {
|
|
721
|
+
// Check for conflict: another project already has this git_remote.
|
|
722
|
+
// If so, merge the conflicting project into this one (one-time).
|
|
723
|
+
const conflict = db()
|
|
724
|
+
.query(
|
|
725
|
+
"SELECT id FROM projects WHERE git_remote = ? AND id != ? LIMIT 1",
|
|
726
|
+
)
|
|
727
|
+
.get(gitRemote, existing.id) as { id: string } | null;
|
|
728
|
+
if (conflict) {
|
|
729
|
+
mergeProjectInternal(conflict.id, existing.id);
|
|
730
|
+
}
|
|
731
|
+
db()
|
|
732
|
+
.query("UPDATE projects SET git_remote = ? WHERE id = ?")
|
|
733
|
+
.run(gitRemote, existing.id);
|
|
734
|
+
}
|
|
735
|
+
}
|
|
736
|
+
return existing.id;
|
|
737
|
+
}
|
|
738
|
+
|
|
739
|
+
// 2. Check path aliases (worktree/clone re-visits)
|
|
740
|
+
const alias = db()
|
|
741
|
+
.query("SELECT project_id FROM project_path_aliases WHERE path = ?")
|
|
742
|
+
.get(path) as { project_id: string } | null;
|
|
743
|
+
if (alias) return alias.project_id;
|
|
744
|
+
|
|
745
|
+
// 3. Git remote identification
|
|
746
|
+
const gitRemote = getGitRemote(path);
|
|
747
|
+
if (gitRemote) {
|
|
748
|
+
const byRemote = db()
|
|
749
|
+
.query("SELECT id FROM projects WHERE git_remote = ? LIMIT 1")
|
|
750
|
+
.get(gitRemote) as { id: string } | null;
|
|
751
|
+
if (byRemote) {
|
|
752
|
+
// Register this path as an alias for O(1) future lookups
|
|
753
|
+
db()
|
|
754
|
+
.query(
|
|
755
|
+
"INSERT OR IGNORE INTO project_path_aliases (path, project_id) VALUES (?, ?)",
|
|
756
|
+
)
|
|
757
|
+
.run(path, byRemote.id);
|
|
758
|
+
return byRemote.id;
|
|
759
|
+
}
|
|
760
|
+
}
|
|
761
|
+
|
|
762
|
+
// 4. Create new project
|
|
526
763
|
const id = crypto.randomUUID();
|
|
527
764
|
db()
|
|
528
765
|
.query(
|
|
529
|
-
"INSERT INTO projects (id, path, name, created_at) VALUES (?, ?, ?, ?)",
|
|
766
|
+
"INSERT INTO projects (id, path, name, git_remote, created_at) VALUES (?, ?, ?, ?, ?)",
|
|
530
767
|
)
|
|
531
|
-
.run(
|
|
768
|
+
.run(
|
|
769
|
+
id,
|
|
770
|
+
path,
|
|
771
|
+
name ?? repoNameFromRemote(gitRemote) ?? path.split("/").pop() ?? "unknown",
|
|
772
|
+
gitRemote,
|
|
773
|
+
Date.now(),
|
|
774
|
+
);
|
|
532
775
|
return id;
|
|
533
776
|
}
|
|
534
777
|
|
|
@@ -536,7 +779,13 @@ export function projectId(path: string): string | undefined {
|
|
|
536
779
|
const row = db()
|
|
537
780
|
.query("SELECT id FROM projects WHERE path = ?")
|
|
538
781
|
.get(path) as { id: string } | null;
|
|
539
|
-
return row
|
|
782
|
+
if (row) return row.id;
|
|
783
|
+
|
|
784
|
+
// Check path aliases (worktree/clone paths registered by ensureProject)
|
|
785
|
+
const alias = db()
|
|
786
|
+
.query("SELECT project_id FROM project_path_aliases WHERE path = ?")
|
|
787
|
+
.get(path) as { project_id: string } | null;
|
|
788
|
+
return alias?.project_id;
|
|
540
789
|
}
|
|
541
790
|
|
|
542
791
|
/** Look up a project's display name by its internal ID. */
|
|
@@ -590,6 +839,136 @@ export function saveForceMinLayer(sessionID: string, layer: number): void {
|
|
|
590
839
|
}
|
|
591
840
|
}
|
|
592
841
|
|
|
842
|
+
/** Persisted cost snapshot for a session. */
|
|
843
|
+
export type SessionCostSnapshot = {
|
|
844
|
+
conversationCost: number;
|
|
845
|
+
workerCost: number;
|
|
846
|
+
conversationTurns: number;
|
|
847
|
+
cacheReadTokens: number;
|
|
848
|
+
cacheWriteTokens: number;
|
|
849
|
+
warmupSavings: number;
|
|
850
|
+
warmupHits: number;
|
|
851
|
+
ttlSavings: number;
|
|
852
|
+
ttlHits: number;
|
|
853
|
+
batchSavings: number;
|
|
854
|
+
};
|
|
855
|
+
|
|
856
|
+
/**
|
|
857
|
+
* Persist a session's cost snapshot. Uses INSERT OR REPLACE so it works
|
|
858
|
+
* whether or not a row already exists (forceMinLayer may have created one).
|
|
859
|
+
*/
|
|
860
|
+
export function saveSessionCosts(sessionID: string, costs: SessionCostSnapshot): void {
|
|
861
|
+
db()
|
|
862
|
+
.query(
|
|
863
|
+
`INSERT INTO session_state (session_id, force_min_layer, updated_at,
|
|
864
|
+
conversation_cost, worker_cost, conversation_turns,
|
|
865
|
+
cache_read_tokens, cache_write_tokens,
|
|
866
|
+
warmup_savings, warmup_hits, ttl_savings, ttl_hits, batch_savings)
|
|
867
|
+
VALUES (?, COALESCE((SELECT force_min_layer FROM session_state WHERE session_id = ?), 0), ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
868
|
+
ON CONFLICT(session_id) DO UPDATE SET
|
|
869
|
+
conversation_cost = excluded.conversation_cost,
|
|
870
|
+
worker_cost = excluded.worker_cost,
|
|
871
|
+
conversation_turns = excluded.conversation_turns,
|
|
872
|
+
cache_read_tokens = excluded.cache_read_tokens,
|
|
873
|
+
cache_write_tokens = excluded.cache_write_tokens,
|
|
874
|
+
warmup_savings = excluded.warmup_savings,
|
|
875
|
+
warmup_hits = excluded.warmup_hits,
|
|
876
|
+
ttl_savings = excluded.ttl_savings,
|
|
877
|
+
ttl_hits = excluded.ttl_hits,
|
|
878
|
+
batch_savings = excluded.batch_savings,
|
|
879
|
+
updated_at = excluded.updated_at`,
|
|
880
|
+
)
|
|
881
|
+
.run(
|
|
882
|
+
sessionID, sessionID, Date.now(),
|
|
883
|
+
costs.conversationCost, costs.workerCost, costs.conversationTurns,
|
|
884
|
+
costs.cacheReadTokens, costs.cacheWriteTokens,
|
|
885
|
+
costs.warmupSavings, costs.warmupHits, costs.ttlSavings, costs.ttlHits, costs.batchSavings,
|
|
886
|
+
);
|
|
887
|
+
}
|
|
888
|
+
|
|
889
|
+
/**
|
|
890
|
+
* Load persisted cost snapshot for a session. Returns null if not stored
|
|
891
|
+
* or if all cost columns are zero (pre-migration row from forceMinLayer only).
|
|
892
|
+
*/
|
|
893
|
+
export function loadSessionCosts(sessionID: string): SessionCostSnapshot | null {
|
|
894
|
+
const row = db()
|
|
895
|
+
.query(
|
|
896
|
+
`SELECT conversation_cost, worker_cost, conversation_turns,
|
|
897
|
+
cache_read_tokens, cache_write_tokens,
|
|
898
|
+
warmup_savings, warmup_hits, ttl_savings, ttl_hits, batch_savings
|
|
899
|
+
FROM session_state WHERE session_id = ?`,
|
|
900
|
+
)
|
|
901
|
+
.get(sessionID) as {
|
|
902
|
+
conversation_cost: number;
|
|
903
|
+
worker_cost: number;
|
|
904
|
+
conversation_turns: number;
|
|
905
|
+
cache_read_tokens: number;
|
|
906
|
+
cache_write_tokens: number;
|
|
907
|
+
warmup_savings: number;
|
|
908
|
+
warmup_hits: number;
|
|
909
|
+
ttl_savings: number;
|
|
910
|
+
ttl_hits: number;
|
|
911
|
+
batch_savings: number;
|
|
912
|
+
} | null;
|
|
913
|
+
if (!row) return null;
|
|
914
|
+
return {
|
|
915
|
+
conversationCost: row.conversation_cost,
|
|
916
|
+
workerCost: row.worker_cost,
|
|
917
|
+
conversationTurns: row.conversation_turns,
|
|
918
|
+
cacheReadTokens: row.cache_read_tokens,
|
|
919
|
+
cacheWriteTokens: row.cache_write_tokens,
|
|
920
|
+
warmupSavings: row.warmup_savings,
|
|
921
|
+
warmupHits: row.warmup_hits,
|
|
922
|
+
ttlSavings: row.ttl_savings,
|
|
923
|
+
ttlHits: row.ttl_hits,
|
|
924
|
+
batchSavings: row.batch_savings,
|
|
925
|
+
};
|
|
926
|
+
}
|
|
927
|
+
|
|
928
|
+
/**
|
|
929
|
+
* Load cost snapshots for all sessions that have non-zero cost data.
|
|
930
|
+
* Returns a map of sessionID → SessionCostSnapshot.
|
|
931
|
+
*/
|
|
932
|
+
export function loadAllSessionCosts(): Map<string, SessionCostSnapshot> {
|
|
933
|
+
const rows = db()
|
|
934
|
+
.query(
|
|
935
|
+
`SELECT session_id, conversation_cost, worker_cost, conversation_turns,
|
|
936
|
+
cache_read_tokens, cache_write_tokens,
|
|
937
|
+
warmup_savings, warmup_hits, ttl_savings, ttl_hits, batch_savings
|
|
938
|
+
FROM session_state
|
|
939
|
+
WHERE conversation_turns > 0 OR warmup_savings > 0 OR ttl_savings > 0 OR batch_savings > 0`,
|
|
940
|
+
)
|
|
941
|
+
.all() as Array<{
|
|
942
|
+
session_id: string;
|
|
943
|
+
conversation_cost: number;
|
|
944
|
+
worker_cost: number;
|
|
945
|
+
conversation_turns: number;
|
|
946
|
+
cache_read_tokens: number;
|
|
947
|
+
cache_write_tokens: number;
|
|
948
|
+
warmup_savings: number;
|
|
949
|
+
warmup_hits: number;
|
|
950
|
+
ttl_savings: number;
|
|
951
|
+
ttl_hits: number;
|
|
952
|
+
batch_savings: number;
|
|
953
|
+
}>;
|
|
954
|
+
const result = new Map<string, SessionCostSnapshot>();
|
|
955
|
+
for (const row of rows) {
|
|
956
|
+
result.set(row.session_id, {
|
|
957
|
+
conversationCost: row.conversation_cost,
|
|
958
|
+
workerCost: row.worker_cost,
|
|
959
|
+
conversationTurns: row.conversation_turns,
|
|
960
|
+
cacheReadTokens: row.cache_read_tokens,
|
|
961
|
+
cacheWriteTokens: row.cache_write_tokens,
|
|
962
|
+
warmupSavings: row.warmup_savings,
|
|
963
|
+
warmupHits: row.warmup_hits,
|
|
964
|
+
ttlSavings: row.ttl_savings,
|
|
965
|
+
ttlHits: row.ttl_hits,
|
|
966
|
+
batchSavings: row.batch_savings,
|
|
967
|
+
});
|
|
968
|
+
}
|
|
969
|
+
return result;
|
|
970
|
+
}
|
|
971
|
+
|
|
593
972
|
// ---------------------------------------------------------------------------
|
|
594
973
|
// Installation metadata (metadata table)
|
|
595
974
|
// ---------------------------------------------------------------------------
|