@tekmidian/pai 0.5.6 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/ARCHITECTURE.md +72 -1
- package/README.md +107 -3
- package/dist/{auto-route-BG6I_4B1.mjs → auto-route-C-DrW6BL.mjs} +3 -3
- package/dist/{auto-route-BG6I_4B1.mjs.map → auto-route-C-DrW6BL.mjs.map} +1 -1
- package/dist/cli/index.mjs +1897 -1569
- package/dist/cli/index.mjs.map +1 -1
- package/dist/clusters-JIDQW65f.mjs +201 -0
- package/dist/clusters-JIDQW65f.mjs.map +1 -0
- package/dist/{config-Cf92lGX_.mjs → config-BuhHWyOK.mjs} +21 -6
- package/dist/config-BuhHWyOK.mjs.map +1 -0
- package/dist/daemon/index.mjs +12 -9
- package/dist/daemon/index.mjs.map +1 -1
- package/dist/{daemon-D9evGlgR.mjs → daemon-D3hYb5_C.mjs} +670 -219
- package/dist/daemon-D3hYb5_C.mjs.map +1 -0
- package/dist/daemon-mcp/index.mjs +4597 -4
- package/dist/daemon-mcp/index.mjs.map +1 -1
- package/dist/{db-4lSqLFb8.mjs → db-BtuN768f.mjs} +9 -2
- package/dist/db-BtuN768f.mjs.map +1 -0
- package/dist/db-DdUperSl.mjs +110 -0
- package/dist/db-DdUperSl.mjs.map +1 -0
- package/dist/{detect-BU3Nx_2L.mjs → detect-CdaA48EI.mjs} +1 -1
- package/dist/{detect-BU3Nx_2L.mjs.map → detect-CdaA48EI.mjs.map} +1 -1
- package/dist/{detector-Bp-2SM3x.mjs → detector-jGBuYQJM.mjs} +2 -2
- package/dist/{detector-Bp-2SM3x.mjs.map → detector-jGBuYQJM.mjs.map} +1 -1
- package/dist/{factory-Bzcy70G9.mjs → factory-Ygqe_bVZ.mjs} +7 -5
- package/dist/{factory-Bzcy70G9.mjs.map → factory-Ygqe_bVZ.mjs.map} +1 -1
- package/dist/helpers-BEST-4Gx.mjs +420 -0
- package/dist/helpers-BEST-4Gx.mjs.map +1 -0
- package/dist/hooks/capture-all-events.mjs +19 -4
- package/dist/hooks/capture-all-events.mjs.map +4 -4
- package/dist/hooks/capture-session-summary.mjs +38 -0
- package/dist/hooks/capture-session-summary.mjs.map +3 -3
- package/dist/hooks/cleanup-session-files.mjs +6 -12
- package/dist/hooks/cleanup-session-files.mjs.map +4 -4
- package/dist/hooks/context-compression-hook.mjs +105 -111
- package/dist/hooks/context-compression-hook.mjs.map +4 -4
- package/dist/hooks/initialize-session.mjs +26 -17
- package/dist/hooks/initialize-session.mjs.map +4 -4
- package/dist/hooks/inject-observations.mjs +220 -0
- package/dist/hooks/inject-observations.mjs.map +7 -0
- package/dist/hooks/load-core-context.mjs +18 -2
- package/dist/hooks/load-core-context.mjs.map +4 -4
- package/dist/hooks/load-project-context.mjs +102 -97
- package/dist/hooks/load-project-context.mjs.map +4 -4
- package/dist/hooks/observe.mjs +354 -0
- package/dist/hooks/observe.mjs.map +7 -0
- package/dist/hooks/stop-hook.mjs +174 -90
- package/dist/hooks/stop-hook.mjs.map +4 -4
- package/dist/hooks/sync-todo-to-md.mjs +31 -33
- package/dist/hooks/sync-todo-to-md.mjs.map +4 -4
- package/dist/index.d.mts +32 -9
- package/dist/index.d.mts.map +1 -1
- package/dist/index.mjs +6 -9
- package/dist/indexer-D53l5d1U.mjs +1 -0
- package/dist/{indexer-backend-CIMXedqk.mjs → indexer-backend-jcJFsmB4.mjs} +37 -127
- package/dist/indexer-backend-jcJFsmB4.mjs.map +1 -0
- package/dist/{ipc-client-Bjg_a1dc.mjs → ipc-client-CoyUHPod.mjs} +2 -7
- package/dist/{ipc-client-Bjg_a1dc.mjs.map → ipc-client-CoyUHPod.mjs.map} +1 -1
- package/dist/latent-ideas-bTJo6Omd.mjs +191 -0
- package/dist/latent-ideas-bTJo6Omd.mjs.map +1 -0
- package/dist/neighborhood-BYYbEkUJ.mjs +135 -0
- package/dist/neighborhood-BYYbEkUJ.mjs.map +1 -0
- package/dist/note-context-BK24bX8Y.mjs +126 -0
- package/dist/note-context-BK24bX8Y.mjs.map +1 -0
- package/dist/postgres-CKf-EDtS.mjs +846 -0
- package/dist/postgres-CKf-EDtS.mjs.map +1 -0
- package/dist/{reranker-D7bRAHi6.mjs → reranker-CMNZcfVx.mjs} +1 -1
- package/dist/{reranker-D7bRAHi6.mjs.map → reranker-CMNZcfVx.mjs.map} +1 -1
- package/dist/{search-_oHfguA5.mjs → search-DC1qhkKn.mjs} +2 -58
- package/dist/search-DC1qhkKn.mjs.map +1 -0
- package/dist/{sqlite-WWBq7_2C.mjs → sqlite-l-s9xPjY.mjs} +160 -3
- package/dist/sqlite-l-s9xPjY.mjs.map +1 -0
- package/dist/state-C6_vqz7w.mjs +102 -0
- package/dist/state-C6_vqz7w.mjs.map +1 -0
- package/dist/stop-words-BaMEGVeY.mjs +326 -0
- package/dist/stop-words-BaMEGVeY.mjs.map +1 -0
- package/dist/{indexer-CMPOiY1r.mjs → sync-BOsnEj2-.mjs} +14 -216
- package/dist/sync-BOsnEj2-.mjs.map +1 -0
- package/dist/themes-BvYF0W8T.mjs +148 -0
- package/dist/themes-BvYF0W8T.mjs.map +1 -0
- package/dist/{tools-DV_lsiCc.mjs → tools-DcaJlYDN.mjs} +162 -273
- package/dist/tools-DcaJlYDN.mjs.map +1 -0
- package/dist/trace-CRx9lPuc.mjs +137 -0
- package/dist/trace-CRx9lPuc.mjs.map +1 -0
- package/dist/{vault-indexer-DXWs9pDn.mjs → vault-indexer-Bi2cRmn7.mjs} +174 -138
- package/dist/vault-indexer-Bi2cRmn7.mjs.map +1 -0
- package/dist/zettelkasten-cdajbnPr.mjs +708 -0
- package/dist/zettelkasten-cdajbnPr.mjs.map +1 -0
- package/package.json +1 -2
- package/src/hooks/ts/capture-all-events.ts +6 -0
- package/src/hooks/ts/lib/project-utils/index.ts +50 -0
- package/src/hooks/ts/lib/project-utils/notify.ts +75 -0
- package/src/hooks/ts/lib/project-utils/paths.ts +218 -0
- package/src/hooks/ts/lib/project-utils/session-notes.ts +363 -0
- package/src/hooks/ts/lib/project-utils/todo.ts +178 -0
- package/src/hooks/ts/lib/project-utils/tokens.ts +39 -0
- package/src/hooks/ts/lib/project-utils.ts +40 -999
- package/src/hooks/ts/post-tool-use/observe.ts +327 -0
- package/src/hooks/ts/pre-compact/context-compression-hook.ts +6 -0
- package/src/hooks/ts/session-end/capture-session-summary.ts +41 -0
- package/src/hooks/ts/session-start/initialize-session.ts +7 -1
- package/src/hooks/ts/session-start/inject-observations.ts +254 -0
- package/src/hooks/ts/session-start/load-core-context.ts +7 -0
- package/src/hooks/ts/session-start/load-project-context.ts +8 -1
- package/src/hooks/ts/stop/stop-hook.ts +28 -0
- package/templates/claude-md.template.md +7 -74
- package/templates/skills/user/.gitkeep +0 -0
- package/dist/chunker-CbnBe0s0.mjs +0 -191
- package/dist/chunker-CbnBe0s0.mjs.map +0 -1
- package/dist/config-Cf92lGX_.mjs.map +0 -1
- package/dist/daemon-D9evGlgR.mjs.map +0 -1
- package/dist/db-4lSqLFb8.mjs.map +0 -1
- package/dist/db-Dp8VXIMR.mjs +0 -212
- package/dist/db-Dp8VXIMR.mjs.map +0 -1
- package/dist/indexer-CMPOiY1r.mjs.map +0 -1
- package/dist/indexer-backend-CIMXedqk.mjs.map +0 -1
- package/dist/mcp/index.d.mts +0 -1
- package/dist/mcp/index.mjs +0 -500
- package/dist/mcp/index.mjs.map +0 -1
- package/dist/postgres-FXrHDPcE.mjs +0 -358
- package/dist/postgres-FXrHDPcE.mjs.map +0 -1
- package/dist/schemas-BFIgGntb.mjs +0 -3405
- package/dist/schemas-BFIgGntb.mjs.map +0 -1
- package/dist/search-_oHfguA5.mjs.map +0 -1
- package/dist/sqlite-WWBq7_2C.mjs.map +0 -1
- package/dist/tools-DV_lsiCc.mjs.map +0 -1
- package/dist/vault-indexer-DXWs9pDn.mjs.map +0 -1
- package/dist/zettelkasten-e-a4rW_6.mjs +0 -901
- package/dist/zettelkasten-e-a4rW_6.mjs.map +0 -1
- package/templates/README.md +0 -181
- package/templates/skills/createskill-skill.template.md +0 -78
- package/templates/skills/history-system.template.md +0 -371
- package/templates/skills/hook-system.template.md +0 -913
- package/templates/skills/sessions-skill.template.md +0 -102
- package/templates/skills/skill-system.template.md +0 -214
- package/templates/skills/terminal-tabs.template.md +0 -120
- package/templates/templates.md +0 -20
|
@@ -27,7 +27,8 @@ import {
|
|
|
27
27
|
createSessionNote,
|
|
28
28
|
findTodoPath,
|
|
29
29
|
findAllClaudeMdPaths,
|
|
30
|
-
sendNtfyNotification
|
|
30
|
+
sendNtfyNotification,
|
|
31
|
+
isProbeSession
|
|
31
32
|
} from '../lib/project-utils';
|
|
32
33
|
|
|
33
34
|
/**
|
|
@@ -80,6 +81,12 @@ interface HookInput {
|
|
|
80
81
|
async function main() {
|
|
81
82
|
console.error('\nload-project-context.ts starting...');
|
|
82
83
|
|
|
84
|
+
// Skip probe/health-check sessions (e.g. CodexBar ClaudeProbe)
|
|
85
|
+
if (isProbeSession()) {
|
|
86
|
+
console.error('Probe session detected - skipping project context loading');
|
|
87
|
+
process.exit(0);
|
|
88
|
+
}
|
|
89
|
+
|
|
83
90
|
// Read hook input from stdin
|
|
84
91
|
let hookInput: HookInput | null = null;
|
|
85
92
|
try {
|
|
@@ -9,6 +9,8 @@ import {
|
|
|
9
9
|
moveSessionFilesToSessionsDir,
|
|
10
10
|
addWorkToSessionNote,
|
|
11
11
|
findNotesDir,
|
|
12
|
+
isProbeSession,
|
|
13
|
+
updateTodoContinue,
|
|
12
14
|
WorkItem
|
|
13
15
|
} from '../lib/project-utils';
|
|
14
16
|
|
|
@@ -207,6 +209,11 @@ function contentToText(content: any): string {
|
|
|
207
209
|
}
|
|
208
210
|
|
|
209
211
|
async function main() {
|
|
212
|
+
// Skip probe/health-check sessions (e.g. CodexBar ClaudeProbe)
|
|
213
|
+
if (isProbeSession()) {
|
|
214
|
+
process.exit(0);
|
|
215
|
+
}
|
|
216
|
+
|
|
210
217
|
const timestamp = new Date().toISOString();
|
|
211
218
|
console.error(`\nSTOP-HOOK TRIGGERED AT ${timestamp}`);
|
|
212
219
|
|
|
@@ -385,6 +392,27 @@ async function main() {
|
|
|
385
392
|
const summary = message || 'Session completed.';
|
|
386
393
|
finalizeSessionNote(currentNotePath, summary);
|
|
387
394
|
console.error(`Session note finalized: ${basename(currentNotePath)}`);
|
|
395
|
+
|
|
396
|
+
// Update TODO.md ## Continue section so next session has context
|
|
397
|
+
try {
|
|
398
|
+
const stateLines: string[] = [];
|
|
399
|
+
stateLines.push(`Working directory: ${cwd}`);
|
|
400
|
+
if (workItems.length > 0) {
|
|
401
|
+
stateLines.push('');
|
|
402
|
+
stateLines.push('Work completed:');
|
|
403
|
+
for (const item of workItems.slice(0, 5)) {
|
|
404
|
+
stateLines.push(`- ${item.title}`);
|
|
405
|
+
}
|
|
406
|
+
}
|
|
407
|
+
if (message) {
|
|
408
|
+
stateLines.push('');
|
|
409
|
+
stateLines.push(`Last completed: ${message}`);
|
|
410
|
+
}
|
|
411
|
+
const state = stateLines.join('\n');
|
|
412
|
+
updateTodoContinue(cwd, basename(currentNotePath), state, 'session-end');
|
|
413
|
+
} catch (todoError) {
|
|
414
|
+
console.error(`Could not update TODO.md: ${todoError}`);
|
|
415
|
+
}
|
|
388
416
|
}
|
|
389
417
|
} catch (noteError) {
|
|
390
418
|
console.error(`Could not finalize session note: ${noteError}`);
|
|
@@ -635,82 +635,13 @@ IMPORTANT:
|
|
|
635
635
|
|
|
636
636
|
---
|
|
637
637
|
|
|
638
|
-
##
|
|
638
|
+
## Session Lifecycle
|
|
639
639
|
|
|
640
|
-
**
|
|
640
|
+
**Session commands (pause, end, continue, go, cpp) are defined in the CORE skill.**
|
|
641
641
|
|
|
642
|
-
|
|
642
|
+
The CORE skill auto-loads at session start and contains the full session lifecycle: pause checkpoints, end session with commit/push, continuation protocol, and session note naming rules.
|
|
643
643
|
|
|
644
|
-
|
|
645
|
-
# 1. Update session notes with summary of work done
|
|
646
|
-
# 2. Update TODO.md (mark completed, add discovered tasks)
|
|
647
|
-
# 3. Rename session note if it has placeholder name
|
|
648
|
-
# 4. Commit and push in Obsidian/Notes project
|
|
649
|
-
# 5. Commit and push in related code folder (if any)
|
|
650
|
-
```
|
|
651
|
-
|
|
652
|
-
### Automatic Execution
|
|
653
|
-
|
|
654
|
-
When user says "end session" (or "end", "done", "finish session"):
|
|
655
|
-
|
|
656
|
-
**Step 1: Summarize and Update Session Note**
|
|
657
|
-
- Write summary of what was accomplished
|
|
658
|
-
- List any blockers or open questions
|
|
659
|
-
- Ensure session note has meaningful name (NOT "New Session" or project name)
|
|
660
|
-
|
|
661
|
-
**Step 2: Update TODO.md**
|
|
662
|
-
- Mark completed tasks with `[x]`
|
|
663
|
-
- Keep in-progress tasks with `[ ]`
|
|
664
|
-
- Add any newly discovered tasks
|
|
665
|
-
|
|
666
|
-
**Step 3: Commit and Push Obsidian Project**
|
|
667
|
-
```bash
|
|
668
|
-
cd [OBSIDIAN_PROJECT_DIR]
|
|
669
|
-
git add .
|
|
670
|
-
git status # Check what's being committed
|
|
671
|
-
git commit -m "docs: Session NNNN complete - [brief description]"
|
|
672
|
-
git push
|
|
673
|
-
```
|
|
674
|
-
|
|
675
|
-
**Step 4: Commit and Push Code Repository (if applicable)**
|
|
676
|
-
```bash
|
|
677
|
-
cd [CODE_PROJECT_DIR]
|
|
678
|
-
git add .
|
|
679
|
-
git status
|
|
680
|
-
git commit -m "feat/fix/refactor: [description of changes]"
|
|
681
|
-
git push
|
|
682
|
-
```
|
|
683
|
-
|
|
684
|
-
**Step 5: Confirm Completion**
|
|
685
|
-
Report back:
|
|
686
|
-
- Session note filename (should be descriptive)
|
|
687
|
-
- Commits made (both repos if applicable)
|
|
688
|
-
- Any uncommitted changes that were skipped
|
|
689
|
-
- Next session starting point
|
|
690
|
-
|
|
691
|
-
### Project-Code Directory Mapping
|
|
692
|
-
|
|
693
|
-
Add your project mappings to `~/.config/pai/project-mappings.json`:
|
|
694
|
-
|
|
695
|
-
```json
|
|
696
|
-
{
|
|
697
|
-
"mappings": [
|
|
698
|
-
{ "obsidian": "My Project", "code_dir": "~/path/to/code" }
|
|
699
|
-
]
|
|
700
|
-
}
|
|
701
|
-
```
|
|
702
|
-
|
|
703
|
-
PAI reads this file during `end session` to know which code repositories to commit alongside their Obsidian project notes. Run `pai setup` to configure your mappings interactively.
|
|
704
|
-
|
|
705
|
-
For personal preferences (notification channels, voice settings, agent defaults), see `~/.config/pai/agent-prefs.md`.
|
|
706
|
-
|
|
707
|
-
### Important Rules
|
|
708
|
-
|
|
709
|
-
- **NEVER skip the code repo commit** if code changes were made
|
|
710
|
-
- **ALWAYS rename placeholder session notes** before committing
|
|
711
|
-
- **ALWAYS use clean commit messages** (no AI signatures)
|
|
712
|
-
- **CHECK for uncommitted changes** in both locations
|
|
713
|
-
- **REPORT what was pushed** so user knows the state
|
|
644
|
+
**Key commands:** "pause session", "end session", "go"/"continue", "cpp" (commit-push-publish).
|
|
714
645
|
|
|
715
646
|
---
|
|
716
647
|
|
|
@@ -722,7 +653,7 @@ For personal preferences (notification channels, voice settings, agent defaults)
|
|
|
722
653
|
4. **Always spotcheck** - verification is mandatory
|
|
723
654
|
5. **Never search home** - use specific subdirectories
|
|
724
655
|
6. **Autonomous swarm mode** - spawn orchestrator, let agents do the work, no questions
|
|
725
|
-
7. **
|
|
656
|
+
7. **Session lifecycle in CORE skill** - pause, end, continue, cpp commands
|
|
726
657
|
8. **Plan mode for 3+ steps** - write specs, evaluate approaches, execute with verification
|
|
727
658
|
9. **Verify before done** - prove it works, don't just claim it
|
|
728
659
|
10. **Learn from corrections** - update tasks/lessons.md after every user correction
|
|
@@ -730,4 +661,6 @@ For personal preferences (notification channels, voice settings, agent defaults)
|
|
|
730
661
|
12. **Fix bugs autonomously** - investigate, resolve, verify without hand-holding
|
|
731
662
|
13. **Task management** - plan to tasks/todo.md first, track progress, document results
|
|
732
663
|
|
|
664
|
+
**CLAUDE.md + CORE skill = complete PAI configuration.** CLAUDE.md covers agent architecture and engineering standards. CORE skill covers identity, session lifecycle, notifications, and compaction resilience.
|
|
665
|
+
|
|
733
666
|
This is constitutional. Violations waste time, money, and context.
|
|
File without changes
|
|
@@ -1,191 +0,0 @@
|
|
|
1
|
-
import { createHash } from "node:crypto";
|
|
2
|
-
|
|
3
|
-
//#region src/memory/chunker.ts
|
|
4
|
-
/**
|
|
5
|
-
* Markdown text chunker for the PAI memory engine.
|
|
6
|
-
*
|
|
7
|
-
* Splits markdown files into overlapping text segments suitable for BM25
|
|
8
|
-
* full-text indexing. Respects heading boundaries where possible, falling
|
|
9
|
-
* back to paragraph and sentence splitting when sections are large.
|
|
10
|
-
*/
|
|
11
|
-
const DEFAULT_MAX_TOKENS = 400;
|
|
12
|
-
const DEFAULT_OVERLAP = 80;
|
|
13
|
-
/**
|
|
14
|
-
* Approximate token count using a words * 1.3 heuristic.
|
|
15
|
-
* Matches the OpenClaw estimate approach.
|
|
16
|
-
*/
|
|
17
|
-
function estimateTokens(text) {
|
|
18
|
-
const wordCount = text.split(/\s+/).filter(Boolean).length;
|
|
19
|
-
return Math.ceil(wordCount * 1.3);
|
|
20
|
-
}
|
|
21
|
-
/**
|
|
22
|
-
* Compute SHA-256 hash of a string, returning a hex string.
|
|
23
|
-
*/
|
|
24
|
-
function sha256(text) {
|
|
25
|
-
return createHash("sha256").update(text).digest("hex");
|
|
26
|
-
}
|
|
27
|
-
/**
|
|
28
|
-
* Split content into sections delimited by ## or ### headings.
|
|
29
|
-
* Each section starts at its heading line (or at line 1 for a preamble).
|
|
30
|
-
*/
|
|
31
|
-
function splitBySections(lines) {
|
|
32
|
-
const sections = [];
|
|
33
|
-
let current = [];
|
|
34
|
-
for (const line of lines) {
|
|
35
|
-
if (/^#{1,3}\s/.test(line.text) && current.length > 0) {
|
|
36
|
-
const text = current.map((l) => l.text).join("\n");
|
|
37
|
-
sections.push({
|
|
38
|
-
lines: current,
|
|
39
|
-
tokens: estimateTokens(text)
|
|
40
|
-
});
|
|
41
|
-
current = [];
|
|
42
|
-
}
|
|
43
|
-
current.push(line);
|
|
44
|
-
}
|
|
45
|
-
if (current.length > 0) {
|
|
46
|
-
const text = current.map((l) => l.text).join("\n");
|
|
47
|
-
sections.push({
|
|
48
|
-
lines: current,
|
|
49
|
-
tokens: estimateTokens(text)
|
|
50
|
-
});
|
|
51
|
-
}
|
|
52
|
-
return sections;
|
|
53
|
-
}
|
|
54
|
-
/**
|
|
55
|
-
* Split a LineBlock by double-newline paragraph boundaries.
|
|
56
|
-
*/
|
|
57
|
-
function splitByParagraphs(block) {
|
|
58
|
-
const paragraphs = [];
|
|
59
|
-
let current = [];
|
|
60
|
-
for (const line of block.lines) if (line.text.trim() === "" && current.length > 0) {
|
|
61
|
-
const text = current.map((l) => l.text).join("\n");
|
|
62
|
-
paragraphs.push({
|
|
63
|
-
lines: [...current],
|
|
64
|
-
tokens: estimateTokens(text)
|
|
65
|
-
});
|
|
66
|
-
current = [];
|
|
67
|
-
} else current.push(line);
|
|
68
|
-
if (current.length > 0) {
|
|
69
|
-
const text = current.map((l) => l.text).join("\n");
|
|
70
|
-
paragraphs.push({
|
|
71
|
-
lines: current,
|
|
72
|
-
tokens: estimateTokens(text)
|
|
73
|
-
});
|
|
74
|
-
}
|
|
75
|
-
return paragraphs.length > 0 ? paragraphs : [block];
|
|
76
|
-
}
|
|
77
|
-
/**
|
|
78
|
-
* Split a LineBlock by sentence boundaries (. ! ?) when even paragraphs are
|
|
79
|
-
* too large. Works character-by-character within joined lines.
|
|
80
|
-
*/
|
|
81
|
-
function splitBySentences(block, maxTokens) {
|
|
82
|
-
const sentences = block.lines.map((l) => l.text).join(" ").split(/(?<=[.!?])\s+(?=[A-Z"'])/g);
|
|
83
|
-
const result = [];
|
|
84
|
-
let accText = "";
|
|
85
|
-
const startLine = block.lines[0]?.lineNo ?? 1;
|
|
86
|
-
const endLine = block.lines[block.lines.length - 1]?.lineNo ?? startLine;
|
|
87
|
-
const totalLines = endLine - startLine + 1;
|
|
88
|
-
const linesPerSentence = Math.max(1, Math.floor(totalLines / Math.max(1, sentences.length)));
|
|
89
|
-
let sentenceIdx = 0;
|
|
90
|
-
let approxLine = startLine;
|
|
91
|
-
const flush = () => {
|
|
92
|
-
if (!accText.trim()) return;
|
|
93
|
-
const endApprox = Math.min(approxLine + linesPerSentence - 1, endLine);
|
|
94
|
-
result.push({
|
|
95
|
-
lines: [{
|
|
96
|
-
text: accText.trim(),
|
|
97
|
-
lineNo: approxLine
|
|
98
|
-
}],
|
|
99
|
-
tokens: estimateTokens(accText)
|
|
100
|
-
});
|
|
101
|
-
approxLine = endApprox + 1;
|
|
102
|
-
accText = "";
|
|
103
|
-
};
|
|
104
|
-
for (const sentence of sentences) {
|
|
105
|
-
sentenceIdx++;
|
|
106
|
-
const candidateText = accText ? accText + " " + sentence : sentence;
|
|
107
|
-
if (estimateTokens(candidateText) > maxTokens && accText) {
|
|
108
|
-
flush();
|
|
109
|
-
accText = sentence;
|
|
110
|
-
} else accText = candidateText;
|
|
111
|
-
}
|
|
112
|
-
flush();
|
|
113
|
-
return result.length > 0 ? result : [block];
|
|
114
|
-
}
|
|
115
|
-
/**
|
|
116
|
-
* Extract the last `overlapTokens` worth of text from a list of previously
|
|
117
|
-
* emitted chunks to prepend to the next chunk.
|
|
118
|
-
*/
|
|
119
|
-
function buildOverlapPrefix(chunks, overlapTokens) {
|
|
120
|
-
if (overlapTokens <= 0 || chunks.length === 0) return [];
|
|
121
|
-
const lastChunk = chunks[chunks.length - 1];
|
|
122
|
-
if (!lastChunk) return [];
|
|
123
|
-
const lines = lastChunk.text.split("\n");
|
|
124
|
-
const kept = [];
|
|
125
|
-
let acc = 0;
|
|
126
|
-
for (let i = lines.length - 1; i >= 0; i--) {
|
|
127
|
-
const lineTokens = estimateTokens(lines[i] ?? "");
|
|
128
|
-
acc += lineTokens;
|
|
129
|
-
kept.unshift(lines[i] ?? "");
|
|
130
|
-
if (acc >= overlapTokens) break;
|
|
131
|
-
}
|
|
132
|
-
const startLine = lastChunk.endLine - kept.length + 1;
|
|
133
|
-
return kept.map((text, idx) => ({
|
|
134
|
-
text,
|
|
135
|
-
lineNo: Math.max(lastChunk.startLine, startLine + idx)
|
|
136
|
-
}));
|
|
137
|
-
}
|
|
138
|
-
/**
|
|
139
|
-
* Chunk a markdown file into overlapping segments for BM25 indexing.
|
|
140
|
-
*
|
|
141
|
-
* Strategy:
|
|
142
|
-
* 1. Split by headings (##, ###) as natural boundaries.
|
|
143
|
-
* 2. If a section exceeds maxTokens, split by paragraphs.
|
|
144
|
-
* 3. If a paragraph still exceeds maxTokens, split by sentences.
|
|
145
|
-
* 4. Apply overlap: each chunk includes the last `overlap` tokens from the
|
|
146
|
-
* previous chunk.
|
|
147
|
-
*/
|
|
148
|
-
function chunkMarkdown(content, opts) {
|
|
149
|
-
const maxTokens = opts?.maxTokens ?? DEFAULT_MAX_TOKENS;
|
|
150
|
-
const overlapTokens = opts?.overlap ?? DEFAULT_OVERLAP;
|
|
151
|
-
if (!content.trim()) return [];
|
|
152
|
-
const sections = splitBySections(content.split("\n").map((text, idx) => ({
|
|
153
|
-
text,
|
|
154
|
-
lineNo: idx + 1
|
|
155
|
-
})));
|
|
156
|
-
const finalBlocks = [];
|
|
157
|
-
for (const section of sections) {
|
|
158
|
-
if (section.tokens <= maxTokens) {
|
|
159
|
-
finalBlocks.push(section);
|
|
160
|
-
continue;
|
|
161
|
-
}
|
|
162
|
-
const paras = splitByParagraphs(section);
|
|
163
|
-
for (const para of paras) {
|
|
164
|
-
if (para.tokens <= maxTokens) {
|
|
165
|
-
finalBlocks.push(para);
|
|
166
|
-
continue;
|
|
167
|
-
}
|
|
168
|
-
const sentences = splitBySentences(para, maxTokens);
|
|
169
|
-
finalBlocks.push(...sentences);
|
|
170
|
-
}
|
|
171
|
-
}
|
|
172
|
-
const chunks = [];
|
|
173
|
-
for (const block of finalBlocks) {
|
|
174
|
-
if (block.lines.length === 0) continue;
|
|
175
|
-
const text = [...buildOverlapPrefix(chunks, overlapTokens), ...block.lines].map((l) => l.text).join("\n").trim();
|
|
176
|
-
if (!text) continue;
|
|
177
|
-
const startLine = block.lines[0]?.lineNo ?? 1;
|
|
178
|
-
const endLine = block.lines[block.lines.length - 1]?.lineNo ?? startLine;
|
|
179
|
-
chunks.push({
|
|
180
|
-
text,
|
|
181
|
-
startLine,
|
|
182
|
-
endLine,
|
|
183
|
-
hash: sha256(text)
|
|
184
|
-
});
|
|
185
|
-
}
|
|
186
|
-
return chunks;
|
|
187
|
-
}
|
|
188
|
-
|
|
189
|
-
//#endregion
|
|
190
|
-
export { estimateTokens as n, chunkMarkdown as t };
|
|
191
|
-
//# sourceMappingURL=chunker-CbnBe0s0.mjs.map
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"chunker-CbnBe0s0.mjs","names":[],"sources":["../src/memory/chunker.ts"],"sourcesContent":["/**\n * Markdown text chunker for the PAI memory engine.\n *\n * Splits markdown files into overlapping text segments suitable for BM25\n * full-text indexing. Respects heading boundaries where possible, falling\n * back to paragraph and sentence splitting when sections are large.\n */\n\nimport { createHash } from \"node:crypto\";\n\nexport interface Chunk {\n text: string;\n startLine: number; // 1-indexed\n endLine: number; // 1-indexed, inclusive\n hash: string; // SHA-256 of text\n}\n\nexport interface ChunkOptions {\n /** Approximate maximum tokens per chunk. Default 400. */\n maxTokens?: number;\n /** Overlap in tokens from the previous chunk. Default 80. */\n overlap?: number;\n}\n\nconst DEFAULT_MAX_TOKENS = 400;\nconst DEFAULT_OVERLAP = 80;\n\n/**\n * Approximate token count using a words * 1.3 heuristic.\n * Matches the OpenClaw estimate approach.\n */\nexport function estimateTokens(text: string): number {\n const wordCount = text.split(/\\s+/).filter(Boolean).length;\n return Math.ceil(wordCount * 1.3);\n}\n\n/**\n * Compute SHA-256 hash of a string, returning a hex string.\n */\nfunction sha256(text: string): string {\n return createHash(\"sha256\").update(text).digest(\"hex\");\n}\n\n// ---------------------------------------------------------------------------\n// Internal section / paragraph / sentence splitters\n// ---------------------------------------------------------------------------\n\n/**\n * A contiguous block of lines associated with an approximate token count.\n */\ninterface LineBlock {\n lines: Array<{ text: string; lineNo: number }>;\n tokens: number;\n}\n\n/**\n * Split content into sections delimited by ## or ### headings.\n * Each section starts at its heading line (or at line 1 for a preamble).\n */\nfunction splitBySections(\n lines: Array<{ text: string; lineNo: number }>,\n): LineBlock[] {\n const sections: LineBlock[] = [];\n let current: Array<{ text: string; lineNo: number }> = [];\n\n for (const line of lines) {\n const isHeading = /^#{1,3}\\s/.test(line.text);\n if (isHeading && current.length > 0) {\n const text = current.map((l) => l.text).join(\"\\n\");\n sections.push({ lines: current, tokens: estimateTokens(text) });\n current = [];\n }\n current.push(line);\n }\n\n if (current.length > 0) {\n const text = current.map((l) => l.text).join(\"\\n\");\n sections.push({ lines: current, tokens: estimateTokens(text) });\n }\n\n return sections;\n}\n\n/**\n * Split a LineBlock by double-newline paragraph boundaries.\n */\nfunction splitByParagraphs(block: LineBlock): LineBlock[] {\n const paragraphs: LineBlock[] = [];\n let current: Array<{ text: string; lineNo: number }> = [];\n\n for (const line of block.lines) {\n if (line.text.trim() === \"\" && current.length > 0) {\n // Empty line — potential paragraph boundary\n const text = current.map((l) => l.text).join(\"\\n\");\n paragraphs.push({ lines: [...current], tokens: estimateTokens(text) });\n current = [];\n } else {\n current.push(line);\n }\n }\n\n if (current.length > 0) {\n const text = current.map((l) => l.text).join(\"\\n\");\n paragraphs.push({ lines: current, tokens: estimateTokens(text) });\n }\n\n return paragraphs.length > 0 ? paragraphs : [block];\n}\n\n/**\n * Split a LineBlock by sentence boundaries (. ! ?) when even paragraphs are\n * too large. Works character-by-character within joined lines.\n */\nfunction splitBySentences(block: LineBlock, maxTokens: number): LineBlock[] {\n const fullText = block.lines.map((l) => l.text).join(\" \");\n // Very rough sentence split — split on '. ', '! ', '? ' followed by uppercase\n const sentenceRe = /(?<=[.!?])\\s+(?=[A-Z\"'])/g;\n const sentences = fullText.split(sentenceRe);\n\n const result: LineBlock[] = [];\n let accText = \"\";\n // We can't recover exact line numbers inside a single oversized paragraph,\n // so we approximate using the block's start/end lines distributed evenly.\n const startLine = block.lines[0]?.lineNo ?? 1;\n const endLine = block.lines[block.lines.length - 1]?.lineNo ?? startLine;\n const totalLines = endLine - startLine + 1;\n const linesPerSentence = Math.max(1, Math.floor(totalLines / Math.max(1, sentences.length)));\n\n let sentenceIdx = 0;\n let approxLine = startLine;\n\n const flush = () => {\n if (!accText.trim()) return;\n const endApprox = Math.min(approxLine + linesPerSentence - 1, endLine);\n result.push({\n lines: [{ text: accText.trim(), lineNo: approxLine }],\n tokens: estimateTokens(accText),\n });\n approxLine = endApprox + 1;\n accText = \"\";\n };\n\n for (const sentence of sentences) {\n sentenceIdx++;\n const candidateText = accText ? accText + \" \" + sentence : sentence;\n if (estimateTokens(candidateText) > maxTokens && accText) {\n flush();\n accText = sentence;\n } else {\n accText = candidateText;\n }\n }\n void sentenceIdx; // used only for iteration count\n flush();\n\n return result.length > 0 ? result : [block];\n}\n\n// ---------------------------------------------------------------------------\n// Overlap helper\n// ---------------------------------------------------------------------------\n\n/**\n * Extract the last `overlapTokens` worth of text from a list of previously\n * emitted chunks to prepend to the next chunk.\n */\nfunction buildOverlapPrefix(\n chunks: Chunk[],\n overlapTokens: number,\n): Array<{ text: string; lineNo: number }> {\n if (overlapTokens <= 0 || chunks.length === 0) return [];\n\n const lastChunk = chunks[chunks.length - 1];\n if (!lastChunk) return [];\n\n const lines = lastChunk.text.split(\"\\n\");\n const kept: string[] = [];\n let acc = 0;\n\n for (let i = lines.length - 1; i >= 0; i--) {\n const lineTokens = estimateTokens(lines[i] ?? \"\");\n acc += lineTokens;\n kept.unshift(lines[i] ?? \"\");\n if (acc >= overlapTokens) break;\n }\n\n // Distribute overlap lines across the lastChunk's line range\n const startLine = lastChunk.endLine - kept.length + 1;\n return kept.map((text, idx) => ({ text, lineNo: Math.max(lastChunk.startLine, startLine + idx) }));\n}\n\n// ---------------------------------------------------------------------------\n// Public API\n// ---------------------------------------------------------------------------\n\n/**\n * Chunk a markdown file into overlapping segments for BM25 indexing.\n *\n * Strategy:\n * 1. Split by headings (##, ###) as natural boundaries.\n * 2. If a section exceeds maxTokens, split by paragraphs.\n * 3. If a paragraph still exceeds maxTokens, split by sentences.\n * 4. Apply overlap: each chunk includes the last `overlap` tokens from the\n * previous chunk.\n */\nexport function chunkMarkdown(content: string, opts?: ChunkOptions): Chunk[] {\n const maxTokens = opts?.maxTokens ?? DEFAULT_MAX_TOKENS;\n const overlapTokens = opts?.overlap ?? DEFAULT_OVERLAP;\n\n if (!content.trim()) return [];\n\n const rawLines = content.split(\"\\n\");\n const lines: Array<{ text: string; lineNo: number }> = rawLines.map((text, idx) => ({\n text,\n lineNo: idx + 1, // 1-indexed\n }));\n\n // Step 1: section split\n const sections = splitBySections(lines);\n\n // Step 2 & 3: further split oversized sections\n const finalBlocks: LineBlock[] = [];\n for (const section of sections) {\n if (section.tokens <= maxTokens) {\n finalBlocks.push(section);\n continue;\n }\n // Too big — split by paragraphs\n const paras = splitByParagraphs(section);\n for (const para of paras) {\n if (para.tokens <= maxTokens) {\n finalBlocks.push(para);\n continue;\n }\n // Still too big — split by sentences\n const sentences = splitBySentences(para, maxTokens);\n finalBlocks.push(...sentences);\n }\n }\n\n // Step 4: build final chunks with overlap\n const chunks: Chunk[] = [];\n\n for (const block of finalBlocks) {\n if (block.lines.length === 0) continue;\n\n // Build overlap prefix from previous chunks\n const overlapLines = buildOverlapPrefix(chunks, overlapTokens);\n\n // Combine overlap + block lines\n const allLines = [...overlapLines, ...block.lines];\n const text = allLines.map((l) => l.text).join(\"\\n\").trim();\n\n if (!text) continue;\n\n const startLine = block.lines[0]?.lineNo ?? 1;\n const endLine = block.lines[block.lines.length - 1]?.lineNo ?? startLine;\n\n chunks.push({\n text,\n startLine,\n endLine,\n hash: sha256(text),\n });\n }\n\n return chunks;\n}\n"],"mappings":";;;;;;;;;;AAwBA,MAAM,qBAAqB;AAC3B,MAAM,kBAAkB;;;;;AAMxB,SAAgB,eAAe,MAAsB;CACnD,MAAM,YAAY,KAAK,MAAM,MAAM,CAAC,OAAO,QAAQ,CAAC;AACpD,QAAO,KAAK,KAAK,YAAY,IAAI;;;;;AAMnC,SAAS,OAAO,MAAsB;AACpC,QAAO,WAAW,SAAS,CAAC,OAAO,KAAK,CAAC,OAAO,MAAM;;;;;;AAmBxD,SAAS,gBACP,OACa;CACb,MAAM,WAAwB,EAAE;CAChC,IAAI,UAAmD,EAAE;AAEzD,MAAK,MAAM,QAAQ,OAAO;AAExB,MADkB,YAAY,KAAK,KAAK,KAAK,IAC5B,QAAQ,SAAS,GAAG;GACnC,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;AAClD,YAAS,KAAK;IAAE,OAAO;IAAS,QAAQ,eAAe,KAAK;IAAE,CAAC;AAC/D,aAAU,EAAE;;AAEd,UAAQ,KAAK,KAAK;;AAGpB,KAAI,QAAQ,SAAS,GAAG;EACtB,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;AAClD,WAAS,KAAK;GAAE,OAAO;GAAS,QAAQ,eAAe,KAAK;GAAE,CAAC;;AAGjE,QAAO;;;;;AAMT,SAAS,kBAAkB,OAA+B;CACxD,MAAM,aAA0B,EAAE;CAClC,IAAI,UAAmD,EAAE;AAEzD,MAAK,MAAM,QAAQ,MAAM,MACvB,KAAI,KAAK,KAAK,MAAM,KAAK,MAAM,QAAQ,SAAS,GAAG;EAEjD,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;AAClD,aAAW,KAAK;GAAE,OAAO,CAAC,GAAG,QAAQ;GAAE,QAAQ,eAAe,KAAK;GAAE,CAAC;AACtE,YAAU,EAAE;OAEZ,SAAQ,KAAK,KAAK;AAItB,KAAI,QAAQ,SAAS,GAAG;EACtB,MAAM,OAAO,QAAQ,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK;AAClD,aAAW,KAAK;GAAE,OAAO;GAAS,QAAQ,eAAe,KAAK;GAAE,CAAC;;AAGnE,QAAO,WAAW,SAAS,IAAI,aAAa,CAAC,MAAM;;;;;;AAOrD,SAAS,iBAAiB,OAAkB,WAAgC;CAI1E,MAAM,YAHW,MAAM,MAAM,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,IAAI,CAG9B,MADR,4BACyB;CAE5C,MAAM,SAAsB,EAAE;CAC9B,IAAI,UAAU;CAGd,MAAM,YAAY,MAAM,MAAM,IAAI,UAAU;CAC5C,MAAM,UAAU,MAAM,MAAM,MAAM,MAAM,SAAS,IAAI,UAAU;CAC/D,MAAM,aAAa,UAAU,YAAY;CACzC,MAAM,mBAAmB,KAAK,IAAI,GAAG,KAAK,MAAM,aAAa,KAAK,IAAI,GAAG,UAAU,OAAO,CAAC,CAAC;CAE5F,IAAI,cAAc;CAClB,IAAI,aAAa;CAEjB,MAAM,cAAc;AAClB,MAAI,CAAC,QAAQ,MAAM,CAAE;EACrB,MAAM,YAAY,KAAK,IAAI,aAAa,mBAAmB,GAAG,QAAQ;AACtE,SAAO,KAAK;GACV,OAAO,CAAC;IAAE,MAAM,QAAQ,MAAM;IAAE,QAAQ;IAAY,CAAC;GACrD,QAAQ,eAAe,QAAQ;GAChC,CAAC;AACF,eAAa,YAAY;AACzB,YAAU;;AAGZ,MAAK,MAAM,YAAY,WAAW;AAChC;EACA,MAAM,gBAAgB,UAAU,UAAU,MAAM,WAAW;AAC3D,MAAI,eAAe,cAAc,GAAG,aAAa,SAAS;AACxD,UAAO;AACP,aAAU;QAEV,WAAU;;AAId,QAAO;AAEP,QAAO,OAAO,SAAS,IAAI,SAAS,CAAC,MAAM;;;;;;AAW7C,SAAS,mBACP,QACA,eACyC;AACzC,KAAI,iBAAiB,KAAK,OAAO,WAAW,EAAG,QAAO,EAAE;CAExD,MAAM,YAAY,OAAO,OAAO,SAAS;AACzC,KAAI,CAAC,UAAW,QAAO,EAAE;CAEzB,MAAM,QAAQ,UAAU,KAAK,MAAM,KAAK;CACxC,MAAM,OAAiB,EAAE;CACzB,IAAI,MAAM;AAEV,MAAK,IAAI,IAAI,MAAM,SAAS,GAAG,KAAK,GAAG,KAAK;EAC1C,MAAM,aAAa,eAAe,MAAM,MAAM,GAAG;AACjD,SAAO;AACP,OAAK,QAAQ,MAAM,MAAM,GAAG;AAC5B,MAAI,OAAO,cAAe;;CAI5B,MAAM,YAAY,UAAU,UAAU,KAAK,SAAS;AACpD,QAAO,KAAK,KAAK,MAAM,SAAS;EAAE;EAAM,QAAQ,KAAK,IAAI,UAAU,WAAW,YAAY,IAAI;EAAE,EAAE;;;;;;;;;;;;AAiBpG,SAAgB,cAAc,SAAiB,MAA8B;CAC3E,MAAM,YAAY,MAAM,aAAa;CACrC,MAAM,gBAAgB,MAAM,WAAW;AAEvC,KAAI,CAAC,QAAQ,MAAM,CAAE,QAAO,EAAE;CAS9B,MAAM,WAAW,gBAPA,QAAQ,MAAM,KAAK,CAC4B,KAAK,MAAM,SAAS;EAClF;EACA,QAAQ,MAAM;EACf,EAAE,CAGoC;CAGvC,MAAM,cAA2B,EAAE;AACnC,MAAK,MAAM,WAAW,UAAU;AAC9B,MAAI,QAAQ,UAAU,WAAW;AAC/B,eAAY,KAAK,QAAQ;AACzB;;EAGF,MAAM,QAAQ,kBAAkB,QAAQ;AACxC,OAAK,MAAM,QAAQ,OAAO;AACxB,OAAI,KAAK,UAAU,WAAW;AAC5B,gBAAY,KAAK,KAAK;AACtB;;GAGF,MAAM,YAAY,iBAAiB,MAAM,UAAU;AACnD,eAAY,KAAK,GAAG,UAAU;;;CAKlC,MAAM,SAAkB,EAAE;AAE1B,MAAK,MAAM,SAAS,aAAa;AAC/B,MAAI,MAAM,MAAM,WAAW,EAAG;EAO9B,MAAM,OADW,CAAC,GAHG,mBAAmB,QAAQ,cAAc,EAG3B,GAAG,MAAM,MAAM,CAC5B,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK,CAAC,MAAM;AAE1D,MAAI,CAAC,KAAM;EAEX,MAAM,YAAY,MAAM,MAAM,IAAI,UAAU;EAC5C,MAAM,UAAU,MAAM,MAAM,MAAM,MAAM,SAAS,IAAI,UAAU;AAE/D,SAAO,KAAK;GACV;GACA;GACA;GACA,MAAM,OAAO,KAAK;GACnB,CAAC;;AAGJ,QAAO"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"config-Cf92lGX_.mjs","names":[],"sources":["../src/notifications/types.ts","../src/daemon/config.ts"],"sourcesContent":["/**\n * types.ts — Unified Notification Framework type definitions\n *\n * Defines the channel registry, event routing, and configuration schema\n * for PAI's notification subsystem.\n */\n\n// ---------------------------------------------------------------------------\n// Channel identifiers\n// ---------------------------------------------------------------------------\n\nexport type ChannelId = \"ntfy\" | \"whatsapp\" | \"macos\" | \"voice\" | \"cli\";\n\n// ---------------------------------------------------------------------------\n// Notification event types\n// ---------------------------------------------------------------------------\n\n/**\n * The semantic type of a notification event.\n * Used to route events to the appropriate channels.\n */\nexport type NotificationEvent =\n | \"error\"\n | \"progress\"\n | \"completion\"\n | \"info\"\n | \"debug\";\n\n// ---------------------------------------------------------------------------\n// Notification mode\n// ---------------------------------------------------------------------------\n\n/**\n * The current notification mode.\n *\n * - \"auto\" — Use the per-event routing table (default)\n * - \"voice\" — All events go to voice (WhatsApp TTS)\n * - \"whatsapp\" — All events go to WhatsApp text\n * - \"ntfy\" — All events go to ntfy.sh\n * - \"macos\" — All events go to macOS notifications\n * - \"cli\" — All events go to CLI stdout only\n * - \"off\" — Suppress all notifications\n */\nexport type NotificationMode =\n | \"auto\"\n | \"voice\"\n | \"whatsapp\"\n | \"ntfy\"\n | \"macos\"\n | \"cli\"\n | \"off\";\n\n// ---------------------------------------------------------------------------\n// Per-channel configuration\n// ---------------------------------------------------------------------------\n\nexport interface NtfyChannelConfig {\n enabled: boolean;\n /** ntfy.sh topic URL, e.g. \"https://ntfy.sh/my-topic\" */\n url?: string;\n /** ntfy priority: min | low | default | high | urgent */\n priority?: \"min\" | \"low\" | \"default\" | \"high\" | \"urgent\";\n}\n\nexport interface WhatsAppChannelConfig {\n enabled: boolean;\n /** Optional recipient (phone, JID, or contact name). Omit for self-chat. */\n recipient?: string;\n}\n\nexport interface MacOsChannelConfig {\n enabled: boolean;\n}\n\nexport interface VoiceChannelConfig {\n enabled: boolean;\n /** Kokoro voice name, e.g. \"bm_george\", \"af_bella\". Default: \"bm_george\" */\n voiceName?: string;\n}\n\nexport interface CliChannelConfig {\n enabled: boolean;\n}\n\nexport interface ChannelConfigs {\n ntfy: NtfyChannelConfig;\n whatsapp: WhatsAppChannelConfig;\n macos: MacOsChannelConfig;\n voice: VoiceChannelConfig;\n cli: CliChannelConfig;\n}\n\n// ---------------------------------------------------------------------------\n// Routing table\n// ---------------------------------------------------------------------------\n\n/**\n * Maps each event type to the ordered list of channels that should receive it.\n * Only channels that are enabled in `channels` and present in this list are used.\n */\nexport type RoutingTable = {\n [K in NotificationEvent]: ChannelId[];\n};\n\nexport const DEFAULT_ROUTING: RoutingTable = {\n error: [\"whatsapp\", \"macos\", \"ntfy\", \"cli\"],\n completion: [\"whatsapp\", \"macos\", \"ntfy\", \"cli\"],\n info: [\"cli\"],\n progress: [\"cli\"],\n debug: [],\n};\n\n// ---------------------------------------------------------------------------\n// Top-level notification config (embedded in PaiDaemonConfig)\n// ---------------------------------------------------------------------------\n\nexport interface NotificationConfig {\n /** Current routing mode. Default: \"auto\" */\n mode: NotificationMode;\n /** Per-channel configuration */\n channels: ChannelConfigs;\n /** Event → channel routing (used in \"auto\" mode) */\n routing: RoutingTable;\n}\n\nexport const DEFAULT_CHANNELS: ChannelConfigs = {\n ntfy: {\n enabled: false,\n url: undefined,\n priority: \"default\",\n },\n whatsapp: {\n enabled: true,\n recipient: undefined,\n },\n macos: {\n enabled: true,\n },\n voice: {\n enabled: false,\n voiceName: \"bm_george\",\n },\n cli: {\n enabled: true,\n },\n};\n\nexport const DEFAULT_NOTIFICATION_CONFIG: NotificationConfig = {\n mode: \"auto\",\n channels: DEFAULT_CHANNELS,\n routing: DEFAULT_ROUTING,\n};\n\n// ---------------------------------------------------------------------------\n// Notification payload\n// ---------------------------------------------------------------------------\n\nexport interface NotificationPayload {\n /** Semantic event type — used for routing */\n event: NotificationEvent;\n /** The notification message body */\n message: string;\n /** Optional title (used by macOS, ntfy) */\n title?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Provider interface\n// ---------------------------------------------------------------------------\n\nexport interface NotificationProvider {\n readonly channelId: ChannelId;\n /**\n * Send a notification.\n * Returns true on success, false on failure (failure is non-fatal).\n */\n send(payload: NotificationPayload, config: NotificationConfig): Promise<boolean>;\n}\n\n// ---------------------------------------------------------------------------\n// Send result\n// ---------------------------------------------------------------------------\n\nexport interface SendResult {\n channelsAttempted: ChannelId[];\n channelsSucceeded: ChannelId[];\n channelsFailed: ChannelId[];\n mode: NotificationMode;\n}\n","/**\n * config.ts — Configuration loader for PAI Daemon\n *\n * Loads config from ~/.config/pai/config.json (XDG convention).\n * Deep-merges with defaults so partial configs work fine.\n * Expands ~ in path values at runtime.\n */\n\nimport { existsSync, readFileSync, mkdirSync, writeFileSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { join } from \"node:path\";\nimport type { NotificationConfig } from \"../notifications/types.js\";\nimport { DEFAULT_NOTIFICATION_CONFIG } from \"../notifications/types.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface SearchConfig {\n /** Default search mode: 'keyword', 'semantic', or 'hybrid'. Default: 'keyword'. */\n mode: \"keyword\" | \"semantic\" | \"hybrid\";\n /** Enable cross-encoder reranking by default. Default: true. */\n rerank: boolean;\n /** Recency boost half-life in days. 0 = off. Default: 90. */\n recencyBoostDays: number;\n /** Default max results. Default: 10. */\n defaultLimit: number;\n /** Default snippet length for MCP results. Default: 200. */\n snippetLength: number;\n}\n\nexport interface PostgresConfig {\n /** Connection string — if set, overrides individual host/port/etc. fields */\n connectionString?: string;\n /** Postgres host (default: \"localhost\") */\n host?: string;\n /** Postgres port (default: 5432) */\n port?: number;\n /** Postgres database name (default: \"pai\") */\n database?: string;\n /** Postgres user (default: \"pai\") */\n user?: string;\n /** Postgres password (default: \"pai\") */\n password?: string;\n /** Maximum pool connections (default: 5) */\n maxConnections?: number;\n /** Connection timeout in ms (default: 5000) */\n connectionTimeoutMs?: number;\n}\n\nexport interface PaiDaemonConfig {\n /** Unix Domain Socket path for IPC */\n socketPath: string;\n\n /** Index schedule interval in seconds (default: 300 = 5 minutes) */\n indexIntervalSecs: number;\n\n /** Embedding schedule interval in seconds (default: 600 = 10 minutes) */\n embedIntervalSecs: number;\n\n /** Storage backend: \"sqlite\" (default) or \"postgres\" */\n storageBackend: \"sqlite\" | \"postgres\";\n\n /** PostgreSQL connection config (used when storageBackend = \"postgres\") */\n postgres?: PostgresConfig;\n\n /** Embedding model name (used for semantic/hybrid search) */\n embeddingModel: string;\n\n /** Log level */\n logLevel: \"debug\" | \"info\" | \"warn\" | \"error\";\n\n /** Obsidian vault root path for zettelkasten indexing. If set, vault indexing runs alongside project indexing. */\n vaultPath?: string;\n\n /** Registry project_id to use for vault chunks in memory_chunks. Default: auto-detected. */\n vaultProjectId?: number;\n\n /** Notification subsystem configuration */\n notifications: NotificationConfig;\n\n /** Search defaults — applied when MCP tool or CLI doesn't specify a value */\n search: SearchConfig;\n}\n\n// ---------------------------------------------------------------------------\n// Defaults\n// ---------------------------------------------------------------------------\n\nexport const DEFAULTS: PaiDaemonConfig = {\n socketPath: \"/tmp/pai.sock\",\n indexIntervalSecs: 300,\n embedIntervalSecs: 600,\n storageBackend: \"sqlite\",\n postgres: {\n connectionString: \"postgresql://pai:pai@localhost:5432/pai\",\n maxConnections: 5,\n connectionTimeoutMs: 5000,\n },\n embeddingModel: \"Snowflake/snowflake-arctic-embed-m-v1.5\",\n logLevel: \"info\",\n notifications: DEFAULT_NOTIFICATION_CONFIG,\n search: {\n mode: \"keyword\",\n rerank: true,\n recencyBoostDays: 90,\n defaultLimit: 10,\n snippetLength: 200,\n },\n};\n\nconst CONFIG_TEMPLATE = `{\n \"socketPath\": \"/tmp/pai.sock\",\n \"indexIntervalSecs\": 300,\n \"embedIntervalSecs\": 600,\n \"storageBackend\": \"sqlite\",\n \"postgres\": {\n \"connectionString\": \"postgresql://pai:pai@localhost:5432/pai\",\n \"maxConnections\": 5,\n \"connectionTimeoutMs\": 5000\n },\n \"embeddingModel\": \"Snowflake/snowflake-arctic-embed-m-v1.5\",\n \"logLevel\": \"info\",\n \"vaultPath\": \"\",\n \"vaultProjectId\": 0,\n \"search\": {\n \"mode\": \"keyword\",\n \"rerank\": true,\n \"recencyBoostDays\": 90,\n \"defaultLimit\": 10,\n \"snippetLength\": 200\n }\n}\n`;\n\n// ---------------------------------------------------------------------------\n// Path helpers\n// ---------------------------------------------------------------------------\n\n/** Expand a leading ~ to the real home directory */\nexport function expandHome(p: string): string {\n if (p === \"~\" || p.startsWith(\"~/\") || p.startsWith(\"~\\\\\")) {\n return join(homedir(), p.slice(1));\n }\n return p;\n}\n\nexport const CONFIG_DIR = join(homedir(), \".config\", \"pai\");\nexport const CONFIG_FILE = join(CONFIG_DIR, \"config.json\");\n\n// ---------------------------------------------------------------------------\n// Deep merge (handles nested objects, not arrays)\n// ---------------------------------------------------------------------------\n\nfunction deepMerge<T extends object>(\n target: T,\n source: Record<string, unknown>\n): T {\n const result = { ...target };\n for (const key of Object.keys(source)) {\n const srcVal = source[key];\n if (srcVal === undefined || srcVal === null) continue;\n const tgtVal = (target as Record<string, unknown>)[key];\n if (\n typeof srcVal === \"object\" &&\n !Array.isArray(srcVal) &&\n typeof tgtVal === \"object\" &&\n tgtVal !== null &&\n !Array.isArray(tgtVal)\n ) {\n (result as Record<string, unknown>)[key] = deepMerge(\n tgtVal as object,\n srcVal as Record<string, unknown>\n );\n } else {\n (result as Record<string, unknown>)[key] = srcVal;\n }\n }\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Config loader\n// ---------------------------------------------------------------------------\n\n/**\n * Load configuration from ~/.config/pai/config.json.\n * Returns defaults merged with any values found in the file.\n */\nexport function loadConfig(): PaiDaemonConfig {\n if (!existsSync(CONFIG_FILE)) {\n return { ...DEFAULTS };\n }\n\n let raw: string;\n try {\n raw = readFileSync(CONFIG_FILE, \"utf-8\");\n } catch (e) {\n process.stderr.write(\n `[pai-daemon] Could not read config file at ${CONFIG_FILE}: ${e}\\n`\n );\n return { ...DEFAULTS };\n }\n\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(raw) as Record<string, unknown>;\n } catch (e) {\n process.stderr.write(\n `[pai-daemon] Config file is not valid JSON: ${e}\\n`\n );\n return { ...DEFAULTS };\n }\n\n return deepMerge(DEFAULTS, parsed);\n}\n\n/**\n * Ensure ~/.config/pai/ exists and write a default config.json template\n * if none exists yet. Call this only from the `serve` command.\n */\nexport function ensureConfigDir(): void {\n if (!existsSync(CONFIG_DIR)) {\n mkdirSync(CONFIG_DIR, { recursive: true });\n process.stderr.write(\n `[pai-daemon] Created config directory: ${CONFIG_DIR}\\n`\n );\n }\n\n if (!existsSync(CONFIG_FILE)) {\n try {\n writeFileSync(CONFIG_FILE, CONFIG_TEMPLATE, \"utf-8\");\n process.stderr.write(\n `[pai-daemon] Wrote default config to: ${CONFIG_FILE}\\n`\n );\n } catch (e) {\n process.stderr.write(\n `[pai-daemon] Could not write default config: ${e}\\n`\n );\n }\n }\n}\n"],"mappings":";;;;;;AAwGA,MAAa,kBAAgC;CAC3C,OAAY;EAAC;EAAY;EAAS;EAAQ;EAAM;CAChD,YAAY;EAAC;EAAY;EAAS;EAAQ;EAAM;CAChD,MAAY,CAAC,MAAM;CACnB,UAAY,CAAC,MAAM;CACnB,OAAY,EAAE;CACf;AAeD,MAAa,mBAAmC;CAC9C,MAAM;EACJ,SAAS;EACT,KAAK;EACL,UAAU;EACX;CACD,UAAU;EACR,SAAS;EACT,WAAW;EACZ;CACD,OAAO,EACL,SAAS,MACV;CACD,OAAO;EACL,SAAS;EACT,WAAW;EACZ;CACD,KAAK,EACH,SAAS,MACV;CACF;AAED,MAAa,8BAAkD;CAC7D,MAAM;CACN,UAAU;CACV,SAAS;CACV;;;;;;;;;;;;;;;;;;;AC9DD,MAAa,WAA4B;CACvC,YAAY;CACZ,mBAAmB;CACnB,mBAAmB;CACnB,gBAAgB;CAChB,UAAU;EACR,kBAAkB;EAClB,gBAAgB;EAChB,qBAAqB;EACtB;CACD,gBAAgB;CAChB,UAAU;CACV,eAAe;CACf,QAAQ;EACN,MAAM;EACN,QAAQ;EACR,kBAAkB;EAClB,cAAc;EACd,eAAe;EAChB;CACF;AAED,MAAM,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;AA6BxB,SAAgB,WAAW,GAAmB;AAC5C,KAAI,MAAM,OAAO,EAAE,WAAW,KAAK,IAAI,EAAE,WAAW,MAAM,CACxD,QAAO,KAAK,SAAS,EAAE,EAAE,MAAM,EAAE,CAAC;AAEpC,QAAO;;AAGT,MAAa,aAAa,KAAK,SAAS,EAAE,WAAW,MAAM;AAC3D,MAAa,cAAc,KAAK,YAAY,cAAc;AAM1D,SAAS,UACP,QACA,QACG;CACH,MAAM,SAAS,EAAE,GAAG,QAAQ;AAC5B,MAAK,MAAM,OAAO,OAAO,KAAK,OAAO,EAAE;EACrC,MAAM,SAAS,OAAO;AACtB,MAAI,WAAW,UAAa,WAAW,KAAM;EAC7C,MAAM,SAAU,OAAmC;AACnD,MACE,OAAO,WAAW,YAClB,CAAC,MAAM,QAAQ,OAAO,IACtB,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,OAAO,CAEtB,CAAC,OAAmC,OAAO,UACzC,QACA,OACD;MAED,CAAC,OAAmC,OAAO;;AAG/C,QAAO;;;;;;AAWT,SAAgB,aAA8B;AAC5C,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO,EAAE,GAAG,UAAU;CAGxB,IAAI;AACJ,KAAI;AACF,QAAM,aAAa,aAAa,QAAQ;UACjC,GAAG;AACV,UAAQ,OAAO,MACb,8CAA8C,YAAY,IAAI,EAAE,IACjE;AACD,SAAO,EAAE,GAAG,UAAU;;CAGxB,IAAI;AACJ,KAAI;AACF,WAAS,KAAK,MAAM,IAAI;UACjB,GAAG;AACV,UAAQ,OAAO,MACb,+CAA+C,EAAE,IAClD;AACD,SAAO,EAAE,GAAG,UAAU;;AAGxB,QAAO,UAAU,UAAU,OAAO;;;;;;AAOpC,SAAgB,kBAAwB;AACtC,KAAI,CAAC,WAAW,WAAW,EAAE;AAC3B,YAAU,YAAY,EAAE,WAAW,MAAM,CAAC;AAC1C,UAAQ,OAAO,MACb,0CAA0C,WAAW,IACtD;;AAGH,KAAI,CAAC,WAAW,YAAY,CAC1B,KAAI;AACF,gBAAc,aAAa,iBAAiB,QAAQ;AACpD,UAAQ,OAAO,MACb,yCAAyC,YAAY,IACtD;UACM,GAAG;AACV,UAAQ,OAAO,MACb,gDAAgD,EAAE,IACnD"}
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"daemon-D9evGlgR.mjs","names":[],"sources":["../src/notifications/config.ts","../src/notifications/providers/ntfy.ts","../src/notifications/providers/whatsapp.ts","../src/notifications/providers/macos.ts","../src/notifications/providers/cli.ts","../src/notifications/router.ts","../src/daemon/daemon.ts"],"sourcesContent":["/**\n * config.ts — Notification config persistence helpers\n *\n * Reads and writes the `notifications` section of ~/.config/pai/config.json.\n * Deep-merges with defaults so partial configs work fine.\n *\n * This module is intentionally separate from the daemon's config loader\n * so it can be used standalone (e.g. from CLI commands).\n */\n\nimport {\n existsSync,\n readFileSync,\n writeFileSync,\n mkdirSync,\n} from \"node:fs\";\nimport {\n CONFIG_FILE,\n CONFIG_DIR,\n expandHome,\n} from \"../daemon/config.js\";\nimport type {\n NotificationConfig,\n ChannelConfigs,\n RoutingTable,\n NotificationMode,\n} from \"./types.js\";\nimport {\n DEFAULT_NOTIFICATION_CONFIG,\n DEFAULT_CHANNELS,\n DEFAULT_ROUTING,\n} from \"./types.js\";\n\n// ---------------------------------------------------------------------------\n// Deep merge helper (same approach as daemon/config.ts)\n// ---------------------------------------------------------------------------\n\nfunction deepMerge<T extends object>(\n target: T,\n source: Record<string, unknown>\n): T {\n const result = { ...target };\n for (const key of Object.keys(source)) {\n const srcVal = source[key];\n if (srcVal === undefined || srcVal === null) continue;\n const tgtVal = (target as Record<string, unknown>)[key];\n if (\n typeof srcVal === \"object\" &&\n !Array.isArray(srcVal) &&\n typeof tgtVal === \"object\" &&\n tgtVal !== null &&\n !Array.isArray(tgtVal)\n ) {\n (result as Record<string, unknown>)[key] = deepMerge(\n tgtVal as object,\n srcVal as Record<string, unknown>\n );\n } else {\n (result as Record<string, unknown>)[key] = srcVal;\n }\n }\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Load\n// ---------------------------------------------------------------------------\n\n/**\n * Load the notification config from the PAI config file.\n * Returns defaults merged with any stored values.\n */\nexport function loadNotificationConfig(): NotificationConfig {\n if (!existsSync(CONFIG_FILE)) {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n let raw: string;\n try {\n raw = readFileSync(CONFIG_FILE, \"utf-8\");\n } catch {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(raw) as Record<string, unknown>;\n } catch {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n const stored = parsed[\"notifications\"];\n if (!stored || typeof stored !== \"object\") {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n return deepMerge(\n DEFAULT_NOTIFICATION_CONFIG,\n stored as Record<string, unknown>\n );\n}\n\n// ---------------------------------------------------------------------------\n// Save\n// ---------------------------------------------------------------------------\n\n/**\n * Persist the notification config by merging it into the existing\n * ~/.config/pai/config.json. Creates the file if it does not exist.\n */\nexport function saveNotificationConfig(config: NotificationConfig): void {\n // Ensure the config dir exists\n if (!existsSync(CONFIG_DIR)) {\n mkdirSync(CONFIG_DIR, { recursive: true });\n }\n\n // Read current full config\n let full: Record<string, unknown> = {};\n if (existsSync(CONFIG_FILE)) {\n try {\n full = JSON.parse(readFileSync(CONFIG_FILE, \"utf-8\")) as Record<\n string,\n unknown\n >;\n } catch {\n // Start fresh if the file is unreadable\n }\n }\n\n // Replace the notifications section\n full[\"notifications\"] = config;\n\n writeFileSync(CONFIG_FILE, JSON.stringify(full, null, 2) + \"\\n\", \"utf-8\");\n}\n\n// ---------------------------------------------------------------------------\n// Patch helpers (used by the set command)\n// ---------------------------------------------------------------------------\n\n/**\n * Apply a partial update to the current notification config and persist it.\n * Returns the new merged config.\n */\nexport function patchNotificationConfig(patch: {\n mode?: NotificationMode;\n channels?: Partial<Partial<ChannelConfigs>>;\n routing?: Partial<RoutingTable>;\n}): NotificationConfig {\n const current = loadNotificationConfig();\n\n if (patch.mode !== undefined) {\n current.mode = patch.mode;\n }\n\n if (patch.channels) {\n current.channels = deepMerge(\n current.channels,\n patch.channels as Record<string, unknown>\n );\n }\n\n if (patch.routing) {\n current.routing = deepMerge(\n current.routing,\n patch.routing as Record<string, unknown>\n );\n }\n\n saveNotificationConfig(current);\n return current;\n}\n\n// Re-export defaults for convenience\nexport { DEFAULT_NOTIFICATION_CONFIG, DEFAULT_CHANNELS, DEFAULT_ROUTING };\nexport { expandHome };\n","/**\n * ntfy.ts — ntfy.sh notification provider\n *\n * Sends notifications to a configured ntfy.sh topic via HTTP.\n */\n\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nexport class NtfyProvider implements NotificationProvider {\n readonly channelId = \"ntfy\" as const;\n\n async send(\n payload: NotificationPayload,\n config: NotificationConfig\n ): Promise<boolean> {\n const cfg = config.channels.ntfy;\n if (!cfg.enabled || !cfg.url) return false;\n\n try {\n const headers: Record<string, string> = {\n \"Content-Type\": \"text/plain; charset=utf-8\",\n };\n\n if (payload.title) {\n headers[\"Title\"] = payload.title;\n }\n\n if (cfg.priority && cfg.priority !== \"default\") {\n headers[\"Priority\"] = cfg.priority;\n }\n\n const response = await fetch(cfg.url, {\n method: \"POST\",\n headers,\n body: payload.message,\n });\n\n return response.ok;\n } catch {\n return false;\n }\n }\n}\n","/**\n * whatsapp.ts — WhatsApp notification provider (via Whazaa MCP)\n *\n * Sends notifications via the Whazaa Unix Domain Socket IPC protocol.\n * Falls back gracefully if Whazaa is not running.\n *\n * Whazaa IPC socket: /tmp/whazaa.sock (standard Whazaa path)\n *\n * We use the same connect-per-call pattern as PaiClient to avoid\n * requiring any persistent connection state.\n */\n\nimport { connect } from \"node:net\";\nimport { randomUUID } from \"node:crypto\";\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nconst WHAZAA_SOCKET = \"/tmp/whazaa.sock\";\nconst WHAZAA_TIMEOUT_MS = 10_000;\n\n/**\n * Send a single IPC call to the Whazaa socket.\n * Returns true on success, false if Whazaa is not available or errors.\n */\nfunction callWhazaa(\n method: string,\n params: Record<string, unknown>\n): Promise<boolean> {\n return new Promise((resolve) => {\n let done = false;\n let buffer = \"\";\n let timer: ReturnType<typeof setTimeout> | null = null;\n\n function finish(ok: boolean): void {\n if (done) return;\n done = true;\n if (timer) { clearTimeout(timer); timer = null; }\n try { socket?.destroy(); } catch { /* ignore */ }\n resolve(ok);\n }\n\n const socket = connect(WHAZAA_SOCKET, () => {\n const request = {\n jsonrpc: \"2.0\",\n id: randomUUID(),\n method,\n params,\n };\n socket.write(JSON.stringify(request) + \"\\n\");\n });\n\n socket.on(\"data\", (chunk: Buffer) => {\n buffer += chunk.toString();\n const nl = buffer.indexOf(\"\\n\");\n if (nl === -1) return;\n try {\n const resp = JSON.parse(buffer.slice(0, nl)) as { error?: unknown };\n finish(!resp.error);\n } catch {\n finish(false);\n }\n });\n\n socket.on(\"error\", () => finish(false));\n socket.on(\"end\", () => finish(false));\n\n timer = setTimeout(() => finish(false), WHAZAA_TIMEOUT_MS);\n });\n}\n\nexport class WhatsAppProvider implements NotificationProvider {\n readonly channelId = \"whatsapp\" as const;\n\n async send(\n payload: NotificationPayload,\n config: NotificationConfig\n ): Promise<boolean> {\n const cfg = config.channels.whatsapp;\n if (!cfg.enabled) return false;\n\n const isVoiceMode = config.mode === \"voice\" || config.channels.voice.enabled;\n\n const params: Record<string, unknown> = {\n message: payload.message,\n };\n\n if (cfg.recipient) {\n params.recipient = cfg.recipient;\n }\n\n if (isVoiceMode && config.mode === \"voice\") {\n const voiceName = config.channels.voice.voiceName ?? \"bm_george\";\n params.voice = voiceName;\n }\n\n return callWhazaa(\"whatsapp_send\", params);\n }\n}\n","/**\n * macos.ts — macOS notification provider\n *\n * Uses the `osascript` command to display a macOS system notification.\n * Non-blocking: spawns the process and returns success without waiting.\n */\n\nimport { spawn } from \"node:child_process\";\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nexport class MacOsProvider implements NotificationProvider {\n readonly channelId = \"macos\" as const;\n\n async send(\n payload: NotificationPayload,\n config: NotificationConfig\n ): Promise<boolean> {\n const cfg = config.channels.macos;\n if (!cfg.enabled) return false;\n\n try {\n const title = payload.title ?? \"PAI\";\n // Escape single quotes in title and message for AppleScript\n const safeTitle = title.replace(/'/g, \"\\\\'\");\n const safeMessage = payload.message.replace(/'/g, \"\\\\'\");\n\n const script = `display notification \"${safeMessage}\" with title \"${safeTitle}\"`;\n\n return new Promise((resolve) => {\n const child = spawn(\"osascript\", [\"-e\", script], {\n detached: true,\n stdio: \"ignore\",\n });\n child.unref();\n\n // Give the process a moment to start, then assume success.\n // osascript is always present on macOS.\n child.on(\"error\", () => resolve(false));\n\n // Resolve after a short timeout — osascript exits quickly\n setTimeout(() => resolve(true), 200);\n });\n } catch {\n return false;\n }\n }\n}\n","/**\n * cli.ts — CLI notification provider\n *\n * Writes notifications to the PAI daemon log (stderr).\n * Always succeeds — it's the fallback channel.\n */\n\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nexport class CliProvider implements NotificationProvider {\n readonly channelId = \"cli\" as const;\n\n async send(\n payload: NotificationPayload,\n _config: NotificationConfig\n ): Promise<boolean> {\n const prefix = `[pai-notify:${payload.event}]`;\n const title = payload.title ? ` ${payload.title}:` : \"\";\n process.stderr.write(`${prefix}${title} ${payload.message}\\n`);\n return true;\n }\n}\n","/**\n * router.ts — Notification router\n *\n * Routes notification events to the appropriate channels based on the\n * current mode and per-event routing config.\n *\n * Channel providers are instantiated lazily and cached.\n */\n\nimport type {\n NotificationPayload,\n NotificationConfig,\n NotificationProvider,\n ChannelId,\n SendResult,\n NotificationMode,\n} from \"./types.js\";\nimport { NtfyProvider } from \"./providers/ntfy.js\";\nimport { WhatsAppProvider } from \"./providers/whatsapp.js\";\nimport { MacOsProvider } from \"./providers/macos.js\";\nimport { CliProvider } from \"./providers/cli.js\";\n\n// ---------------------------------------------------------------------------\n// Provider registry (singletons — stateless, safe to reuse)\n// ---------------------------------------------------------------------------\n\nconst PROVIDERS: Record<ChannelId, NotificationProvider> = {\n ntfy: new NtfyProvider(),\n whatsapp: new WhatsAppProvider(),\n macos: new MacOsProvider(),\n voice: new WhatsAppProvider(), // Voice uses WhatsApp TTS; handled in WhatsAppProvider\n cli: new CliProvider(),\n};\n\n// ---------------------------------------------------------------------------\n// Channel resolution\n// ---------------------------------------------------------------------------\n\n/**\n * Given the current config, resolve which channels should receive a\n * notification for the given event type.\n *\n * Mode overrides:\n * \"off\" → no channels\n * \"auto\" → use routing table, filtered by enabled channels\n * \"voice\" → whatsapp (TTS enabled in provider)\n * \"whatsapp\" → whatsapp\n * \"ntfy\" → ntfy\n * \"macos\" → macos\n * \"cli\" → cli\n */\nfunction resolveChannels(\n config: NotificationConfig,\n event: NotificationPayload[\"event\"]\n): ChannelId[] {\n const { mode, channels, routing } = config;\n\n if (mode === \"off\") return [];\n\n // Non-auto modes: force a single channel\n const modeToChannel: Partial<Record<NotificationMode, ChannelId>> = {\n voice: \"whatsapp\", // WhatsAppProvider checks mode === \"voice\" for TTS\n whatsapp: \"whatsapp\",\n ntfy: \"ntfy\",\n macos: \"macos\",\n cli: \"cli\",\n };\n\n if (mode !== \"auto\") {\n const ch = modeToChannel[mode];\n if (!ch) return [];\n // Check the channel is enabled\n const cfg = channels[ch];\n if (cfg && !cfg.enabled) return [ch]; // Still send — mode override bypasses enabled check\n return [ch];\n }\n\n // Auto mode: use routing table, filter to enabled channels\n const candidates = routing[event] ?? [];\n return candidates.filter((ch) => {\n const cfg = channels[ch];\n // \"voice\" channel is virtual — it overlaps with whatsapp.\n // Skip \"voice\" as an independent channel; voice is handled by checking config.mode.\n if (ch === \"voice\") return false;\n return cfg?.enabled === true;\n });\n}\n\n// ---------------------------------------------------------------------------\n// Router\n// ---------------------------------------------------------------------------\n\n/**\n * Route a notification to the appropriate channels.\n *\n * Sends to all resolved channels in parallel.\n * Individual channel failures are non-fatal and logged to stderr.\n *\n * @param payload The notification to send\n * @param config The current notification config (from daemon state)\n */\nexport async function routeNotification(\n payload: NotificationPayload,\n config: NotificationConfig\n): Promise<SendResult> {\n const channels = resolveChannels(config, payload.event);\n\n if (channels.length === 0) {\n return {\n channelsAttempted: [],\n channelsSucceeded: [],\n channelsFailed: [],\n mode: config.mode,\n };\n }\n\n const results = await Promise.allSettled(\n channels.map(async (ch) => {\n const provider = PROVIDERS[ch];\n const ok = await provider.send(payload, config);\n if (!ok) {\n process.stderr.write(\n `[pai-notify] Channel ${ch} failed for event ${payload.event}\\n`\n );\n }\n return { ch, ok };\n })\n );\n\n const succeeded: ChannelId[] = [];\n const failed: ChannelId[] = [];\n\n for (const r of results) {\n if (r.status === \"fulfilled\") {\n if (r.value.ok) {\n succeeded.push(r.value.ch);\n } else {\n failed.push(r.value.ch);\n }\n } else {\n // Provider threw — treat as failure\n failed.push(channels[results.indexOf(r)]);\n }\n }\n\n return {\n channelsAttempted: channels,\n channelsSucceeded: succeeded,\n channelsFailed: failed,\n mode: config.mode,\n };\n}\n","/**\n * daemon.ts — The persistent PAI Daemon\n *\n * Provides shared database access, tool dispatch, and periodic index scheduling\n * for multiple concurrent Claude Code sessions via a Unix Domain Socket.\n *\n * Architecture:\n * MCP shims (Claude sessions) → Unix socket → PAI Daemon\n * ├── registry.db (shared, WAL, always SQLite)\n * ├── federation (SQLite or Postgres/pgvector)\n * ├── Embedding model (singleton)\n * └── Index scheduler (periodic)\n *\n * IPC protocol: NDJSON over Unix Domain Socket\n *\n * Request (shim → daemon):\n * { \"id\": \"uuid\", \"method\": \"tool_name_or_special\", \"params\": {} }\n *\n * Response (daemon → shim):\n * { \"id\": \"uuid\", \"ok\": true, \"result\": <any> }\n * { \"id\": \"uuid\", \"ok\": false, \"error\": \"message\" }\n *\n * Special methods:\n * status — Return daemon status (uptime, index state, db stats)\n * index_now — Trigger immediate index run (non-blocking)\n *\n * All other methods are dispatched to the corresponding PAI tool function.\n *\n * Design notes:\n * - Registry stays in SQLite (small, simple metadata).\n * - Federation backend is configurable: SQLite (default) or Postgres/pgvector.\n * - Auto-fallback: if Postgres is configured but unavailable, falls back to SQLite.\n * - Index writes guarded by indexInProgress flag (not a mutex — index is idempotent).\n * - Embedding model loaded lazily on first semantic/hybrid request, then kept alive.\n * - Scheduler runs indexAll() every indexIntervalSecs (default 5 minutes).\n */\n\nimport { existsSync, unlinkSync } from \"node:fs\";\nimport { createServer, connect, Socket, Server } from \"node:net\";\nimport { setPriority } from \"node:os\";\nimport { openRegistry } from \"../registry/db.js\";\nimport type { Database } from \"better-sqlite3\";\nimport { indexAll } from \"../memory/indexer.js\";\nimport {\n toolMemorySearch,\n toolMemoryGet,\n toolProjectInfo,\n toolProjectList,\n toolSessionList,\n toolRegistrySearch,\n toolProjectDetect,\n toolProjectHealth,\n toolProjectTodo,\n toolSessionRoute,\n} from \"../mcp/tools.js\";\nimport { detectTopicShift } from \"../topics/detector.js\";\nimport type { PaiDaemonConfig } from \"./config.js\";\nimport { createStorageBackend } from \"../storage/factory.js\";\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport { configureEmbeddingModel } from \"../memory/embeddings.js\";\nimport type { NotificationConfig, NotificationMode } from \"../notifications/types.js\";\nimport {\n loadNotificationConfig,\n patchNotificationConfig,\n} from \"../notifications/config.js\";\nimport { routeNotification } from \"../notifications/router.js\";\n\n// ---------------------------------------------------------------------------\n// Protocol types\n// ---------------------------------------------------------------------------\n\ninterface IpcRequest {\n id: string;\n method: string;\n params: Record<string, unknown>;\n}\n\ninterface IpcResponse {\n id: string;\n ok: boolean;\n result?: unknown;\n error?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Daemon state\n// ---------------------------------------------------------------------------\n\nlet registryDb: ReturnType<typeof openRegistry>;\nlet storageBackend: StorageBackend;\nlet daemonConfig: PaiDaemonConfig;\nlet startTime = Date.now();\n\n// Index scheduler state\nlet indexInProgress = false;\nlet lastIndexTime = 0;\nlet indexSchedulerTimer: ReturnType<typeof setInterval> | null = null;\n\n// Embed scheduler state\nlet embedInProgress = false;\nlet lastEmbedTime = 0;\nlet embedSchedulerTimer: ReturnType<typeof setInterval> | null = null;\n\n// Vault index scheduler state\nlet vaultIndexInProgress = false;\nlet lastVaultIndexTime = 0;\n\n// ---------------------------------------------------------------------------\n// Notification state\n// ---------------------------------------------------------------------------\n\n/** Mutable notification config — loaded from disk at startup, patchable at runtime */\nlet notificationConfig: NotificationConfig;\n\n// ---------------------------------------------------------------------------\n// Graceful shutdown flag\n// ---------------------------------------------------------------------------\n\n/**\n * Set to true when a SIGTERM/SIGINT is received so that long-running loops\n * (embed, index) can detect the signal and exit their inner loops before the\n * pool/backend is closed. Checked by embedChunksWithBackend() via the\n * `shouldStop` callback passed from runEmbed().\n */\nlet shutdownRequested = false;\n\n// ---------------------------------------------------------------------------\n// Index scheduler\n// ---------------------------------------------------------------------------\n\n/**\n * Run a full index pass. Guards against overlapping runs with indexInProgress.\n * Called both by the scheduler and by the index_now IPC method.\n *\n * NOTE: We pass the raw SQLite federation DB to indexAll() for SQLite backend,\n * or skip and use the backend interface for Postgres. The indexer currently\n * uses better-sqlite3 directly; it will be refactored in a future phase.\n * For now, we keep the SQLite indexer path and add a Postgres-aware path.\n */\nasync function runIndex(): Promise<void> {\n if (indexInProgress) {\n process.stderr.write(\"[pai-daemon] Index already in progress, skipping.\\n\");\n return;\n }\n\n if (embedInProgress) {\n process.stderr.write(\"[pai-daemon] Embed in progress, deferring index run.\\n\");\n return;\n }\n\n indexInProgress = true;\n const t0 = Date.now();\n\n try {\n process.stderr.write(\"[pai-daemon] Starting scheduled index run...\\n\");\n\n if (storageBackend.backendType === \"sqlite\") {\n // SQLite: use existing indexAll() which operates on the raw DB handle\n // We need the raw DB — extract it from the SQLite backend\n const { SQLiteBackend } = await import(\"../storage/sqlite.js\");\n if (storageBackend instanceof SQLiteBackend) {\n const db = (storageBackend as SQLiteBackendWithDb).getRawDb();\n const { projects, result } = await indexAll(db, registryDb);\n const elapsed = Date.now() - t0;\n lastIndexTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Index complete: ${projects} projects, ` +\n `${result.filesProcessed} files, ${result.chunksCreated} chunks ` +\n `(${elapsed}ms)\\n`\n );\n }\n } else {\n // Postgres: use the backend-aware indexer\n const { indexAllWithBackend } = await import(\"../memory/indexer-backend.js\");\n const { projects, result } = await indexAllWithBackend(storageBackend, registryDb);\n const elapsed = Date.now() - t0;\n lastIndexTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Index complete (postgres): ${projects} projects, ` +\n `${result.filesProcessed} files, ${result.chunksCreated} chunks ` +\n `(${elapsed}ms)\\n`\n );\n }\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Index error: ${msg}\\n`);\n } finally {\n indexInProgress = false;\n }\n}\n\n/**\n * Internal interface for accessing the raw DB from SQLiteBackend.\n * This avoids a circular dep while keeping type safety.\n */\ninterface SQLiteBackendWithDb {\n getRawDb(): Database;\n}\n\n/**\n * Run a vault index pass. Guards against overlapping runs with vaultIndexInProgress.\n * Skips if no vaultPath is configured, or if project index/embed is in progress.\n * Called both by the scheduler (chained after runIndex) and by the vault_index_now IPC method.\n */\nasync function runVaultIndex(): Promise<void> {\n // Skip if no vault path configured\n if (!daemonConfig.vaultPath) return;\n\n if (vaultIndexInProgress) {\n process.stderr.write(\"[pai-daemon] Vault index already in progress, skipping.\\n\");\n return;\n }\n\n // Don't run concurrently with project index or embed\n if (indexInProgress || embedInProgress) {\n process.stderr.write(\"[pai-daemon] Index/embed in progress, deferring vault index.\\n\");\n return;\n }\n\n vaultIndexInProgress = true;\n const t0 = Date.now();\n\n try {\n process.stderr.write(\"[pai-daemon] Starting vault index run...\\n\");\n\n if (storageBackend.backendType === \"sqlite\") {\n const { SQLiteBackend } = await import(\"../storage/sqlite.js\");\n if (storageBackend instanceof SQLiteBackend) {\n const db = (storageBackend as SQLiteBackendWithDb).getRawDb();\n\n // Auto-detect vault project ID if not configured\n let vaultProjectId = daemonConfig.vaultProjectId;\n if (!vaultProjectId) {\n // Look for a project registered at the vault path\n const row = registryDb\n .prepare(\"SELECT id FROM projects WHERE root_path = ?\")\n .get(daemonConfig.vaultPath) as { id: number } | undefined;\n vaultProjectId = row?.id ?? 0;\n }\n\n if (!vaultProjectId) {\n process.stderr.write(\"[pai-daemon] Vault project ID not found. Register the vault as a project first.\\n\");\n return;\n }\n\n const { indexVault } = await import(\"../memory/vault-indexer.js\");\n const result = await indexVault(db, vaultProjectId, daemonConfig.vaultPath);\n const elapsed = Date.now() - t0;\n lastVaultIndexTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Vault index complete: ${result.filesIndexed} files, ` +\n `${result.linksExtracted} links, ${result.deadLinksFound} dead, ` +\n `${result.orphansFound} orphans (${elapsed}ms)\\n`\n );\n }\n } else {\n process.stderr.write(\"[pai-daemon] Vault indexing only supported on SQLite backend.\\n\");\n }\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Vault index error: ${msg}\\n`);\n } finally {\n vaultIndexInProgress = false;\n }\n}\n\n/**\n * Start the periodic index scheduler.\n */\nfunction startIndexScheduler(): void {\n const intervalMs = daemonConfig.indexIntervalSecs * 1_000;\n\n process.stderr.write(\n `[pai-daemon] Index scheduler: every ${daemonConfig.indexIntervalSecs}s\\n`\n );\n\n // Run an initial index at startup (non-blocking — let the socket come up first)\n setTimeout(() => {\n runIndex()\n .then(() => runVaultIndex())\n .catch((e) => {\n process.stderr.write(`[pai-daemon] Startup index error: ${e}\\n`);\n });\n }, 2_000);\n\n indexSchedulerTimer = setInterval(() => {\n runIndex()\n .then(() => runVaultIndex())\n .catch((e) => {\n process.stderr.write(`[pai-daemon] Scheduled index error: ${e}\\n`);\n });\n }, intervalMs);\n\n // Don't let the interval keep the process alive if all else exits\n if (indexSchedulerTimer.unref) {\n indexSchedulerTimer.unref();\n }\n}\n\n// ---------------------------------------------------------------------------\n// Embed scheduler\n// ---------------------------------------------------------------------------\n\n/**\n * Run an embedding pass for all unembedded chunks (Postgres backend only).\n * Guards against overlapping runs with embedInProgress.\n * Skips if an index run is currently in progress to avoid contention.\n */\nasync function runEmbed(): Promise<void> {\n if (embedInProgress) {\n process.stderr.write(\"[pai-daemon] Embed already in progress, skipping.\\n\");\n return;\n }\n\n // Don't compete with the indexer — it writes new chunks that will need embedding\n if (indexInProgress) {\n process.stderr.write(\"[pai-daemon] Index in progress, deferring embed pass.\\n\");\n return;\n }\n\n // Embedding is only supported on the Postgres backend.\n // The SQLite path uses embedChunks() in indexer.ts directly (manual CLI only).\n if (storageBackend.backendType !== \"postgres\") {\n return;\n }\n\n embedInProgress = true;\n const t0 = Date.now();\n\n try {\n process.stderr.write(\"[pai-daemon] Starting scheduled embed pass...\\n\");\n\n const { embedChunksWithBackend } = await import(\"../memory/indexer-backend.js\");\n const count = await embedChunksWithBackend(storageBackend, () => shutdownRequested);\n\n const elapsed = Date.now() - t0;\n lastEmbedTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Embed pass complete: ${count} chunks embedded (${elapsed}ms)\\n`\n );\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Embed error: ${msg}\\n`);\n } finally {\n embedInProgress = false;\n }\n}\n\n/**\n * Start the periodic embed scheduler.\n * Initial run is 30 seconds after startup (after the 2-second index startup run).\n */\nfunction startEmbedScheduler(): void {\n const intervalMs = daemonConfig.embedIntervalSecs * 1_000;\n\n process.stderr.write(\n `[pai-daemon] Embed scheduler: every ${daemonConfig.embedIntervalSecs}s\\n`\n );\n\n // Initial embed run 30 seconds after startup (lets the first index run finish)\n setTimeout(() => {\n runEmbed().catch((e) => {\n process.stderr.write(`[pai-daemon] Startup embed error: ${e}\\n`);\n });\n }, 30_000);\n\n embedSchedulerTimer = setInterval(() => {\n runEmbed().catch((e) => {\n process.stderr.write(`[pai-daemon] Scheduled embed error: ${e}\\n`);\n });\n }, intervalMs);\n\n // Don't let the interval keep the process alive if all else exits\n if (embedSchedulerTimer.unref) {\n embedSchedulerTimer.unref();\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool dispatcher\n// ---------------------------------------------------------------------------\n\n/**\n * Dispatch an IPC tool call to the appropriate tool function.\n * Returns the tool result or throws.\n */\nasync function dispatchTool(\n method: string,\n params: Record<string, unknown>\n): Promise<unknown> {\n // Cast through unknown to satisfy TypeScript's strict overlap check on\n // Record<string, unknown> → specific param types. Runtime validation is\n // the responsibility of each tool function (they surface errors gracefully).\n const p = params as unknown;\n\n switch (method) {\n case \"memory_search\":\n return toolMemorySearch(registryDb, storageBackend, p as Parameters<typeof toolMemorySearch>[2]);\n\n case \"memory_get\":\n return toolMemoryGet(registryDb, p as Parameters<typeof toolMemoryGet>[1]);\n\n case \"project_info\":\n return toolProjectInfo(registryDb, p as Parameters<typeof toolProjectInfo>[1]);\n\n case \"project_list\":\n return toolProjectList(registryDb, p as Parameters<typeof toolProjectList>[1]);\n\n case \"session_list\":\n return toolSessionList(registryDb, p as Parameters<typeof toolSessionList>[1]);\n\n case \"registry_search\":\n return toolRegistrySearch(registryDb, p as Parameters<typeof toolRegistrySearch>[1]);\n\n case \"project_detect\":\n return toolProjectDetect(registryDb, p as Parameters<typeof toolProjectDetect>[1]);\n\n case \"project_health\":\n return toolProjectHealth(registryDb, p as Parameters<typeof toolProjectHealth>[1]);\n\n case \"project_todo\":\n return toolProjectTodo(registryDb, p as Parameters<typeof toolProjectTodo>[1]);\n\n case \"topic_check\":\n return detectTopicShift(\n registryDb,\n storageBackend,\n p as Parameters<typeof detectTopicShift>[2]\n );\n\n case \"session_auto_route\":\n return toolSessionRoute(\n registryDb,\n storageBackend,\n p as Parameters<typeof toolSessionRoute>[2]\n );\n\n case \"zettel_explore\":\n case \"zettel_health\":\n case \"zettel_surprise\":\n case \"zettel_suggest\":\n case \"zettel_converse\":\n case \"zettel_themes\": {\n // Zettel tools need the raw federation DB\n const { toolZettelExplore, toolZettelHealth, toolZettelSurprise, toolZettelSuggest, toolZettelConverse, toolZettelThemes } = await import(\"../mcp/tools.js\");\n\n if (storageBackend.backendType !== \"sqlite\") {\n throw new Error(\"Zettel tools require SQLite backend\");\n }\n const { SQLiteBackend } = await import(\"../storage/sqlite.js\");\n if (!(storageBackend instanceof SQLiteBackend)) {\n throw new Error(\"Zettel tools require SQLite backend\");\n }\n const fedDb = (storageBackend as SQLiteBackendWithDb).getRawDb();\n\n switch (method) {\n case \"zettel_explore\": return toolZettelExplore(fedDb, p as Parameters<typeof toolZettelExplore>[1]);\n case \"zettel_health\": return toolZettelHealth(fedDb, p as Parameters<typeof toolZettelHealth>[1]);\n case \"zettel_surprise\": return toolZettelSurprise(fedDb, p as Parameters<typeof toolZettelSurprise>[1]);\n case \"zettel_suggest\": return toolZettelSuggest(fedDb, p as Parameters<typeof toolZettelSuggest>[1]);\n case \"zettel_converse\": return toolZettelConverse(fedDb, p as Parameters<typeof toolZettelConverse>[1]);\n case \"zettel_themes\": return toolZettelThemes(fedDb, p as Parameters<typeof toolZettelThemes>[1]);\n }\n break;\n }\n\n default:\n throw new Error(`Unknown method: ${method}`);\n }\n}\n\n// ---------------------------------------------------------------------------\n// IPC server\n// ---------------------------------------------------------------------------\n\nfunction sendResponse(socket: Socket, response: IpcResponse): void {\n try {\n socket.write(JSON.stringify(response) + \"\\n\");\n } catch {\n // Socket may already be closed\n }\n}\n\n/**\n * Handle a single IPC request.\n */\nasync function handleRequest(\n request: IpcRequest,\n socket: Socket\n): Promise<void> {\n const { id, method, params } = request;\n\n // Special: status\n if (method === \"status\") {\n const dbStats = await (async () => {\n try {\n const fedStats = await storageBackend.getStats();\n const projects = (\n registryDb\n .prepare(\"SELECT COUNT(*) AS n FROM projects\")\n .get() as { n: number }\n ).n;\n return { files: fedStats.files, chunks: fedStats.chunks, projects };\n } catch {\n return null;\n }\n })();\n\n sendResponse(socket, {\n id,\n ok: true,\n result: {\n uptime: Math.floor((Date.now() - startTime) / 1000),\n indexInProgress,\n lastIndexTime: lastIndexTime ? new Date(lastIndexTime).toISOString() : null,\n indexIntervalSecs: daemonConfig.indexIntervalSecs,\n embedInProgress,\n lastEmbedTime: lastEmbedTime ? new Date(lastEmbedTime).toISOString() : null,\n embedIntervalSecs: daemonConfig.embedIntervalSecs,\n socketPath: daemonConfig.socketPath,\n storageBackend: storageBackend.backendType,\n db: dbStats,\n vaultIndexInProgress,\n lastVaultIndexTime: lastVaultIndexTime ? new Date(lastVaultIndexTime).toISOString() : null,\n vaultPath: daemonConfig.vaultPath ?? null,\n },\n });\n socket.end();\n return;\n }\n\n // Special: index_now — trigger immediate index (non-blocking response)\n if (method === \"index_now\") {\n // Fire and forget — don't await\n runIndex().catch((e) => {\n process.stderr.write(`[pai-daemon] index_now error: ${e}\\n`);\n });\n sendResponse(socket, { id, ok: true, result: { triggered: true } });\n socket.end();\n return;\n }\n\n // Special: vault_index_now — trigger immediate vault index (non-blocking response)\n if (method === \"vault_index_now\") {\n runVaultIndex().catch((e) => {\n process.stderr.write(`[pai-daemon] vault_index_now error: ${e}\\n`);\n });\n sendResponse(socket, { id, ok: true, result: { triggered: true } });\n socket.end();\n return;\n }\n\n // Special: notification_get_config — return current notification config\n if (method === \"notification_get_config\") {\n sendResponse(socket, {\n id,\n ok: true,\n result: {\n config: notificationConfig,\n activeChannels: Object.entries(notificationConfig.channels)\n .filter(([ch, cfg]) => ch !== \"voice\" && (cfg as { enabled: boolean }).enabled)\n .map(([ch]) => ch),\n },\n });\n socket.end();\n return;\n }\n\n // Special: notification_set_config — patch the notification config\n if (method === \"notification_set_config\") {\n try {\n const p = params as {\n mode?: NotificationMode;\n channels?: Record<string, unknown>;\n routing?: Record<string, unknown>;\n };\n notificationConfig = patchNotificationConfig({\n mode: p.mode,\n channels: p.channels as Parameters<typeof patchNotificationConfig>[0][\"channels\"],\n routing: p.routing as Parameters<typeof patchNotificationConfig>[0][\"routing\"],\n });\n sendResponse(socket, {\n id,\n ok: true,\n result: { config: notificationConfig },\n });\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id, ok: false, error: msg });\n }\n socket.end();\n return;\n }\n\n // Special: notification_send — route a notification to configured channels\n if (method === \"notification_send\") {\n const p = params as {\n event?: string;\n message?: string;\n title?: string;\n };\n\n if (!p.message) {\n sendResponse(socket, { id, ok: false, error: \"notification_send: message is required\" });\n socket.end();\n return;\n }\n\n const event = (p.event as NotificationConfig[\"routing\"] extends Record<infer K, unknown> ? K : string) ?? \"info\";\n\n routeNotification(\n {\n event: event as Parameters<typeof routeNotification>[0][\"event\"],\n message: p.message,\n title: p.title,\n },\n notificationConfig\n ).then((result) => {\n sendResponse(socket, { id, ok: true, result });\n socket.end();\n }).catch((e) => {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id, ok: false, error: msg });\n socket.end();\n });\n return;\n }\n\n // All other methods: PAI tool dispatch\n try {\n const result = await dispatchTool(method, params);\n sendResponse(socket, { id, ok: true, result });\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id, ok: false, error: msg });\n }\n socket.end();\n}\n\n/**\n * Check whether an existing socket file is actually being served by a live process.\n * Returns true if a daemon is already accepting connections, false otherwise.\n */\nfunction isSocketLive(path: string): Promise<boolean> {\n return new Promise((resolve) => {\n const client = connect(path);\n const timer = setTimeout(() => { client.destroy(); resolve(false); }, 500);\n client.on(\"connect\", () => { clearTimeout(timer); client.end(); resolve(true); });\n client.on(\"error\", () => { clearTimeout(timer); resolve(false); });\n });\n}\n\n/**\n * Start the Unix Domain Socket IPC server.\n */\nasync function startIpcServer(socketPath: string): Promise<Server> {\n // Before removing the socket file, check whether another daemon is already live\n if (existsSync(socketPath)) {\n const live = await isSocketLive(socketPath);\n if (live) {\n throw new Error(\"Another daemon is already running — socket is live. Aborting startup.\");\n }\n try {\n unlinkSync(socketPath);\n process.stderr.write(\"[pai-daemon] Removed stale socket file.\\n\");\n } catch {\n // If we can't remove it, bind will fail with a clear error\n }\n }\n\n const server = createServer((socket: Socket) => {\n let buffer = \"\";\n\n socket.on(\"data\", (chunk: Buffer) => {\n buffer += chunk.toString();\n let nl: number;\n // Process every complete newline-delimited frame in this chunk\n while ((nl = buffer.indexOf(\"\\n\")) !== -1) {\n const line = buffer.slice(0, nl);\n buffer = buffer.slice(nl + 1);\n\n if (line.trim() === \"\") continue; // skip blank lines between frames\n\n let request: IpcRequest;\n try {\n request = JSON.parse(line) as IpcRequest;\n } catch {\n sendResponse(socket, { id: \"?\", ok: false, error: \"Invalid JSON\" });\n socket.destroy();\n return;\n }\n\n handleRequest(request, socket).catch((e: unknown) => {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id: request.id, ok: false, error: msg });\n socket.destroy();\n });\n }\n });\n\n socket.on(\"error\", () => {\n // Client disconnected — nothing to do\n });\n });\n\n server.on(\"error\", (e) => {\n process.stderr.write(`[pai-daemon] IPC server error: ${e}\\n`);\n });\n\n server.listen(socketPath, () => {\n process.stderr.write(\n `[pai-daemon] IPC server listening on ${socketPath}\\n`\n );\n });\n\n return server;\n}\n\n// ---------------------------------------------------------------------------\n// Main daemon entry point\n// ---------------------------------------------------------------------------\n\nexport async function serve(config: PaiDaemonConfig): Promise<void> {\n daemonConfig = config;\n startTime = Date.now();\n\n // Load notification config from disk (merged with defaults)\n notificationConfig = loadNotificationConfig();\n\n process.stderr.write(\"[pai-daemon] Starting daemon...\\n\");\n process.stderr.write(`[pai-daemon] Socket: ${config.socketPath}\\n`);\n process.stderr.write(`[pai-daemon] Storage backend: ${config.storageBackend}\\n`);\n process.stderr.write(\n `[pai-daemon] Notification mode: ${notificationConfig.mode}\\n`\n );\n\n // Lower the daemon's scheduling priority so it yields CPU to interactive\n // Claude Code sessions and editor processes during indexing and embedding.\n // niceness 10 = noticeably lower priority without making it unresponsive.\n // Non-fatal: some environments (containers, restricted sandboxes) may deny it.\n try { setPriority(process.pid, 10); } catch { /* non-fatal */ }\n\n // Configure embedding model from config (before any embed work starts)\n configureEmbeddingModel(config.embeddingModel);\n\n // Open registry (always SQLite)\n try {\n registryDb = openRegistry();\n process.stderr.write(\"[pai-daemon] Registry database opened.\\n\");\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Fatal: Could not open registry: ${msg}\\n`);\n process.exit(1);\n }\n\n // Open federation storage (SQLite or Postgres with auto-fallback)\n try {\n storageBackend = await createStorageBackend(config);\n process.stderr.write(\n `[pai-daemon] Federation backend: ${storageBackend.backendType}\\n`\n );\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Fatal: Could not open federation storage: ${msg}\\n`);\n process.exit(1);\n }\n\n // Start index scheduler\n startIndexScheduler();\n\n // Start embed scheduler (Postgres backend only)\n if (storageBackend.backendType === \"postgres\") {\n startEmbedScheduler();\n } else {\n process.stderr.write(\n \"[pai-daemon] Embed scheduler: disabled (SQLite backend)\\n\"\n );\n }\n\n // Start IPC server (async: checks for a live daemon before unlinking socket)\n const server = await startIpcServer(config.socketPath);\n\n const shutdown = async (signal: string): Promise<void> => {\n process.stderr.write(`\\n[pai-daemon] ${signal} received. Stopping.\\n`);\n\n // Signal all long-running loops to stop between batches\n shutdownRequested = true;\n\n // Stop schedulers so no new runs are launched\n if (indexSchedulerTimer) {\n clearInterval(indexSchedulerTimer);\n }\n\n if (embedSchedulerTimer) {\n clearInterval(embedSchedulerTimer);\n }\n\n // Stop accepting new IPC connections\n server.close();\n\n // Wait for any in-progress index or embed pass to finish, up to 10 s.\n // Without this wait, closing the pool while an async query is running\n // causes \"Cannot use a pool after calling end on the pool\" and a dirty crash.\n const SHUTDOWN_TIMEOUT_MS = 10_000;\n const POLL_INTERVAL_MS = 100;\n const deadline = Date.now() + SHUTDOWN_TIMEOUT_MS;\n\n if (indexInProgress || embedInProgress) {\n process.stderr.write(\n `[pai-daemon] Waiting for in-progress operations to finish ` +\n `(index=${indexInProgress}, embed=${embedInProgress})...\\n`\n );\n\n while ((indexInProgress || embedInProgress) && Date.now() < deadline) {\n await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS));\n }\n\n if (indexInProgress || embedInProgress) {\n process.stderr.write(\n \"[pai-daemon] Shutdown timeout reached — forcing exit.\\n\"\n );\n } else {\n process.stderr.write(\"[pai-daemon] In-progress operations finished.\\n\");\n }\n }\n\n try {\n await storageBackend.close();\n } catch {\n // ignore\n }\n\n try {\n unlinkSync(config.socketPath);\n } catch {\n // ignore\n }\n\n process.exit(0);\n };\n\n process.on(\"SIGINT\", () => { shutdown(\"SIGINT\").catch(() => process.exit(0)); });\n process.on(\"SIGTERM\", () => { shutdown(\"SIGTERM\").catch(() => process.exit(0)); });\n\n // Keep process alive\n await new Promise(() => {});\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAqCA,SAAS,UACP,QACA,QACG;CACH,MAAM,SAAS,EAAE,GAAG,QAAQ;AAC5B,MAAK,MAAM,OAAO,OAAO,KAAK,OAAO,EAAE;EACrC,MAAM,SAAS,OAAO;AACtB,MAAI,WAAW,UAAa,WAAW,KAAM;EAC7C,MAAM,SAAU,OAAmC;AACnD,MACE,OAAO,WAAW,YAClB,CAAC,MAAM,QAAQ,OAAO,IACtB,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,OAAO,CAEtB,CAAC,OAAmC,OAAO,UACzC,QACA,OACD;MAED,CAAC,OAAmC,OAAO;;AAG/C,QAAO;;;;;;AAWT,SAAgB,yBAA6C;AAC3D,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO,EAAE,GAAG,6BAA6B;CAG3C,IAAI;AACJ,KAAI;AACF,QAAM,aAAa,aAAa,QAAQ;SAClC;AACN,SAAO,EAAE,GAAG,6BAA6B;;CAG3C,IAAI;AACJ,KAAI;AACF,WAAS,KAAK,MAAM,IAAI;SAClB;AACN,SAAO,EAAE,GAAG,6BAA6B;;CAG3C,MAAM,SAAS,OAAO;AACtB,KAAI,CAAC,UAAU,OAAO,WAAW,SAC/B,QAAO,EAAE,GAAG,6BAA6B;AAG3C,QAAO,UACL,6BACA,OACD;;;;;;AAWH,SAAgB,uBAAuB,QAAkC;AAEvE,KAAI,CAAC,WAAW,WAAW,CACzB,WAAU,YAAY,EAAE,WAAW,MAAM,CAAC;CAI5C,IAAI,OAAgC,EAAE;AACtC,KAAI,WAAW,YAAY,CACzB,KAAI;AACF,SAAO,KAAK,MAAM,aAAa,aAAa,QAAQ,CAAC;SAI/C;AAMV,MAAK,mBAAmB;AAExB,eAAc,aAAa,KAAK,UAAU,MAAM,MAAM,EAAE,GAAG,MAAM,QAAQ;;;;;;AAW3E,SAAgB,wBAAwB,OAIjB;CACrB,MAAM,UAAU,wBAAwB;AAExC,KAAI,MAAM,SAAS,OACjB,SAAQ,OAAO,MAAM;AAGvB,KAAI,MAAM,SACR,SAAQ,WAAW,UACjB,QAAQ,UACR,MAAM,SACP;AAGH,KAAI,MAAM,QACR,SAAQ,UAAU,UAChB,QAAQ,SACR,MAAM,QACP;AAGH,wBAAuB,QAAQ;AAC/B,QAAO;;;;;AC7JT,IAAa,eAAb,MAA0D;CACxD,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,QACkB;EAClB,MAAM,MAAM,OAAO,SAAS;AAC5B,MAAI,CAAC,IAAI,WAAW,CAAC,IAAI,IAAK,QAAO;AAErC,MAAI;GACF,MAAM,UAAkC,EACtC,gBAAgB,6BACjB;AAED,OAAI,QAAQ,MACV,SAAQ,WAAW,QAAQ;AAG7B,OAAI,IAAI,YAAY,IAAI,aAAa,UACnC,SAAQ,cAAc,IAAI;AAS5B,WANiB,MAAM,MAAM,IAAI,KAAK;IACpC,QAAQ;IACR;IACA,MAAM,QAAQ;IACf,CAAC,EAEc;UACV;AACN,UAAO;;;;;;;;;;;;;;;;;;ACvBb,MAAM,gBAAgB;AACtB,MAAM,oBAAoB;;;;;AAM1B,SAAS,WACP,QACA,QACkB;AAClB,QAAO,IAAI,SAAS,YAAY;EAC9B,IAAI,OAAO;EACX,IAAI,SAAS;EACb,IAAI,QAA8C;EAElD,SAAS,OAAO,IAAmB;AACjC,OAAI,KAAM;AACV,UAAO;AACP,OAAI,OAAO;AAAE,iBAAa,MAAM;AAAE,YAAQ;;AAC1C,OAAI;AAAE,YAAQ,SAAS;WAAU;AACjC,WAAQ,GAAG;;EAGb,MAAM,SAAS,QAAQ,qBAAqB;GAC1C,MAAM,UAAU;IACd,SAAS;IACT,IAAI,YAAY;IAChB;IACA;IACD;AACD,UAAO,MAAM,KAAK,UAAU,QAAQ,GAAG,KAAK;IAC5C;AAEF,SAAO,GAAG,SAAS,UAAkB;AACnC,aAAU,MAAM,UAAU;GAC1B,MAAM,KAAK,OAAO,QAAQ,KAAK;AAC/B,OAAI,OAAO,GAAI;AACf,OAAI;AAEF,WAAO,CADM,KAAK,MAAM,OAAO,MAAM,GAAG,GAAG,CAAC,CAC/B,MAAM;WACb;AACN,WAAO,MAAM;;IAEf;AAEF,SAAO,GAAG,eAAe,OAAO,MAAM,CAAC;AACvC,SAAO,GAAG,aAAa,OAAO,MAAM,CAAC;AAErC,UAAQ,iBAAiB,OAAO,MAAM,EAAE,kBAAkB;GAC1D;;AAGJ,IAAa,mBAAb,MAA8D;CAC5D,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,QACkB;EAClB,MAAM,MAAM,OAAO,SAAS;AAC5B,MAAI,CAAC,IAAI,QAAS,QAAO;EAEzB,MAAM,cAAc,OAAO,SAAS,WAAW,OAAO,SAAS,MAAM;EAErE,MAAM,SAAkC,EACtC,SAAS,QAAQ,SAClB;AAED,MAAI,IAAI,UACN,QAAO,YAAY,IAAI;AAGzB,MAAI,eAAe,OAAO,SAAS,QAEjC,QAAO,QADW,OAAO,SAAS,MAAM,aAAa;AAIvD,SAAO,WAAW,iBAAiB,OAAO;;;;;;;;;;;;ACpF9C,IAAa,gBAAb,MAA2D;CACzD,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,QACkB;AAElB,MAAI,CADQ,OAAO,SAAS,MACnB,QAAS,QAAO;AAEzB,MAAI;GAGF,MAAM,aAFQ,QAAQ,SAAS,OAEP,QAAQ,MAAM,MAAM;GAG5C,MAAM,SAAS,yBAFK,QAAQ,QAAQ,QAAQ,MAAM,MAAM,CAEJ,gBAAgB,UAAU;AAE9E,UAAO,IAAI,SAAS,YAAY;IAC9B,MAAM,QAAQ,MAAM,aAAa,CAAC,MAAM,OAAO,EAAE;KAC/C,UAAU;KACV,OAAO;KACR,CAAC;AACF,UAAM,OAAO;AAIb,UAAM,GAAG,eAAe,QAAQ,MAAM,CAAC;AAGvC,qBAAiB,QAAQ,KAAK,EAAE,IAAI;KACpC;UACI;AACN,UAAO;;;;;;;AClCb,IAAa,cAAb,MAAyD;CACvD,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,SACkB;EAClB,MAAM,SAAS,eAAe,QAAQ,MAAM;EAC5C,MAAM,QAAQ,QAAQ,QAAQ,IAAI,QAAQ,MAAM,KAAK;AACrD,UAAQ,OAAO,MAAM,GAAG,SAAS,MAAM,GAAG,QAAQ,QAAQ,IAAI;AAC9D,SAAO;;;;;;ACGX,MAAM,YAAqD;CACzD,MAAW,IAAI,cAAc;CAC7B,UAAW,IAAI,kBAAkB;CACjC,OAAW,IAAI,eAAe;CAC9B,OAAW,IAAI,kBAAkB;CACjC,KAAW,IAAI,aAAa;CAC7B;;;;;;;;;;;;;;AAmBD,SAAS,gBACP,QACA,OACa;CACb,MAAM,EAAE,MAAM,UAAU,YAAY;AAEpC,KAAI,SAAS,MAAO,QAAO,EAAE;CAG7B,MAAM,gBAA8D;EAClE,OAAW;EACX,UAAW;EACX,MAAW;EACX,OAAW;EACX,KAAW;EACZ;AAED,KAAI,SAAS,QAAQ;EACnB,MAAM,KAAK,cAAc;AACzB,MAAI,CAAC,GAAI,QAAO,EAAE;EAElB,MAAM,MAAM,SAAS;AACrB,MAAI,OAAO,CAAC,IAAI,QAAS,QAAO,CAAC,GAAG;AACpC,SAAO,CAAC,GAAG;;AAKb,SADmB,QAAQ,UAAU,EAAE,EACrB,QAAQ,OAAO;EAC/B,MAAM,MAAM,SAAS;AAGrB,MAAI,OAAO,QAAS,QAAO;AAC3B,SAAO,KAAK,YAAY;GACxB;;;;;;;;;;;AAgBJ,eAAsB,kBACpB,SACA,QACqB;CACrB,MAAM,WAAW,gBAAgB,QAAQ,QAAQ,MAAM;AAEvD,KAAI,SAAS,WAAW,EACtB,QAAO;EACL,mBAAmB,EAAE;EACrB,mBAAmB,EAAE;EACrB,gBAAgB,EAAE;EAClB,MAAM,OAAO;EACd;CAGH,MAAM,UAAU,MAAM,QAAQ,WAC5B,SAAS,IAAI,OAAO,OAAO;EAEzB,MAAM,KAAK,MADM,UAAU,IACD,KAAK,SAAS,OAAO;AAC/C,MAAI,CAAC,GACH,SAAQ,OAAO,MACb,wBAAwB,GAAG,oBAAoB,QAAQ,MAAM,IAC9D;AAEH,SAAO;GAAE;GAAI;GAAI;GACjB,CACH;CAED,MAAM,YAAyB,EAAE;CACjC,MAAM,SAAsB,EAAE;AAE9B,MAAK,MAAM,KAAK,QACd,KAAI,EAAE,WAAW,YACf,KAAI,EAAE,MAAM,GACV,WAAU,KAAK,EAAE,MAAM,GAAG;KAE1B,QAAO,KAAK,EAAE,MAAM,GAAG;KAIzB,QAAO,KAAK,SAAS,QAAQ,QAAQ,EAAE,EAAE;AAI7C,QAAO;EACL,mBAAmB;EACnB,mBAAmB;EACnB,gBAAgB;EAChB,MAAM,OAAO;EACd;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9DH,IAAI;AACJ,IAAI;AACJ,IAAI;AACJ,IAAI,YAAY,KAAK,KAAK;AAG1B,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,sBAA6D;AAGjE,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,sBAA6D;AAGjE,IAAI,uBAAuB;AAC3B,IAAI,qBAAqB;;AAOzB,IAAI;;;;;;;AAYJ,IAAI,oBAAoB;;;;;;;;;;AAexB,eAAe,WAA0B;AACvC,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,sDAAsD;AAC3E;;AAGF,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,yDAAyD;AAC9E;;AAGF,mBAAkB;CAClB,MAAM,KAAK,KAAK,KAAK;AAErB,KAAI;AACF,UAAQ,OAAO,MAAM,iDAAiD;AAEtE,MAAI,eAAe,gBAAgB,UAAU;GAG3C,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,OAAI,0BAA0B,eAAe;IAE3C,MAAM,EAAE,UAAU,WAAW,MAAM,SADvB,eAAuC,UAAU,EACb,WAAW;IAC3D,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,oBAAgB,KAAK,KAAK;AAC1B,YAAQ,OAAO,MACb,gCAAgC,SAAS,aACpC,OAAO,eAAe,UAAU,OAAO,cAAc,WACpD,QAAQ,OACf;;SAEE;GAEL,MAAM,EAAE,wBAAwB,MAAM,OAAO;GAC7C,MAAM,EAAE,UAAU,WAAW,MAAM,oBAAoB,gBAAgB,WAAW;GAClF,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,mBAAgB,KAAK,KAAK;AAC1B,WAAQ,OAAO,MACb,2CAA2C,SAAS,aAC/C,OAAO,eAAe,UAAU,OAAO,cAAc,WACpD,QAAQ,OACf;;UAEI,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,6BAA6B,IAAI,IAAI;WAClD;AACR,oBAAkB;;;;;;;;AAiBtB,eAAe,gBAA+B;AAE5C,KAAI,CAAC,aAAa,UAAW;AAE7B,KAAI,sBAAsB;AACxB,UAAQ,OAAO,MAAM,4DAA4D;AACjF;;AAIF,KAAI,mBAAmB,iBAAiB;AACtC,UAAQ,OAAO,MAAM,iEAAiE;AACtF;;AAGF,wBAAuB;CACvB,MAAM,KAAK,KAAK,KAAK;AAErB,KAAI;AACF,UAAQ,OAAO,MAAM,6CAA6C;AAElE,MAAI,eAAe,gBAAgB,UAAU;GAC3C,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,OAAI,0BAA0B,eAAe;IAC3C,MAAM,KAAM,eAAuC,UAAU;IAG7D,IAAI,iBAAiB,aAAa;AAClC,QAAI,CAAC,eAKH,kBAHY,WACT,QAAQ,8CAA8C,CACtD,IAAI,aAAa,UAAU,EACR,MAAM;AAG9B,QAAI,CAAC,gBAAgB;AACnB,aAAQ,OAAO,MAAM,oFAAoF;AACzG;;IAGF,MAAM,EAAE,eAAe,MAAM,OAAO;IACpC,MAAM,SAAS,MAAM,WAAW,IAAI,gBAAgB,aAAa,UAAU;IAC3E,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,yBAAqB,KAAK,KAAK;AAC/B,YAAQ,OAAO,MACb,sCAAsC,OAAO,aAAa,UACvD,OAAO,eAAe,UAAU,OAAO,eAAe,SACtD,OAAO,aAAa,YAAY,QAAQ,OAC5C;;QAGH,SAAQ,OAAO,MAAM,kEAAkE;UAElF,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,mCAAmC,IAAI,IAAI;WACxD;AACR,yBAAuB;;;;;;AAO3B,SAAS,sBAA4B;CACnC,MAAM,aAAa,aAAa,oBAAoB;AAEpD,SAAQ,OAAO,MACb,uCAAuC,aAAa,kBAAkB,KACvE;AAGD,kBAAiB;AACf,YAAU,CACP,WAAW,eAAe,CAAC,CAC3B,OAAO,MAAM;AACZ,WAAQ,OAAO,MAAM,qCAAqC,EAAE,IAAI;IAChE;IACH,IAAM;AAET,uBAAsB,kBAAkB;AACtC,YAAU,CACP,WAAW,eAAe,CAAC,CAC3B,OAAO,MAAM;AACZ,WAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;IAClE;IACH,WAAW;AAGd,KAAI,oBAAoB,MACtB,qBAAoB,OAAO;;;;;;;AAa/B,eAAe,WAA0B;AACvC,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,sDAAsD;AAC3E;;AAIF,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,0DAA0D;AAC/E;;AAKF,KAAI,eAAe,gBAAgB,WACjC;AAGF,mBAAkB;CAClB,MAAM,KAAK,KAAK,KAAK;AAErB,KAAI;AACF,UAAQ,OAAO,MAAM,kDAAkD;EAEvE,MAAM,EAAE,2BAA2B,MAAM,OAAO;EAChD,MAAM,QAAQ,MAAM,uBAAuB,sBAAsB,kBAAkB;EAEnF,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,kBAAgB,KAAK,KAAK;AAC1B,UAAQ,OAAO,MACb,qCAAqC,MAAM,oBAAoB,QAAQ,OACxE;UACM,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,6BAA6B,IAAI,IAAI;WAClD;AACR,oBAAkB;;;;;;;AAQtB,SAAS,sBAA4B;CACnC,MAAM,aAAa,aAAa,oBAAoB;AAEpD,SAAQ,OAAO,MACb,uCAAuC,aAAa,kBAAkB,KACvE;AAGD,kBAAiB;AACf,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,qCAAqC,EAAE,IAAI;IAChE;IACD,IAAO;AAEV,uBAAsB,kBAAkB;AACtC,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;IAClE;IACD,WAAW;AAGd,KAAI,oBAAoB,MACtB,qBAAoB,OAAO;;;;;;AAY/B,eAAe,aACb,QACA,QACkB;CAIlB,MAAM,IAAI;AAEV,SAAQ,QAAR;EACE,KAAK,gBACH,QAAO,iBAAiB,YAAY,gBAAgB,EAA4C;EAElG,KAAK,aACH,QAAO,cAAc,YAAY,EAAyC;EAE5E,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,kBACH,QAAO,mBAAmB,YAAY,EAA8C;EAEtF,KAAK,iBACH,QAAO,kBAAkB,YAAY,EAA6C;EAEpF,KAAK,iBACH,QAAO,kBAAkB,YAAY,EAA6C;EAEpF,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,cACH,QAAO,iBACL,YACA,gBACA,EACD;EAEH,KAAK,qBACH,QAAO,iBACL,YACA,gBACA,EACD;EAEH,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK;EACL,KAAK,iBAAiB;GAEpB,MAAM,EAAE,mBAAmB,kBAAkB,oBAAoB,mBAAmB,oBAAoB,qBAAqB,MAAM,OAAO;AAE1I,OAAI,eAAe,gBAAgB,SACjC,OAAM,IAAI,MAAM,sCAAsC;GAExD,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,OAAI,EAAE,0BAA0B,eAC9B,OAAM,IAAI,MAAM,sCAAsC;GAExD,MAAM,QAAS,eAAuC,UAAU;AAEhE,WAAQ,QAAR;IACE,KAAK,iBAAkB,QAAO,kBAAkB,OAAO,EAA6C;IACpG,KAAK,gBAAiB,QAAO,iBAAiB,OAAO,EAA4C;IACjG,KAAK,kBAAmB,QAAO,mBAAmB,OAAO,EAA8C;IACvG,KAAK,iBAAkB,QAAO,kBAAkB,OAAO,EAA6C;IACpG,KAAK,kBAAmB,QAAO,mBAAmB,OAAO,EAA8C;IACvG,KAAK,gBAAiB,QAAO,iBAAiB,OAAO,EAA4C;;AAEnG;;EAGF,QACE,OAAM,IAAI,MAAM,mBAAmB,SAAS;;;AAQlD,SAAS,aAAa,QAAgB,UAA6B;AACjE,KAAI;AACF,SAAO,MAAM,KAAK,UAAU,SAAS,GAAG,KAAK;SACvC;;;;;AAQV,eAAe,cACb,SACA,QACe;CACf,MAAM,EAAE,IAAI,QAAQ,WAAW;AAG/B,KAAI,WAAW,UAAU;EACvB,MAAM,UAAU,OAAO,YAAY;AACjC,OAAI;IACF,MAAM,WAAW,MAAM,eAAe,UAAU;IAChD,MAAM,WACJ,WACG,QAAQ,qCAAqC,CAC7C,KAAK,CACR;AACF,WAAO;KAAE,OAAO,SAAS;KAAO,QAAQ,SAAS;KAAQ;KAAU;WAC7D;AACN,WAAO;;MAEP;AAEJ,eAAa,QAAQ;GACnB;GACA,IAAI;GACJ,QAAQ;IACN,QAAQ,KAAK,OAAO,KAAK,KAAK,GAAG,aAAa,IAAK;IACnD;IACA,eAAe,gBAAgB,IAAI,KAAK,cAAc,CAAC,aAAa,GAAG;IACvE,mBAAmB,aAAa;IAChC;IACA,eAAe,gBAAgB,IAAI,KAAK,cAAc,CAAC,aAAa,GAAG;IACvE,mBAAmB,aAAa;IAChC,YAAY,aAAa;IACzB,gBAAgB,eAAe;IAC/B,IAAI;IACJ;IACA,oBAAoB,qBAAqB,IAAI,KAAK,mBAAmB,CAAC,aAAa,GAAG;IACtF,WAAW,aAAa,aAAa;IACtC;GACF,CAAC;AACF,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,aAAa;AAE1B,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,iCAAiC,EAAE,IAAI;IAC5D;AACF,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAM,QAAQ,EAAE,WAAW,MAAM;GAAE,CAAC;AACnE,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,mBAAmB;AAChC,iBAAe,CAAC,OAAO,MAAM;AAC3B,WAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;IAClE;AACF,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAM,QAAQ,EAAE,WAAW,MAAM;GAAE,CAAC;AACnE,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,2BAA2B;AACxC,eAAa,QAAQ;GACnB;GACA,IAAI;GACJ,QAAQ;IACN,QAAQ;IACR,gBAAgB,OAAO,QAAQ,mBAAmB,SAAS,CACxD,QAAQ,CAAC,IAAI,SAAS,OAAO,WAAY,IAA6B,QAAQ,CAC9E,KAAK,CAAC,QAAQ,GAAG;IACrB;GACF,CAAC;AACF,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,2BAA2B;AACxC,MAAI;GACF,MAAM,IAAI;AAKV,wBAAqB,wBAAwB;IAC3C,MAAM,EAAE;IACR,UAAU,EAAE;IACZ,SAAS,EAAE;IACZ,CAAC;AACF,gBAAa,QAAQ;IACnB;IACA,IAAI;IACJ,QAAQ,EAAE,QAAQ,oBAAoB;IACvC,CAAC;WACK,GAAG;AAEV,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAO,OAD1B,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;IACJ,CAAC;;AAErD,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,qBAAqB;EAClC,MAAM,IAAI;AAMV,MAAI,CAAC,EAAE,SAAS;AACd,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAO,OAAO;IAA0C,CAAC;AACxF,UAAO,KAAK;AACZ;;AAKF,oBACE;GACE,OAJW,EAAE,SAAyF;GAKtG,SAAS,EAAE;GACX,OAAO,EAAE;GACV,EACD,mBACD,CAAC,MAAM,WAAW;AACjB,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAM;IAAQ,CAAC;AAC9C,UAAO,KAAK;IACZ,CAAC,OAAO,MAAM;AAEd,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAO,OAD1B,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;IACJ,CAAC;AACnD,UAAO,KAAK;IACZ;AACF;;AAIF,KAAI;AAEF,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAM,QADtB,MAAM,aAAa,QAAQ,OAAO;GACJ,CAAC;UACvC,GAAG;AAEV,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAO,OAD1B,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;GACJ,CAAC;;AAErD,QAAO,KAAK;;;;;;AAOd,SAAS,aAAa,MAAgC;AACpD,QAAO,IAAI,SAAS,YAAY;EAC9B,MAAM,SAAS,QAAQ,KAAK;EAC5B,MAAM,QAAQ,iBAAiB;AAAE,UAAO,SAAS;AAAE,WAAQ,MAAM;KAAK,IAAI;AAC1E,SAAO,GAAG,iBAAiB;AAAE,gBAAa,MAAM;AAAE,UAAO,KAAK;AAAE,WAAQ,KAAK;IAAI;AACjF,SAAO,GAAG,eAAe;AAAE,gBAAa,MAAM;AAAE,WAAQ,MAAM;IAAI;GAClE;;;;;AAMJ,eAAe,eAAe,YAAqC;AAEjE,KAAI,WAAW,WAAW,EAAE;AAE1B,MADa,MAAM,aAAa,WAAW,CAEzC,OAAM,IAAI,MAAM,wEAAwE;AAE1F,MAAI;AACF,cAAW,WAAW;AACtB,WAAQ,OAAO,MAAM,4CAA4C;UAC3D;;CAKV,MAAM,SAAS,cAAc,WAAmB;EAC9C,IAAI,SAAS;AAEb,SAAO,GAAG,SAAS,UAAkB;AACnC,aAAU,MAAM,UAAU;GAC1B,IAAI;AAEJ,WAAQ,KAAK,OAAO,QAAQ,KAAK,MAAM,IAAI;IACzC,MAAM,OAAO,OAAO,MAAM,GAAG,GAAG;AAChC,aAAS,OAAO,MAAM,KAAK,EAAE;AAE7B,QAAI,KAAK,MAAM,KAAK,GAAI;IAExB,IAAI;AACJ,QAAI;AACF,eAAU,KAAK,MAAM,KAAK;YACpB;AACN,kBAAa,QAAQ;MAAE,IAAI;MAAK,IAAI;MAAO,OAAO;MAAgB,CAAC;AACnE,YAAO,SAAS;AAChB;;AAGF,kBAAc,SAAS,OAAO,CAAC,OAAO,MAAe;KACnD,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,kBAAa,QAAQ;MAAE,IAAI,QAAQ;MAAI,IAAI;MAAO,OAAO;MAAK,CAAC;AAC/D,YAAO,SAAS;MAChB;;IAEJ;AAEF,SAAO,GAAG,eAAe,GAEvB;GACF;AAEF,QAAO,GAAG,UAAU,MAAM;AACxB,UAAQ,OAAO,MAAM,kCAAkC,EAAE,IAAI;GAC7D;AAEF,QAAO,OAAO,kBAAkB;AAC9B,UAAQ,OAAO,MACb,wCAAwC,WAAW,IACpD;GACD;AAEF,QAAO;;AAOT,eAAsB,MAAM,QAAwC;AAClE,gBAAe;AACf,aAAY,KAAK,KAAK;AAGtB,sBAAqB,wBAAwB;AAE7C,SAAQ,OAAO,MAAM,oCAAoC;AACzD,SAAQ,OAAO,MAAM,wBAAwB,OAAO,WAAW,IAAI;AACnE,SAAQ,OAAO,MAAM,iCAAiC,OAAO,eAAe,IAAI;AAChF,SAAQ,OAAO,MACb,mCAAmC,mBAAmB,KAAK,IAC5D;AAMD,KAAI;AAAE,cAAY,QAAQ,KAAK,GAAG;SAAU;AAG5C,yBAAwB,OAAO,eAAe;AAG9C,KAAI;AACF,eAAa,cAAc;AAC3B,UAAQ,OAAO,MAAM,2CAA2C;UACzD,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,gDAAgD,IAAI,IAAI;AAC7E,UAAQ,KAAK,EAAE;;AAIjB,KAAI;AACF,mBAAiB,MAAM,qBAAqB,OAAO;AACnD,UAAQ,OAAO,MACb,oCAAoC,eAAe,YAAY,IAChE;UACM,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,0DAA0D,IAAI,IAAI;AACvF,UAAQ,KAAK,EAAE;;AAIjB,sBAAqB;AAGrB,KAAI,eAAe,gBAAgB,WACjC,sBAAqB;KAErB,SAAQ,OAAO,MACb,4DACD;CAIH,MAAM,SAAS,MAAM,eAAe,OAAO,WAAW;CAEtD,MAAM,WAAW,OAAO,WAAkC;AACxD,UAAQ,OAAO,MAAM,kBAAkB,OAAO,wBAAwB;AAGtE,sBAAoB;AAGpB,MAAI,oBACF,eAAc,oBAAoB;AAGpC,MAAI,oBACF,eAAc,oBAAoB;AAIpC,SAAO,OAAO;EAKd,MAAM,sBAAsB;EAC5B,MAAM,mBAAmB;EACzB,MAAM,WAAW,KAAK,KAAK,GAAG;AAE9B,MAAI,mBAAmB,iBAAiB;AACtC,WAAQ,OAAO,MACb,oEACY,gBAAgB,UAAU,gBAAgB,QACvD;AAED,WAAQ,mBAAmB,oBAAoB,KAAK,KAAK,GAAG,SAC1D,OAAM,IAAI,SAAS,YAAY,WAAW,SAAS,iBAAiB,CAAC;AAGvE,OAAI,mBAAmB,gBACrB,SAAQ,OAAO,MACb,0DACD;OAED,SAAQ,OAAO,MAAM,kDAAkD;;AAI3E,MAAI;AACF,SAAM,eAAe,OAAO;UACtB;AAIR,MAAI;AACF,cAAW,OAAO,WAAW;UACvB;AAIR,UAAQ,KAAK,EAAE;;AAGjB,SAAQ,GAAG,gBAAgB;AAAE,WAAS,SAAS,CAAC,YAAY,QAAQ,KAAK,EAAE,CAAC;GAAI;AAChF,SAAQ,GAAG,iBAAiB;AAAE,WAAS,UAAU,CAAC,YAAY,QAAQ,KAAK,EAAE,CAAC;GAAI;AAGlF,OAAM,IAAI,cAAc,GAAG"}
|
package/dist/db-4lSqLFb8.mjs.map
DELETED
|
@@ -1 +0,0 @@
|
|
|
1
|
-
{"version":3,"file":"db-4lSqLFb8.mjs","names":[],"sources":["../src/registry/schema.ts","../src/registry/db.ts"],"sourcesContent":["/**\n * SQLite DDL for the PAI registry database.\n *\n * Tables:\n * - projects — tracked project directories with type and status\n * - sessions — per-project session notes\n * - tags — normalised tag vocabulary\n * - project_tags — M:N join between projects and tags\n * - session_tags — M:N join between sessions and tags\n * - aliases — alternative slugs that resolve to a project\n * - compaction_log — audit trail for context-compaction events\n * - schema_version — single-row migration version tracking\n */\n\nimport type { Database } from \"better-sqlite3\";\n\nexport const SCHEMA_VERSION = 3;\n\nexport const CREATE_TABLES_SQL = `\nPRAGMA journal_mode = WAL;\nPRAGMA foreign_keys = ON;\n\nCREATE TABLE IF NOT EXISTS projects (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n slug TEXT NOT NULL UNIQUE,\n display_name TEXT NOT NULL,\n root_path TEXT NOT NULL UNIQUE,\n encoded_dir TEXT NOT NULL UNIQUE,\n type TEXT NOT NULL DEFAULT 'local'\n CHECK(type IN ('local','central','obsidian-linked','external')),\n status TEXT NOT NULL DEFAULT 'active'\n CHECK(status IN ('active','archived','migrating')),\n parent_id INTEGER,\n obsidian_link TEXT,\n claude_notes_dir TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER NOT NULL,\n archived_at INTEGER,\n FOREIGN KEY (parent_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS sessions (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n project_id INTEGER NOT NULL,\n number INTEGER NOT NULL,\n date TEXT NOT NULL,\n slug TEXT NOT NULL,\n title TEXT NOT NULL,\n filename TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'open'\n CHECK(status IN ('open','completed','compacted')),\n claude_session_id TEXT,\n token_count INTEGER,\n created_at INTEGER NOT NULL,\n closed_at INTEGER,\n UNIQUE (project_id, number),\n FOREIGN KEY (project_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS tags (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE\n);\n\nCREATE TABLE IF NOT EXISTS project_tags (\n project_id INTEGER NOT NULL,\n tag_id INTEGER NOT NULL,\n PRIMARY KEY (project_id, tag_id),\n FOREIGN KEY (project_id) REFERENCES projects(id),\n FOREIGN KEY (tag_id) REFERENCES tags(id)\n);\n\nCREATE TABLE IF NOT EXISTS session_tags (\n session_id INTEGER NOT NULL,\n tag_id INTEGER NOT NULL,\n PRIMARY KEY (session_id, tag_id),\n FOREIGN KEY (session_id) REFERENCES sessions(id),\n FOREIGN KEY (tag_id) REFERENCES tags(id)\n);\n\nCREATE TABLE IF NOT EXISTS aliases (\n alias TEXT PRIMARY KEY,\n project_id INTEGER NOT NULL,\n FOREIGN KEY (project_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS compaction_log (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n project_id INTEGER NOT NULL,\n session_id INTEGER,\n trigger TEXT NOT NULL\n CHECK(trigger IN ('precompact','manual','end-session')),\n files_written TEXT NOT NULL,\n token_count INTEGER,\n created_at INTEGER NOT NULL,\n FOREIGN KEY (project_id) REFERENCES projects(id),\n FOREIGN KEY (session_id) REFERENCES sessions(id)\n);\n\nCREATE TABLE IF NOT EXISTS links (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id INTEGER NOT NULL,\n target_project_id INTEGER NOT NULL,\n link_type TEXT NOT NULL DEFAULT 'related'\n CHECK(link_type IN ('related','follow-up','reference')),\n created_at INTEGER NOT NULL,\n UNIQUE (session_id, target_project_id),\n FOREIGN KEY (session_id) REFERENCES sessions(id),\n FOREIGN KEY (target_project_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at INTEGER NOT NULL\n);\n\n-- Indexes\nCREATE INDEX IF NOT EXISTS idx_projects_slug ON projects(slug);\nCREATE INDEX IF NOT EXISTS idx_projects_status ON projects(status);\nCREATE INDEX IF NOT EXISTS idx_projects_type ON projects(type);\nCREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project_id);\nCREATE INDEX IF NOT EXISTS idx_sessions_date ON sessions(date);\nCREATE INDEX IF NOT EXISTS idx_sessions_status ON sessions(status);\nCREATE INDEX IF NOT EXISTS idx_sessions_claude ON sessions(claude_session_id);\nCREATE INDEX IF NOT EXISTS idx_pc_project ON project_tags(project_id);\n`;\n\n/**\n * Run the full DDL against an open database connection.\n *\n * The function is idempotent — every statement uses IF NOT EXISTS so it is\n * safe to call on an already-initialised database. After creating the tables\n * it inserts the current SCHEMA_VERSION into schema_version if no row exists\n * yet.\n */\nexport function initializeSchema(db: Database): void {\n // better-sqlite3's exec() runs multiple semicolon-separated statements\n db.exec(CREATE_TABLES_SQL);\n\n const row = db\n .prepare(\"SELECT version FROM schema_version WHERE version = ?\")\n .get(SCHEMA_VERSION);\n\n if (!row) {\n db.prepare(\n \"INSERT INTO schema_version (version, applied_at) VALUES (?, ?)\"\n ).run(SCHEMA_VERSION, Date.now());\n }\n}\n\n/**\n * Apply incremental schema migrations to an already-initialised database.\n *\n * Each migration is guarded by a version check so it is safe to call on\n * databases at any schema version — already-applied migrations are skipped.\n */\nexport function runMigrations(db: Database): void {\n const currentRow = db\n .prepare(\"SELECT version FROM schema_version ORDER BY version DESC LIMIT 1\")\n .get() as { version: number } | undefined;\n\n const current = currentRow?.version ?? 0;\n\n // Migration v1 → v2: add claude_notes_dir column to projects\n if (current < 2) {\n db.transaction(() => {\n // Use a try/catch so re-running on a DB that already has the column is safe\n try {\n db.exec(\"ALTER TABLE projects ADD COLUMN claude_notes_dir TEXT\");\n } catch {\n // Column may already exist (e.g. fresh DB created with v2 DDL)\n }\n db.prepare(\n \"INSERT OR REPLACE INTO schema_version (version, applied_at) VALUES (?, ?)\"\n ).run(2, Date.now());\n })();\n }\n\n // Migration v2 → v3: add links table for cross-project session references\n if (current < 3) {\n db.transaction(() => {\n try {\n db.exec(`\n CREATE TABLE IF NOT EXISTS links (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id INTEGER NOT NULL,\n target_project_id INTEGER NOT NULL,\n link_type TEXT NOT NULL DEFAULT 'related'\n CHECK(link_type IN ('related','follow-up','reference')),\n created_at INTEGER NOT NULL,\n UNIQUE (session_id, target_project_id),\n FOREIGN KEY (session_id) REFERENCES sessions(id),\n FOREIGN KEY (target_project_id) REFERENCES projects(id)\n )\n `);\n } catch {\n // Table may already exist (fresh DB created with v3 DDL)\n }\n db.prepare(\n \"INSERT OR REPLACE INTO schema_version (version, applied_at) VALUES (?, ?)\"\n ).run(3, Date.now());\n })();\n }\n}\n","/**\n * Database connection helper for the PAI registry.\n *\n * Uses better-sqlite3 (synchronous API) to open or create registry.db.\n * On first open it runs the full DDL via initializeSchema().\n */\n\nimport { mkdirSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport BetterSqlite3 from \"better-sqlite3\";\nimport type { Database } from \"better-sqlite3\";\nimport { initializeSchema, runMigrations } from \"./schema.js\";\n\nexport type { Database };\n\n/** Default registry path inside the ~/.pai/ directory. */\nconst DEFAULT_REGISTRY_PATH = join(homedir(), \".pai\", \"registry.db\");\n\n/**\n * Open (or create) the PAI registry database.\n *\n * @param path Absolute path to registry.db. Defaults to ~/.pai/registry.db.\n * @returns An open better-sqlite3 Database instance.\n *\n * Side effects on first call:\n * - Creates the parent directory if it does not exist.\n * - Enables WAL journal mode.\n * - Runs initializeSchema() if schema_version is empty.\n */\nexport function openRegistry(path: string = DEFAULT_REGISTRY_PATH): Database {\n // Ensure the directory exists before SQLite tries to create the file\n mkdirSync(dirname(path), { recursive: true });\n\n const db = new BetterSqlite3(path);\n\n // WAL gives better concurrent read performance and crash safety\n db.pragma(\"journal_mode = WAL\");\n db.pragma(\"foreign_keys = ON\");\n\n // Check whether the schema has been applied before\n const tableExists = db\n .prepare(\n `SELECT name FROM sqlite_master\n WHERE type = 'table' AND name = 'schema_version'`\n )\n .get();\n\n if (!tableExists) {\n // Brand-new database — apply the full schema\n initializeSchema(db);\n } else {\n const row = db\n .prepare(\"SELECT version FROM schema_version LIMIT 1\")\n .get() as { version: number } | undefined;\n\n if (!row) {\n // Table exists but is empty — apply schema (handles partial init)\n initializeSchema(db);\n }\n }\n\n // Apply any pending incremental migrations\n runMigrations(db);\n\n return db;\n}\n"],"mappings":";;;;;;;AAgBA,MAAa,iBAAiB;AAE9B,MAAa,oBAAoB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqHjC,SAAgB,iBAAiB,IAAoB;AAEnD,IAAG,KAAK,kBAAkB;AAM1B,KAAI,CAJQ,GACT,QAAQ,uDAAuD,CAC/D,IAAI,eAAe,CAGpB,IAAG,QACD,iEACD,CAAC,IAAI,gBAAgB,KAAK,KAAK,CAAC;;;;;;;;AAUrC,SAAgB,cAAc,IAAoB;CAKhD,MAAM,UAJa,GAChB,QAAQ,mEAAmE,CAC3E,KAAK,EAEoB,WAAW;AAGvC,KAAI,UAAU,EACZ,IAAG,kBAAkB;AAEnB,MAAI;AACF,MAAG,KAAK,wDAAwD;UAC1D;AAGR,KAAG,QACD,4EACD,CAAC,IAAI,GAAG,KAAK,KAAK,CAAC;GACpB,EAAE;AAIN,KAAI,UAAU,EACZ,IAAG,kBAAkB;AACnB,MAAI;AACF,MAAG,KAAK;;;;;;;;;;;;UAYN;UACI;AAGR,KAAG,QACD,4EACD,CAAC,IAAI,GAAG,KAAK,KAAK,CAAC;GACpB,EAAE;;;;;;;;;;;;;ACxLR,MAAM,wBAAwB,KAAK,SAAS,EAAE,QAAQ,cAAc;;;;;;;;;;;;AAapE,SAAgB,aAAa,OAAe,uBAAiC;AAE3E,WAAU,QAAQ,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;CAE7C,MAAM,KAAK,IAAI,cAAc,KAAK;AAGlC,IAAG,OAAO,qBAAqB;AAC/B,IAAG,OAAO,oBAAoB;AAU9B,KAAI,CAPgB,GACjB,QACC;yDAED,CACA,KAAK,CAIN,kBAAiB,GAAG;UAMhB,CAJQ,GACT,QAAQ,6CAA6C,CACrD,KAAK,CAIN,kBAAiB,GAAG;AAKxB,eAAc,GAAG;AAEjB,QAAO"}
|