@tekmidian/pai 0.2.2 → 0.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. package/ARCHITECTURE.md +148 -6
  2. package/FEATURE.md +1 -1
  3. package/README.md +79 -0
  4. package/dist/{auto-route-D7W6RE06.mjs → auto-route-JjW3f7pV.mjs} +4 -4
  5. package/dist/{auto-route-D7W6RE06.mjs.map → auto-route-JjW3f7pV.mjs.map} +1 -1
  6. package/dist/chunker-CbnBe0s0.mjs +191 -0
  7. package/dist/chunker-CbnBe0s0.mjs.map +1 -0
  8. package/dist/cli/index.mjs +835 -40
  9. package/dist/cli/index.mjs.map +1 -1
  10. package/dist/{config-DBh1bYM2.mjs → config-DELNqq3Z.mjs} +4 -2
  11. package/dist/{config-DBh1bYM2.mjs.map → config-DELNqq3Z.mjs.map} +1 -1
  12. package/dist/daemon/index.mjs +9 -9
  13. package/dist/{daemon-v5O897D4.mjs → daemon-CeTX4NpF.mjs} +94 -13
  14. package/dist/daemon-CeTX4NpF.mjs.map +1 -0
  15. package/dist/daemon-mcp/index.mjs +3 -3
  16. package/dist/db-Dp8VXIMR.mjs +212 -0
  17. package/dist/db-Dp8VXIMR.mjs.map +1 -0
  18. package/dist/{detect-BHqYcjJ1.mjs → detect-D7gPV3fQ.mjs} +1 -1
  19. package/dist/{detect-BHqYcjJ1.mjs.map → detect-D7gPV3fQ.mjs.map} +1 -1
  20. package/dist/{detector-DKA83aTZ.mjs → detector-cYYhK2Mi.mjs} +2 -2
  21. package/dist/{detector-DKA83aTZ.mjs.map → detector-cYYhK2Mi.mjs.map} +1 -1
  22. package/dist/{embeddings-mfqv-jFu.mjs → embeddings-DGRAPAYb.mjs} +2 -2
  23. package/dist/{embeddings-mfqv-jFu.mjs.map → embeddings-DGRAPAYb.mjs.map} +1 -1
  24. package/dist/{factory-BDAiKtYR.mjs → factory-DZLvRf4m.mjs} +4 -4
  25. package/dist/{factory-BDAiKtYR.mjs.map → factory-DZLvRf4m.mjs.map} +1 -1
  26. package/dist/index.d.mts +1 -1
  27. package/dist/index.d.mts.map +1 -1
  28. package/dist/index.mjs +9 -7
  29. package/dist/{indexer-B20bPHL-.mjs → indexer-CKQcgKsz.mjs} +4 -190
  30. package/dist/indexer-CKQcgKsz.mjs.map +1 -0
  31. package/dist/{indexer-backend-BXaocO5r.mjs → indexer-backend-BHztlJJg.mjs} +4 -3
  32. package/dist/{indexer-backend-BXaocO5r.mjs.map → indexer-backend-BHztlJJg.mjs.map} +1 -1
  33. package/dist/{ipc-client-DPy7s3iu.mjs → ipc-client-CLt2fNlC.mjs} +1 -1
  34. package/dist/ipc-client-CLt2fNlC.mjs.map +1 -0
  35. package/dist/mcp/index.mjs +118 -5
  36. package/dist/mcp/index.mjs.map +1 -1
  37. package/dist/{migrate-Bwj7qPaE.mjs → migrate-jokLenje.mjs} +8 -1
  38. package/dist/migrate-jokLenje.mjs.map +1 -0
  39. package/dist/{pai-marker-DX_mFLum.mjs → pai-marker-CXQPX2P6.mjs} +1 -1
  40. package/dist/{pai-marker-DX_mFLum.mjs.map → pai-marker-CXQPX2P6.mjs.map} +1 -1
  41. package/dist/{postgres-Ccvpc6fC.mjs → postgres-CRBe30Ag.mjs} +1 -1
  42. package/dist/{postgres-Ccvpc6fC.mjs.map → postgres-CRBe30Ag.mjs.map} +1 -1
  43. package/dist/{schemas-DjdwzIQ8.mjs → schemas-BY3Pjvje.mjs} +1 -1
  44. package/dist/{schemas-DjdwzIQ8.mjs.map → schemas-BY3Pjvje.mjs.map} +1 -1
  45. package/dist/{search-PjftDxxs.mjs → search-GK0ibTJy.mjs} +2 -2
  46. package/dist/{search-PjftDxxs.mjs.map → search-GK0ibTJy.mjs.map} +1 -1
  47. package/dist/{sqlite-CHUrNtbI.mjs → sqlite-RyR8Up1v.mjs} +3 -3
  48. package/dist/{sqlite-CHUrNtbI.mjs.map → sqlite-RyR8Up1v.mjs.map} +1 -1
  49. package/dist/{tools-CLK4080-.mjs → tools-CUg0Lyg-.mjs} +175 -11
  50. package/dist/{tools-CLK4080-.mjs.map → tools-CUg0Lyg-.mjs.map} +1 -1
  51. package/dist/{utils-DEWdIFQ0.mjs → utils-QSfKagcj.mjs} +62 -2
  52. package/dist/utils-QSfKagcj.mjs.map +1 -0
  53. package/dist/vault-indexer-Bo2aPSzP.mjs +499 -0
  54. package/dist/vault-indexer-Bo2aPSzP.mjs.map +1 -0
  55. package/dist/zettelkasten-Co-w0XSZ.mjs +901 -0
  56. package/dist/zettelkasten-Co-w0XSZ.mjs.map +1 -0
  57. package/package.json +2 -1
  58. package/src/hooks/README.md +99 -0
  59. package/src/hooks/hooks.md +13 -0
  60. package/src/hooks/pre-compact.sh +95 -0
  61. package/src/hooks/session-stop.sh +93 -0
  62. package/statusline-command.sh +9 -4
  63. package/templates/pai-skill.template.md +428 -0
  64. package/templates/templates.md +20 -0
  65. package/dist/daemon-v5O897D4.mjs.map +0 -1
  66. package/dist/db-BcDxXVBu.mjs +0 -110
  67. package/dist/db-BcDxXVBu.mjs.map +0 -1
  68. package/dist/indexer-B20bPHL-.mjs.map +0 -1
  69. package/dist/ipc-client-DPy7s3iu.mjs.map +0 -1
  70. package/dist/migrate-Bwj7qPaE.mjs.map +0 -1
  71. package/dist/utils-DEWdIFQ0.mjs.map +0 -1
@@ -0,0 +1,428 @@
1
+ ---
2
+ name: PAI
3
+ description: PAI session lifecycle automation. USE WHEN user says "go", "continue", "pause session", "end session", "cpp", OR session needs context loading. Provides session commands, token monitoring, continuation protocol, git commit rules, fact-checking, and source citation.
4
+ ---
5
+
6
+ <!-- Generated by PAI Setup -->
7
+
8
+ ## RESPONSE MODE CLASSIFICATION (Always Active)
9
+
10
+ **Classify EVERY request into one of three modes BEFORE emitting any response token.**
11
+
12
+ | Mode | When | Format |
13
+ |------|------|--------|
14
+ | **MINIMAL** | Greetings, thanks, acks, simple yes/no, one-word answers | Natural conversational response. No structured format. 1-3 sentences max. |
15
+ | **STANDARD** | Single-step tasks, quick lookups, simple file reads, direct questions | Compact: just answer the question directly. |
16
+ | **FULL** | Multi-step work, research, implementation, analysis, 3+ tool calls | Full structured format with SUMMARY/ANALYSIS/ACTIONS/RESULTS/STATUS/NEXT. |
17
+
18
+ **Decision rule:** If you can answer in under 3 sentences without tools → MINIMAL. If it's one action or lookup → STANDARD. Everything else → FULL.
19
+
20
+ ---
21
+
22
+ ## TOKEN MONITORING (Always Active)
23
+
24
+ **Token Limit:** ~200k total context window
25
+ **Auto-Reset Threshold:** ~100k tokens (50%)
26
+
27
+ ### Proactive Context Management
28
+
29
+ **After every 5+ sequential tool calls, PAUSE and self-assess:**
30
+ 1. Estimate current context usage (each file read ≈ 1-3k, edit ≈ 0.5-2k, message+response ≈ 2-5k, search results ≈ 2-5k)
31
+ 2. If estimated usage > 60% of window (~120k tokens): **self-summarize before continuing**
32
+ - **Preserve:** key decisions, numbers, code references, file paths, next actions
33
+ - **Discard:** verbose tool output, intermediate reasoning, raw search results
34
+ - Write a 1-3 paragraph summary replacing prior phase content
35
+ 3. If > 80%: consider whether to checkpoint and suggest `/clear`
36
+
37
+ **This is proactive, not reactive.** Don't wait for auto-compact to surprise you. Manage context like a budget.
38
+
39
+ ### Auto-Reset Protocol
40
+
41
+ **When approaching ~100k tokens, initiate AUTO-RESET:**
42
+
43
+ 1. Update TODO.md with current state
44
+ 2. Create/update session note with checkpoint
45
+ 3. Git commit if there are changes
46
+ 4. Inform user: "Context is getting full. I've saved state to TODO.md. Please run /clear to start fresh."
47
+
48
+ ---
49
+
50
+ ## CONTINUE PREVIOUS WORK (Always Active)
51
+
52
+ **When user's first message implies continuing (e.g., "go", "continue", "weiter", "resume"):**
53
+
54
+ 1. **Check TODO.md for `## Continue` section FIRST** — this is the continuation prompt from the last pause session. It contains everything needed to resume: project context, what was done, what's in progress, exact next steps, background processes, and key file paths.
55
+ 2. **If `## Continue` exists:** Use it as primary context. Announce what you're resuming and proceed with the next step.
56
+ 3. **If no `## Continue`:** Fall back to reading the full TODO.md and the latest session note.
57
+ 4. **Resume** the most relevant work
58
+
59
+ **Quick lookup:**
60
+ ```bash
61
+ # Find TODO.md — check for ## Continue section at the top
62
+ cat Notes/TODO.md 2>/dev/null || cat TODO.md 2>/dev/null
63
+
64
+ # Find latest session note (4-digit format)
65
+ ls -t Notes/*.md 2>/dev/null | grep -E '^Notes/[0-9]{4}' | head -1
66
+ ```
67
+
68
+ ---
69
+
70
+ ## FACT-CHECKING PROTOCOL (Always Active)
71
+
72
+ **When using information from external AI sources (Gemini, ChatGPT, Perplexity, etc.):**
73
+
74
+ 1. **ALWAYS verify** claims against official sources before presenting
75
+ 2. **Mark unverified claims** with: `⚠️ Unverified`
76
+ 3. **Prefer official sources:** Official documentation, government sites, company sites
77
+ 4. **AI assessments may contain errors** - treat them as starting points, not facts
78
+
79
+ **Example:**
80
+ ```
81
+ According to Gemini, the limit is 500 requests/day. ⚠️ Unverified - checking official docs...
82
+ ```
83
+
84
+ ---
85
+
86
+ ## SOURCE CITATION (Always Active)
87
+
88
+ **For legal, regulatory, or technical claims:**
89
+
90
+ - **ALWAYS include links** to official sources
91
+ - **Format:** `[Source Name](URL)` or inline link
92
+ - **Prefer:** Official documentation > Blog posts > Forum answers
93
+ - **When unsure:** Say "I couldn't find an official source for this"
94
+
95
+ **Example:**
96
+ ```
97
+ The GDPR requires consent for processing personal data ([GDPR Art. 6](https://gdpr-info.eu/art-6-gdpr/)).
98
+ ```
99
+
100
+ ---
101
+
102
+ ## ANTI-CRITERIA IN PLANNING (Always Active)
103
+
104
+ **When planning non-trivial work, define what MUST NOT happen alongside what must happen.**
105
+
106
+ - Prefix negative requirements with `ISC-A` (Anti-Criteria): `ISC-A1: No personal data in exported files`
107
+ - Anti-criteria are first-class verifiable requirements — verify them in the same pass as positive criteria
108
+ - Common anti-criteria: no regressions, no secrets in commits, no breaking changes to public API, no data loss
109
+
110
+ ---
111
+
112
+ ## INVOCATION OBLIGATION (Always Active)
113
+
114
+ **If you mention a tool or capability during planning, you MUST actually invoke it.**
115
+
116
+ - Listing a capability but never calling it via tool is dishonest — it's "capability theater."
117
+ - If you say "let me search for that" → you MUST call a search tool. Don't generate from memory.
118
+ - If you plan to use a skill → you MUST call the Skill tool. Don't simulate the output.
119
+ - If you decide NOT to use a planned capability → explicitly state why: "Skipping X because Y."
120
+ - At the end of multi-step work, verify: every tool/skill you mentioned was either invoked or explicitly declined.
121
+
122
+ ---
123
+
124
+ ## GIT COMMIT RULES (Always Active)
125
+
126
+ **MANDATORY FOR ALL COMMITS:**
127
+
128
+ - **NO** "Generated with Claude Code" or similar AI signatures
129
+ - **NO** "Co-Authored-By: Claude" or any AI co-author lines
130
+ - **NO** emoji signatures like "🤖" in commit messages
131
+ - **NO** mentions of AI assistance in commit messages
132
+
133
+ **Commit Message Format:**
134
+ ```
135
+ <type>: <description>
136
+
137
+ [optional body with details]
138
+ ```
139
+
140
+ **Types:** feat, fix, refactor, docs, test, chore, style
141
+
142
+ **Example:**
143
+ ```bash
144
+ # CORRECT
145
+ git commit -m "feat: Add session notes system"
146
+
147
+ # WRONG
148
+ git commit -m "feat: Add session notes system
149
+
150
+ 🤖 Generated with Claude Code
151
+ Co-Authored-By: Claude <noreply@anthropic.com>"
152
+ ```
153
+
154
+ **Why:** Commit history should be clean and professional. AI assistance is an implementation detail, not part of the permanent record.
155
+
156
+ ---
157
+
158
+ ## PERMISSION TO FAIL (Always Active)
159
+
160
+ **Explicitly allow "I don't know" responses.**
161
+
162
+ You have EXPLICIT PERMISSION to say "I don't know" or "I'm not confident" when:
163
+ - Information isn't available in context
164
+ - The answer requires knowledge you don't have
165
+ - Multiple conflicting answers seem equally valid
166
+ - Verification isn't possible
167
+
168
+ **Acceptable Failure Responses:**
169
+ - "I don't have enough information to answer this accurately."
170
+ - "I found conflicting information and can't determine which is correct."
171
+ - "I could guess, but I'm not confident. Want me to try anyway?"
172
+
173
+ **The Permission:** You will NEVER be penalized for honestly saying you don't know. Fabricating an answer is far worse than admitting uncertainty.
174
+
175
+ ---
176
+
177
+ ## SESSION COMMANDS (Always Active)
178
+
179
+ **Session management is a core PAI function. Follow these procedures exactly.**
180
+
181
+ ### Session Start Confirmation
182
+
183
+ At the start of every session, confirm you have loaded the PAI context by including in your first response:
184
+ - The project name
185
+ - Whether a local CLAUDE.md was found
186
+ - The active session note number
187
+ - Any pending TODOs (first 3)
188
+
189
+ ### "go" / "continue" / "weiter" Command
190
+
191
+ When user's first message is just "go", "continue", "weiter", or similar:
192
+ 1. Read Notes/TODO.md — **look for the `## Continue` section at the TOP first**
193
+ - If a `## Continue` section exists, use it as **primary context** — it contains the continuation prompt from the last pause
194
+ - The continuation prompt tells you: what project/dir, what was done, what's in progress, exact next steps, background processes, key file paths
195
+ 2. Read the latest session note for additional context if needed
196
+ 3. Summarize what was in progress based on the continuation prompt
197
+ 4. Proceed with the next step from the continuation prompt, or ask if multiple options are available
198
+
199
+ ### "cpp" Command (Commit, Push, Publish)
200
+
201
+ When user says "cpp":
202
+ ```bash
203
+ # 1. Stage all changes
204
+ git add .
205
+
206
+ # 2. Commit with clean message (no AI signatures!)
207
+ git commit -m "feat: [Description of changes]"
208
+
209
+ # 3. Push to remote
210
+ git push
211
+
212
+ # 4. If publish script exists, run it
213
+ [ -f scripts/publish.py ] && python3 scripts/publish.py --clean
214
+ [ -f publish.sh ] && ./publish.sh
215
+ ```
216
+
217
+ ### "pause session" Command
218
+
219
+ When user says "pause session", execute this procedure:
220
+
221
+ 1. **Summarize Current State**
222
+ - List what was accomplished
223
+ - List what's in progress
224
+ - List any blockers or open questions
225
+
226
+ 2. **Save Checkpoint to Session Note**
227
+ - Append checkpoint with current work state to the active session note
228
+
229
+ 3. **Update TODO.md**
230
+ - Mark completed tasks with `[x]`
231
+ - Keep in-progress tasks with `[ ]`
232
+ - Add any new discovered tasks
233
+
234
+ 4. **Provide Handoff Summary**
235
+ ```
236
+ ## Pause Checkpoint
237
+
238
+ **Completed:**
239
+ - [list of done items]
240
+
241
+ **In Progress:**
242
+ - [list of active items]
243
+
244
+ **Next Steps:**
245
+ - [what to do when resuming]
246
+ ```
247
+
248
+ 5. **Generate Continuation Prompt and Write to TODO.md**
249
+
250
+ Write a self-contained continuation prompt to the TODO.md file. This prompt gives the NEXT session everything needed to pick up immediately.
251
+
252
+ The continuation prompt MUST include:
253
+ - What project and working directory we're in
254
+ - What was accomplished in this session
255
+ - What is currently in progress (and how far along)
256
+ - The exact next steps to take
257
+ - Any running background processes (daemons, watchers, embedding jobs, etc.)
258
+ - Key file paths that were created or modified
259
+
260
+ Write it as a `## Continue` section at the **TOP** of TODO.md, replacing any existing `## Continue` section. The format must be:
261
+
262
+ ```markdown
263
+ ## Continue
264
+
265
+ > **Last session:** NNNN - YYYY-MM-DD - Session Description
266
+ > **Paused at:** YYYY-MM-DDTHH:MM:SSZ
267
+ >
268
+ > [Continuation prompt text — 3-8 sentences covering: project/dir, what was done,
269
+ > what's in progress, exact next steps, background processes, key file paths]
270
+
271
+ ---
272
+
273
+ [rest of TODO.md content]
274
+ ```
275
+
276
+ 6. **Exit** - The session ends cleanly (stop-hook will finalize the note)
277
+
278
+ ### "end session" Command
279
+
280
+ When user says "end session", execute this procedure:
281
+
282
+ 1. **Complete Pause Procedure** (steps 1-4 above)
283
+
284
+ 2. **RENAME SESSION NOTE (MANDATORY - NEVER SKIP)**
285
+ ```bash
286
+ # Find current session note
287
+ ls -t Notes/*.md | head -1
288
+ # Rename with meaningful description based on work done
289
+ mv "Notes/0027 - 2026-01-04 - New Session.md" "Notes/0027 - 2026-01-04 - Descriptive Name Here.md"
290
+ ```
291
+ - The filename MUST describe what was accomplished
292
+ - WRONG: "Appstore", "New Session", "Session Started"
293
+ - RIGHT: "Markdown Heading Fix", "Notification System", "Dark Mode Implementation"
294
+
295
+ 3. **Check for Uncommitted Changes**
296
+ ```bash
297
+ git status
298
+ ```
299
+ - If changes exist, ask: "There are uncommitted changes. Commit them?"
300
+
301
+ 4. **Final Summary**
302
+ - Provide a brief narrative of what was accomplished
303
+ - The session note will be marked as "Completed"
304
+
305
+ ### Session Note Naming
306
+
307
+ Session notes are stored in: `~/.claude/projects/{encoded-cwd}/Notes/` or local `Notes/`
308
+
309
+ **Format:** `NNNN - YYYY-MM-DD - Meaningful Description.md`
310
+
311
+ | Element | Requirement | Example |
312
+ |---------|-------------|---------|
313
+ | Number | **4 digits**, zero-padded | `0001`, `0027`, `0100` |
314
+ | Separator | **Space-dash-space** (` - `) | NOT `_`, NOT `-` alone |
315
+ | Date | ISO format | `2026-01-04` |
316
+ | Description | **Describes the WORK DONE** | NOT project name! |
317
+
318
+ **CORRECT Examples:**
319
+ ```
320
+ 0027 - 2026-01-04 - Markdown Heading Fix.md
321
+ 0028 - 2026-01-05 - Notification System Refactor.md
322
+ 0029 - 2026-01-06 - Dark Mode Implementation.md
323
+ ```
324
+
325
+ **WRONG - NEVER DO THIS:**
326
+ ```
327
+ 0027 - 2026-01-04 - Appstore.md ❌ Project name, not descriptive
328
+ 0027 - 2026-01-04 - New Session.md ❌ Placeholder, not descriptive
329
+ 0027_2026-01-04_appstore.md ❌ Wrong format AND not descriptive
330
+ ```
331
+
332
+ **At session end, you MUST:**
333
+ 1. Check if the session note has a placeholder name
334
+ 2. Rename it based on the actual work done
335
+ 3. Update the H1 title inside the file to match
336
+
337
+ ---
338
+
339
+ ## DELEGATION & PARALLELIZATION (Always Active)
340
+
341
+ **Whenever a task can be parallelized, use multiple agents.**
342
+
343
+ ### Model Selection for Agents
344
+
345
+ | Task Type | Model | Why |
346
+ |-----------|-------|-----|
347
+ | Deep reasoning, complex architecture | `opus` | Maximum intelligence needed |
348
+ | Standard implementation, most coding | `sonnet` | Good balance of speed + capability |
349
+ | Simple lookups, quick checks, grunt work | `haiku` | 10-20x faster, sufficient intelligence |
350
+
351
+ **Rule of Thumb:**
352
+ - Grunt work or verification → `haiku`
353
+ - Implementation or research → `sonnet`
354
+ - Deep strategic thinking → `opus`
355
+
356
+ ### How to Parallelize
357
+
358
+ - Use a SINGLE message with MULTIPLE Agent/Task tool calls = parallel execution
359
+ - Each agent gets FULL CONTEXT and DETAILED INSTRUCTIONS
360
+ - **ALWAYS launch a spotcheck agent after parallel work completes**
361
+
362
+ ### Context Conservation
363
+
364
+ Bulk/repetitive work consumes context. Delegate it to conserve your main conversation space for planning and decisions.
365
+
366
+ **When to delegate:** Updating many files, batch refactoring, repetitive transformations, large-scale testing, batch file operations.
367
+
368
+ **Pattern:**
369
+ 1. Plan the work in main conversation
370
+ 2. Delegate to agent(s) with detailed instructions
371
+ 3. Agent executes bulk changes efficiently
372
+ 4. Review results and iterate if needed
373
+ 5. Main conversation remains lean and focused
374
+
375
+ ---
376
+
377
+ ## STACK PREFERENCES (Always Active)
378
+
379
+ - **TypeScript > Python** — Use TypeScript unless explicitly told otherwise
380
+ - **Package managers:** bun for JS/TS (NOT npm/yarn/pnpm), uv for Python (NOT pip)
381
+ - **Markdown > HTML:** Never use HTML tags for basic content
382
+ - **Analysis vs Action:** If asked to analyze, do analysis only — don't change things unless asked
383
+
384
+ ---
385
+
386
+ ## FILE ORGANIZATION (Always Active)
387
+
388
+ - **Scratchpad** (`${PAI_DIR}/scratchpad/`) — Temporary files only. Delete when done.
389
+ - **History** (`${PAI_DIR}/History/`) — Permanent valuable outputs.
390
+ - **Backups** (`${PAI_DIR}/History/backups/`) — All backups go here, NEVER inside skill directories.
391
+
392
+ **Rules:**
393
+ - Save valuable work to history, not scratchpad
394
+ - Never create `backups/` directories inside skills
395
+ - Never use `.bak` suffixes
396
+
397
+ ---
398
+
399
+ ## HISTORY SYSTEM — Past Work Lookup (Always Active)
400
+
401
+ **When the user asks about anything done in the past, check the history system first.**
402
+
403
+ The history system at `${PAI_DIR}/History/` contains all past work — sessions, learnings, research, decisions.
404
+
405
+ ### How to Search History
406
+
407
+ ```bash
408
+ # Quick keyword search across all history
409
+ rg -i "keyword" ${PAI_DIR}/History/
410
+
411
+ # Search sessions specifically
412
+ rg -i "keyword" ${PAI_DIR}/History/sessions/
413
+
414
+ # List recent files
415
+ ls -lt ${PAI_DIR}/History/sessions/ | head -20
416
+ ```
417
+
418
+ ### Directory Quick Reference
419
+
420
+ | What you're looking for | Where to search |
421
+ |------------------------|-----------------|
422
+ | Session summaries | `History/sessions/YYYY-MM/` |
423
+ | Problem-solving narratives | `History/learnings/YYYY-MM/` |
424
+ | Research & investigations | `History/research/YYYY-MM/` |
425
+
426
+ ---
427
+
428
+ **This skill is installed by `pai setup`. For personal customization (identity, personality, notification preferences), create your own skill in `~/.claude/skills/`.**
@@ -0,0 +1,20 @@
1
+ ---
2
+ related:
3
+ - '[[Ideaverse/AI/PAI/templates/README|README]]'
4
+ - '[[Ideaverse/AI/PAI/templates/agent-prefs.example|agent-prefs.example]]'
5
+ - '[[Ideaverse/AI/PAI/templates/claude-md.template|claude-md.template]]'
6
+ - '[[Ideaverse/AI/PAI/templates/pai-project.template|pai-project.template]]'
7
+ links:
8
+ - '[[Ideaverse/AI/PAI/PAI|PAI]]'
9
+ ---
10
+ emplates
11
+
12
+ - [README](./README.md)
13
+ - [agent-prefs.example](./agent-prefs.example.md)
14
+ - [claude-md.template](./claude-md.template.md)
15
+ - [pai-project.template](./pai-project.template.md)
16
+
17
+ [← PAI](../)
18
+
19
+ ---
20
+ *Links:* [[Ideaverse/AI/PAI/templates/README|README]] · [[Ideaverse/AI/PAI/templates/agent-prefs.example|agent-prefs.example]] · [[Ideaverse/AI/PAI/templates/claude-md.template|claude-md.template]] · [[Ideaverse/AI/PAI/templates/pai-project.template|pai-project.template]] · [[Ideaverse/AI/PAI/PAI|PAI]]
@@ -1 +0,0 @@
1
- {"version":3,"file":"daemon-v5O897D4.mjs","names":[],"sources":["../src/notifications/config.ts","../src/notifications/providers/ntfy.ts","../src/notifications/providers/whatsapp.ts","../src/notifications/providers/macos.ts","../src/notifications/providers/cli.ts","../src/notifications/router.ts","../src/daemon/daemon.ts"],"sourcesContent":["/**\n * config.ts — Notification config persistence helpers\n *\n * Reads and writes the `notifications` section of ~/.config/pai/config.json.\n * Deep-merges with defaults so partial configs work fine.\n *\n * This module is intentionally separate from the daemon's config loader\n * so it can be used standalone (e.g. from CLI commands).\n */\n\nimport {\n existsSync,\n readFileSync,\n writeFileSync,\n mkdirSync,\n} from \"node:fs\";\nimport {\n CONFIG_FILE,\n CONFIG_DIR,\n expandHome,\n} from \"../daemon/config.js\";\nimport type {\n NotificationConfig,\n ChannelConfigs,\n RoutingTable,\n NotificationMode,\n} from \"./types.js\";\nimport {\n DEFAULT_NOTIFICATION_CONFIG,\n DEFAULT_CHANNELS,\n DEFAULT_ROUTING,\n} from \"./types.js\";\n\n// ---------------------------------------------------------------------------\n// Deep merge helper (same approach as daemon/config.ts)\n// ---------------------------------------------------------------------------\n\nfunction deepMerge<T extends object>(\n target: T,\n source: Record<string, unknown>\n): T {\n const result = { ...target };\n for (const key of Object.keys(source)) {\n const srcVal = source[key];\n if (srcVal === undefined || srcVal === null) continue;\n const tgtVal = (target as Record<string, unknown>)[key];\n if (\n typeof srcVal === \"object\" &&\n !Array.isArray(srcVal) &&\n typeof tgtVal === \"object\" &&\n tgtVal !== null &&\n !Array.isArray(tgtVal)\n ) {\n (result as Record<string, unknown>)[key] = deepMerge(\n tgtVal as object,\n srcVal as Record<string, unknown>\n );\n } else {\n (result as Record<string, unknown>)[key] = srcVal;\n }\n }\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Load\n// ---------------------------------------------------------------------------\n\n/**\n * Load the notification config from the PAI config file.\n * Returns defaults merged with any stored values.\n */\nexport function loadNotificationConfig(): NotificationConfig {\n if (!existsSync(CONFIG_FILE)) {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n let raw: string;\n try {\n raw = readFileSync(CONFIG_FILE, \"utf-8\");\n } catch {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n let parsed: Record<string, unknown>;\n try {\n parsed = JSON.parse(raw) as Record<string, unknown>;\n } catch {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n const stored = parsed[\"notifications\"];\n if (!stored || typeof stored !== \"object\") {\n return { ...DEFAULT_NOTIFICATION_CONFIG };\n }\n\n return deepMerge(\n DEFAULT_NOTIFICATION_CONFIG,\n stored as Record<string, unknown>\n );\n}\n\n// ---------------------------------------------------------------------------\n// Save\n// ---------------------------------------------------------------------------\n\n/**\n * Persist the notification config by merging it into the existing\n * ~/.config/pai/config.json. Creates the file if it does not exist.\n */\nexport function saveNotificationConfig(config: NotificationConfig): void {\n // Ensure the config dir exists\n if (!existsSync(CONFIG_DIR)) {\n mkdirSync(CONFIG_DIR, { recursive: true });\n }\n\n // Read current full config\n let full: Record<string, unknown> = {};\n if (existsSync(CONFIG_FILE)) {\n try {\n full = JSON.parse(readFileSync(CONFIG_FILE, \"utf-8\")) as Record<\n string,\n unknown\n >;\n } catch {\n // Start fresh if the file is unreadable\n }\n }\n\n // Replace the notifications section\n full[\"notifications\"] = config;\n\n writeFileSync(CONFIG_FILE, JSON.stringify(full, null, 2) + \"\\n\", \"utf-8\");\n}\n\n// ---------------------------------------------------------------------------\n// Patch helpers (used by the set command)\n// ---------------------------------------------------------------------------\n\n/**\n * Apply a partial update to the current notification config and persist it.\n * Returns the new merged config.\n */\nexport function patchNotificationConfig(patch: {\n mode?: NotificationMode;\n channels?: Partial<Partial<ChannelConfigs>>;\n routing?: Partial<RoutingTable>;\n}): NotificationConfig {\n const current = loadNotificationConfig();\n\n if (patch.mode !== undefined) {\n current.mode = patch.mode;\n }\n\n if (patch.channels) {\n current.channels = deepMerge(\n current.channels,\n patch.channels as Record<string, unknown>\n );\n }\n\n if (patch.routing) {\n current.routing = deepMerge(\n current.routing,\n patch.routing as Record<string, unknown>\n );\n }\n\n saveNotificationConfig(current);\n return current;\n}\n\n// Re-export defaults for convenience\nexport { DEFAULT_NOTIFICATION_CONFIG, DEFAULT_CHANNELS, DEFAULT_ROUTING };\nexport { expandHome };\n","/**\n * ntfy.ts — ntfy.sh notification provider\n *\n * Sends notifications to a configured ntfy.sh topic via HTTP.\n */\n\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nexport class NtfyProvider implements NotificationProvider {\n readonly channelId = \"ntfy\" as const;\n\n async send(\n payload: NotificationPayload,\n config: NotificationConfig\n ): Promise<boolean> {\n const cfg = config.channels.ntfy;\n if (!cfg.enabled || !cfg.url) return false;\n\n try {\n const headers: Record<string, string> = {\n \"Content-Type\": \"text/plain; charset=utf-8\",\n };\n\n if (payload.title) {\n headers[\"Title\"] = payload.title;\n }\n\n if (cfg.priority && cfg.priority !== \"default\") {\n headers[\"Priority\"] = cfg.priority;\n }\n\n const response = await fetch(cfg.url, {\n method: \"POST\",\n headers,\n body: payload.message,\n });\n\n return response.ok;\n } catch {\n return false;\n }\n }\n}\n","/**\n * whatsapp.ts — WhatsApp notification provider (via Whazaa MCP)\n *\n * Sends notifications via the Whazaa Unix Domain Socket IPC protocol.\n * Falls back gracefully if Whazaa is not running.\n *\n * Whazaa IPC socket: /tmp/whazaa.sock (standard Whazaa path)\n *\n * We use the same connect-per-call pattern as PaiClient to avoid\n * requiring any persistent connection state.\n */\n\nimport { connect } from \"node:net\";\nimport { randomUUID } from \"node:crypto\";\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nconst WHAZAA_SOCKET = \"/tmp/whazaa.sock\";\nconst WHAZAA_TIMEOUT_MS = 10_000;\n\n/**\n * Send a single IPC call to the Whazaa socket.\n * Returns true on success, false if Whazaa is not available or errors.\n */\nfunction callWhazaa(\n method: string,\n params: Record<string, unknown>\n): Promise<boolean> {\n return new Promise((resolve) => {\n let done = false;\n let buffer = \"\";\n let timer: ReturnType<typeof setTimeout> | null = null;\n\n function finish(ok: boolean): void {\n if (done) return;\n done = true;\n if (timer) { clearTimeout(timer); timer = null; }\n try { socket?.destroy(); } catch { /* ignore */ }\n resolve(ok);\n }\n\n const socket = connect(WHAZAA_SOCKET, () => {\n const request = {\n jsonrpc: \"2.0\",\n id: randomUUID(),\n method,\n params,\n };\n socket.write(JSON.stringify(request) + \"\\n\");\n });\n\n socket.on(\"data\", (chunk: Buffer) => {\n buffer += chunk.toString();\n const nl = buffer.indexOf(\"\\n\");\n if (nl === -1) return;\n try {\n const resp = JSON.parse(buffer.slice(0, nl)) as { error?: unknown };\n finish(!resp.error);\n } catch {\n finish(false);\n }\n });\n\n socket.on(\"error\", () => finish(false));\n socket.on(\"end\", () => finish(false));\n\n timer = setTimeout(() => finish(false), WHAZAA_TIMEOUT_MS);\n });\n}\n\nexport class WhatsAppProvider implements NotificationProvider {\n readonly channelId = \"whatsapp\" as const;\n\n async send(\n payload: NotificationPayload,\n config: NotificationConfig\n ): Promise<boolean> {\n const cfg = config.channels.whatsapp;\n if (!cfg.enabled) return false;\n\n const isVoiceMode = config.mode === \"voice\" || config.channels.voice.enabled;\n\n const params: Record<string, unknown> = {\n message: payload.message,\n };\n\n if (cfg.recipient) {\n params.recipient = cfg.recipient;\n }\n\n if (isVoiceMode && config.mode === \"voice\") {\n const voiceName = config.channels.voice.voiceName ?? \"bm_george\";\n params.voice = voiceName;\n }\n\n return callWhazaa(\"whatsapp_send\", params);\n }\n}\n","/**\n * macos.ts — macOS notification provider\n *\n * Uses the `osascript` command to display a macOS system notification.\n * Non-blocking: spawns the process and returns success without waiting.\n */\n\nimport { spawn } from \"node:child_process\";\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nexport class MacOsProvider implements NotificationProvider {\n readonly channelId = \"macos\" as const;\n\n async send(\n payload: NotificationPayload,\n config: NotificationConfig\n ): Promise<boolean> {\n const cfg = config.channels.macos;\n if (!cfg.enabled) return false;\n\n try {\n const title = payload.title ?? \"PAI\";\n // Escape single quotes in title and message for AppleScript\n const safeTitle = title.replace(/'/g, \"\\\\'\");\n const safeMessage = payload.message.replace(/'/g, \"\\\\'\");\n\n const script = `display notification \"${safeMessage}\" with title \"${safeTitle}\"`;\n\n return new Promise((resolve) => {\n const child = spawn(\"osascript\", [\"-e\", script], {\n detached: true,\n stdio: \"ignore\",\n });\n child.unref();\n\n // Give the process a moment to start, then assume success.\n // osascript is always present on macOS.\n child.on(\"error\", () => resolve(false));\n\n // Resolve after a short timeout — osascript exits quickly\n setTimeout(() => resolve(true), 200);\n });\n } catch {\n return false;\n }\n }\n}\n","/**\n * cli.ts — CLI notification provider\n *\n * Writes notifications to the PAI daemon log (stderr).\n * Always succeeds — it's the fallback channel.\n */\n\nimport type {\n NotificationProvider,\n NotificationPayload,\n NotificationConfig,\n} from \"../types.js\";\n\nexport class CliProvider implements NotificationProvider {\n readonly channelId = \"cli\" as const;\n\n async send(\n payload: NotificationPayload,\n _config: NotificationConfig\n ): Promise<boolean> {\n const prefix = `[pai-notify:${payload.event}]`;\n const title = payload.title ? ` ${payload.title}:` : \"\";\n process.stderr.write(`${prefix}${title} ${payload.message}\\n`);\n return true;\n }\n}\n","/**\n * router.ts — Notification router\n *\n * Routes notification events to the appropriate channels based on the\n * current mode and per-event routing config.\n *\n * Channel providers are instantiated lazily and cached.\n */\n\nimport type {\n NotificationPayload,\n NotificationConfig,\n NotificationProvider,\n ChannelId,\n SendResult,\n NotificationMode,\n} from \"./types.js\";\nimport { NtfyProvider } from \"./providers/ntfy.js\";\nimport { WhatsAppProvider } from \"./providers/whatsapp.js\";\nimport { MacOsProvider } from \"./providers/macos.js\";\nimport { CliProvider } from \"./providers/cli.js\";\n\n// ---------------------------------------------------------------------------\n// Provider registry (singletons — stateless, safe to reuse)\n// ---------------------------------------------------------------------------\n\nconst PROVIDERS: Record<ChannelId, NotificationProvider> = {\n ntfy: new NtfyProvider(),\n whatsapp: new WhatsAppProvider(),\n macos: new MacOsProvider(),\n voice: new WhatsAppProvider(), // Voice uses WhatsApp TTS; handled in WhatsAppProvider\n cli: new CliProvider(),\n};\n\n// ---------------------------------------------------------------------------\n// Channel resolution\n// ---------------------------------------------------------------------------\n\n/**\n * Given the current config, resolve which channels should receive a\n * notification for the given event type.\n *\n * Mode overrides:\n * \"off\" → no channels\n * \"auto\" → use routing table, filtered by enabled channels\n * \"voice\" → whatsapp (TTS enabled in provider)\n * \"whatsapp\" → whatsapp\n * \"ntfy\" → ntfy\n * \"macos\" → macos\n * \"cli\" → cli\n */\nfunction resolveChannels(\n config: NotificationConfig,\n event: NotificationPayload[\"event\"]\n): ChannelId[] {\n const { mode, channels, routing } = config;\n\n if (mode === \"off\") return [];\n\n // Non-auto modes: force a single channel\n const modeToChannel: Partial<Record<NotificationMode, ChannelId>> = {\n voice: \"whatsapp\", // WhatsAppProvider checks mode === \"voice\" for TTS\n whatsapp: \"whatsapp\",\n ntfy: \"ntfy\",\n macos: \"macos\",\n cli: \"cli\",\n };\n\n if (mode !== \"auto\") {\n const ch = modeToChannel[mode];\n if (!ch) return [];\n // Check the channel is enabled\n const cfg = channels[ch];\n if (cfg && !cfg.enabled) return [ch]; // Still send — mode override bypasses enabled check\n return [ch];\n }\n\n // Auto mode: use routing table, filter to enabled channels\n const candidates = routing[event] ?? [];\n return candidates.filter((ch) => {\n const cfg = channels[ch];\n // \"voice\" channel is virtual — it overlaps with whatsapp.\n // Skip \"voice\" as an independent channel; voice is handled by checking config.mode.\n if (ch === \"voice\") return false;\n return cfg?.enabled === true;\n });\n}\n\n// ---------------------------------------------------------------------------\n// Router\n// ---------------------------------------------------------------------------\n\n/**\n * Route a notification to the appropriate channels.\n *\n * Sends to all resolved channels in parallel.\n * Individual channel failures are non-fatal and logged to stderr.\n *\n * @param payload The notification to send\n * @param config The current notification config (from daemon state)\n */\nexport async function routeNotification(\n payload: NotificationPayload,\n config: NotificationConfig\n): Promise<SendResult> {\n const channels = resolveChannels(config, payload.event);\n\n if (channels.length === 0) {\n return {\n channelsAttempted: [],\n channelsSucceeded: [],\n channelsFailed: [],\n mode: config.mode,\n };\n }\n\n const results = await Promise.allSettled(\n channels.map(async (ch) => {\n const provider = PROVIDERS[ch];\n const ok = await provider.send(payload, config);\n if (!ok) {\n process.stderr.write(\n `[pai-notify] Channel ${ch} failed for event ${payload.event}\\n`\n );\n }\n return { ch, ok };\n })\n );\n\n const succeeded: ChannelId[] = [];\n const failed: ChannelId[] = [];\n\n for (const r of results) {\n if (r.status === \"fulfilled\") {\n if (r.value.ok) {\n succeeded.push(r.value.ch);\n } else {\n failed.push(r.value.ch);\n }\n } else {\n // Provider threw — treat as failure\n failed.push(channels[results.indexOf(r)]);\n }\n }\n\n return {\n channelsAttempted: channels,\n channelsSucceeded: succeeded,\n channelsFailed: failed,\n mode: config.mode,\n };\n}\n","/**\n * daemon.ts — The persistent PAI Daemon\n *\n * Provides shared database access, tool dispatch, and periodic index scheduling\n * for multiple concurrent Claude Code sessions via a Unix Domain Socket.\n *\n * Architecture:\n * MCP shims (Claude sessions) → Unix socket → PAI Daemon\n * ├── registry.db (shared, WAL, always SQLite)\n * ├── federation (SQLite or Postgres/pgvector)\n * ├── Embedding model (singleton)\n * └── Index scheduler (periodic)\n *\n * IPC protocol: NDJSON over Unix Domain Socket\n *\n * Request (shim → daemon):\n * { \"id\": \"uuid\", \"method\": \"tool_name_or_special\", \"params\": {} }\n *\n * Response (daemon → shim):\n * { \"id\": \"uuid\", \"ok\": true, \"result\": <any> }\n * { \"id\": \"uuid\", \"ok\": false, \"error\": \"message\" }\n *\n * Special methods:\n * status — Return daemon status (uptime, index state, db stats)\n * index_now — Trigger immediate index run (non-blocking)\n *\n * All other methods are dispatched to the corresponding PAI tool function.\n *\n * Design notes:\n * - Registry stays in SQLite (small, simple metadata).\n * - Federation backend is configurable: SQLite (default) or Postgres/pgvector.\n * - Auto-fallback: if Postgres is configured but unavailable, falls back to SQLite.\n * - Index writes guarded by indexInProgress flag (not a mutex — index is idempotent).\n * - Embedding model loaded lazily on first semantic/hybrid request, then kept alive.\n * - Scheduler runs indexAll() every indexIntervalSecs (default 5 minutes).\n */\n\nimport { existsSync, unlinkSync } from \"node:fs\";\nimport { createServer, connect, Socket, Server } from \"node:net\";\nimport { setPriority } from \"node:os\";\nimport { openRegistry } from \"../registry/db.js\";\nimport type { Database } from \"better-sqlite3\";\nimport { indexAll } from \"../memory/indexer.js\";\nimport {\n toolMemorySearch,\n toolMemoryGet,\n toolProjectInfo,\n toolProjectList,\n toolSessionList,\n toolRegistrySearch,\n toolProjectDetect,\n toolProjectHealth,\n toolProjectTodo,\n toolSessionRoute,\n} from \"../mcp/tools.js\";\nimport { detectTopicShift } from \"../topics/detector.js\";\nimport type { PaiDaemonConfig } from \"./config.js\";\nimport { createStorageBackend } from \"../storage/factory.js\";\nimport type { StorageBackend } from \"../storage/interface.js\";\nimport { configureEmbeddingModel } from \"../memory/embeddings.js\";\nimport type { NotificationConfig, NotificationMode } from \"../notifications/types.js\";\nimport {\n loadNotificationConfig,\n patchNotificationConfig,\n} from \"../notifications/config.js\";\nimport { routeNotification } from \"../notifications/router.js\";\n\n// ---------------------------------------------------------------------------\n// Protocol types\n// ---------------------------------------------------------------------------\n\ninterface IpcRequest {\n id: string;\n method: string;\n params: Record<string, unknown>;\n}\n\ninterface IpcResponse {\n id: string;\n ok: boolean;\n result?: unknown;\n error?: string;\n}\n\n// ---------------------------------------------------------------------------\n// Daemon state\n// ---------------------------------------------------------------------------\n\nlet registryDb: ReturnType<typeof openRegistry>;\nlet storageBackend: StorageBackend;\nlet daemonConfig: PaiDaemonConfig;\nlet startTime = Date.now();\n\n// Index scheduler state\nlet indexInProgress = false;\nlet lastIndexTime = 0;\nlet indexSchedulerTimer: ReturnType<typeof setInterval> | null = null;\n\n// Embed scheduler state\nlet embedInProgress = false;\nlet lastEmbedTime = 0;\nlet embedSchedulerTimer: ReturnType<typeof setInterval> | null = null;\n\n// ---------------------------------------------------------------------------\n// Notification state\n// ---------------------------------------------------------------------------\n\n/** Mutable notification config — loaded from disk at startup, patchable at runtime */\nlet notificationConfig: NotificationConfig;\n\n// ---------------------------------------------------------------------------\n// Graceful shutdown flag\n// ---------------------------------------------------------------------------\n\n/**\n * Set to true when a SIGTERM/SIGINT is received so that long-running loops\n * (embed, index) can detect the signal and exit their inner loops before the\n * pool/backend is closed. Checked by embedChunksWithBackend() via the\n * `shouldStop` callback passed from runEmbed().\n */\nlet shutdownRequested = false;\n\n// ---------------------------------------------------------------------------\n// Index scheduler\n// ---------------------------------------------------------------------------\n\n/**\n * Run a full index pass. Guards against overlapping runs with indexInProgress.\n * Called both by the scheduler and by the index_now IPC method.\n *\n * NOTE: We pass the raw SQLite federation DB to indexAll() for SQLite backend,\n * or skip and use the backend interface for Postgres. The indexer currently\n * uses better-sqlite3 directly; it will be refactored in a future phase.\n * For now, we keep the SQLite indexer path and add a Postgres-aware path.\n */\nasync function runIndex(): Promise<void> {\n if (indexInProgress) {\n process.stderr.write(\"[pai-daemon] Index already in progress, skipping.\\n\");\n return;\n }\n\n if (embedInProgress) {\n process.stderr.write(\"[pai-daemon] Embed in progress, deferring index run.\\n\");\n return;\n }\n\n indexInProgress = true;\n const t0 = Date.now();\n\n try {\n process.stderr.write(\"[pai-daemon] Starting scheduled index run...\\n\");\n\n if (storageBackend.backendType === \"sqlite\") {\n // SQLite: use existing indexAll() which operates on the raw DB handle\n // We need the raw DB — extract it from the SQLite backend\n const { SQLiteBackend } = await import(\"../storage/sqlite.js\");\n if (storageBackend instanceof SQLiteBackend) {\n const db = (storageBackend as SQLiteBackendWithDb).getRawDb();\n const { projects, result } = await indexAll(db, registryDb);\n const elapsed = Date.now() - t0;\n lastIndexTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Index complete: ${projects} projects, ` +\n `${result.filesProcessed} files, ${result.chunksCreated} chunks ` +\n `(${elapsed}ms)\\n`\n );\n }\n } else {\n // Postgres: use the backend-aware indexer\n const { indexAllWithBackend } = await import(\"../memory/indexer-backend.js\");\n const { projects, result } = await indexAllWithBackend(storageBackend, registryDb);\n const elapsed = Date.now() - t0;\n lastIndexTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Index complete (postgres): ${projects} projects, ` +\n `${result.filesProcessed} files, ${result.chunksCreated} chunks ` +\n `(${elapsed}ms)\\n`\n );\n }\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Index error: ${msg}\\n`);\n } finally {\n indexInProgress = false;\n }\n}\n\n/**\n * Internal interface for accessing the raw DB from SQLiteBackend.\n * This avoids a circular dep while keeping type safety.\n */\ninterface SQLiteBackendWithDb {\n getRawDb(): Database;\n}\n\n/**\n * Start the periodic index scheduler.\n */\nfunction startIndexScheduler(): void {\n const intervalMs = daemonConfig.indexIntervalSecs * 1_000;\n\n process.stderr.write(\n `[pai-daemon] Index scheduler: every ${daemonConfig.indexIntervalSecs}s\\n`\n );\n\n // Run an initial index at startup (non-blocking — let the socket come up first)\n setTimeout(() => {\n runIndex().catch((e) => {\n process.stderr.write(`[pai-daemon] Startup index error: ${e}\\n`);\n });\n }, 2_000);\n\n indexSchedulerTimer = setInterval(() => {\n runIndex().catch((e) => {\n process.stderr.write(`[pai-daemon] Scheduled index error: ${e}\\n`);\n });\n }, intervalMs);\n\n // Don't let the interval keep the process alive if all else exits\n if (indexSchedulerTimer.unref) {\n indexSchedulerTimer.unref();\n }\n}\n\n// ---------------------------------------------------------------------------\n// Embed scheduler\n// ---------------------------------------------------------------------------\n\n/**\n * Run an embedding pass for all unembedded chunks (Postgres backend only).\n * Guards against overlapping runs with embedInProgress.\n * Skips if an index run is currently in progress to avoid contention.\n */\nasync function runEmbed(): Promise<void> {\n if (embedInProgress) {\n process.stderr.write(\"[pai-daemon] Embed already in progress, skipping.\\n\");\n return;\n }\n\n // Don't compete with the indexer — it writes new chunks that will need embedding\n if (indexInProgress) {\n process.stderr.write(\"[pai-daemon] Index in progress, deferring embed pass.\\n\");\n return;\n }\n\n // Embedding is only supported on the Postgres backend.\n // The SQLite path uses embedChunks() in indexer.ts directly (manual CLI only).\n if (storageBackend.backendType !== \"postgres\") {\n return;\n }\n\n embedInProgress = true;\n const t0 = Date.now();\n\n try {\n process.stderr.write(\"[pai-daemon] Starting scheduled embed pass...\\n\");\n\n const { embedChunksWithBackend } = await import(\"../memory/indexer-backend.js\");\n const count = await embedChunksWithBackend(storageBackend, () => shutdownRequested);\n\n const elapsed = Date.now() - t0;\n lastEmbedTime = Date.now();\n process.stderr.write(\n `[pai-daemon] Embed pass complete: ${count} chunks embedded (${elapsed}ms)\\n`\n );\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Embed error: ${msg}\\n`);\n } finally {\n embedInProgress = false;\n }\n}\n\n/**\n * Start the periodic embed scheduler.\n * Initial run is 30 seconds after startup (after the 2-second index startup run).\n */\nfunction startEmbedScheduler(): void {\n const intervalMs = daemonConfig.embedIntervalSecs * 1_000;\n\n process.stderr.write(\n `[pai-daemon] Embed scheduler: every ${daemonConfig.embedIntervalSecs}s\\n`\n );\n\n // Initial embed run 30 seconds after startup (lets the first index run finish)\n setTimeout(() => {\n runEmbed().catch((e) => {\n process.stderr.write(`[pai-daemon] Startup embed error: ${e}\\n`);\n });\n }, 30_000);\n\n embedSchedulerTimer = setInterval(() => {\n runEmbed().catch((e) => {\n process.stderr.write(`[pai-daemon] Scheduled embed error: ${e}\\n`);\n });\n }, intervalMs);\n\n // Don't let the interval keep the process alive if all else exits\n if (embedSchedulerTimer.unref) {\n embedSchedulerTimer.unref();\n }\n}\n\n// ---------------------------------------------------------------------------\n// Tool dispatcher\n// ---------------------------------------------------------------------------\n\n/**\n * Dispatch an IPC tool call to the appropriate tool function.\n * Returns the tool result or throws.\n */\nasync function dispatchTool(\n method: string,\n params: Record<string, unknown>\n): Promise<unknown> {\n // Cast through unknown to satisfy TypeScript's strict overlap check on\n // Record<string, unknown> → specific param types. Runtime validation is\n // the responsibility of each tool function (they surface errors gracefully).\n const p = params as unknown;\n\n switch (method) {\n case \"memory_search\":\n return toolMemorySearch(registryDb, storageBackend, p as Parameters<typeof toolMemorySearch>[2]);\n\n case \"memory_get\":\n return toolMemoryGet(registryDb, p as Parameters<typeof toolMemoryGet>[1]);\n\n case \"project_info\":\n return toolProjectInfo(registryDb, p as Parameters<typeof toolProjectInfo>[1]);\n\n case \"project_list\":\n return toolProjectList(registryDb, p as Parameters<typeof toolProjectList>[1]);\n\n case \"session_list\":\n return toolSessionList(registryDb, p as Parameters<typeof toolSessionList>[1]);\n\n case \"registry_search\":\n return toolRegistrySearch(registryDb, p as Parameters<typeof toolRegistrySearch>[1]);\n\n case \"project_detect\":\n return toolProjectDetect(registryDb, p as Parameters<typeof toolProjectDetect>[1]);\n\n case \"project_health\":\n return toolProjectHealth(registryDb, p as Parameters<typeof toolProjectHealth>[1]);\n\n case \"project_todo\":\n return toolProjectTodo(registryDb, p as Parameters<typeof toolProjectTodo>[1]);\n\n case \"topic_check\":\n return detectTopicShift(\n registryDb,\n storageBackend,\n p as Parameters<typeof detectTopicShift>[2]\n );\n\n case \"session_auto_route\":\n return toolSessionRoute(\n registryDb,\n storageBackend,\n p as Parameters<typeof toolSessionRoute>[2]\n );\n\n default:\n throw new Error(`Unknown method: ${method}`);\n }\n}\n\n// ---------------------------------------------------------------------------\n// IPC server\n// ---------------------------------------------------------------------------\n\nfunction sendResponse(socket: Socket, response: IpcResponse): void {\n try {\n socket.write(JSON.stringify(response) + \"\\n\");\n } catch {\n // Socket may already be closed\n }\n}\n\n/**\n * Handle a single IPC request.\n */\nasync function handleRequest(\n request: IpcRequest,\n socket: Socket\n): Promise<void> {\n const { id, method, params } = request;\n\n // Special: status\n if (method === \"status\") {\n const dbStats = await (async () => {\n try {\n const fedStats = await storageBackend.getStats();\n const projects = (\n registryDb\n .prepare(\"SELECT COUNT(*) AS n FROM projects\")\n .get() as { n: number }\n ).n;\n return { files: fedStats.files, chunks: fedStats.chunks, projects };\n } catch {\n return null;\n }\n })();\n\n sendResponse(socket, {\n id,\n ok: true,\n result: {\n uptime: Math.floor((Date.now() - startTime) / 1000),\n indexInProgress,\n lastIndexTime: lastIndexTime ? new Date(lastIndexTime).toISOString() : null,\n indexIntervalSecs: daemonConfig.indexIntervalSecs,\n embedInProgress,\n lastEmbedTime: lastEmbedTime ? new Date(lastEmbedTime).toISOString() : null,\n embedIntervalSecs: daemonConfig.embedIntervalSecs,\n socketPath: daemonConfig.socketPath,\n storageBackend: storageBackend.backendType,\n db: dbStats,\n },\n });\n socket.end();\n return;\n }\n\n // Special: index_now — trigger immediate index (non-blocking response)\n if (method === \"index_now\") {\n // Fire and forget — don't await\n runIndex().catch((e) => {\n process.stderr.write(`[pai-daemon] index_now error: ${e}\\n`);\n });\n sendResponse(socket, { id, ok: true, result: { triggered: true } });\n socket.end();\n return;\n }\n\n // Special: notification_get_config — return current notification config\n if (method === \"notification_get_config\") {\n sendResponse(socket, {\n id,\n ok: true,\n result: {\n config: notificationConfig,\n activeChannels: Object.entries(notificationConfig.channels)\n .filter(([ch, cfg]) => ch !== \"voice\" && (cfg as { enabled: boolean }).enabled)\n .map(([ch]) => ch),\n },\n });\n socket.end();\n return;\n }\n\n // Special: notification_set_config — patch the notification config\n if (method === \"notification_set_config\") {\n try {\n const p = params as {\n mode?: NotificationMode;\n channels?: Record<string, unknown>;\n routing?: Record<string, unknown>;\n };\n notificationConfig = patchNotificationConfig({\n mode: p.mode,\n channels: p.channels as Parameters<typeof patchNotificationConfig>[0][\"channels\"],\n routing: p.routing as Parameters<typeof patchNotificationConfig>[0][\"routing\"],\n });\n sendResponse(socket, {\n id,\n ok: true,\n result: { config: notificationConfig },\n });\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id, ok: false, error: msg });\n }\n socket.end();\n return;\n }\n\n // Special: notification_send — route a notification to configured channels\n if (method === \"notification_send\") {\n const p = params as {\n event?: string;\n message?: string;\n title?: string;\n };\n\n if (!p.message) {\n sendResponse(socket, { id, ok: false, error: \"notification_send: message is required\" });\n socket.end();\n return;\n }\n\n const event = (p.event as NotificationConfig[\"routing\"] extends Record<infer K, unknown> ? K : string) ?? \"info\";\n\n routeNotification(\n {\n event: event as Parameters<typeof routeNotification>[0][\"event\"],\n message: p.message,\n title: p.title,\n },\n notificationConfig\n ).then((result) => {\n sendResponse(socket, { id, ok: true, result });\n socket.end();\n }).catch((e) => {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id, ok: false, error: msg });\n socket.end();\n });\n return;\n }\n\n // All other methods: PAI tool dispatch\n try {\n const result = await dispatchTool(method, params);\n sendResponse(socket, { id, ok: true, result });\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id, ok: false, error: msg });\n }\n socket.end();\n}\n\n/**\n * Check whether an existing socket file is actually being served by a live process.\n * Returns true if a daemon is already accepting connections, false otherwise.\n */\nfunction isSocketLive(path: string): Promise<boolean> {\n return new Promise((resolve) => {\n const client = connect(path);\n const timer = setTimeout(() => { client.destroy(); resolve(false); }, 500);\n client.on(\"connect\", () => { clearTimeout(timer); client.end(); resolve(true); });\n client.on(\"error\", () => { clearTimeout(timer); resolve(false); });\n });\n}\n\n/**\n * Start the Unix Domain Socket IPC server.\n */\nasync function startIpcServer(socketPath: string): Promise<Server> {\n // Before removing the socket file, check whether another daemon is already live\n if (existsSync(socketPath)) {\n const live = await isSocketLive(socketPath);\n if (live) {\n throw new Error(\"Another daemon is already running — socket is live. Aborting startup.\");\n }\n try {\n unlinkSync(socketPath);\n process.stderr.write(\"[pai-daemon] Removed stale socket file.\\n\");\n } catch {\n // If we can't remove it, bind will fail with a clear error\n }\n }\n\n const server = createServer((socket: Socket) => {\n let buffer = \"\";\n\n socket.on(\"data\", (chunk: Buffer) => {\n buffer += chunk.toString();\n let nl: number;\n // Process every complete newline-delimited frame in this chunk\n while ((nl = buffer.indexOf(\"\\n\")) !== -1) {\n const line = buffer.slice(0, nl);\n buffer = buffer.slice(nl + 1);\n\n if (line.trim() === \"\") continue; // skip blank lines between frames\n\n let request: IpcRequest;\n try {\n request = JSON.parse(line) as IpcRequest;\n } catch {\n sendResponse(socket, { id: \"?\", ok: false, error: \"Invalid JSON\" });\n socket.destroy();\n return;\n }\n\n handleRequest(request, socket).catch((e: unknown) => {\n const msg = e instanceof Error ? e.message : String(e);\n sendResponse(socket, { id: request.id, ok: false, error: msg });\n socket.destroy();\n });\n }\n });\n\n socket.on(\"error\", () => {\n // Client disconnected — nothing to do\n });\n });\n\n server.on(\"error\", (e) => {\n process.stderr.write(`[pai-daemon] IPC server error: ${e}\\n`);\n });\n\n server.listen(socketPath, () => {\n process.stderr.write(\n `[pai-daemon] IPC server listening on ${socketPath}\\n`\n );\n });\n\n return server;\n}\n\n// ---------------------------------------------------------------------------\n// Main daemon entry point\n// ---------------------------------------------------------------------------\n\nexport async function serve(config: PaiDaemonConfig): Promise<void> {\n daemonConfig = config;\n startTime = Date.now();\n\n // Load notification config from disk (merged with defaults)\n notificationConfig = loadNotificationConfig();\n\n process.stderr.write(\"[pai-daemon] Starting daemon...\\n\");\n process.stderr.write(`[pai-daemon] Socket: ${config.socketPath}\\n`);\n process.stderr.write(`[pai-daemon] Storage backend: ${config.storageBackend}\\n`);\n process.stderr.write(\n `[pai-daemon] Notification mode: ${notificationConfig.mode}\\n`\n );\n\n // Lower the daemon's scheduling priority so it yields CPU to interactive\n // Claude Code sessions and editor processes during indexing and embedding.\n // niceness 10 = noticeably lower priority without making it unresponsive.\n // Non-fatal: some environments (containers, restricted sandboxes) may deny it.\n try { setPriority(process.pid, 10); } catch { /* non-fatal */ }\n\n // Configure embedding model from config (before any embed work starts)\n configureEmbeddingModel(config.embeddingModel);\n\n // Open registry (always SQLite)\n try {\n registryDb = openRegistry();\n process.stderr.write(\"[pai-daemon] Registry database opened.\\n\");\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Fatal: Could not open registry: ${msg}\\n`);\n process.exit(1);\n }\n\n // Open federation storage (SQLite or Postgres with auto-fallback)\n try {\n storageBackend = await createStorageBackend(config);\n process.stderr.write(\n `[pai-daemon] Federation backend: ${storageBackend.backendType}\\n`\n );\n } catch (e) {\n const msg = e instanceof Error ? e.message : String(e);\n process.stderr.write(`[pai-daemon] Fatal: Could not open federation storage: ${msg}\\n`);\n process.exit(1);\n }\n\n // Start index scheduler\n startIndexScheduler();\n\n // Start embed scheduler (Postgres backend only)\n if (storageBackend.backendType === \"postgres\") {\n startEmbedScheduler();\n } else {\n process.stderr.write(\n \"[pai-daemon] Embed scheduler: disabled (SQLite backend)\\n\"\n );\n }\n\n // Start IPC server (async: checks for a live daemon before unlinking socket)\n const server = await startIpcServer(config.socketPath);\n\n const shutdown = async (signal: string): Promise<void> => {\n process.stderr.write(`\\n[pai-daemon] ${signal} received. Stopping.\\n`);\n\n // Signal all long-running loops to stop between batches\n shutdownRequested = true;\n\n // Stop schedulers so no new runs are launched\n if (indexSchedulerTimer) {\n clearInterval(indexSchedulerTimer);\n }\n\n if (embedSchedulerTimer) {\n clearInterval(embedSchedulerTimer);\n }\n\n // Stop accepting new IPC connections\n server.close();\n\n // Wait for any in-progress index or embed pass to finish, up to 10 s.\n // Without this wait, closing the pool while an async query is running\n // causes \"Cannot use a pool after calling end on the pool\" and a dirty crash.\n const SHUTDOWN_TIMEOUT_MS = 10_000;\n const POLL_INTERVAL_MS = 100;\n const deadline = Date.now() + SHUTDOWN_TIMEOUT_MS;\n\n if (indexInProgress || embedInProgress) {\n process.stderr.write(\n `[pai-daemon] Waiting for in-progress operations to finish ` +\n `(index=${indexInProgress}, embed=${embedInProgress})...\\n`\n );\n\n while ((indexInProgress || embedInProgress) && Date.now() < deadline) {\n await new Promise((resolve) => setTimeout(resolve, POLL_INTERVAL_MS));\n }\n\n if (indexInProgress || embedInProgress) {\n process.stderr.write(\n \"[pai-daemon] Shutdown timeout reached — forcing exit.\\n\"\n );\n } else {\n process.stderr.write(\"[pai-daemon] In-progress operations finished.\\n\");\n }\n }\n\n try {\n await storageBackend.close();\n } catch {\n // ignore\n }\n\n try {\n unlinkSync(config.socketPath);\n } catch {\n // ignore\n }\n\n process.exit(0);\n };\n\n process.on(\"SIGINT\", () => { shutdown(\"SIGINT\").catch(() => process.exit(0)); });\n process.on(\"SIGTERM\", () => { shutdown(\"SIGTERM\").catch(() => process.exit(0)); });\n\n // Keep process alive\n await new Promise(() => {});\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;AAqCA,SAAS,UACP,QACA,QACG;CACH,MAAM,SAAS,EAAE,GAAG,QAAQ;AAC5B,MAAK,MAAM,OAAO,OAAO,KAAK,OAAO,EAAE;EACrC,MAAM,SAAS,OAAO;AACtB,MAAI,WAAW,UAAa,WAAW,KAAM;EAC7C,MAAM,SAAU,OAAmC;AACnD,MACE,OAAO,WAAW,YAClB,CAAC,MAAM,QAAQ,OAAO,IACtB,OAAO,WAAW,YAClB,WAAW,QACX,CAAC,MAAM,QAAQ,OAAO,CAEtB,CAAC,OAAmC,OAAO,UACzC,QACA,OACD;MAED,CAAC,OAAmC,OAAO;;AAG/C,QAAO;;;;;;AAWT,SAAgB,yBAA6C;AAC3D,KAAI,CAAC,WAAW,YAAY,CAC1B,QAAO,EAAE,GAAG,6BAA6B;CAG3C,IAAI;AACJ,KAAI;AACF,QAAM,aAAa,aAAa,QAAQ;SAClC;AACN,SAAO,EAAE,GAAG,6BAA6B;;CAG3C,IAAI;AACJ,KAAI;AACF,WAAS,KAAK,MAAM,IAAI;SAClB;AACN,SAAO,EAAE,GAAG,6BAA6B;;CAG3C,MAAM,SAAS,OAAO;AACtB,KAAI,CAAC,UAAU,OAAO,WAAW,SAC/B,QAAO,EAAE,GAAG,6BAA6B;AAG3C,QAAO,UACL,6BACA,OACD;;;;;;AAWH,SAAgB,uBAAuB,QAAkC;AAEvE,KAAI,CAAC,WAAW,WAAW,CACzB,WAAU,YAAY,EAAE,WAAW,MAAM,CAAC;CAI5C,IAAI,OAAgC,EAAE;AACtC,KAAI,WAAW,YAAY,CACzB,KAAI;AACF,SAAO,KAAK,MAAM,aAAa,aAAa,QAAQ,CAAC;SAI/C;AAMV,MAAK,mBAAmB;AAExB,eAAc,aAAa,KAAK,UAAU,MAAM,MAAM,EAAE,GAAG,MAAM,QAAQ;;;;;;AAW3E,SAAgB,wBAAwB,OAIjB;CACrB,MAAM,UAAU,wBAAwB;AAExC,KAAI,MAAM,SAAS,OACjB,SAAQ,OAAO,MAAM;AAGvB,KAAI,MAAM,SACR,SAAQ,WAAW,UACjB,QAAQ,UACR,MAAM,SACP;AAGH,KAAI,MAAM,QACR,SAAQ,UAAU,UAChB,QAAQ,SACR,MAAM,QACP;AAGH,wBAAuB,QAAQ;AAC/B,QAAO;;;;;AC7JT,IAAa,eAAb,MAA0D;CACxD,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,QACkB;EAClB,MAAM,MAAM,OAAO,SAAS;AAC5B,MAAI,CAAC,IAAI,WAAW,CAAC,IAAI,IAAK,QAAO;AAErC,MAAI;GACF,MAAM,UAAkC,EACtC,gBAAgB,6BACjB;AAED,OAAI,QAAQ,MACV,SAAQ,WAAW,QAAQ;AAG7B,OAAI,IAAI,YAAY,IAAI,aAAa,UACnC,SAAQ,cAAc,IAAI;AAS5B,WANiB,MAAM,MAAM,IAAI,KAAK;IACpC,QAAQ;IACR;IACA,MAAM,QAAQ;IACf,CAAC,EAEc;UACV;AACN,UAAO;;;;;;;;;;;;;;;;;;ACvBb,MAAM,gBAAgB;AACtB,MAAM,oBAAoB;;;;;AAM1B,SAAS,WACP,QACA,QACkB;AAClB,QAAO,IAAI,SAAS,YAAY;EAC9B,IAAI,OAAO;EACX,IAAI,SAAS;EACb,IAAI,QAA8C;EAElD,SAAS,OAAO,IAAmB;AACjC,OAAI,KAAM;AACV,UAAO;AACP,OAAI,OAAO;AAAE,iBAAa,MAAM;AAAE,YAAQ;;AAC1C,OAAI;AAAE,YAAQ,SAAS;WAAU;AACjC,WAAQ,GAAG;;EAGb,MAAM,SAAS,QAAQ,qBAAqB;GAC1C,MAAM,UAAU;IACd,SAAS;IACT,IAAI,YAAY;IAChB;IACA;IACD;AACD,UAAO,MAAM,KAAK,UAAU,QAAQ,GAAG,KAAK;IAC5C;AAEF,SAAO,GAAG,SAAS,UAAkB;AACnC,aAAU,MAAM,UAAU;GAC1B,MAAM,KAAK,OAAO,QAAQ,KAAK;AAC/B,OAAI,OAAO,GAAI;AACf,OAAI;AAEF,WAAO,CADM,KAAK,MAAM,OAAO,MAAM,GAAG,GAAG,CAAC,CAC/B,MAAM;WACb;AACN,WAAO,MAAM;;IAEf;AAEF,SAAO,GAAG,eAAe,OAAO,MAAM,CAAC;AACvC,SAAO,GAAG,aAAa,OAAO,MAAM,CAAC;AAErC,UAAQ,iBAAiB,OAAO,MAAM,EAAE,kBAAkB;GAC1D;;AAGJ,IAAa,mBAAb,MAA8D;CAC5D,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,QACkB;EAClB,MAAM,MAAM,OAAO,SAAS;AAC5B,MAAI,CAAC,IAAI,QAAS,QAAO;EAEzB,MAAM,cAAc,OAAO,SAAS,WAAW,OAAO,SAAS,MAAM;EAErE,MAAM,SAAkC,EACtC,SAAS,QAAQ,SAClB;AAED,MAAI,IAAI,UACN,QAAO,YAAY,IAAI;AAGzB,MAAI,eAAe,OAAO,SAAS,QAEjC,QAAO,QADW,OAAO,SAAS,MAAM,aAAa;AAIvD,SAAO,WAAW,iBAAiB,OAAO;;;;;;;;;;;;ACpF9C,IAAa,gBAAb,MAA2D;CACzD,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,QACkB;AAElB,MAAI,CADQ,OAAO,SAAS,MACnB,QAAS,QAAO;AAEzB,MAAI;GAGF,MAAM,aAFQ,QAAQ,SAAS,OAEP,QAAQ,MAAM,MAAM;GAG5C,MAAM,SAAS,yBAFK,QAAQ,QAAQ,QAAQ,MAAM,MAAM,CAEJ,gBAAgB,UAAU;AAE9E,UAAO,IAAI,SAAS,YAAY;IAC9B,MAAM,QAAQ,MAAM,aAAa,CAAC,MAAM,OAAO,EAAE;KAC/C,UAAU;KACV,OAAO;KACR,CAAC;AACF,UAAM,OAAO;AAIb,UAAM,GAAG,eAAe,QAAQ,MAAM,CAAC;AAGvC,qBAAiB,QAAQ,KAAK,EAAE,IAAI;KACpC;UACI;AACN,UAAO;;;;;;;AClCb,IAAa,cAAb,MAAyD;CACvD,AAAS,YAAY;CAErB,MAAM,KACJ,SACA,SACkB;EAClB,MAAM,SAAS,eAAe,QAAQ,MAAM;EAC5C,MAAM,QAAQ,QAAQ,QAAQ,IAAI,QAAQ,MAAM,KAAK;AACrD,UAAQ,OAAO,MAAM,GAAG,SAAS,MAAM,GAAG,QAAQ,QAAQ,IAAI;AAC9D,SAAO;;;;;;ACGX,MAAM,YAAqD;CACzD,MAAW,IAAI,cAAc;CAC7B,UAAW,IAAI,kBAAkB;CACjC,OAAW,IAAI,eAAe;CAC9B,OAAW,IAAI,kBAAkB;CACjC,KAAW,IAAI,aAAa;CAC7B;;;;;;;;;;;;;;AAmBD,SAAS,gBACP,QACA,OACa;CACb,MAAM,EAAE,MAAM,UAAU,YAAY;AAEpC,KAAI,SAAS,MAAO,QAAO,EAAE;CAG7B,MAAM,gBAA8D;EAClE,OAAW;EACX,UAAW;EACX,MAAW;EACX,OAAW;EACX,KAAW;EACZ;AAED,KAAI,SAAS,QAAQ;EACnB,MAAM,KAAK,cAAc;AACzB,MAAI,CAAC,GAAI,QAAO,EAAE;EAElB,MAAM,MAAM,SAAS;AACrB,MAAI,OAAO,CAAC,IAAI,QAAS,QAAO,CAAC,GAAG;AACpC,SAAO,CAAC,GAAG;;AAKb,SADmB,QAAQ,UAAU,EAAE,EACrB,QAAQ,OAAO;EAC/B,MAAM,MAAM,SAAS;AAGrB,MAAI,OAAO,QAAS,QAAO;AAC3B,SAAO,KAAK,YAAY;GACxB;;;;;;;;;;;AAgBJ,eAAsB,kBACpB,SACA,QACqB;CACrB,MAAM,WAAW,gBAAgB,QAAQ,QAAQ,MAAM;AAEvD,KAAI,SAAS,WAAW,EACtB,QAAO;EACL,mBAAmB,EAAE;EACrB,mBAAmB,EAAE;EACrB,gBAAgB,EAAE;EAClB,MAAM,OAAO;EACd;CAGH,MAAM,UAAU,MAAM,QAAQ,WAC5B,SAAS,IAAI,OAAO,OAAO;EAEzB,MAAM,KAAK,MADM,UAAU,IACD,KAAK,SAAS,OAAO;AAC/C,MAAI,CAAC,GACH,SAAQ,OAAO,MACb,wBAAwB,GAAG,oBAAoB,QAAQ,MAAM,IAC9D;AAEH,SAAO;GAAE;GAAI;GAAI;GACjB,CACH;CAED,MAAM,YAAyB,EAAE;CACjC,MAAM,SAAsB,EAAE;AAE9B,MAAK,MAAM,KAAK,QACd,KAAI,EAAE,WAAW,YACf,KAAI,EAAE,MAAM,GACV,WAAU,KAAK,EAAE,MAAM,GAAG;KAE1B,QAAO,KAAK,EAAE,MAAM,GAAG;KAIzB,QAAO,KAAK,SAAS,QAAQ,QAAQ,EAAE,EAAE;AAI7C,QAAO;EACL,mBAAmB;EACnB,mBAAmB;EACnB,gBAAgB;EAChB,MAAM,OAAO;EACd;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AC9DH,IAAI;AACJ,IAAI;AACJ,IAAI;AACJ,IAAI,YAAY,KAAK,KAAK;AAG1B,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,sBAA6D;AAGjE,IAAI,kBAAkB;AACtB,IAAI,gBAAgB;AACpB,IAAI,sBAA6D;;AAOjE,IAAI;;;;;;;AAYJ,IAAI,oBAAoB;;;;;;;;;;AAexB,eAAe,WAA0B;AACvC,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,sDAAsD;AAC3E;;AAGF,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,yDAAyD;AAC9E;;AAGF,mBAAkB;CAClB,MAAM,KAAK,KAAK,KAAK;AAErB,KAAI;AACF,UAAQ,OAAO,MAAM,iDAAiD;AAEtE,MAAI,eAAe,gBAAgB,UAAU;GAG3C,MAAM,EAAE,kBAAkB,MAAM,OAAO;AACvC,OAAI,0BAA0B,eAAe;IAE3C,MAAM,EAAE,UAAU,WAAW,MAAM,SADvB,eAAuC,UAAU,EACb,WAAW;IAC3D,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,oBAAgB,KAAK,KAAK;AAC1B,YAAQ,OAAO,MACb,gCAAgC,SAAS,aACpC,OAAO,eAAe,UAAU,OAAO,cAAc,WACpD,QAAQ,OACf;;SAEE;GAEL,MAAM,EAAE,wBAAwB,MAAM,OAAO;GAC7C,MAAM,EAAE,UAAU,WAAW,MAAM,oBAAoB,gBAAgB,WAAW;GAClF,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,mBAAgB,KAAK,KAAK;AAC1B,WAAQ,OAAO,MACb,2CAA2C,SAAS,aAC/C,OAAO,eAAe,UAAU,OAAO,cAAc,WACpD,QAAQ,OACf;;UAEI,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,6BAA6B,IAAI,IAAI;WAClD;AACR,oBAAkB;;;;;;AAetB,SAAS,sBAA4B;CACnC,MAAM,aAAa,aAAa,oBAAoB;AAEpD,SAAQ,OAAO,MACb,uCAAuC,aAAa,kBAAkB,KACvE;AAGD,kBAAiB;AACf,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,qCAAqC,EAAE,IAAI;IAChE;IACD,IAAM;AAET,uBAAsB,kBAAkB;AACtC,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;IAClE;IACD,WAAW;AAGd,KAAI,oBAAoB,MACtB,qBAAoB,OAAO;;;;;;;AAa/B,eAAe,WAA0B;AACvC,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,sDAAsD;AAC3E;;AAIF,KAAI,iBAAiB;AACnB,UAAQ,OAAO,MAAM,0DAA0D;AAC/E;;AAKF,KAAI,eAAe,gBAAgB,WACjC;AAGF,mBAAkB;CAClB,MAAM,KAAK,KAAK,KAAK;AAErB,KAAI;AACF,UAAQ,OAAO,MAAM,kDAAkD;EAEvE,MAAM,EAAE,2BAA2B,MAAM,OAAO;EAChD,MAAM,QAAQ,MAAM,uBAAuB,sBAAsB,kBAAkB;EAEnF,MAAM,UAAU,KAAK,KAAK,GAAG;AAC7B,kBAAgB,KAAK,KAAK;AAC1B,UAAQ,OAAO,MACb,qCAAqC,MAAM,oBAAoB,QAAQ,OACxE;UACM,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,6BAA6B,IAAI,IAAI;WAClD;AACR,oBAAkB;;;;;;;AAQtB,SAAS,sBAA4B;CACnC,MAAM,aAAa,aAAa,oBAAoB;AAEpD,SAAQ,OAAO,MACb,uCAAuC,aAAa,kBAAkB,KACvE;AAGD,kBAAiB;AACf,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,qCAAqC,EAAE,IAAI;IAChE;IACD,IAAO;AAEV,uBAAsB,kBAAkB;AACtC,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,uCAAuC,EAAE,IAAI;IAClE;IACD,WAAW;AAGd,KAAI,oBAAoB,MACtB,qBAAoB,OAAO;;;;;;AAY/B,eAAe,aACb,QACA,QACkB;CAIlB,MAAM,IAAI;AAEV,SAAQ,QAAR;EACE,KAAK,gBACH,QAAO,iBAAiB,YAAY,gBAAgB,EAA4C;EAElG,KAAK,aACH,QAAO,cAAc,YAAY,EAAyC;EAE5E,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,kBACH,QAAO,mBAAmB,YAAY,EAA8C;EAEtF,KAAK,iBACH,QAAO,kBAAkB,YAAY,EAA6C;EAEpF,KAAK,iBACH,QAAO,kBAAkB,YAAY,EAA6C;EAEpF,KAAK,eACH,QAAO,gBAAgB,YAAY,EAA2C;EAEhF,KAAK,cACH,QAAO,iBACL,YACA,gBACA,EACD;EAEH,KAAK,qBACH,QAAO,iBACL,YACA,gBACA,EACD;EAEH,QACE,OAAM,IAAI,MAAM,mBAAmB,SAAS;;;AAQlD,SAAS,aAAa,QAAgB,UAA6B;AACjE,KAAI;AACF,SAAO,MAAM,KAAK,UAAU,SAAS,GAAG,KAAK;SACvC;;;;;AAQV,eAAe,cACb,SACA,QACe;CACf,MAAM,EAAE,IAAI,QAAQ,WAAW;AAG/B,KAAI,WAAW,UAAU;EACvB,MAAM,UAAU,OAAO,YAAY;AACjC,OAAI;IACF,MAAM,WAAW,MAAM,eAAe,UAAU;IAChD,MAAM,WACJ,WACG,QAAQ,qCAAqC,CAC7C,KAAK,CACR;AACF,WAAO;KAAE,OAAO,SAAS;KAAO,QAAQ,SAAS;KAAQ;KAAU;WAC7D;AACN,WAAO;;MAEP;AAEJ,eAAa,QAAQ;GACnB;GACA,IAAI;GACJ,QAAQ;IACN,QAAQ,KAAK,OAAO,KAAK,KAAK,GAAG,aAAa,IAAK;IACnD;IACA,eAAe,gBAAgB,IAAI,KAAK,cAAc,CAAC,aAAa,GAAG;IACvE,mBAAmB,aAAa;IAChC;IACA,eAAe,gBAAgB,IAAI,KAAK,cAAc,CAAC,aAAa,GAAG;IACvE,mBAAmB,aAAa;IAChC,YAAY,aAAa;IACzB,gBAAgB,eAAe;IAC/B,IAAI;IACL;GACF,CAAC;AACF,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,aAAa;AAE1B,YAAU,CAAC,OAAO,MAAM;AACtB,WAAQ,OAAO,MAAM,iCAAiC,EAAE,IAAI;IAC5D;AACF,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAM,QAAQ,EAAE,WAAW,MAAM;GAAE,CAAC;AACnE,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,2BAA2B;AACxC,eAAa,QAAQ;GACnB;GACA,IAAI;GACJ,QAAQ;IACN,QAAQ;IACR,gBAAgB,OAAO,QAAQ,mBAAmB,SAAS,CACxD,QAAQ,CAAC,IAAI,SAAS,OAAO,WAAY,IAA6B,QAAQ,CAC9E,KAAK,CAAC,QAAQ,GAAG;IACrB;GACF,CAAC;AACF,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,2BAA2B;AACxC,MAAI;GACF,MAAM,IAAI;AAKV,wBAAqB,wBAAwB;IAC3C,MAAM,EAAE;IACR,UAAU,EAAE;IACZ,SAAS,EAAE;IACZ,CAAC;AACF,gBAAa,QAAQ;IACnB;IACA,IAAI;IACJ,QAAQ,EAAE,QAAQ,oBAAoB;IACvC,CAAC;WACK,GAAG;AAEV,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAO,OAD1B,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;IACJ,CAAC;;AAErD,SAAO,KAAK;AACZ;;AAIF,KAAI,WAAW,qBAAqB;EAClC,MAAM,IAAI;AAMV,MAAI,CAAC,EAAE,SAAS;AACd,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAO,OAAO;IAA0C,CAAC;AACxF,UAAO,KAAK;AACZ;;AAKF,oBACE;GACE,OAJW,EAAE,SAAyF;GAKtG,SAAS,EAAE;GACX,OAAO,EAAE;GACV,EACD,mBACD,CAAC,MAAM,WAAW;AACjB,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAM;IAAQ,CAAC;AAC9C,UAAO,KAAK;IACZ,CAAC,OAAO,MAAM;AAEd,gBAAa,QAAQ;IAAE;IAAI,IAAI;IAAO,OAD1B,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;IACJ,CAAC;AACnD,UAAO,KAAK;IACZ;AACF;;AAIF,KAAI;AAEF,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAM,QADtB,MAAM,aAAa,QAAQ,OAAO;GACJ,CAAC;UACvC,GAAG;AAEV,eAAa,QAAQ;GAAE;GAAI,IAAI;GAAO,OAD1B,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;GACJ,CAAC;;AAErD,QAAO,KAAK;;;;;;AAOd,SAAS,aAAa,MAAgC;AACpD,QAAO,IAAI,SAAS,YAAY;EAC9B,MAAM,SAAS,QAAQ,KAAK;EAC5B,MAAM,QAAQ,iBAAiB;AAAE,UAAO,SAAS;AAAE,WAAQ,MAAM;KAAK,IAAI;AAC1E,SAAO,GAAG,iBAAiB;AAAE,gBAAa,MAAM;AAAE,UAAO,KAAK;AAAE,WAAQ,KAAK;IAAI;AACjF,SAAO,GAAG,eAAe;AAAE,gBAAa,MAAM;AAAE,WAAQ,MAAM;IAAI;GAClE;;;;;AAMJ,eAAe,eAAe,YAAqC;AAEjE,KAAI,WAAW,WAAW,EAAE;AAE1B,MADa,MAAM,aAAa,WAAW,CAEzC,OAAM,IAAI,MAAM,wEAAwE;AAE1F,MAAI;AACF,cAAW,WAAW;AACtB,WAAQ,OAAO,MAAM,4CAA4C;UAC3D;;CAKV,MAAM,SAAS,cAAc,WAAmB;EAC9C,IAAI,SAAS;AAEb,SAAO,GAAG,SAAS,UAAkB;AACnC,aAAU,MAAM,UAAU;GAC1B,IAAI;AAEJ,WAAQ,KAAK,OAAO,QAAQ,KAAK,MAAM,IAAI;IACzC,MAAM,OAAO,OAAO,MAAM,GAAG,GAAG;AAChC,aAAS,OAAO,MAAM,KAAK,EAAE;AAE7B,QAAI,KAAK,MAAM,KAAK,GAAI;IAExB,IAAI;AACJ,QAAI;AACF,eAAU,KAAK,MAAM,KAAK;YACpB;AACN,kBAAa,QAAQ;MAAE,IAAI;MAAK,IAAI;MAAO,OAAO;MAAgB,CAAC;AACnE,YAAO,SAAS;AAChB;;AAGF,kBAAc,SAAS,OAAO,CAAC,OAAO,MAAe;KACnD,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,kBAAa,QAAQ;MAAE,IAAI,QAAQ;MAAI,IAAI;MAAO,OAAO;MAAK,CAAC;AAC/D,YAAO,SAAS;MAChB;;IAEJ;AAEF,SAAO,GAAG,eAAe,GAEvB;GACF;AAEF,QAAO,GAAG,UAAU,MAAM;AACxB,UAAQ,OAAO,MAAM,kCAAkC,EAAE,IAAI;GAC7D;AAEF,QAAO,OAAO,kBAAkB;AAC9B,UAAQ,OAAO,MACb,wCAAwC,WAAW,IACpD;GACD;AAEF,QAAO;;AAOT,eAAsB,MAAM,QAAwC;AAClE,gBAAe;AACf,aAAY,KAAK,KAAK;AAGtB,sBAAqB,wBAAwB;AAE7C,SAAQ,OAAO,MAAM,oCAAoC;AACzD,SAAQ,OAAO,MAAM,wBAAwB,OAAO,WAAW,IAAI;AACnE,SAAQ,OAAO,MAAM,iCAAiC,OAAO,eAAe,IAAI;AAChF,SAAQ,OAAO,MACb,mCAAmC,mBAAmB,KAAK,IAC5D;AAMD,KAAI;AAAE,cAAY,QAAQ,KAAK,GAAG;SAAU;AAG5C,yBAAwB,OAAO,eAAe;AAG9C,KAAI;AACF,eAAa,cAAc;AAC3B,UAAQ,OAAO,MAAM,2CAA2C;UACzD,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,gDAAgD,IAAI,IAAI;AAC7E,UAAQ,KAAK,EAAE;;AAIjB,KAAI;AACF,mBAAiB,MAAM,qBAAqB,OAAO;AACnD,UAAQ,OAAO,MACb,oCAAoC,eAAe,YAAY,IAChE;UACM,GAAG;EACV,MAAM,MAAM,aAAa,QAAQ,EAAE,UAAU,OAAO,EAAE;AACtD,UAAQ,OAAO,MAAM,0DAA0D,IAAI,IAAI;AACvF,UAAQ,KAAK,EAAE;;AAIjB,sBAAqB;AAGrB,KAAI,eAAe,gBAAgB,WACjC,sBAAqB;KAErB,SAAQ,OAAO,MACb,4DACD;CAIH,MAAM,SAAS,MAAM,eAAe,OAAO,WAAW;CAEtD,MAAM,WAAW,OAAO,WAAkC;AACxD,UAAQ,OAAO,MAAM,kBAAkB,OAAO,wBAAwB;AAGtE,sBAAoB;AAGpB,MAAI,oBACF,eAAc,oBAAoB;AAGpC,MAAI,oBACF,eAAc,oBAAoB;AAIpC,SAAO,OAAO;EAKd,MAAM,sBAAsB;EAC5B,MAAM,mBAAmB;EACzB,MAAM,WAAW,KAAK,KAAK,GAAG;AAE9B,MAAI,mBAAmB,iBAAiB;AACtC,WAAQ,OAAO,MACb,oEACY,gBAAgB,UAAU,gBAAgB,QACvD;AAED,WAAQ,mBAAmB,oBAAoB,KAAK,KAAK,GAAG,SAC1D,OAAM,IAAI,SAAS,YAAY,WAAW,SAAS,iBAAiB,CAAC;AAGvE,OAAI,mBAAmB,gBACrB,SAAQ,OAAO,MACb,0DACD;OAED,SAAQ,OAAO,MAAM,kDAAkD;;AAI3E,MAAI;AACF,SAAM,eAAe,OAAO;UACtB;AAIR,MAAI;AACF,cAAW,OAAO,WAAW;UACvB;AAIR,UAAQ,KAAK,EAAE;;AAGjB,SAAQ,GAAG,gBAAgB;AAAE,WAAS,SAAS,CAAC,YAAY,QAAQ,KAAK,EAAE,CAAC;GAAI;AAChF,SAAQ,GAAG,iBAAiB;AAAE,WAAS,UAAU,CAAC,YAAY,QAAQ,KAAK,EAAE,CAAC;GAAI;AAGlF,OAAM,IAAI,cAAc,GAAG"}