@tekmidian/pai 0.5.6 → 0.5.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. package/README.md +20 -2
  2. package/dist/cli/index.mjs +479 -5
  3. package/dist/cli/index.mjs.map +1 -1
  4. package/dist/daemon/index.mjs +2 -2
  5. package/dist/{daemon-D9evGlgR.mjs → daemon-2ND5WO2j.mjs} +3 -3
  6. package/dist/{daemon-D9evGlgR.mjs.map → daemon-2ND5WO2j.mjs.map} +1 -1
  7. package/dist/{db-4lSqLFb8.mjs → db-BtuN768f.mjs} +9 -2
  8. package/dist/db-BtuN768f.mjs.map +1 -0
  9. package/dist/hooks/capture-all-events.mjs +19 -4
  10. package/dist/hooks/capture-all-events.mjs.map +4 -4
  11. package/dist/hooks/cleanup-session-files.mjs.map +2 -2
  12. package/dist/hooks/context-compression-hook.mjs +14 -9
  13. package/dist/hooks/context-compression-hook.mjs.map +3 -3
  14. package/dist/hooks/initialize-session.mjs +14 -8
  15. package/dist/hooks/initialize-session.mjs.map +3 -3
  16. package/dist/hooks/load-core-context.mjs +18 -2
  17. package/dist/hooks/load-core-context.mjs.map +4 -4
  18. package/dist/hooks/load-project-context.mjs +14 -8
  19. package/dist/hooks/load-project-context.mjs.map +3 -3
  20. package/dist/hooks/stop-hook.mjs +105 -8
  21. package/dist/hooks/stop-hook.mjs.map +3 -3
  22. package/dist/hooks/sync-todo-to-md.mjs.map +2 -2
  23. package/dist/index.d.mts +2 -2
  24. package/dist/index.d.mts.map +1 -1
  25. package/dist/index.mjs +1 -1
  26. package/dist/mcp/index.mjs +1 -1
  27. package/dist/{vault-indexer-DXWs9pDn.mjs → vault-indexer-k-kUlaZ-.mjs} +41 -7
  28. package/dist/vault-indexer-k-kUlaZ-.mjs.map +1 -0
  29. package/package.json +1 -1
  30. package/src/hooks/ts/capture-all-events.ts +6 -0
  31. package/src/hooks/ts/lib/project-utils.ts +24 -5
  32. package/src/hooks/ts/pre-compact/context-compression-hook.ts +6 -0
  33. package/src/hooks/ts/session-start/initialize-session.ts +7 -1
  34. package/src/hooks/ts/session-start/load-core-context.ts +7 -0
  35. package/src/hooks/ts/session-start/load-project-context.ts +8 -1
  36. package/src/hooks/ts/stop/stop-hook.ts +28 -0
  37. package/templates/claude-md.template.md +7 -74
  38. package/templates/skills/CORE/Aesthetic.md +333 -0
  39. package/templates/skills/CORE/CONSTITUTION.md +1502 -0
  40. package/templates/skills/CORE/HistorySystem.md +427 -0
  41. package/templates/skills/CORE/HookSystem.md +1082 -0
  42. package/templates/skills/CORE/Prompting.md +509 -0
  43. package/templates/skills/CORE/ProsodyAgentTemplate.md +53 -0
  44. package/templates/skills/CORE/ProsodyGuide.md +416 -0
  45. package/templates/skills/CORE/SKILL.md +741 -0
  46. package/templates/skills/CORE/SkillSystem.md +213 -0
  47. package/templates/skills/CORE/TerminalTabs.md +119 -0
  48. package/templates/skills/CORE/VOICE.md +106 -0
  49. package/templates/skills/user/.gitkeep +0 -0
  50. package/dist/db-4lSqLFb8.mjs.map +0 -1
  51. package/dist/vault-indexer-DXWs9pDn.mjs.map +0 -1
@@ -0,0 +1,213 @@
1
+ # Custom Skill System
2
+
3
+ **The MANDATORY configuration system for ALL PAI skills.**
4
+
5
+ ---
6
+
7
+ ## THIS IS THE AUTHORITATIVE SOURCE
8
+
9
+ This document defines the **required structure** for every skill in the PAI system.
10
+
11
+ **ALL skill creation MUST follow this structure.**
12
+
13
+ **"Canonicalize a skill"** = Restructure it to match this exact format, including TitleCase naming.
14
+
15
+ ---
16
+
17
+ ## TitleCase Naming Convention (MANDATORY)
18
+
19
+ **All naming in the skill system MUST use TitleCase (PascalCase).**
20
+
21
+ | Component | Wrong | Correct |
22
+ |-----------|-------|---------|
23
+ | Skill directory | `createskill`, `create-skill` | `Createskill` |
24
+ | Workflow files | `create.md`, `update-info.md` | `Create.md`, `UpdateInfo.md` |
25
+ | Reference docs | `prosody-guide.md` | `ProsodyGuide.md` |
26
+ | Tool files | `manage-server.ts` | `ManageServer.ts` |
27
+ | YAML name | `name: create-skill` | `name: Createskill` |
28
+
29
+ **Exception:** `SKILL.md` is always uppercase (convention for the main skill file).
30
+
31
+ ---
32
+
33
+ ## The Required Structure
34
+
35
+ Every SKILL.md has two parts:
36
+
37
+ ### 1. YAML Frontmatter (Single-Line Description)
38
+
39
+ ```yaml
40
+ ---
41
+ name: SkillName
42
+ description: [What it does]. USE WHEN [intent triggers using OR]. [Additional capabilities].
43
+ ---
44
+ ```
45
+
46
+ **Rules:**
47
+ - `name` uses **TitleCase**
48
+ - `description` is a **single line** (not multi-line with `|`)
49
+ - `USE WHEN` keyword is **MANDATORY** (Claude Code parses this for skill activation)
50
+ - Use intent-based triggers with `OR` for multiple conditions
51
+ - Max 1024 characters (Anthropic hard limit)
52
+
53
+ ### 2. Markdown Body
54
+
55
+ ```markdown
56
+ # SkillName
57
+
58
+ [Brief description]
59
+
60
+ ## Workflow Routing
61
+
62
+ **When executing a workflow, do BOTH of these:**
63
+
64
+ 1. **Call the notification script** (for observability tracking):
65
+ ```bash
66
+ ~/.claude/Tools/SkillWorkflowNotification WORKFLOWNAME SKILLNAME
67
+ ```
68
+
69
+ 2. **Output the text notification** (for user visibility):
70
+ ```
71
+ Running the **WorkflowName** workflow from the **SKILLNAME** skill...
72
+ ```
73
+
74
+ This ensures workflows appear in the observability dashboard AND the user sees the announcement.
75
+
76
+ | Workflow | Trigger | File |
77
+ |----------|---------|------|
78
+ | **WorkflowOne** | "trigger phrase" | `workflows/WorkflowOne.md` |
79
+ | **WorkflowTwo** | "another trigger" | `workflows/WorkflowTwo.md` |
80
+
81
+ ## Examples
82
+
83
+ **Example 1: [Common use case]**
84
+ ```
85
+ User: "[Typical user request]"
86
+ → Invokes WorkflowOne workflow
87
+ → [What skill does]
88
+ → [What user gets back]
89
+ ```
90
+
91
+ ## [Additional Sections]
92
+ ```
93
+
94
+ ---
95
+
96
+ ## Examples Section (REQUIRED)
97
+
98
+ **Every skill MUST have an `## Examples` section** showing 2-3 concrete usage patterns.
99
+
100
+ **Why Examples Matter:**
101
+ - Anthropic research shows examples improve tool selection accuracy from 72% to 90%
102
+ - Descriptions tell Claude WHEN to activate; examples show HOW the skill works
103
+
104
+ **Example Format:**
105
+ ```markdown
106
+ ## Examples
107
+
108
+ **Example 1: [Use case name]**
109
+ ```
110
+ User: "[Actual user request]"
111
+ → Invokes WorkflowName workflow
112
+ → [What the skill does]
113
+ → [What user receives back]
114
+ ```
115
+ ```
116
+
117
+ ---
118
+
119
+ ## Intent Matching, Not String Matching
120
+
121
+ We use **intent matching**, not exact phrase matching.
122
+
123
+ **Example description:**
124
+ ```yaml
125
+ description: Complete blog workflow. USE WHEN user mentions doing anything with their blog, website, site, including things like update, proofread, write, edit, publish, preview, blog posts, or website pages.
126
+ ```
127
+
128
+ **Key Principles:**
129
+ - Use intent language: "user mentions", "user wants to", "including things like"
130
+ - Don't list exact phrases in quotes
131
+ - Cover the domain conceptually
132
+ - Use `OR` to combine multiple trigger conditions
133
+
134
+ ---
135
+
136
+ ## Directory Structure
137
+
138
+ Every skill follows this structure:
139
+
140
+ ```
141
+ SkillName/ # TitleCase directory name
142
+ ├── SKILL.md # Main skill file (always uppercase)
143
+ ├── ReferenceDoc.md # Optional: Reference docs (TitleCase)
144
+ ├── tools/ # CLI tools (ALWAYS present, even if empty)
145
+ │ ├── ToolName.ts # TypeScript CLI tool (TitleCase)
146
+ │ └── ToolName.help.md # Tool documentation (TitleCase)
147
+ └── workflows/
148
+ ├── Create.md # Work execution workflow (TitleCase)
149
+ └── Update.md # Work execution workflow (TitleCase)
150
+ ```
151
+
152
+ ---
153
+
154
+ ## Workflows vs Reference Documentation
155
+
156
+ **CRITICAL DISTINCTION:**
157
+
158
+ ### Workflows (`workflows/` directory)
159
+ - Operational procedures (create, update, delete, deploy)
160
+ - Step-by-step execution instructions
161
+ - Actions that change state or produce output
162
+ - Things you "run" or "execute"
163
+
164
+ ### Reference Documentation (skill root)
165
+ - Guides and how-to documentation
166
+ - Specifications and schemas
167
+ - Information you "read" or "reference"
168
+
169
+ ---
170
+
171
+ ## Complete Checklist
172
+
173
+ Before a skill is complete:
174
+
175
+ ### Naming (TitleCase)
176
+ - [ ] Skill directory uses TitleCase
177
+ - [ ] All workflow files use TitleCase
178
+ - [ ] All reference docs use TitleCase
179
+ - [ ] YAML `name:` uses TitleCase
180
+
181
+ ### YAML Frontmatter
182
+ - [ ] Single-line description with embedded `USE WHEN` clause
183
+ - [ ] No separate `triggers:` or `workflows:` arrays
184
+ - [ ] Description under 1024 characters
185
+
186
+ ### Markdown Body
187
+ - [ ] `## Workflow Routing` section with table format
188
+ - [ ] `## Examples` section with 2-3 concrete patterns
189
+ - [ ] All workflows have routing entries
190
+
191
+ ### Structure
192
+ - [ ] `tools/` directory exists (even if empty)
193
+ - [ ] No `backups/` directory inside skill
194
+ - [ ] Workflows contain ONLY execution procedures
195
+ - [ ] Reference docs live at skill root
196
+
197
+ ---
198
+
199
+ ## Summary
200
+
201
+ | Component | Purpose | Naming |
202
+ |-----------|---------|--------|
203
+ | **Skill directory** | Contains all skill files | TitleCase (e.g., `Blogging`) |
204
+ | **SKILL.md** | Main skill file | Always uppercase |
205
+ | **Workflow files** | Execution procedures | TitleCase (e.g., `Create.md`) |
206
+ | **Reference docs** | Information to read | TitleCase (e.g., `ApiReference.md`) |
207
+ | **Tool files** | CLI automation | TitleCase (e.g., `ManageServer.ts`) |
208
+
209
+ This system ensures:
210
+ 1. Skills invoke properly based on intent (USE WHEN in description)
211
+ 2. Specific functionality executes accurately (Workflow Routing in body)
212
+ 3. All skills have consistent, predictable structure
213
+ 4. **All naming follows TitleCase convention**
@@ -0,0 +1,119 @@
1
+ # Terminal Tab Title System
2
+
3
+ ## Overview
4
+
5
+ The PAI system automatically updates your terminal tab title with a 4-word summary of what was done after each task completion. This provides instant visual feedback in your terminal tabs, making it easy to see what each Claude session accomplished.
6
+
7
+ ## How It Works
8
+
9
+ The `stop-hook.ts` hook runs after every task completion and:
10
+
11
+ 1. **Extracts the task summary** from the COMPLETED line in responses
12
+ 2. **Generates a 4-word title** that summarizes what was accomplished
13
+ 3. **Updates your terminal tab** using ANSI escape sequences
14
+
15
+ ## Features
16
+
17
+ ### 4-Word Summary Format
18
+
19
+ The system creates meaningful 4-word summaries by:
20
+ - Using past-tense action verbs (Created, Updated, Fixed, etc.)
21
+ - Extracting key nouns from the task
22
+ - Prioritizing words from the COMPLETED line when available
23
+ - Falling back to the user's original query if needed
24
+
25
+ ### Examples
26
+
27
+ | User Query | Tab Title |
28
+ |------------|-----------|
29
+ | "Update the README documentation" | Updated Readme Documentation Done |
30
+ | "Fix the stop-hook" | Fixed Stop Hook Successfully |
31
+ | "Send email to Angela" | Sent Email Angela Done |
32
+ | "Research AI trends" | Researched AI Trends Complete |
33
+
34
+ ## Terminal Compatibility
35
+
36
+ The tab title system works with terminals that support OSC (Operating System Command) sequences:
37
+
38
+ - **Kitty** - Full support
39
+ - **iTerm2** - Full support
40
+ - **Terminal.app** - Full support
41
+ - **Alacritty** - Full support
42
+ - **VS Code Terminal** - Full support
43
+
44
+ ## Implementation Details
45
+
46
+ ### Escape Sequences Used
47
+
48
+ ```bash
49
+ # OSC 0 - Sets icon and window title
50
+ printf '\033]0;Title Here\007'
51
+
52
+ # OSC 2 - Sets window title
53
+ printf '\033]2;Title Here\007'
54
+
55
+ # OSC 30 - Kitty-specific tab title
56
+ printf '\033]30;Title Here\007'
57
+ ```
58
+
59
+ ### Hook Location
60
+
61
+ The terminal tab functionality is implemented in:
62
+ ```
63
+ ${PAI_DIR}/Hooks/stop-hook.ts
64
+ ```
65
+
66
+ ### Key Functions
67
+
68
+ 1. **generateTabTitle(prompt, completedLine)** - Creates the 4-word summary
69
+ 2. **setKittyTabTitle(title)** - Sends escape sequences to update the tab
70
+ 3. **Hook execution** - Runs automatically after every task
71
+
72
+ ## Debugging
73
+
74
+ If tab titles aren't updating:
75
+
76
+ 1. **Check hook is executable:**
77
+ ```bash
78
+ ls -la ${PAI_DIR}/Hooks/stop-hook.ts
79
+ # Should show: -rwxr-xr-x
80
+ ```
81
+
82
+ 2. **Verify Claude Code settings:**
83
+ - Ensure stop-hook is configured in your Claude Code settings
84
+ - Path should be: `${PAI_DIR}/Hooks/stop-hook.ts`
85
+
86
+ 3. **Test manually:**
87
+ ```bash
88
+ printf '\033]0;Test Title\007' >&2
89
+ ```
90
+
91
+ 4. **Check stderr output:**
92
+ The hook logs to stderr with:
93
+ - 🏷️ Tab title changes
94
+ - 📝 User queries processed
95
+ - ✅ Completed text extracted
96
+
97
+ ## Customization
98
+
99
+ To modify the tab title behavior, edit `${PAI_DIR}/Hooks/stop-hook.ts`:
100
+
101
+ - Change word count (currently 4 words)
102
+ - Modify verb tense (currently past tense)
103
+ - Add custom prefixes or suffixes
104
+ - Filter different stop words
105
+
106
+ ## Benefits
107
+
108
+ - **Visual Task Tracking** - See what each tab accomplished at a glance
109
+ - **Multi-Session Management** - Easily identify different Claude sessions
110
+ - **Task History** - Tab titles persist as a record of completed work
111
+ - **No Manual Updates** - Fully automatic, runs on every task completion
112
+
113
+ ## Integration with Voice System
114
+
115
+ The terminal tab system works alongside the voice notification system:
116
+ - Both extract information from the COMPLETED line
117
+ - Tab gets a 4-word visual summary
118
+ - Voice speaks the completion message
119
+ - Both provide immediate feedback through different channels
@@ -0,0 +1,106 @@
1
+ # Voice System Reference
2
+
3
+ **This file is a reference pointer to the canonical voice system documentation.**
4
+
5
+ ---
6
+
7
+ ## 📍 Canonical Documentation Location
8
+
9
+ **All voice system documentation is maintained in the voice-server directory:**
10
+
11
+ `${PAI_DIR}/voice-server/`
12
+
13
+ ---
14
+
15
+ ## 📚 Voice Server Documentation
16
+
17
+ ### Overview and Setup
18
+ **Location:** `${PAI_DIR}/voice-server/README.md`
19
+
20
+ **Contains:**
21
+ - Voice server overview and features
22
+ - Installation and setup instructions
23
+ - Service management (start/stop/restart)
24
+ - API usage and endpoints
25
+ - Voice IDs for all agents
26
+ - Menu bar indicator setup
27
+ - Configuration options
28
+ - Troubleshooting guide
29
+
30
+ ### Quick Start
31
+ **Location:** `${PAI_DIR}/voice-server/QUICKSTART.md`
32
+
33
+ **Contains:**
34
+ - 5-minute setup guide
35
+ - Minimal configuration steps
36
+ - Basic testing commands
37
+
38
+ ---
39
+
40
+ ## 🎯 Quick Reference
41
+
42
+ **Start voice server:**
43
+ ```bash
44
+ ${PAI_DIR}/voice-server/start.sh
45
+ ```
46
+
47
+ **Check status:**
48
+ ```bash
49
+ ${PAI_DIR}/voice-server/status.sh
50
+ ```
51
+
52
+ **Restart server:**
53
+ ```bash
54
+ ${PAI_DIR}/voice-server/restart.sh
55
+ ```
56
+
57
+ **Stop server:**
58
+ ```bash
59
+ ${PAI_DIR}/voice-server/stop.sh
60
+ ```
61
+
62
+ **Test voice:**
63
+ ```bash
64
+ curl -X POST http://localhost:8888/notify \
65
+ -H "Content-Type: application/json" \
66
+ -d '{"message":"Test message","voice_enabled":true}'
67
+ ```
68
+
69
+ ---
70
+
71
+ ## 🎤 Available Voice IDs
72
+
73
+ Voice IDs are ElevenLabs identifiers configured in hook files:
74
+
75
+ | Agent | Voice ID | Description |
76
+ |-------|----------|-------------|
77
+ | PAI (Main) | s3TPKV1kjDlVtZbl4Ksh | UK Male - Professional |
78
+ | Researcher | AXdMgz6evoL7OPd7eU12 | US Female - Analytical |
79
+ | Engineer | fATgBRI8wg5KkDFg8vBd | US Female - Steady |
80
+ | Architect | muZKMsIDGYtIkjjiUS82 | UK Female - Strategic |
81
+ | Designer | ZF6FPAbjXT4488VcRRnw | Indian Female - Creative |
82
+ | Pentester | xvHLFjaUEpx4BOf7EiDd | UK Male - Technical |
83
+
84
+ See `${PAI_DIR}/voice-server/README.md` for complete voice list.
85
+
86
+ ---
87
+
88
+ ## 🔗 Related Documentation
89
+
90
+ - **Prosody Guide:** `${PAI_DIR}/Skills/CORE/prosody-guide.md` (voice parameter tuning)
91
+ - **Agent Template:** `${PAI_DIR}/Skills/CORE/prosody-agent-template.md` (creating agent voices)
92
+
93
+ ---
94
+
95
+ ## ⚠️ Important
96
+
97
+ **DO NOT duplicate voice documentation in CORE.**
98
+
99
+ - The voice-server directory is the **canonical source** for all voice system documentation
100
+ - Duplicating documentation causes version conflicts and maintenance issues
101
+ - Always refer to and update voice-server documentation directly
102
+ - This reference file should only contain pointers, not duplicated content
103
+
104
+ ---
105
+
106
+ **Last Updated:** 2025-12-01
File without changes
@@ -1 +0,0 @@
1
- {"version":3,"file":"db-4lSqLFb8.mjs","names":[],"sources":["../src/registry/schema.ts","../src/registry/db.ts"],"sourcesContent":["/**\n * SQLite DDL for the PAI registry database.\n *\n * Tables:\n * - projects — tracked project directories with type and status\n * - sessions — per-project session notes\n * - tags — normalised tag vocabulary\n * - project_tags — M:N join between projects and tags\n * - session_tags — M:N join between sessions and tags\n * - aliases — alternative slugs that resolve to a project\n * - compaction_log — audit trail for context-compaction events\n * - schema_version — single-row migration version tracking\n */\n\nimport type { Database } from \"better-sqlite3\";\n\nexport const SCHEMA_VERSION = 3;\n\nexport const CREATE_TABLES_SQL = `\nPRAGMA journal_mode = WAL;\nPRAGMA foreign_keys = ON;\n\nCREATE TABLE IF NOT EXISTS projects (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n slug TEXT NOT NULL UNIQUE,\n display_name TEXT NOT NULL,\n root_path TEXT NOT NULL UNIQUE,\n encoded_dir TEXT NOT NULL UNIQUE,\n type TEXT NOT NULL DEFAULT 'local'\n CHECK(type IN ('local','central','obsidian-linked','external')),\n status TEXT NOT NULL DEFAULT 'active'\n CHECK(status IN ('active','archived','migrating')),\n parent_id INTEGER,\n obsidian_link TEXT,\n claude_notes_dir TEXT,\n created_at INTEGER NOT NULL,\n updated_at INTEGER NOT NULL,\n archived_at INTEGER,\n FOREIGN KEY (parent_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS sessions (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n project_id INTEGER NOT NULL,\n number INTEGER NOT NULL,\n date TEXT NOT NULL,\n slug TEXT NOT NULL,\n title TEXT NOT NULL,\n filename TEXT NOT NULL,\n status TEXT NOT NULL DEFAULT 'open'\n CHECK(status IN ('open','completed','compacted')),\n claude_session_id TEXT,\n token_count INTEGER,\n created_at INTEGER NOT NULL,\n closed_at INTEGER,\n UNIQUE (project_id, number),\n FOREIGN KEY (project_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS tags (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n name TEXT NOT NULL UNIQUE\n);\n\nCREATE TABLE IF NOT EXISTS project_tags (\n project_id INTEGER NOT NULL,\n tag_id INTEGER NOT NULL,\n PRIMARY KEY (project_id, tag_id),\n FOREIGN KEY (project_id) REFERENCES projects(id),\n FOREIGN KEY (tag_id) REFERENCES tags(id)\n);\n\nCREATE TABLE IF NOT EXISTS session_tags (\n session_id INTEGER NOT NULL,\n tag_id INTEGER NOT NULL,\n PRIMARY KEY (session_id, tag_id),\n FOREIGN KEY (session_id) REFERENCES sessions(id),\n FOREIGN KEY (tag_id) REFERENCES tags(id)\n);\n\nCREATE TABLE IF NOT EXISTS aliases (\n alias TEXT PRIMARY KEY,\n project_id INTEGER NOT NULL,\n FOREIGN KEY (project_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS compaction_log (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n project_id INTEGER NOT NULL,\n session_id INTEGER,\n trigger TEXT NOT NULL\n CHECK(trigger IN ('precompact','manual','end-session')),\n files_written TEXT NOT NULL,\n token_count INTEGER,\n created_at INTEGER NOT NULL,\n FOREIGN KEY (project_id) REFERENCES projects(id),\n FOREIGN KEY (session_id) REFERENCES sessions(id)\n);\n\nCREATE TABLE IF NOT EXISTS links (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id INTEGER NOT NULL,\n target_project_id INTEGER NOT NULL,\n link_type TEXT NOT NULL DEFAULT 'related'\n CHECK(link_type IN ('related','follow-up','reference')),\n created_at INTEGER NOT NULL,\n UNIQUE (session_id, target_project_id),\n FOREIGN KEY (session_id) REFERENCES sessions(id),\n FOREIGN KEY (target_project_id) REFERENCES projects(id)\n);\n\nCREATE TABLE IF NOT EXISTS schema_version (\n version INTEGER PRIMARY KEY,\n applied_at INTEGER NOT NULL\n);\n\n-- Indexes\nCREATE INDEX IF NOT EXISTS idx_projects_slug ON projects(slug);\nCREATE INDEX IF NOT EXISTS idx_projects_status ON projects(status);\nCREATE INDEX IF NOT EXISTS idx_projects_type ON projects(type);\nCREATE INDEX IF NOT EXISTS idx_sessions_project ON sessions(project_id);\nCREATE INDEX IF NOT EXISTS idx_sessions_date ON sessions(date);\nCREATE INDEX IF NOT EXISTS idx_sessions_status ON sessions(status);\nCREATE INDEX IF NOT EXISTS idx_sessions_claude ON sessions(claude_session_id);\nCREATE INDEX IF NOT EXISTS idx_pc_project ON project_tags(project_id);\n`;\n\n/**\n * Run the full DDL against an open database connection.\n *\n * The function is idempotent — every statement uses IF NOT EXISTS so it is\n * safe to call on an already-initialised database. After creating the tables\n * it inserts the current SCHEMA_VERSION into schema_version if no row exists\n * yet.\n */\nexport function initializeSchema(db: Database): void {\n // better-sqlite3's exec() runs multiple semicolon-separated statements\n db.exec(CREATE_TABLES_SQL);\n\n const row = db\n .prepare(\"SELECT version FROM schema_version WHERE version = ?\")\n .get(SCHEMA_VERSION);\n\n if (!row) {\n db.prepare(\n \"INSERT INTO schema_version (version, applied_at) VALUES (?, ?)\"\n ).run(SCHEMA_VERSION, Date.now());\n }\n}\n\n/**\n * Apply incremental schema migrations to an already-initialised database.\n *\n * Each migration is guarded by a version check so it is safe to call on\n * databases at any schema version — already-applied migrations are skipped.\n */\nexport function runMigrations(db: Database): void {\n const currentRow = db\n .prepare(\"SELECT version FROM schema_version ORDER BY version DESC LIMIT 1\")\n .get() as { version: number } | undefined;\n\n const current = currentRow?.version ?? 0;\n\n // Migration v1 → v2: add claude_notes_dir column to projects\n if (current < 2) {\n db.transaction(() => {\n // Use a try/catch so re-running on a DB that already has the column is safe\n try {\n db.exec(\"ALTER TABLE projects ADD COLUMN claude_notes_dir TEXT\");\n } catch {\n // Column may already exist (e.g. fresh DB created with v2 DDL)\n }\n db.prepare(\n \"INSERT OR REPLACE INTO schema_version (version, applied_at) VALUES (?, ?)\"\n ).run(2, Date.now());\n })();\n }\n\n // Migration v2 → v3: add links table for cross-project session references\n if (current < 3) {\n db.transaction(() => {\n try {\n db.exec(`\n CREATE TABLE IF NOT EXISTS links (\n id INTEGER PRIMARY KEY AUTOINCREMENT,\n session_id INTEGER NOT NULL,\n target_project_id INTEGER NOT NULL,\n link_type TEXT NOT NULL DEFAULT 'related'\n CHECK(link_type IN ('related','follow-up','reference')),\n created_at INTEGER NOT NULL,\n UNIQUE (session_id, target_project_id),\n FOREIGN KEY (session_id) REFERENCES sessions(id),\n FOREIGN KEY (target_project_id) REFERENCES projects(id)\n )\n `);\n } catch {\n // Table may already exist (fresh DB created with v3 DDL)\n }\n db.prepare(\n \"INSERT OR REPLACE INTO schema_version (version, applied_at) VALUES (?, ?)\"\n ).run(3, Date.now());\n })();\n }\n}\n","/**\n * Database connection helper for the PAI registry.\n *\n * Uses better-sqlite3 (synchronous API) to open or create registry.db.\n * On first open it runs the full DDL via initializeSchema().\n */\n\nimport { mkdirSync } from \"node:fs\";\nimport { homedir } from \"node:os\";\nimport { dirname, join } from \"node:path\";\nimport BetterSqlite3 from \"better-sqlite3\";\nimport type { Database } from \"better-sqlite3\";\nimport { initializeSchema, runMigrations } from \"./schema.js\";\n\nexport type { Database };\n\n/** Default registry path inside the ~/.pai/ directory. */\nconst DEFAULT_REGISTRY_PATH = join(homedir(), \".pai\", \"registry.db\");\n\n/**\n * Open (or create) the PAI registry database.\n *\n * @param path Absolute path to registry.db. Defaults to ~/.pai/registry.db.\n * @returns An open better-sqlite3 Database instance.\n *\n * Side effects on first call:\n * - Creates the parent directory if it does not exist.\n * - Enables WAL journal mode.\n * - Runs initializeSchema() if schema_version is empty.\n */\nexport function openRegistry(path: string = DEFAULT_REGISTRY_PATH): Database {\n // Ensure the directory exists before SQLite tries to create the file\n mkdirSync(dirname(path), { recursive: true });\n\n const db = new BetterSqlite3(path);\n\n // WAL gives better concurrent read performance and crash safety\n db.pragma(\"journal_mode = WAL\");\n db.pragma(\"foreign_keys = ON\");\n\n // Check whether the schema has been applied before\n const tableExists = db\n .prepare(\n `SELECT name FROM sqlite_master\n WHERE type = 'table' AND name = 'schema_version'`\n )\n .get();\n\n if (!tableExists) {\n // Brand-new database — apply the full schema\n initializeSchema(db);\n } else {\n const row = db\n .prepare(\"SELECT version FROM schema_version LIMIT 1\")\n .get() as { version: number } | undefined;\n\n if (!row) {\n // Table exists but is empty — apply schema (handles partial init)\n initializeSchema(db);\n }\n }\n\n // Apply any pending incremental migrations\n runMigrations(db);\n\n return db;\n}\n"],"mappings":";;;;;;;AAgBA,MAAa,iBAAiB;AAE9B,MAAa,oBAAoB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAqHjC,SAAgB,iBAAiB,IAAoB;AAEnD,IAAG,KAAK,kBAAkB;AAM1B,KAAI,CAJQ,GACT,QAAQ,uDAAuD,CAC/D,IAAI,eAAe,CAGpB,IAAG,QACD,iEACD,CAAC,IAAI,gBAAgB,KAAK,KAAK,CAAC;;;;;;;;AAUrC,SAAgB,cAAc,IAAoB;CAKhD,MAAM,UAJa,GAChB,QAAQ,mEAAmE,CAC3E,KAAK,EAEoB,WAAW;AAGvC,KAAI,UAAU,EACZ,IAAG,kBAAkB;AAEnB,MAAI;AACF,MAAG,KAAK,wDAAwD;UAC1D;AAGR,KAAG,QACD,4EACD,CAAC,IAAI,GAAG,KAAK,KAAK,CAAC;GACpB,EAAE;AAIN,KAAI,UAAU,EACZ,IAAG,kBAAkB;AACnB,MAAI;AACF,MAAG,KAAK;;;;;;;;;;;;UAYN;UACI;AAGR,KAAG,QACD,4EACD,CAAC,IAAI,GAAG,KAAK,KAAK,CAAC;GACpB,EAAE;;;;;;;;;;;;;ACxLR,MAAM,wBAAwB,KAAK,SAAS,EAAE,QAAQ,cAAc;;;;;;;;;;;;AAapE,SAAgB,aAAa,OAAe,uBAAiC;AAE3E,WAAU,QAAQ,KAAK,EAAE,EAAE,WAAW,MAAM,CAAC;CAE7C,MAAM,KAAK,IAAI,cAAc,KAAK;AAGlC,IAAG,OAAO,qBAAqB;AAC/B,IAAG,OAAO,oBAAoB;AAU9B,KAAI,CAPgB,GACjB,QACC;yDAED,CACA,KAAK,CAIN,kBAAiB,GAAG;UAMhB,CAJQ,GACT,QAAQ,6CAA6C,CACrD,KAAK,CAIN,kBAAiB,GAAG;AAKxB,eAAc,GAAG;AAEjB,QAAO"}
@@ -1 +0,0 @@
1
- {"version":3,"file":"vault-indexer-DXWs9pDn.mjs","names":[],"sources":["../src/memory/vault-indexer.ts"],"sourcesContent":["/**\n * Vault indexer for the PAI federation memory engine.\n *\n * Indexes an entire Obsidian vault (or any markdown knowledge base), following\n * symlinks, deduplicating files by inode, parsing wikilinks, and computing\n * per-file health metrics (orphan detection, dead links).\n *\n * Key differences from the project indexer (indexer.ts):\n * - Follows symbolic links (project indexer skips them)\n * - Deduplicates files with the same inode (same content reachable via multiple paths)\n * - Parses [[wikilinks]] and builds a directed link graph\n * - Resolves wikilinks using Obsidian's shortest-match algorithm\n * - Computes health metrics per file: inbound/outbound link counts, dead links, orphans\n */\n\nimport { createHash } from \"node:crypto\";\nimport { readFileSync, statSync, readdirSync, existsSync } from \"node:fs\";\nimport { join, relative, basename, dirname, normalize } from \"node:path\";\nimport type { Database } from \"better-sqlite3\";\nimport { chunkMarkdown } from \"./chunker.js\";\n\n// ---------------------------------------------------------------------------\n// Types\n// ---------------------------------------------------------------------------\n\nexport interface VaultFile {\n absPath: string;\n vaultRelPath: string;\n inode: number;\n device: number;\n}\n\nexport interface InodeGroup {\n canonical: VaultFile;\n aliases: VaultFile[];\n}\n\nexport interface ParsedLink {\n raw: string;\n alias: string | null;\n lineNumber: number;\n isEmbed: boolean;\n}\n\nexport interface VaultIndexResult {\n filesIndexed: number;\n chunksCreated: number;\n filesSkipped: number;\n aliasesRecorded: number;\n linksExtracted: number;\n deadLinksFound: number;\n orphansFound: number;\n elapsed: number;\n}\n\n// ---------------------------------------------------------------------------\n// Constants\n// ---------------------------------------------------------------------------\n\n/** Maximum number of .md files to collect from a vault. */\nconst VAULT_MAX_FILES = 10_000;\n\n/** Maximum recursion depth for vault directory walks. */\nconst VAULT_MAX_DEPTH = 10;\n\n/** Number of files to process before yielding to the event loop. */\nconst VAULT_YIELD_EVERY = 10;\n\n/**\n * Directories to always skip, at any depth, during vault walks.\n * Includes standard build/VCS noise plus Obsidian-specific directories.\n */\nconst VAULT_SKIP_DIRS = new Set([\n // Version control\n \".git\",\n // Dependency directories (any language)\n \"node_modules\",\n \"vendor\",\n \"Pods\",\n // Build / compile output\n \"dist\",\n \"build\",\n \"out\",\n \"DerivedData\",\n \".next\",\n // Python virtual environments and caches\n \".venv\",\n \"venv\",\n \"__pycache__\",\n // General caches\n \".cache\",\n \".bun\",\n // Obsidian internals\n \".obsidian\",\n \".trash\",\n]);\n\n// ---------------------------------------------------------------------------\n// Utility\n// ---------------------------------------------------------------------------\n\nfunction sha256File(content: string): string {\n return createHash(\"sha256\").update(content).digest(\"hex\");\n}\n\nfunction chunkId(\n projectId: number,\n path: string,\n chunkIndex: number,\n startLine: number,\n endLine: number,\n): string {\n return createHash(\"sha256\")\n .update(`${projectId}:${path}:${chunkIndex}:${startLine}:${endLine}`)\n .digest(\"hex\");\n}\n\nfunction yieldToEventLoop(): Promise<void> {\n return new Promise((resolve) => setImmediate(resolve));\n}\n\n// ---------------------------------------------------------------------------\n// Vault directory walker (follows symlinks)\n// ---------------------------------------------------------------------------\n\n/**\n * Recursively collect all .md files under a vault root, following symlinks.\n *\n * Symlink-following behaviour:\n * - Symbolic links to files: followed if the target is a .md file\n * - Symbolic links to directories: followed with cycle detection via inode\n *\n * Cycle detection is based on the real inode of each visited directory.\n * Using the real stat (not lstat) ensures that symlinked dirs resolve to\n * their actual inode, preventing infinite loops.\n *\n * @param dir Directory to scan.\n * @param vaultRoot Absolute root of the vault (for computing vaultRelPath).\n * @param acc Shared accumulator (mutated in place for early exit).\n * @param visited Set of \"device:inode\" strings for visited directories.\n * @param depth Current recursion depth.\n */\nexport function walkVaultMdFiles(\n vaultRoot: string,\n opts?: { maxFiles?: number; maxDepth?: number },\n): VaultFile[] {\n const maxFiles = opts?.maxFiles ?? VAULT_MAX_FILES;\n const maxDepth = opts?.maxDepth ?? VAULT_MAX_DEPTH;\n\n const results: VaultFile[] = [];\n const visitedDirs = new Set<string>();\n\n function walk(dir: string, depth: number): void {\n if (results.length >= maxFiles) return;\n if (depth > maxDepth) return;\n\n // Get the real inode of this directory (follows symlinks on the dir itself)\n let dirStat: ReturnType<typeof statSync>;\n try {\n dirStat = statSync(dir);\n } catch {\n return; // Unreadable or broken symlink — skip\n }\n\n const dirKey = `${dirStat.dev}:${dirStat.ino}`;\n if (visitedDirs.has(dirKey)) return; // Cycle detected\n visitedDirs.add(dirKey);\n\n let entries: import(\"node:fs\").Dirent<string>[];\n try {\n entries = readdirSync(dir, { withFileTypes: true, encoding: \"utf8\" });\n } catch {\n return; // Unreadable directory — skip\n }\n\n for (const entry of entries) {\n if (results.length >= maxFiles) break;\n if (VAULT_SKIP_DIRS.has(entry.name)) continue;\n\n const full = join(dir, entry.name);\n\n if (entry.isSymbolicLink()) {\n // Follow the symlink — resolve to real target\n let targetStat: ReturnType<typeof statSync>;\n try {\n targetStat = statSync(full); // statSync follows symlinks\n } catch {\n continue; // Broken symlink — skip\n }\n\n if (targetStat.isDirectory()) {\n if (!VAULT_SKIP_DIRS.has(entry.name)) {\n walk(full, depth + 1);\n }\n } else if (targetStat.isFile() && entry.name.endsWith(\".md\")) {\n results.push({\n absPath: full,\n vaultRelPath: relative(vaultRoot, full),\n inode: targetStat.ino,\n device: targetStat.dev,\n });\n }\n } else if (entry.isDirectory()) {\n walk(full, depth + 1);\n } else if (entry.isFile() && entry.name.endsWith(\".md\")) {\n let fileStat: ReturnType<typeof statSync>;\n try {\n fileStat = statSync(full);\n } catch {\n continue;\n }\n results.push({\n absPath: full,\n vaultRelPath: relative(vaultRoot, full),\n inode: fileStat.ino,\n device: fileStat.dev,\n });\n }\n }\n }\n\n if (existsSync(vaultRoot)) {\n walk(vaultRoot, 0);\n }\n\n return results;\n}\n\n// ---------------------------------------------------------------------------\n// Inode deduplication\n// ---------------------------------------------------------------------------\n\n/**\n * Group vault files by inode identity (device + inode).\n *\n * Within each group, the canonical file is chosen as the one with the\n * fewest path separators (shallowest), breaking ties by shortest string.\n * All other group members become aliases.\n */\nexport function deduplicateByInode(files: VaultFile[]): InodeGroup[] {\n const groups = new Map<string, VaultFile[]>();\n\n for (const file of files) {\n const key = `${file.device}:${file.inode}`;\n const existing = groups.get(key);\n if (existing) {\n existing.push(file);\n } else {\n groups.set(key, [file]);\n }\n }\n\n const result: InodeGroup[] = [];\n\n for (const group of groups.values()) {\n if (group.length === 0) continue;\n\n // Sort: fewest path separators first, then shortest string\n const sorted = [...group].sort((a, b) => {\n const aDepth = (a.vaultRelPath.match(/\\//g) ?? []).length;\n const bDepth = (b.vaultRelPath.match(/\\//g) ?? []).length;\n if (aDepth !== bDepth) return aDepth - bDepth;\n return a.vaultRelPath.length - b.vaultRelPath.length;\n });\n\n const [canonical, ...aliases] = sorted as [VaultFile, ...VaultFile[]];\n result.push({ canonical, aliases });\n }\n\n return result;\n}\n\n// ---------------------------------------------------------------------------\n// Wikilink parser\n// ---------------------------------------------------------------------------\n\n/**\n * Parse all [[wikilinks]] and ![[embeds]] from markdown content.\n *\n * Handles:\n * - Standard wikilinks: [[Target Note]]\n * - Aliased wikilinks: [[Target Note|Display Text]]\n * - Heading anchors: [[Target Note#Heading]] (stripped for resolution)\n * - Embeds: ![[Target Note]]\n * - Frontmatter wikilinks (YAML between --- delimiters)\n *\n * @param content Raw markdown file content.\n * @returns Array of parsed links in document order.\n */\nexport function parseWikilinks(content: string): ParsedLink[] {\n const links: ParsedLink[] = [];\n const lines = content.split(\"\\n\");\n\n // Determine frontmatter range (YAML between opening and closing ---)\n let frontmatterEnd = 0;\n if (content.startsWith(\"---\")) {\n const closingIdx = content.indexOf(\"\\n---\", 3);\n if (closingIdx !== -1) {\n frontmatterEnd = content.slice(0, closingIdx + 4).split(\"\\n\").length - 1;\n }\n }\n\n // Regex for [[wikilinks]] and ![[embeds]]\n const wikilinkRe = /(!?)\\[\\[([^\\]]+?)\\]\\]/g;\n\n for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {\n const line = lines[lineIdx]!;\n const lineNumber = lineIdx + 1; // 1-indexed\n\n wikilinkRe.lastIndex = 0;\n let match: RegExpExecArray | null;\n while ((match = wikilinkRe.exec(line)) !== null) {\n const isEmbed = match[1] === \"!\";\n const inner = match[2]!;\n\n // Split on first | for alias\n const pipeIdx = inner.indexOf(\"|\");\n const beforePipe = pipeIdx === -1 ? inner : inner.slice(0, pipeIdx);\n const alias = pipeIdx === -1 ? null : inner.slice(pipeIdx + 1);\n\n // Strip heading anchor (everything after #)\n const hashIdx = beforePipe.indexOf(\"#\");\n const raw = hashIdx === -1 ? beforePipe.trim() : beforePipe.slice(0, hashIdx).trim();\n\n if (!raw) continue; // Skip links with empty targets (e.g. [[#Heading]])\n\n // For frontmatter lines, mark as non-embed regardless of !\n const isFrontmatter = lineIdx < frontmatterEnd;\n links.push({\n raw,\n alias: alias?.trim() ?? null,\n lineNumber,\n isEmbed: isEmbed && !isFrontmatter,\n });\n }\n }\n\n return links;\n}\n\n// ---------------------------------------------------------------------------\n// Name index builder\n// ---------------------------------------------------------------------------\n\n/**\n * Build a name lookup index for Obsidian wikilink resolution.\n *\n * Maps lowercase filename (without .md extension) to all vault-relative paths\n * that share that name. Includes both canonical paths and alias paths so that\n * wikilinks resolve regardless of which path the file is accessed through.\n */\nexport function buildNameIndex(files: VaultFile[]): Map<string, string[]> {\n const index = new Map<string, string[]>();\n\n for (const file of files) {\n const name = basename(file.vaultRelPath, \".md\").toLowerCase();\n const existing = index.get(name);\n if (existing) {\n existing.push(file.vaultRelPath);\n } else {\n index.set(name, [file.vaultRelPath]);\n }\n }\n\n return index;\n}\n\n// ---------------------------------------------------------------------------\n// Wikilink resolver\n// ---------------------------------------------------------------------------\n\n/**\n * Resolve a wikilink target to a vault-relative path using Obsidian's rules.\n *\n * Resolution algorithm:\n * 1. If raw contains \"/\", attempt exact path match (with and without .md).\n * 2. Normalize: lowercase the raw target, strip .md extension.\n * 3. Look up in the name index (all files with that basename).\n * 4. If exactly one match, return it.\n * 5. If multiple matches, pick the one closest to the source file\n * (longest common directory prefix, then shortest overall path).\n * 6. If no matches, return null (dead link).\n *\n * @param raw The raw link target (heading-stripped, pipe-stripped).\n * @param nameIndex Map from lowercase basename-without-ext to vault paths.\n * @param sourcePath Vault-relative path of the file containing the link.\n * @returns Vault-relative path of the resolved target, or null.\n */\nexport function resolveWikilink(\n raw: string,\n nameIndex: Map<string, string[]>,\n sourcePath: string,\n): string | null {\n if (!raw) return null;\n\n // Case 1: path contains \"/\" — try exact match with and without .md\n if (raw.includes(\"/\")) {\n const normalized = normalize(raw);\n const normalizedMd = normalized.endsWith(\".md\") ? normalized : normalized + \".md\";\n\n // Check if any indexed path matches (case-insensitive for macOS compatibility)\n for (const [, paths] of nameIndex) {\n for (const p of paths) {\n if (p === normalizedMd || p === normalized) return p;\n if (p.toLowerCase() === normalizedMd.toLowerCase()) return p;\n }\n }\n // Fall through to name lookup in case the path prefix was wrong\n }\n\n // Normalize the raw target for name lookup.\n // Use the basename only — Obsidian resolves by filename, not full path.\n // E.g. \"PAI/20-webseiten/_20-webseiten-master\" → \"_20-webseiten-master\"\n const rawBase = basename(raw)\n .replace(/\\.md$/i, \"\")\n .toLowerCase()\n .trim();\n\n if (!rawBase) return null;\n\n const candidates = nameIndex.get(rawBase);\n\n if (!candidates || candidates.length === 0) {\n return null; // Dead link\n }\n\n if (candidates.length === 1) {\n return candidates[0]!;\n }\n\n // Multiple matches — pick the one closest to the source file\n const sourceDir = dirname(sourcePath);\n\n let bestPath: string | null = null;\n let bestPrefixLen = -1;\n let bestPathLen = Infinity;\n\n for (const candidate of candidates) {\n const candidateDir = dirname(candidate);\n const prefixLen = commonPrefixLength(sourceDir, candidateDir);\n const pathLen = candidate.length;\n\n if (\n prefixLen > bestPrefixLen ||\n (prefixLen === bestPrefixLen && pathLen < bestPathLen)\n ) {\n bestPrefixLen = prefixLen;\n bestPathLen = pathLen;\n bestPath = candidate;\n }\n }\n\n return bestPath;\n}\n\n/**\n * Compute the length of the common prefix between two directory paths,\n * measured in path segments (not raw characters).\n *\n * Example: \"a/b/c\" and \"a/b/d\" → 2 (common: \"a\", \"b\")\n */\nfunction commonPrefixLength(a: string, b: string): number {\n if (a === \".\" && b === \".\") return 0;\n const aParts = a === \".\" ? [] : a.split(\"/\");\n const bParts = b === \".\" ? [] : b.split(\"/\");\n let count = 0;\n const len = Math.min(aParts.length, bParts.length);\n for (let i = 0; i < len; i++) {\n if (aParts[i] === bParts[i]) {\n count++;\n } else {\n break;\n }\n }\n return count;\n}\n\n// ---------------------------------------------------------------------------\n// Main vault indexing orchestrator\n// ---------------------------------------------------------------------------\n\n/**\n * Index an entire Obsidian vault (or markdown knowledge base) into the\n * federation database.\n *\n * Steps:\n * 1. Walk vault root, following symlinks.\n * 2. Deduplicate by inode — each unique file is indexed once.\n * 3. Build a name index for wikilink resolution.\n * 4. For each canonical file:\n * a. SHA-256 hash for change detection — skip unchanged files.\n * b. Read content, chunk with chunkMarkdown().\n * c. Insert chunks into memory_chunks and memory_fts.\n * d. Upsert vault_files row.\n * 5. Record aliases in vault_aliases.\n * 6. Rebuild vault_name_index table.\n * 7. Rebuild vault_links:\n * a. Parse [[wikilinks]] from each canonical file.\n * b. Resolve each link with resolveWikilink().\n * c. Insert into vault_links.\n * 8. Compute and upsert health metrics (vault_health).\n * 9. Return statistics.\n *\n * @param db Open federation database.\n * @param vaultProjectId Registry project ID for the vault \"project\".\n * @param vaultRoot Absolute path to the vault root directory.\n */\nexport async function indexVault(\n db: Database,\n vaultProjectId: number,\n vaultRoot: string,\n): Promise<VaultIndexResult> {\n const startTime = Date.now();\n\n const result: VaultIndexResult = {\n filesIndexed: 0,\n chunksCreated: 0,\n filesSkipped: 0,\n aliasesRecorded: 0,\n linksExtracted: 0,\n deadLinksFound: 0,\n orphansFound: 0,\n elapsed: 0,\n };\n\n // ---------------------------------------------------------------------------\n // Step 1: Walk vault, collecting all .md files (follows symlinks)\n // ---------------------------------------------------------------------------\n\n const allFiles = walkVaultMdFiles(vaultRoot);\n\n // ---------------------------------------------------------------------------\n // Step 2: Deduplicate by inode\n // ---------------------------------------------------------------------------\n\n const inodeGroups = deduplicateByInode(allFiles);\n\n // ---------------------------------------------------------------------------\n // Step 3: Build name index (from all files including aliases, for resolution)\n // ---------------------------------------------------------------------------\n\n const nameIndex = buildNameIndex(allFiles);\n\n // ---------------------------------------------------------------------------\n // Step 4: Prepare SQL statements\n // ---------------------------------------------------------------------------\n\n const selectFileHash = db.prepare(\n \"SELECT hash FROM vault_files WHERE vault_path = ?\",\n );\n\n const deleteOldChunkIds = db.prepare(\n \"SELECT id FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n const deleteFts = db.prepare(\"DELETE FROM memory_fts WHERE id = ?\");\n\n const deleteChunks = db.prepare(\n \"DELETE FROM memory_chunks WHERE project_id = ? AND path = ?\",\n );\n\n const insertChunk = db.prepare(`\n INSERT INTO memory_chunks (id, project_id, source, tier, path, start_line, end_line, hash, text, updated_at)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const insertFts = db.prepare(`\n INSERT INTO memory_fts (text, id, project_id, path, source, tier, start_line, end_line)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?)\n `);\n\n const upsertVaultFile = db.prepare(`\n INSERT INTO vault_files (vault_path, inode, device, hash, title, indexed_at)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(vault_path) DO UPDATE SET\n inode = excluded.inode,\n device = excluded.device,\n hash = excluded.hash,\n title = excluded.title,\n indexed_at = excluded.indexed_at\n `);\n\n // ---------------------------------------------------------------------------\n // Step 4 (cont.): Index each canonical file\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n let filesSinceYield = 0;\n\n for (const group of inodeGroups) {\n // Yield periodically to keep the IPC server responsive\n if (filesSinceYield >= VAULT_YIELD_EVERY) {\n await yieldToEventLoop();\n filesSinceYield = 0;\n }\n filesSinceYield++;\n\n const { canonical } = group;\n\n // Read file content\n let content: string;\n try {\n content = readFileSync(canonical.absPath, \"utf8\");\n } catch {\n result.filesSkipped++;\n continue;\n }\n\n const hash = sha256File(content);\n\n // Change detection: skip if hash is unchanged\n const existing = selectFileHash.get(canonical.vaultRelPath) as\n | { hash: string }\n | undefined;\n\n if (existing?.hash === hash) {\n result.filesSkipped++;\n continue;\n }\n\n // Delete old chunks for this vault path\n const oldChunkIds = deleteOldChunkIds.all(\n vaultProjectId,\n canonical.vaultRelPath,\n ) as Array<{ id: string }>;\n\n db.transaction(() => {\n for (const row of oldChunkIds) {\n deleteFts.run(row.id);\n }\n deleteChunks.run(vaultProjectId, canonical.vaultRelPath);\n })();\n\n // Chunk the content\n const chunks = chunkMarkdown(content);\n const updatedAt = Date.now();\n\n // Extract title from first H1 heading or filename\n const titleMatch = /^#\\s+(.+)$/m.exec(content);\n const title = titleMatch\n ? titleMatch[1]!.trim()\n : basename(canonical.vaultRelPath, \".md\");\n\n db.transaction(() => {\n for (let i = 0; i < chunks.length; i++) {\n const chunk = chunks[i]!;\n const id = chunkId(\n vaultProjectId,\n canonical.vaultRelPath,\n i,\n chunk.startLine,\n chunk.endLine,\n );\n insertChunk.run(\n id,\n vaultProjectId,\n \"vault\",\n \"topic\",\n canonical.vaultRelPath,\n chunk.startLine,\n chunk.endLine,\n chunk.hash,\n chunk.text,\n updatedAt,\n );\n insertFts.run(\n chunk.text,\n id,\n vaultProjectId,\n canonical.vaultRelPath,\n \"vault\",\n \"topic\",\n chunk.startLine,\n chunk.endLine,\n );\n }\n upsertVaultFile.run(\n canonical.vaultRelPath,\n canonical.inode,\n canonical.device,\n hash,\n title,\n updatedAt,\n );\n })();\n\n result.filesIndexed++;\n result.chunksCreated += chunks.length;\n }\n\n // ---------------------------------------------------------------------------\n // Step 5: Record aliases in vault_aliases\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n // Clear old aliases for this vault before rebuilding\n // (We identify vault aliases by checking which canonical paths belong to\n // the canonical files we just indexed — simpler to clear + rebuild all.)\n db.exec(\"DELETE FROM vault_aliases\");\n\n const insertAlias = db.prepare(`\n INSERT OR REPLACE INTO vault_aliases (vault_path, canonical_path, inode, device)\n VALUES (?, ?, ?, ?)\n `);\n\n const insertAliasesTx = db.transaction((groups: InodeGroup[]) => {\n for (const group of groups) {\n for (const alias of group.aliases) {\n insertAlias.run(\n alias.vaultRelPath,\n group.canonical.vaultRelPath,\n alias.inode,\n alias.device,\n );\n result.aliasesRecorded++;\n }\n }\n });\n insertAliasesTx(inodeGroups);\n\n // ---------------------------------------------------------------------------\n // Step 6: Rebuild vault_name_index\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n db.exec(\"DELETE FROM vault_name_index\");\n\n const insertNameIndex = db.prepare(`\n INSERT OR REPLACE INTO vault_name_index (name, vault_path) VALUES (?, ?)\n `);\n\n const insertNameIndexTx = db.transaction(\n (entries: Array<[string, string]>) => {\n for (const [name, path] of entries) {\n insertNameIndex.run(name, path);\n }\n },\n );\n\n const nameEntries: Array<[string, string]> = [];\n for (const [name, paths] of nameIndex) {\n for (const path of paths) {\n nameEntries.push([name, path]);\n }\n }\n insertNameIndexTx(nameEntries);\n\n // ---------------------------------------------------------------------------\n // Step 7: Rebuild vault_links\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n db.exec(\"DELETE FROM vault_links\");\n\n const insertLink = db.prepare(`\n INSERT OR IGNORE INTO vault_links\n (source_path, target_raw, target_path, link_type, line_number)\n VALUES (?, ?, ?, ?, ?)\n `);\n\n // Parse and resolve wikilinks in bulk transaction\n const linkRows: Array<{\n source: string;\n raw: string;\n target: string | null;\n linkType: string;\n lineNumber: number;\n }> = [];\n\n for (const group of inodeGroups) {\n const { canonical } = group;\n\n let content: string;\n try {\n content = readFileSync(canonical.absPath, \"utf8\");\n } catch {\n continue;\n }\n\n const parsedLinks = parseWikilinks(content);\n for (const link of parsedLinks) {\n const target = resolveWikilink(link.raw, nameIndex, canonical.vaultRelPath);\n linkRows.push({\n source: canonical.vaultRelPath,\n raw: link.raw,\n target,\n linkType: link.isEmbed ? \"embed\" : \"wikilink\",\n lineNumber: link.lineNumber,\n });\n }\n }\n\n const insertLinksTx = db.transaction(\n (\n rows: Array<{\n source: string;\n raw: string;\n target: string | null;\n linkType: string;\n lineNumber: number;\n }>,\n ) => {\n for (const row of rows) {\n insertLink.run(row.source, row.raw, row.target, row.linkType, row.lineNumber);\n }\n },\n );\n insertLinksTx(linkRows);\n\n result.linksExtracted = linkRows.length;\n result.deadLinksFound = linkRows.filter((r) => r.target === null).length;\n\n // ---------------------------------------------------------------------------\n // Step 8: Compute and upsert vault_health metrics\n // ---------------------------------------------------------------------------\n\n await yieldToEventLoop();\n\n // Count outbound links per source\n const outboundCounts = db\n .prepare(\n `SELECT source_path, COUNT(*) AS cnt FROM vault_links GROUP BY source_path`,\n )\n .all() as Array<{ source_path: string; cnt: number }>;\n\n // Count dead links per source\n const deadLinkCounts = db\n .prepare(\n `SELECT source_path, COUNT(*) AS cnt FROM vault_links\n WHERE target_path IS NULL GROUP BY source_path`,\n )\n .all() as Array<{ source_path: string; cnt: number }>;\n\n // Count inbound links per target\n const inboundCounts = db\n .prepare(\n `SELECT target_path, COUNT(*) AS cnt FROM vault_links\n WHERE target_path IS NOT NULL GROUP BY target_path`,\n )\n .all() as Array<{ target_path: string; cnt: number }>;\n\n // Build maps for O(1) lookup\n const outboundMap = new Map<string, number>(\n outboundCounts.map((r) => [r.source_path, r.cnt]),\n );\n const deadMap = new Map<string, number>(\n deadLinkCounts.map((r) => [r.source_path, r.cnt]),\n );\n const inboundMap = new Map<string, number>(\n inboundCounts.map((r) => [r.target_path, r.cnt]),\n );\n\n const upsertHealth = db.prepare(`\n INSERT INTO vault_health\n (vault_path, inbound_count, outbound_count, dead_link_count, is_orphan, computed_at)\n VALUES (?, ?, ?, ?, ?, ?)\n ON CONFLICT(vault_path) DO UPDATE SET\n inbound_count = excluded.inbound_count,\n outbound_count = excluded.outbound_count,\n dead_link_count = excluded.dead_link_count,\n is_orphan = excluded.is_orphan,\n computed_at = excluded.computed_at\n `);\n\n const computedAt = Date.now();\n let orphanCount = 0;\n\n const upsertHealthTx = db.transaction((groups: InodeGroup[]) => {\n for (const group of groups) {\n const path = group.canonical.vaultRelPath;\n const inbound = inboundMap.get(path) ?? 0;\n const outbound = outboundMap.get(path) ?? 0;\n const dead = deadMap.get(path) ?? 0;\n const isOrphan = inbound === 0 ? 1 : 0;\n if (isOrphan) orphanCount++;\n upsertHealth.run(path, inbound, outbound, dead, isOrphan, computedAt);\n }\n });\n upsertHealthTx(inodeGroups);\n\n result.orphansFound = orphanCount;\n result.elapsed = Date.now() - startTime;\n\n return result;\n}\n"],"mappings":";;;;;;;;;;;;;;;;;;;;;AA4DA,MAAM,kBAAkB;;AAGxB,MAAM,kBAAkB;;AAGxB,MAAM,oBAAoB;;;;;AAM1B,MAAM,kBAAkB,IAAI,IAAI;CAE9B;CAEA;CACA;CACA;CAEA;CACA;CACA;CACA;CACA;CAEA;CACA;CACA;CAEA;CACA;CAEA;CACA;CACD,CAAC;AAMF,SAAS,WAAW,SAAyB;AAC3C,QAAO,WAAW,SAAS,CAAC,OAAO,QAAQ,CAAC,OAAO,MAAM;;AAG3D,SAAS,QACP,WACA,MACA,YACA,WACA,SACQ;AACR,QAAO,WAAW,SAAS,CACxB,OAAO,GAAG,UAAU,GAAG,KAAK,GAAG,WAAW,GAAG,UAAU,GAAG,UAAU,CACpE,OAAO,MAAM;;AAGlB,SAAS,mBAAkC;AACzC,QAAO,IAAI,SAAS,YAAY,aAAa,QAAQ,CAAC;;;;;;;;;;;;;;;;;;;AAwBxD,SAAgB,iBACd,WACA,MACa;CACb,MAAM,WAAW,MAAM,YAAY;CACnC,MAAM,WAAW,MAAM,YAAY;CAEnC,MAAM,UAAuB,EAAE;CAC/B,MAAM,8BAAc,IAAI,KAAa;CAErC,SAAS,KAAK,KAAa,OAAqB;AAC9C,MAAI,QAAQ,UAAU,SAAU;AAChC,MAAI,QAAQ,SAAU;EAGtB,IAAI;AACJ,MAAI;AACF,aAAU,SAAS,IAAI;UACjB;AACN;;EAGF,MAAM,SAAS,GAAG,QAAQ,IAAI,GAAG,QAAQ;AACzC,MAAI,YAAY,IAAI,OAAO,CAAE;AAC7B,cAAY,IAAI,OAAO;EAEvB,IAAI;AACJ,MAAI;AACF,aAAU,YAAY,KAAK;IAAE,eAAe;IAAM,UAAU;IAAQ,CAAC;UAC/D;AACN;;AAGF,OAAK,MAAM,SAAS,SAAS;AAC3B,OAAI,QAAQ,UAAU,SAAU;AAChC,OAAI,gBAAgB,IAAI,MAAM,KAAK,CAAE;GAErC,MAAM,OAAO,KAAK,KAAK,MAAM,KAAK;AAElC,OAAI,MAAM,gBAAgB,EAAE;IAE1B,IAAI;AACJ,QAAI;AACF,kBAAa,SAAS,KAAK;YACrB;AACN;;AAGF,QAAI,WAAW,aAAa,EAC1B;SAAI,CAAC,gBAAgB,IAAI,MAAM,KAAK,CAClC,MAAK,MAAM,QAAQ,EAAE;eAEd,WAAW,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,CAC1D,SAAQ,KAAK;KACX,SAAS;KACT,cAAc,SAAS,WAAW,KAAK;KACvC,OAAO,WAAW;KAClB,QAAQ,WAAW;KACpB,CAAC;cAEK,MAAM,aAAa,CAC5B,MAAK,MAAM,QAAQ,EAAE;YACZ,MAAM,QAAQ,IAAI,MAAM,KAAK,SAAS,MAAM,EAAE;IACvD,IAAI;AACJ,QAAI;AACF,gBAAW,SAAS,KAAK;YACnB;AACN;;AAEF,YAAQ,KAAK;KACX,SAAS;KACT,cAAc,SAAS,WAAW,KAAK;KACvC,OAAO,SAAS;KAChB,QAAQ,SAAS;KAClB,CAAC;;;;AAKR,KAAI,WAAW,UAAU,CACvB,MAAK,WAAW,EAAE;AAGpB,QAAO;;;;;;;;;AAcT,SAAgB,mBAAmB,OAAkC;CACnE,MAAM,yBAAS,IAAI,KAA0B;AAE7C,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,MAAM,GAAG,KAAK,OAAO,GAAG,KAAK;EACnC,MAAM,WAAW,OAAO,IAAI,IAAI;AAChC,MAAI,SACF,UAAS,KAAK,KAAK;MAEnB,QAAO,IAAI,KAAK,CAAC,KAAK,CAAC;;CAI3B,MAAM,SAAuB,EAAE;AAE/B,MAAK,MAAM,SAAS,OAAO,QAAQ,EAAE;AACnC,MAAI,MAAM,WAAW,EAAG;EAUxB,MAAM,CAAC,WAAW,GAAG,WAPN,CAAC,GAAG,MAAM,CAAC,MAAM,GAAG,MAAM;GACvC,MAAM,UAAU,EAAE,aAAa,MAAM,MAAM,IAAI,EAAE,EAAE;GACnD,MAAM,UAAU,EAAE,aAAa,MAAM,MAAM,IAAI,EAAE,EAAE;AACnD,OAAI,WAAW,OAAQ,QAAO,SAAS;AACvC,UAAO,EAAE,aAAa,SAAS,EAAE,aAAa;IAC9C;AAGF,SAAO,KAAK;GAAE;GAAW;GAAS,CAAC;;AAGrC,QAAO;;;;;;;;;;;;;;;AAoBT,SAAgB,eAAe,SAA+B;CAC5D,MAAM,QAAsB,EAAE;CAC9B,MAAM,QAAQ,QAAQ,MAAM,KAAK;CAGjC,IAAI,iBAAiB;AACrB,KAAI,QAAQ,WAAW,MAAM,EAAE;EAC7B,MAAM,aAAa,QAAQ,QAAQ,SAAS,EAAE;AAC9C,MAAI,eAAe,GACjB,kBAAiB,QAAQ,MAAM,GAAG,aAAa,EAAE,CAAC,MAAM,KAAK,CAAC,SAAS;;CAK3E,MAAM,aAAa;AAEnB,MAAK,IAAI,UAAU,GAAG,UAAU,MAAM,QAAQ,WAAW;EACvD,MAAM,OAAO,MAAM;EACnB,MAAM,aAAa,UAAU;AAE7B,aAAW,YAAY;EACvB,IAAI;AACJ,UAAQ,QAAQ,WAAW,KAAK,KAAK,MAAM,MAAM;GAC/C,MAAM,UAAU,MAAM,OAAO;GAC7B,MAAM,QAAQ,MAAM;GAGpB,MAAM,UAAU,MAAM,QAAQ,IAAI;GAClC,MAAM,aAAa,YAAY,KAAK,QAAQ,MAAM,MAAM,GAAG,QAAQ;GACnE,MAAM,QAAQ,YAAY,KAAK,OAAO,MAAM,MAAM,UAAU,EAAE;GAG9D,MAAM,UAAU,WAAW,QAAQ,IAAI;GACvC,MAAM,MAAM,YAAY,KAAK,WAAW,MAAM,GAAG,WAAW,MAAM,GAAG,QAAQ,CAAC,MAAM;AAEpF,OAAI,CAAC,IAAK;GAGV,MAAM,gBAAgB,UAAU;AAChC,SAAM,KAAK;IACT;IACA,OAAO,OAAO,MAAM,IAAI;IACxB;IACA,SAAS,WAAW,CAAC;IACtB,CAAC;;;AAIN,QAAO;;;;;;;;;AAcT,SAAgB,eAAe,OAA2C;CACxE,MAAM,wBAAQ,IAAI,KAAuB;AAEzC,MAAK,MAAM,QAAQ,OAAO;EACxB,MAAM,OAAO,SAAS,KAAK,cAAc,MAAM,CAAC,aAAa;EAC7D,MAAM,WAAW,MAAM,IAAI,KAAK;AAChC,MAAI,SACF,UAAS,KAAK,KAAK,aAAa;MAEhC,OAAM,IAAI,MAAM,CAAC,KAAK,aAAa,CAAC;;AAIxC,QAAO;;;;;;;;;;;;;;;;;;;AAwBT,SAAgB,gBACd,KACA,WACA,YACe;AACf,KAAI,CAAC,IAAK,QAAO;AAGjB,KAAI,IAAI,SAAS,IAAI,EAAE;EACrB,MAAM,aAAa,UAAU,IAAI;EACjC,MAAM,eAAe,WAAW,SAAS,MAAM,GAAG,aAAa,aAAa;AAG5E,OAAK,MAAM,GAAG,UAAU,UACtB,MAAK,MAAM,KAAK,OAAO;AACrB,OAAI,MAAM,gBAAgB,MAAM,WAAY,QAAO;AACnD,OAAI,EAAE,aAAa,KAAK,aAAa,aAAa,CAAE,QAAO;;;CASjE,MAAM,UAAU,SAAS,IAAI,CAC1B,QAAQ,UAAU,GAAG,CACrB,aAAa,CACb,MAAM;AAET,KAAI,CAAC,QAAS,QAAO;CAErB,MAAM,aAAa,UAAU,IAAI,QAAQ;AAEzC,KAAI,CAAC,cAAc,WAAW,WAAW,EACvC,QAAO;AAGT,KAAI,WAAW,WAAW,EACxB,QAAO,WAAW;CAIpB,MAAM,YAAY,QAAQ,WAAW;CAErC,IAAI,WAA0B;CAC9B,IAAI,gBAAgB;CACpB,IAAI,cAAc;AAElB,MAAK,MAAM,aAAa,YAAY;EAElC,MAAM,YAAY,mBAAmB,WADhB,QAAQ,UAAU,CACsB;EAC7D,MAAM,UAAU,UAAU;AAE1B,MACE,YAAY,iBACX,cAAc,iBAAiB,UAAU,aAC1C;AACA,mBAAgB;AAChB,iBAAc;AACd,cAAW;;;AAIf,QAAO;;;;;;;;AAST,SAAS,mBAAmB,GAAW,GAAmB;AACxD,KAAI,MAAM,OAAO,MAAM,IAAK,QAAO;CACnC,MAAM,SAAS,MAAM,MAAM,EAAE,GAAG,EAAE,MAAM,IAAI;CAC5C,MAAM,SAAS,MAAM,MAAM,EAAE,GAAG,EAAE,MAAM,IAAI;CAC5C,IAAI,QAAQ;CACZ,MAAM,MAAM,KAAK,IAAI,OAAO,QAAQ,OAAO,OAAO;AAClD,MAAK,IAAI,IAAI,GAAG,IAAI,KAAK,IACvB,KAAI,OAAO,OAAO,OAAO,GACvB;KAEA;AAGJ,QAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAiCT,eAAsB,WACpB,IACA,gBACA,WAC2B;CAC3B,MAAM,YAAY,KAAK,KAAK;CAE5B,MAAM,SAA2B;EAC/B,cAAc;EACd,eAAe;EACf,cAAc;EACd,iBAAiB;EACjB,gBAAgB;EAChB,gBAAgB;EAChB,cAAc;EACd,SAAS;EACV;CAMD,MAAM,WAAW,iBAAiB,UAAU;CAM5C,MAAM,cAAc,mBAAmB,SAAS;CAMhD,MAAM,YAAY,eAAe,SAAS;CAM1C,MAAM,iBAAiB,GAAG,QACxB,oDACD;CAED,MAAM,oBAAoB,GAAG,QAC3B,iEACD;CAED,MAAM,YAAY,GAAG,QAAQ,sCAAsC;CAEnE,MAAM,eAAe,GAAG,QACtB,8DACD;CAED,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;CAEF,MAAM,YAAY,GAAG,QAAQ;;;IAG3B;CAEF,MAAM,kBAAkB,GAAG,QAAQ;;;;;;;;;IASjC;AAMF,OAAM,kBAAkB;CACxB,IAAI,kBAAkB;AAEtB,MAAK,MAAM,SAAS,aAAa;AAE/B,MAAI,mBAAmB,mBAAmB;AACxC,SAAM,kBAAkB;AACxB,qBAAkB;;AAEpB;EAEA,MAAM,EAAE,cAAc;EAGtB,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,UAAU,SAAS,OAAO;UAC3C;AACN,UAAO;AACP;;EAGF,MAAM,OAAO,WAAW,QAAQ;AAOhC,MAJiB,eAAe,IAAI,UAAU,aAAa,EAI7C,SAAS,MAAM;AAC3B,UAAO;AACP;;EAIF,MAAM,cAAc,kBAAkB,IACpC,gBACA,UAAU,aACX;AAED,KAAG,kBAAkB;AACnB,QAAK,MAAM,OAAO,YAChB,WAAU,IAAI,IAAI,GAAG;AAEvB,gBAAa,IAAI,gBAAgB,UAAU,aAAa;IACxD,EAAE;EAGJ,MAAM,SAAS,cAAc,QAAQ;EACrC,MAAM,YAAY,KAAK,KAAK;EAG5B,MAAM,aAAa,cAAc,KAAK,QAAQ;EAC9C,MAAM,QAAQ,aACV,WAAW,GAAI,MAAM,GACrB,SAAS,UAAU,cAAc,MAAM;AAE3C,KAAG,kBAAkB;AACnB,QAAK,IAAI,IAAI,GAAG,IAAI,OAAO,QAAQ,KAAK;IACtC,MAAM,QAAQ,OAAO;IACrB,MAAM,KAAK,QACT,gBACA,UAAU,cACV,GACA,MAAM,WACN,MAAM,QACP;AACD,gBAAY,IACV,IACA,gBACA,SACA,SACA,UAAU,cACV,MAAM,WACN,MAAM,SACN,MAAM,MACN,MAAM,MACN,UACD;AACD,cAAU,IACR,MAAM,MACN,IACA,gBACA,UAAU,cACV,SACA,SACA,MAAM,WACN,MAAM,QACP;;AAEH,mBAAgB,IACd,UAAU,cACV,UAAU,OACV,UAAU,QACV,MACA,OACA,UACD;IACD,EAAE;AAEJ,SAAO;AACP,SAAO,iBAAiB,OAAO;;AAOjC,OAAM,kBAAkB;AAKxB,IAAG,KAAK,4BAA4B;CAEpC,MAAM,cAAc,GAAG,QAAQ;;;IAG7B;AAeF,CAbwB,GAAG,aAAa,WAAyB;AAC/D,OAAK,MAAM,SAAS,OAClB,MAAK,MAAM,SAAS,MAAM,SAAS;AACjC,eAAY,IACV,MAAM,cACN,MAAM,UAAU,cAChB,MAAM,OACN,MAAM,OACP;AACD,UAAO;;GAGX,CACc,YAAY;AAM5B,OAAM,kBAAkB;AAExB,IAAG,KAAK,+BAA+B;CAEvC,MAAM,kBAAkB,GAAG,QAAQ;;IAEjC;CAEF,MAAM,oBAAoB,GAAG,aAC1B,YAAqC;AACpC,OAAK,MAAM,CAAC,MAAM,SAAS,QACzB,iBAAgB,IAAI,MAAM,KAAK;GAGpC;CAED,MAAM,cAAuC,EAAE;AAC/C,MAAK,MAAM,CAAC,MAAM,UAAU,UAC1B,MAAK,MAAM,QAAQ,MACjB,aAAY,KAAK,CAAC,MAAM,KAAK,CAAC;AAGlC,mBAAkB,YAAY;AAM9B,OAAM,kBAAkB;AAExB,IAAG,KAAK,0BAA0B;CAElC,MAAM,aAAa,GAAG,QAAQ;;;;IAI5B;CAGF,MAAM,WAMD,EAAE;AAEP,MAAK,MAAM,SAAS,aAAa;EAC/B,MAAM,EAAE,cAAc;EAEtB,IAAI;AACJ,MAAI;AACF,aAAU,aAAa,UAAU,SAAS,OAAO;UAC3C;AACN;;EAGF,MAAM,cAAc,eAAe,QAAQ;AAC3C,OAAK,MAAM,QAAQ,aAAa;GAC9B,MAAM,SAAS,gBAAgB,KAAK,KAAK,WAAW,UAAU,aAAa;AAC3E,YAAS,KAAK;IACZ,QAAQ,UAAU;IAClB,KAAK,KAAK;IACV;IACA,UAAU,KAAK,UAAU,UAAU;IACnC,YAAY,KAAK;IAClB,CAAC;;;AAmBN,CAfsB,GAAG,aAErB,SAOG;AACH,OAAK,MAAM,OAAO,KAChB,YAAW,IAAI,IAAI,QAAQ,IAAI,KAAK,IAAI,QAAQ,IAAI,UAAU,IAAI,WAAW;GAGlF,CACa,SAAS;AAEvB,QAAO,iBAAiB,SAAS;AACjC,QAAO,iBAAiB,SAAS,QAAQ,MAAM,EAAE,WAAW,KAAK,CAAC;AAMlE,OAAM,kBAAkB;CAGxB,MAAM,iBAAiB,GACpB,QACC,4EACD,CACA,KAAK;CAGR,MAAM,iBAAiB,GACpB,QACC;uDAED,CACA,KAAK;CAGR,MAAM,gBAAgB,GACnB,QACC;2DAED,CACA,KAAK;CAGR,MAAM,cAAc,IAAI,IACtB,eAAe,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CAClD;CACD,MAAM,UAAU,IAAI,IAClB,eAAe,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CAClD;CACD,MAAM,aAAa,IAAI,IACrB,cAAc,KAAK,MAAM,CAAC,EAAE,aAAa,EAAE,IAAI,CAAC,CACjD;CAED,MAAM,eAAe,GAAG,QAAQ;;;;;;;;;;IAU9B;CAEF,MAAM,aAAa,KAAK,KAAK;CAC7B,IAAI,cAAc;AAalB,CAXuB,GAAG,aAAa,WAAyB;AAC9D,OAAK,MAAM,SAAS,QAAQ;GAC1B,MAAM,OAAO,MAAM,UAAU;GAC7B,MAAM,UAAU,WAAW,IAAI,KAAK,IAAI;GACxC,MAAM,WAAW,YAAY,IAAI,KAAK,IAAI;GAC1C,MAAM,OAAO,QAAQ,IAAI,KAAK,IAAI;GAClC,MAAM,WAAW,YAAY,IAAI,IAAI;AACrC,OAAI,SAAU;AACd,gBAAa,IAAI,MAAM,SAAS,UAAU,MAAM,UAAU,WAAW;;GAEvE,CACa,YAAY;AAE3B,QAAO,eAAe;AACtB,QAAO,UAAU,KAAK,KAAK,GAAG;AAE9B,QAAO"}