personal-ai 0.1.0 → 0.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -86,19 +86,27 @@ pai profile --export # 输出可复制粘贴的 profile
86
86
  pai profile --json # JSON 元数据
87
87
  pai distribute # 部署 profile + skill 到 Cursor
88
88
 
89
- # 数据管理 (进阶)
89
+ # 日常记录 (零 LLM)
90
+ pai log <text> # 追加到今日 journal (memory/YYYY-MM-DD.md)
91
+ pai log --clip # 从剪贴板读入
92
+ pai log --show # 查看今日 journal
93
+ pai log --date <date> <text> # 回填指定日期
94
+
95
+ # 数据管理
90
96
  pai reset [--force] # 清空所有数据并重新初始化
91
97
  pai add <text> # 手动添加文本到 raw/local/
92
98
  pai add --url <url> # 抓取 URL 到 raw/web/
93
99
  pai add <file> # 添加文件内容
94
100
  pai auth google # Google 授权 (gmail/calendar 首次或重授权)
95
101
  pai import --source mac # 手动触发 Mac 扫描 (写入 raw)
96
- pai import --source gmail [--days N] [--query "..."] # Gmail 导入 (未授权时自动弹窗)
102
+ pai import --source gmail [--days N] [--query "..."] # Gmail 导入
97
103
  pai import --source calendar [--days N] # 日历导入
98
- pai import --source <src> --path # 批量导入其他数据源 (需 --path)
99
104
 
100
- # 深度知识 (可选,需要 LLM)
101
- pai distill [--dry-run] [--file] # 蒸馏 raw → vault (需 OPENAI_API_KEY)
105
+ # PINData 提取 (需要 OPENAI_API_KEY)
106
+ pai distill [--dry-run] [--file] # 提取 PINData: raw + journal → vault
107
+ pai distill --today # 只处理今日 journal
108
+ pai digest [--date <date>] # 生成 AI 日摘要
109
+ pai gaps [--days <n>] # 检查缺失日志 (默认 7 天)
102
110
  pai generate [--profile <name>] # LLM 生成 SKILL.md
103
111
  pai index # 更新 QMD 索引
104
112
 
@@ -145,23 +153,24 @@ src/
145
153
  ├── cli/ # CLI 命令注册 (Commander.js)
146
154
  │ ├── build-program.ts
147
155
  │ ├── command-registry.ts
148
- │ └── register.*.ts
156
+ │ └── register.*.ts # init/profile/log/distill/digest/gaps/add/search/...
149
157
  ├── profile/ # Profile 编译器 (scan → profile.md, 零 LLM)
150
- ├── compile.ts
151
- │ └── index.ts
152
- ├── config/ # 配置管理 (Zod + JSON5)
158
+ ├── memory/ # 时间维度 — 日志 + 摘要 (journal.ts)
159
+ ├── vault/ # 主题维度 — PINData 结构化知识 (writer.ts)
160
+ ├── config/ # 配置管理 (Zod + JSON5 + 路径管理)
153
161
  ├── auth/ # Google OAuth (encryption + google-oauth)
154
162
  ├── connectors/ # 数据收集器
155
163
  │ ├── mac/ # 14 个 Mac collectors
156
164
  │ ├── google/ # gmail.ts, calendar.ts
157
165
  │ └── sanitize.ts
158
- ├── raw/ # Raw 层
159
- ├── scraper/ # 网页抓取
160
- ├── distill/ # 蒸馏 Pipeline (可选)
161
- ├── generate/ # SKILL.md 生成 (可选)
166
+ ├── raw/ # Raw 层 (来源维度)
167
+ ├── scraper/ # 网页抓取 (Playwright + defuddle)
168
+ ├── distill/ # PINData 提取 Pipeline (extract.ts)
169
+ ├── generate/ # SKILL.md 生成
162
170
  ├── search/ # QMD 搜索封装
163
171
  ├── llm/ # OpenAI client
164
- ├── prompts/ # Prompt 模板
172
+ ├── prompts/ # Prompt 模板 (extract.ts, generate.ts)
173
+ ├── ask/ # Ask agent (Vercel AI SDK + tools)
165
174
  ├── utils/ # 工具函数
166
175
  ├── types.ts # 全局类型
167
176
  ├── index.ts # Public API
@@ -172,13 +181,21 @@ src/
172
181
 
173
182
  ```
174
183
  ~/.pai/
175
- ├── profile.md # 核心产物 — 编译后的用户画像
176
- ├── raw/ # 原始数据 (扫描/添加)
184
+ ├── profile.md # 核心产物 — 编译后的用户画像 (零 LLM)
185
+ ├── memory/ # 时间维度 — 日志 + 摘要
186
+ │ ├── 2026-02-08.md # 每日 journal (pai log 追加)
187
+ │ └── weekly/ # 周摘要 (未来)
188
+ ├── raw/ # 来源维度 — 原始数据 (扫描/添加)
177
189
  │ ├── local/
178
190
  │ ├── web/
179
191
  │ └── connector/ # mac/, gmail/, calendar/
192
+ ├── vault/ # 主题维度 — PINData 结构化知识
193
+ │ ├── context/ # identity.md, projects.md, services.md
194
+ │ ├── preferences/ # tools.md, workflow.md
195
+ │ ├── work/ # activity.md, finance.md
196
+ │ ├── life/ # interests.md
197
+ │ └── coding/ # lessons.md
180
198
  ├── credentials/ # Google OAuth: client_secret.json, google-oauth.json.enc
181
- ├── vault/ # 蒸馏后知识 (可选进阶)
182
199
  ├── skills/profiles/ # LLM 生成的 SKILL.md (可选)
183
200
  └── config/
184
201
  ├── pai.json5
@@ -207,7 +224,7 @@ pai init && pai distribute # 两步搞定
207
224
  pai ask "What's the user's deployment preference?" # 推荐:直接拿答案
208
225
  pai context --task "configure PostgreSQL connection pooling"
209
226
  pai search "database performance" --json
210
- pai add "learned: always set pool_size=20 for production"
227
+ pai log "learned: always set pool_size=20 for production" # 快速记住
211
228
  ```
212
229
 
213
230
  ## 开发
package/SKILL.md CHANGED
@@ -1,13 +1,13 @@
1
1
  # pai — Personal AI Identity Provider
2
2
 
3
- > One command to scan your machine, compile your profile, and deploy to any AI agent.
3
+ > One command to scan your machine, compile your profile, and deploy to any AI agent. Log your day, extract structured knowledge, search your personal memory.
4
4
 
5
5
  <!--
6
6
  metadata:
7
7
  {
8
8
  "name": "pai",
9
- "version": "0.1.0",
10
- "description": "Local-first AI agent identity system. Scan, compile profile, deploy to agents.",
9
+ "version": "latest",
10
+ "description": "Local-first AI agent identity & memory system. Scan machine, compile profile, log daily journal, extract PINData knowledge, deploy to agents.",
11
11
  "requires": {
12
12
  "bins": ["pai"],
13
13
  "optional_bins": ["qmd"],
@@ -21,19 +21,19 @@ metadata:
21
21
 
22
22
  ## What This Skill Does
23
23
 
24
- pai is a local-first AI agent identity provider. It scans your machine, compiles a personal profile, and deploys it to AI agents. When you have access to this skill, you can:
24
+ pai is a local-first AI agent identity & memory system. When you have access to this skill, you can:
25
25
 
26
26
  - **Know** who the user is — identity, environment, tools, projects, preferences
27
- - **Search** the user's personal knowledge base (vault + raw)
27
+ - **Log** the user's day quick journal entries, no LLM needed
28
+ - **Remember** structured knowledge (PINData) — facts, preferences, decisions, entities, events
29
+ - **Search** the user's personal knowledge base (vault + raw + memory)
28
30
  - **Ask** questions about the user and get direct answers (agentic)
29
- - **Remember** new lessons, preferences, and discoveries for the user
30
31
 
31
32
  ## Quick Setup (1 minute)
32
33
 
33
34
  ```bash
34
- npm install -g personal-ai # Install pai CLI
35
- pai init # Scan machine + compile profile (~12s, no LLM needed)
36
- pai distribute # Deploy to Cursor / Claude Code / agents
35
+ npm install -g personal-ai@latest # Always install the latest version
36
+ pai init # Scan machine + compile profile (~12s, no LLM needed)
37
37
  ```
38
38
 
39
39
  That's it. Your agent now knows who you are.
@@ -44,15 +44,15 @@ That's it. Your agent now knows who you are.
44
44
 
45
45
  | Requirement | How to Check | How to Get |
46
46
  |---|---|---|
47
- | `pai` CLI | `pai --version` | `npm install -g personal-ai` |
47
+ | `pai` CLI (latest) | `pai --version` | `npm install -g personal-ai@latest` |
48
48
  | Node.js ≥ 22 | `node -v` | `fnm install 22` or [nodejs.org](https://nodejs.org) |
49
49
 
50
50
  ### Optional (for advanced features)
51
51
 
52
52
  | Requirement | Feature | How to Get |
53
53
  |---|---|---|
54
- | `qmd` CLI | Search (`pai search`) | `npm install -g https://github.com/tobi/qmd` |
55
- | `OPENAI_API_KEY` | AI features (`pai ask`, `pai distill`) | Set in shell profile |
54
+ | `qmd` CLI | Hybrid search (`pai search`) | `npm install -g https://github.com/tobi/qmd` |
55
+ | `OPENAI_API_KEY` | AI features (`pai ask`, `pai distill`, `pai digest`) | Set in shell profile |
56
56
  | Google OAuth | Gmail/Calendar import | `pai auth google` (built-in flow) |
57
57
 
58
58
  ### Data Location
@@ -61,17 +61,33 @@ All data is stored locally at `~/.pai/` (override with `PAI_HOME` env var):
61
61
 
62
62
  ```
63
63
  ~/.pai/
64
- ├── profile.md # Core output — compiled user profile (no LLM needed)
65
- ├── raw/ # Original input (immutable after write)
66
- │ ├── local/ # Text & file input
67
- ├── web/ # Scraped URLs
68
- │ └── connector/# Imported data (mac scan, gmail, calendar)
69
- ├── vault/ # Distilled knowledge (living documents, optional)
70
- ├── credentials/ # Google OAuth tokens (encrypted)
64
+ ├── profile.md # Core — compiled user profile (no LLM needed)
65
+ ├── memory/ # Time dimension daily journals & digests
66
+ │ ├── 2026-02-08.md
67
+ └── weekly/ # Weekly summaries (future)
68
+ ├── raw/ # Source dimension original input (immutable)
69
+ ├── local/ # Text & file input
70
+ ├── web/ # Scraped URLs
71
+ │ └── connector/ # Imported data (mac scan, gmail, calendar)
72
+ ├── vault/ # Topic dimension — PINData structured knowledge
73
+ │ ├── context/ # identity, projects, services
74
+ │ ├── preferences/ # tools, workflow
75
+ │ ├── work/ # activity, finance
76
+ │ ├── life/ # interests
77
+ │ └── coding/ # lessons
78
+ ├── credentials/ # Google OAuth tokens (encrypted)
71
79
  ├── skills/profiles/ # LLM-generated SKILL.md files (optional)
72
- └── config/ # pai.json5 + profiles.json5 + preferences.md
80
+ └── config/ # pai.json5 + profiles.json5 + preferences.md
73
81
  ```
74
82
 
83
+ ### Three-Dimensional Data
84
+
85
+ | Layer | Directory | Dimension | Answers |
86
+ |---|---|---|---|
87
+ | `memory/` | YYYY-MM-DD.md | **Time** | "What happened that day?" |
88
+ | `raw/` | local/ web/ connector/ | **Source** | "Where did this data come from?" |
89
+ | `vault/` | context/ preferences/ work/ | **Topic** | "What do I know about X?" |
90
+
75
91
  ### Verifying Access
76
92
 
77
93
  ```bash
@@ -80,146 +96,143 @@ pai status # Check data directory and counts
80
96
  pai profile # View your profile
81
97
  ```
82
98
 
83
- If `pai` is not found:
99
+ If `pai` is not found or outdated:
84
100
  ```bash
85
- npm install -g personal-ai
101
+ npm install -g personal-ai@latest
86
102
  pai init
87
103
  ```
88
104
 
89
105
  ## Available Commands
90
106
 
91
- ### Adding Knowledge
107
+ ### Profile (Core — no LLM needed)
92
108
 
93
109
  ```bash
94
- # Add a text experience/lesson
95
- pai add "Always use connection pooling with PostgreSQL in production"
96
-
97
- # Add content from a URL (scrapes and saves)
98
- pai add --url "https://docs.example.com/best-practices"
99
-
100
- # Add a local file
101
- pai add ./meeting-notes.txt
102
-
103
- # Import from Mac system scan (no --path; auto-scans 11 dimensions)
104
- pai import --source mac
105
- pai import --source mac --dry-run
106
-
107
- # Import from a data connector directory
108
- pai import --source gmail --path ~/data/gmail-export/
110
+ pai init # Initialize + scan + compile profile (one command)
111
+ pai init --skip-scan # Init only, skip scan (CI/testing)
112
+ pai profile # View your profile
113
+ pai profile --rebuild # Re-scan machine + recompile
114
+ pai profile --export # Plain text output for copy-paste
115
+ pai profile --json # JSON metadata
116
+ pai distribute # Deploy profile + skill to Cursor/Claude
117
+ pai distribute --target cursor # Specific target
118
+ pai reset [--force] # Wipe all data and re-init
109
119
  ```
110
120
 
111
- ### Ask (RECOMMENDED for Agents — get direct answers)
112
-
113
- Ask a question; an agentic secretary uses tools to find the answer and returns a direct reply. No need to interpret raw search results.
121
+ ### Daily Journal (no LLM needed)
114
122
 
115
- **Requires:** `OPENAI_API_KEY` (agentic loop uses LLM + tools).
116
-
117
- **Tools available to the agent:** search vault/raw, read profile, read file, grep (ripgrep), glob, ls, bash. The agent decides which to call and when to stop.
123
+ Quick, low-friction logging. Appends to `memory/YYYY-MM-DD.md`.
118
124
 
119
125
  ```bash
120
- # Ask anything about the user
121
- pai ask "What deployment method does this user prefer?"
122
- pai ask "Does the user have Kubernetes experience?" --json
123
-
124
- # Options
125
- pai ask "用户的编码规范是什么?" --steps 15 # max tool-call steps (default: 10)
126
- pai ask "..." --model gpt-4o # override model
127
- pai ask "..." --verbose # show each tool step
126
+ pai log "Had standup with Ethan, Prediction on track"
127
+ pai log "Decided to use Tailwind instead of vanilla CSS"
128
+ pai log --clip # Read from clipboard
129
+ pai log --show # View today's journal
130
+ pai log --date 2026-02-07 "Backfill yesterday"
131
+ echo "piped text" | pai log # Read from stdin
128
132
  ```
129
133
 
130
- **Output:** plain answer; or with `--json`: `{ "answer", "sources", "steps" }`.
131
-
132
- ### Retrieving Context (fast, no LLM)
134
+ ### Adding Knowledge to Raw
133
135
 
134
136
  ```bash
135
- # Get identity + task-relevant vault memories (hybrid search)
136
- pai context --task "current task description"
137
-
138
- # Get identity only (no search, instant)
139
- pai context
140
-
141
- # Machine-readable output
142
- pai context --task "deploy React app" --json
143
-
144
- # Use a specific profile
145
- pai context --profile coding-assistant --task "..."
137
+ pai add "Always use connection pooling with PostgreSQL in production"
138
+ pai add --url "https://docs.example.com/best-practices"
139
+ pai add ./meeting-notes.txt
140
+ pai add "some text" --source work
146
141
  ```
147
142
 
148
- ### Searching Knowledge
143
+ ### Data Import
149
144
 
150
145
  ```bash
151
- # Search distilled knowledge (vault)
152
- pai search "PostgreSQL performance"
146
+ pai auth google # Google OAuth (once)
147
+ pai import --source mac # Mac scan (14 collectors)
148
+ pai import --source mac --dry-run # Preview mode
149
+ pai import --source gmail # Gmail (default: 30 days)
150
+ pai import --source gmail --days 7 --query "is:important"
151
+ pai import --source calendar # Calendar
152
+ pai import --source calendar --days 60
153
+ ```
153
154
 
154
- # Search raw original data (for tracing back)
155
- pai search "PostgreSQL" --raw
155
+ ### PINData Extraction (requires OPENAI_API_KEY)
156
156
 
157
- # Search everything
158
- pai search "PostgreSQL" --all
157
+ Extract structured knowledge entries from raw files and journals into vault.
159
158
 
160
- # Control result count
161
- pai search "React hooks" -n 10
159
+ PINData types: `[fact]` / `[pref]` / `[decision]` / `[entity]` / `[event]`
162
160
 
163
- # Machine-readable output (for agents)
164
- pai search "PostgreSQL" --json
161
+ ```bash
162
+ pai distill # Process all pending raw + today's journal
163
+ pai distill --today # Only today's journal
164
+ pai distill --dry-run # Preview extraction, no writes
165
+ pai distill --file ~/.pai/raw/local/xxx.md # Single file
165
166
  ```
166
167
 
167
- ### Processing & Distilling
168
+ ### Daily Digest (requires OPENAI_API_KEY)
169
+
170
+ Generate an AI summary of the day, appended to the journal.
168
171
 
169
172
  ```bash
170
- # Preview what would be distilled (safe, no writes)
171
- pai distill --dry-run
173
+ pai digest # Today's digest
174
+ pai digest --date 2026-02-07 # Specific date
175
+ pai digest --dry-run # Preview without writing
176
+ ```
172
177
 
173
- # Process all pending raw files → extract to vault
174
- pai distill
178
+ ### Journal Gaps
175
179
 
176
- # Process a specific file
177
- pai distill --file ~/.pai/raw/local/2026-02-06T15-17-xxx.md
180
+ Check for missing daily journals.
178
181
 
179
- # Update QMD search index after changes
180
- pai index
182
+ ```bash
183
+ pai gaps # Last 7 days
184
+ pai gaps --days 30 # Last 30 days
181
185
  ```
182
186
 
183
- ### Generating SKILL.md
187
+ ### Ask (RECOMMENDED for Agents — get direct answers)
184
188
 
185
- ```bash
186
- # Generate all configured profiles
187
- pai generate
189
+ Agentic secretary that uses tools to find answers. No need to interpret raw search results.
188
190
 
189
- # Generate a specific profile
190
- pai generate --profile coding-assistant
191
+ **Requires:** `OPENAI_API_KEY`.
191
192
 
192
- # View generated profile
193
- cat ~/.pai/skills/profiles/coding-assistant.md
193
+ ```bash
194
+ pai ask "What deployment method does this user prefer?"
195
+ pai ask "Does the user have Kubernetes experience?" --json
196
+ pai ask "用户的编码规范是什么?" --steps 15
197
+ pai ask "..." --model gpt-4o --verbose
194
198
  ```
195
199
 
196
- ### Agent Deployment
200
+ **Output:** plain answer; or with `--json`: `{ "answer", "sources", "steps" }`.
201
+
202
+ ### Context Retrieval (fast, no LLM)
197
203
 
198
204
  ```bash
199
- # Deploy identity + agent instructions to Cursor rules and other agent configs
200
- pai distribute # Default: all targets
201
- pai distribute --target cursor # Cursor only
202
- pai distribute --target claude # Claude Code only
203
- pai distribute --profile coding-assistant # Use a specific generated profile
205
+ pai context # Identity only
206
+ pai context --task "current task description" # Identity + relevant memories
207
+ pai context --task "deploy React app" --json # Machine-readable
208
+ pai context --profile coding-assistant --task "..."
204
209
  ```
205
210
 
206
- ### Status & Reset
211
+ ### Search
207
212
 
208
213
  ```bash
209
- # Overview of all data
210
- pai status
211
- pai status --json # Machine-readable
214
+ pai search "PostgreSQL performance" # Hybrid search vault
215
+ pai search "PostgreSQL" --fast # Keyword only
216
+ pai search "PostgreSQL" --vector # Semantic only
217
+ pai search "PostgreSQL" --raw # Search raw
218
+ pai search "PostgreSQL" --all # vault + raw
219
+ pai search "PostgreSQL" --json -n 10 # JSON, 10 results
220
+ ```
221
+
222
+ ### Generate & Deploy
212
223
 
213
- # Remove all data and re-initialize (clean slate for testing)
214
- pai reset # Prompts for confirmation
215
- pai reset --force # No prompt, immediate wipe + init
224
+ ```bash
225
+ pai generate # Generate all SKILL.md profiles
226
+ pai generate --profile coding-assistant # Specific profile
227
+ pai index # Update QMD search index
228
+ pai status [--json] # Data overview
216
229
  ```
217
230
 
218
231
  ## How to Use This Skill as an Agent
219
232
 
220
233
  ### 1. Ask questions (recommended)
221
234
 
222
- For any question about the user, use `pai ask` to get a direct answer. The agent uses tools (vault search, profile, grep, bash) and returns a concise reply.
235
+ For any question about the user, use `pai ask` for a direct answer:
223
236
 
224
237
  ```bash
225
238
  pai ask "What does this user prefer for deployment?"
@@ -228,75 +241,107 @@ pai ask "What's the user's current project?" --json
228
241
 
229
242
  ### 2. Retrieve context before starting work
230
243
 
231
- For a quick identity + task-relevant snippets (no LLM, or light):
244
+ For quick identity + task-relevant knowledge (no LLM):
232
245
 
233
246
  ```bash
234
247
  pai context --task "brief description of what you're about to do"
235
248
  ```
236
249
 
237
- When you need raw search results (chunks) instead of a synthesized answer:
250
+ ### 3. Search for specific knowledge
251
+
252
+ When you need raw search results instead of a synthesized answer:
238
253
 
239
254
  ```bash
240
- pai search "editor preferences"
255
+ pai search "editor preferences" --json
241
256
  pai search "React deployment" --json -n 3
242
257
  ```
243
258
 
244
- When you discover something the user should remember (a lesson, preference, or tip):
259
+ ### 4. Log discoveries and decisions (RECOMMENDED over pai add)
260
+
261
+ When you discover something the user should remember — use `pai log` for quick notes, `pai add` for longer content:
245
262
 
246
263
  ```bash
247
- pai add "The user prefers dark mode and monospace fonts in all editors"
264
+ # Quick notes (appends to today's journal, no LLM)
265
+ pai log "User prefers dark mode and monospace fonts"
266
+ pai log "Decided: pool_size=20 for production PostgreSQL"
267
+
268
+ # Longer content or URLs (writes to raw/)
269
+ pai add "Detailed explanation of the CORS issue and resolution..."
248
270
  pai add --url "https://the-useful-article.com"
249
271
  ```
250
272
 
251
273
  ### 5. Periodic maintenance
252
274
 
253
- After adding multiple items, process and regenerate:
275
+ After logging multiple items, extract and regenerate:
254
276
 
255
277
  ```bash
256
- pai distill # Process pending raw files into vault
278
+ pai distill # Extract PINData from pending raw + journal vault
279
+ pai digest # Generate daily AI summary
257
280
  pai generate # Regenerate SKILL.md profiles
258
281
  pai distribute # Update deployed agent configs
259
282
  ```
260
283
 
261
- The generated profiles at `~/.pai/skills/profiles/` can be used by other agents.
262
-
263
284
  ## Data Flow
264
285
 
265
286
  ```
266
- User Input ──→ pai add ──→ raw/ (original, immutable)
287
+ pai log ──→ memory/YYYY-MM-DD.md (daily journal, append-only)
288
+
289
+ User Input ──→ pai add ──→ raw/ │ (original, immutable)
290
+ │ │
291
+ ▼ ▼
292
+ pai distill ──→ Extract PINData (1 LLM call)
293
+
294
+
295
+ vault/{topic}.md (structured PINData entries)
296
+
297
+ ┌─────────────┼──────────────┐
298
+ ▼ ▼ ▼
299
+ pai generate pai search pai digest
300
+ │ │
301
+ ▼ ▼
302
+ skills/profiles/*.md memory/## Digest
267
303
 
268
304
 
269
- pai distill ──→ vault/ (structured knowledge, 1:N routing)
305
+ pai distribute ──→ ~/.cursor/rules/ (auto-injected)
270
306
 
271
307
 
272
- pai generate ──→ skills/profiles/*.md
273
-
274
-
275
- pai distribute ──→ ~/.cursor/rules/ (auto-injected)
276
-
277
-
278
- Agent starts ──→ reads identity from rules (Layer 1: passive)
279
- Agent works ──→ pai ask "question" (Layer 2: direct answer) or pai context --task "..." (quick retrieval)
280
- Agent learns ──→ pai add "new lesson" (Layer 3: write-back)
308
+ Agent starts ──→ reads identity from rules (Layer 1: passive)
309
+ Agent works ──→ pai ask / pai context (Layer 2: active retrieval)
310
+ Agent learns ──→ pai log / pai add (Layer 3: write-back)
311
+ ```
312
+
313
+ ### PINData Entry Format
314
+
315
+ Vault stores structured entries, one per line:
316
+
317
+ ```markdown
318
+ - [fact] Vercel project is pin-sandman (2026-02-08 | ref:journal | verified:3)
319
+ - [pref] Prefers pnpm over npm (2026-02-07 | ref:raw/local/xxx.md)
320
+ - [decision] Chose Tailwind over vanilla CSS (2026-02-08 | ref:journal)
321
+ - [entity] Ethan Liu — team member, Prediction module (2026-02-06)
322
+ - [event] ETHDenver 2026 attendance (2026-02-20)
281
323
  ```
282
324
 
325
+ 5 types: `fact` (data points), `pref` (preferences), `decision` (choices), `entity` (people/orgs/projects), `event` (time-bound).
326
+
283
327
  ## Security Notes
284
328
 
285
329
  - All data stays on the user's local machine (`~/.pai/`)
286
330
  - No data is sent anywhere except to the configured LLM API for processing
287
331
  - `OPENAI_API_KEY` is read from environment, never stored in files
288
- - Raw files are immutable after creation (append-only log)
289
- - The user controls what goes in and what gets distilled
332
+ - Raw files are immutable after creation (append-only)
333
+ - Journal files are append-only (new entries inserted before digest)
334
+ - The user controls what goes in and what gets extracted
290
335
 
291
336
  ## Profiles Configuration
292
337
 
293
- Profiles are defined in `~/.pai/config/profiles.json5`. Each profile specifies which vault directories to include and the max output size:
338
+ Profiles are defined in `~/.pai/config/profiles.json5`:
294
339
 
295
340
  ```json5
296
341
  {
297
342
  profiles: {
298
343
  "coding-assistant": {
299
- scope: ["vault/coding/**", "vault/preferences/coding-style.md"],
344
+ scope: ["vault/coding/**", "vault/preferences/**"],
300
345
  maxLines: 30,
301
346
  },
302
347
  "full-context": {
@@ -1,6 +1,6 @@
1
1
  #!/usr/bin/env node
2
2
  import { a as info, c as warn, r as googleOAuth } from "./entry.mjs";
3
- import "./auth-Dtx8Wc3l.mjs";
3
+ import "./auth-DTf1SI8B.mjs";
4
4
  import { google } from "googleapis";
5
5
 
6
6
  //#region src/connectors/google/calendar.ts
@@ -88,4 +88,4 @@ async function syncCalendar(opts = {}) {
88
88
 
89
89
  //#endregion
90
90
  export { syncCalendar };
91
- //# sourceMappingURL=calendar-BHcM4wfQ.mjs.map
91
+ //# sourceMappingURL=calendar-DGJSyErS.mjs.map
@@ -1 +1 @@
1
- {"version":3,"file":"calendar-BHcM4wfQ.mjs","names":[],"sources":["../src/connectors/google/calendar.ts"],"sourcesContent":["/**\n * Google Calendar connector — fetches events and returns CollectorResult[] for raw layer.\n *\n * Default behavior: enumerate ALL user-visible calendars via calendarList.list(),\n * tag each event with the calendar display name so the ask agent can distinguish\n * \"Work\" meetings from \"Holiday\" markers.\n */\n\nimport { google } from \"googleapis\";\nimport type { calendar_v3 } from \"googleapis\";\nimport type { CollectorResult } from \"../../types.js\";\nimport { googleOAuth } from \"../../auth/index.js\";\nimport * as console from \"../../utils/console.js\";\n\nexport interface SyncCalendarOptions {\n /** Calendar IDs to fetch. If omitted, fetches ALL user-visible calendars. */\n calendars?: string[];\n /** Days to look back (default 30). */\n lookbackDays?: number;\n /** Days to look forward (default 90). */\n lookforwardDays?: number;\n}\n\nfunction formatEventTime(start: calendar_v3.Schema$EventDateTime): string {\n const dt = start.dateTime ?? start.date;\n const tz = start.timeZone ?? \"UTC\";\n if (!dt) return \"\";\n return `${dt} (${tz})`;\n}\n\n/** Resolve calendar IDs + display names. If explicit list given, use as-is; otherwise enumerate all. */\nasync function resolveCalendars(\n calendarApi: calendar_v3.Calendar,\n explicitIds?: string[],\n): Promise<{ id: string; name: string }[]> {\n if (explicitIds) {\n return explicitIds.map((id) => ({ id, name: id }));\n }\n\n // Enumerate all user-visible calendars\n const res = await calendarApi.calendarList.list({ showHidden: false });\n const items = res.data.items ?? [];\n return items\n .filter((c) => c.id)\n .map((c) => ({\n id: c.id!,\n name: c.summary ?? c.id!,\n }));\n}\n\n/**\n * Sync Google Calendar events and return one CollectorResult per event.\n * Each event includes a **Calendar** field with the display name of its source calendar.\n */\nexport async function syncCalendar(\n opts: SyncCalendarOptions = {},\n): Promise<CollectorResult[]> {\n await googleOAuth.ensureAuthenticated();\n const calendarApi: calendar_v3.Calendar = google.calendar({\n version: \"v3\",\n auth: googleOAuth.getClient(),\n });\n\n const calendars = await resolveCalendars(calendarApi, opts.calendars);\n const lookbackDays = opts.lookbackDays ?? 30;\n const lookforwardDays = opts.lookforwardDays ?? 90;\n\n const now = new Date();\n const timeMin = new Date(\n now.getTime() - lookbackDays * 24 * 60 * 60 * 1000,\n ).toISOString();\n const timeMax = new Date(\n now.getTime() + lookforwardDays * 24 * 60 * 60 * 1000,\n ).toISOString();\n\n console.info(\n `Fetching events from ${calendars.length} calendar(s): ${calendars.map((c) => c.name).join(\", \")}`,\n );\n\n const results: CollectorResult[] = [];\n let failed = 0;\n\n for (const cal of calendars) {\n try {\n const res = await calendarApi.events.list({\n calendarId: cal.id,\n timeMin,\n timeMax,\n singleEvents: true,\n orderBy: \"startTime\",\n maxResults: 250,\n });\n\n const items = res.data.items ?? [];\n for (const event of items) {\n const id = event.id ?? \"\";\n const summary = event.summary ?? \"(no title)\";\n const startStr = event.start\n ? formatEventTime(event.start)\n : \"\";\n const endStr = event.end ? formatEventTime(event.end) : \"\";\n const location = event.location ?? \"\";\n const attendees = (event.attendees ?? [])\n .map((a) => a.email ?? a.displayName ?? \"\")\n .filter(Boolean)\n .join(\", \");\n const status = event.status ?? \"confirmed\";\n\n const lines = [\n `- **Calendar**: ${cal.name}`,\n startStr ? `- **Time**: ${startStr}${endStr ? ` - ${endStr}` : \"\"}` : \"\",\n location ? `- **Location**: ${location}` : \"\",\n attendees ? `- **Attendees**: ${attendees}` : \"\",\n `- **Status**: ${status}`,\n ].filter(Boolean);\n\n results.push({\n id,\n title: summary,\n content: lines.join(\"\\n\") || \"(no details)\",\n });\n }\n } catch (err) {\n failed++;\n const msgText = err instanceof Error ? err.message : String(err);\n console.warn(`Calendar fetch failed for ${cal.name} (${cal.id}): ${msgText}`);\n }\n }\n\n if (failed > 0 && calendars.length > failed) {\n console.warn(`Calendar fetch failed for ${failed} of ${calendars.length} calendar(s).`);\n }\n return results;\n}\n"],"mappings":";;;;;;;;;;;;;AAuBA,SAAS,gBAAgB,OAAiD;CACxE,MAAM,KAAK,MAAM,YAAY,MAAM;CACnC,MAAM,KAAK,MAAM,YAAY;AAC7B,KAAI,CAAC,GAAI,QAAO;AAChB,QAAO,GAAG,GAAG,IAAI,GAAG;;;AAItB,eAAe,iBACb,aACA,aACyC;AACzC,KAAI,YACF,QAAO,YAAY,KAAK,QAAQ;EAAE;EAAI,MAAM;EAAI,EAAE;AAMpD,UAFY,MAAM,YAAY,aAAa,KAAK,EAAE,YAAY,OAAO,CAAC,EACpD,KAAK,SAAS,EAAE,EAE/B,QAAQ,MAAM,EAAE,GAAG,CACnB,KAAK,OAAO;EACX,IAAI,EAAE;EACN,MAAM,EAAE,WAAW,EAAE;EACtB,EAAE;;;;;;AAOP,eAAsB,aACpB,OAA4B,EAAE,EACF;AAC5B,OAAM,YAAY,qBAAqB;CACvC,MAAM,cAAoC,OAAO,SAAS;EACxD,SAAS;EACT,MAAM,YAAY,WAAW;EAC9B,CAAC;CAEF,MAAM,YAAY,MAAM,iBAAiB,aAAa,KAAK,UAAU;CACrE,MAAM,eAAe,KAAK,gBAAgB;CAC1C,MAAM,kBAAkB,KAAK,mBAAmB;CAEhD,MAAM,sBAAM,IAAI,MAAM;CACtB,MAAM,2BAAU,IAAI,KAClB,IAAI,SAAS,GAAG,eAAe,KAAK,KAAK,KAAK,IAC/C,EAAC,aAAa;CACf,MAAM,UAAU,IAAI,KAClB,IAAI,SAAS,GAAG,kBAAkB,KAAK,KAAK,KAAK,IAClD,CAAC,aAAa;AAEf,MACE,wBAAwB,UAAU,OAAO,gBAAgB,UAAU,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK,GACjG;CAED,MAAM,UAA6B,EAAE;CACrC,IAAI,SAAS;AAEb,MAAK,MAAM,OAAO,UAChB,KAAI;EAUF,MAAM,SATM,MAAM,YAAY,OAAO,KAAK;GACxC,YAAY,IAAI;GAChB;GACA;GACA,cAAc;GACd,SAAS;GACT,YAAY;GACb,CAAC,EAEgB,KAAK,SAAS,EAAE;AAClC,OAAK,MAAM,SAAS,OAAO;GACzB,MAAM,KAAK,MAAM,MAAM;GACvB,MAAM,UAAU,MAAM,WAAW;GACjC,MAAM,WAAW,MAAM,QACnB,gBAAgB,MAAM,MAAM,GAC5B;GACJ,MAAM,SAAS,MAAM,MAAM,gBAAgB,MAAM,IAAI,GAAG;GACxD,MAAM,WAAW,MAAM,YAAY;GACnC,MAAM,aAAa,MAAM,aAAa,EAAE,EACrC,KAAK,MAAM,EAAE,SAAS,EAAE,eAAe,GAAG,CAC1C,OAAO,QAAQ,CACf,KAAK,KAAK;GACb,MAAM,SAAS,MAAM,UAAU;GAE/B,MAAM,QAAQ;IACZ,mBAAmB,IAAI;IACvB,WAAW,eAAe,WAAW,SAAS,MAAM,WAAW,OAAO;IACtE,WAAW,mBAAmB,aAAa;IAC3C,YAAY,oBAAoB,cAAc;IAC9C,iBAAiB;IAClB,CAAC,OAAO,QAAQ;AAEjB,WAAQ,KAAK;IACX;IACA,OAAO;IACP,SAAS,MAAM,KAAK,KAAK,IAAI;IAC9B,CAAC;;UAEG,KAAK;AACZ;EACA,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,OAAa,6BAA6B,IAAI,KAAK,IAAI,IAAI,GAAG,KAAK,UAAU;;AAIjF,KAAI,SAAS,KAAK,UAAU,SAAS,OACnC,MAAa,6BAA6B,OAAO,MAAM,UAAU,OAAO,eAAe;AAEzF,QAAO"}
1
+ {"version":3,"file":"calendar-DGJSyErS.mjs","names":[],"sources":["../src/connectors/google/calendar.ts"],"sourcesContent":["/**\n * Google Calendar connector — fetches events and returns CollectorResult[] for raw layer.\n *\n * Default behavior: enumerate ALL user-visible calendars via calendarList.list(),\n * tag each event with the calendar display name so the ask agent can distinguish\n * \"Work\" meetings from \"Holiday\" markers.\n */\n\nimport { google } from \"googleapis\";\nimport type { calendar_v3 } from \"googleapis\";\nimport type { CollectorResult } from \"../../types.js\";\nimport { googleOAuth } from \"../../auth/index.js\";\nimport * as console from \"../../utils/console.js\";\n\nexport interface SyncCalendarOptions {\n /** Calendar IDs to fetch. If omitted, fetches ALL user-visible calendars. */\n calendars?: string[];\n /** Days to look back (default 30). */\n lookbackDays?: number;\n /** Days to look forward (default 90). */\n lookforwardDays?: number;\n}\n\nfunction formatEventTime(start: calendar_v3.Schema$EventDateTime): string {\n const dt = start.dateTime ?? start.date;\n const tz = start.timeZone ?? \"UTC\";\n if (!dt) return \"\";\n return `${dt} (${tz})`;\n}\n\n/** Resolve calendar IDs + display names. If explicit list given, use as-is; otherwise enumerate all. */\nasync function resolveCalendars(\n calendarApi: calendar_v3.Calendar,\n explicitIds?: string[],\n): Promise<{ id: string; name: string }[]> {\n if (explicitIds) {\n return explicitIds.map((id) => ({ id, name: id }));\n }\n\n // Enumerate all user-visible calendars\n const res = await calendarApi.calendarList.list({ showHidden: false });\n const items = res.data.items ?? [];\n return items\n .filter((c) => c.id)\n .map((c) => ({\n id: c.id!,\n name: c.summary ?? c.id!,\n }));\n}\n\n/**\n * Sync Google Calendar events and return one CollectorResult per event.\n * Each event includes a **Calendar** field with the display name of its source calendar.\n */\nexport async function syncCalendar(\n opts: SyncCalendarOptions = {},\n): Promise<CollectorResult[]> {\n await googleOAuth.ensureAuthenticated();\n const calendarApi: calendar_v3.Calendar = google.calendar({\n version: \"v3\",\n auth: googleOAuth.getClient(),\n });\n\n const calendars = await resolveCalendars(calendarApi, opts.calendars);\n const lookbackDays = opts.lookbackDays ?? 30;\n const lookforwardDays = opts.lookforwardDays ?? 90;\n\n const now = new Date();\n const timeMin = new Date(\n now.getTime() - lookbackDays * 24 * 60 * 60 * 1000,\n ).toISOString();\n const timeMax = new Date(\n now.getTime() + lookforwardDays * 24 * 60 * 60 * 1000,\n ).toISOString();\n\n console.info(\n `Fetching events from ${calendars.length} calendar(s): ${calendars.map((c) => c.name).join(\", \")}`,\n );\n\n const results: CollectorResult[] = [];\n let failed = 0;\n\n for (const cal of calendars) {\n try {\n const res = await calendarApi.events.list({\n calendarId: cal.id,\n timeMin,\n timeMax,\n singleEvents: true,\n orderBy: \"startTime\",\n maxResults: 250,\n });\n\n const items = res.data.items ?? [];\n for (const event of items) {\n const id = event.id ?? \"\";\n const summary = event.summary ?? \"(no title)\";\n const startStr = event.start\n ? formatEventTime(event.start)\n : \"\";\n const endStr = event.end ? formatEventTime(event.end) : \"\";\n const location = event.location ?? \"\";\n const attendees = (event.attendees ?? [])\n .map((a) => a.email ?? a.displayName ?? \"\")\n .filter(Boolean)\n .join(\", \");\n const status = event.status ?? \"confirmed\";\n\n const lines = [\n `- **Calendar**: ${cal.name}`,\n startStr ? `- **Time**: ${startStr}${endStr ? ` - ${endStr}` : \"\"}` : \"\",\n location ? `- **Location**: ${location}` : \"\",\n attendees ? `- **Attendees**: ${attendees}` : \"\",\n `- **Status**: ${status}`,\n ].filter(Boolean);\n\n results.push({\n id,\n title: summary,\n content: lines.join(\"\\n\") || \"(no details)\",\n });\n }\n } catch (err) {\n failed++;\n const msgText = err instanceof Error ? err.message : String(err);\n console.warn(`Calendar fetch failed for ${cal.name} (${cal.id}): ${msgText}`);\n }\n }\n\n if (failed > 0 && calendars.length > failed) {\n console.warn(`Calendar fetch failed for ${failed} of ${calendars.length} calendar(s).`);\n }\n return results;\n}\n"],"mappings":";;;;;;;;;;;;;AAuBA,SAAS,gBAAgB,OAAiD;CACxE,MAAM,KAAK,MAAM,YAAY,MAAM;CACnC,MAAM,KAAK,MAAM,YAAY;AAC7B,KAAI,CAAC,GAAI,QAAO;AAChB,QAAO,GAAG,GAAG,IAAI,GAAG;;;AAItB,eAAe,iBACb,aACA,aACyC;AACzC,KAAI,YACF,QAAO,YAAY,KAAK,QAAQ;EAAE;EAAI,MAAM;EAAI,EAAE;AAMpD,UAFY,MAAM,YAAY,aAAa,KAAK,EAAE,YAAY,OAAO,CAAC,EACpD,KAAK,SAAS,EAAE,EAE/B,QAAQ,MAAM,EAAE,GAAG,CACnB,KAAK,OAAO;EACX,IAAI,EAAE;EACN,MAAM,EAAE,WAAW,EAAE;EACtB,EAAE;;;;;;AAOP,eAAsB,aACpB,OAA4B,EAAE,EACF;AAC5B,OAAM,YAAY,qBAAqB;CACvC,MAAM,cAAoC,OAAO,SAAS;EACxD,SAAS;EACT,MAAM,YAAY,WAAW;EAC9B,CAAC;CAEF,MAAM,YAAY,MAAM,iBAAiB,aAAa,KAAK,UAAU;CACrE,MAAM,eAAe,KAAK,gBAAgB;CAC1C,MAAM,kBAAkB,KAAK,mBAAmB;CAEhD,MAAM,sBAAM,IAAI,MAAM;CACtB,MAAM,2BAAU,IAAI,KAClB,IAAI,SAAS,GAAG,eAAe,KAAK,KAAK,KAAK,IAC/C,EAAC,aAAa;CACf,MAAM,UAAU,IAAI,KAClB,IAAI,SAAS,GAAG,kBAAkB,KAAK,KAAK,KAAK,IAClD,CAAC,aAAa;AAEf,MACE,wBAAwB,UAAU,OAAO,gBAAgB,UAAU,KAAK,MAAM,EAAE,KAAK,CAAC,KAAK,KAAK,GACjG;CAED,MAAM,UAA6B,EAAE;CACrC,IAAI,SAAS;AAEb,MAAK,MAAM,OAAO,UAChB,KAAI;EAUF,MAAM,SATM,MAAM,YAAY,OAAO,KAAK;GACxC,YAAY,IAAI;GAChB;GACA;GACA,cAAc;GACd,SAAS;GACT,YAAY;GACb,CAAC,EAEgB,KAAK,SAAS,EAAE;AAClC,OAAK,MAAM,SAAS,OAAO;GACzB,MAAM,KAAK,MAAM,MAAM;GACvB,MAAM,UAAU,MAAM,WAAW;GACjC,MAAM,WAAW,MAAM,QACnB,gBAAgB,MAAM,MAAM,GAC5B;GACJ,MAAM,SAAS,MAAM,MAAM,gBAAgB,MAAM,IAAI,GAAG;GACxD,MAAM,WAAW,MAAM,YAAY;GACnC,MAAM,aAAa,MAAM,aAAa,EAAE,EACrC,KAAK,MAAM,EAAE,SAAS,EAAE,eAAe,GAAG,CAC1C,OAAO,QAAQ,CACf,KAAK,KAAK;GACb,MAAM,SAAS,MAAM,UAAU;GAE/B,MAAM,QAAQ;IACZ,mBAAmB,IAAI;IACvB,WAAW,eAAe,WAAW,SAAS,MAAM,WAAW,OAAO;IACtE,WAAW,mBAAmB,aAAa;IAC3C,YAAY,oBAAoB,cAAc;IAC9C,iBAAiB;IAClB,CAAC,OAAO,QAAQ;AAEjB,WAAQ,KAAK;IACX;IACA,OAAO;IACP,SAAS,MAAM,KAAK,KAAK,IAAI;IAC9B,CAAC;;UAEG,KAAK;AACZ;EACA,MAAM,UAAU,eAAe,QAAQ,IAAI,UAAU,OAAO,IAAI;AAChE,OAAa,6BAA6B,IAAI,KAAK,IAAI,IAAI,GAAG,KAAK,UAAU;;AAIjF,KAAI,SAAS,KAAK,UAAU,SAAS,OACnC,MAAa,6BAA6B,OAAO,MAAM,UAAU,OAAO,eAAe;AAEzF,QAAO"}