@forwardimpact/basecamp 2.0.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/config/scheduler.json +5 -0
- package/package.json +1 -1
- package/src/basecamp.js +288 -57
- package/template/.claude/agents/chief-of-staff.md +6 -2
- package/template/.claude/agents/concierge.md +2 -3
- package/template/.claude/agents/librarian.md +4 -6
- package/template/.claude/agents/recruiter.md +222 -0
- package/template/.claude/settings.json +0 -4
- package/template/.claude/skills/analyze-cv/SKILL.md +267 -0
- package/template/.claude/skills/create-presentations/SKILL.md +2 -2
- package/template/.claude/skills/create-presentations/references/slide.css +1 -1
- package/template/.claude/skills/create-presentations/scripts/convert-to-pdf.mjs +47 -0
- package/template/.claude/skills/draft-emails/SKILL.md +85 -123
- package/template/.claude/skills/draft-emails/scripts/scan-emails.mjs +66 -0
- package/template/.claude/skills/draft-emails/scripts/send-email.mjs +118 -0
- package/template/.claude/skills/extract-entities/SKILL.md +2 -2
- package/template/.claude/skills/extract-entities/scripts/state.mjs +130 -0
- package/template/.claude/skills/manage-tasks/SKILL.md +242 -0
- package/template/.claude/skills/organize-files/SKILL.md +3 -3
- package/template/.claude/skills/organize-files/scripts/organize-by-type.mjs +105 -0
- package/template/.claude/skills/organize-files/scripts/summarize.mjs +84 -0
- package/template/.claude/skills/process-hyprnote/SKILL.md +2 -2
- package/template/.claude/skills/send-chat/SKILL.md +170 -0
- package/template/.claude/skills/sync-apple-calendar/SKILL.md +5 -5
- package/template/.claude/skills/sync-apple-calendar/scripts/sync.mjs +325 -0
- package/template/.claude/skills/sync-apple-mail/SKILL.md +6 -6
- package/template/.claude/skills/sync-apple-mail/scripts/parse-emlx.mjs +374 -0
- package/template/.claude/skills/sync-apple-mail/scripts/sync.mjs +629 -0
- package/template/.claude/skills/track-candidates/SKILL.md +375 -0
- package/template/.claude/skills/weekly-update/SKILL.md +250 -0
- package/template/CLAUDE.md +63 -40
- package/template/.claude/skills/create-presentations/scripts/convert-to-pdf.js +0 -32
- package/template/.claude/skills/draft-emails/scripts/scan-emails.sh +0 -34
- package/template/.claude/skills/extract-entities/scripts/state.py +0 -100
- package/template/.claude/skills/organize-files/scripts/organize-by-type.sh +0 -42
- package/template/.claude/skills/organize-files/scripts/summarize.sh +0 -21
- package/template/.claude/skills/sync-apple-calendar/scripts/sync.py +0 -242
- package/template/.claude/skills/sync-apple-mail/scripts/parse-emlx.py +0 -104
- package/template/.claude/skills/sync-apple-mail/scripts/sync.py +0 -455
package/template/CLAUDE.md
CHANGED
|
@@ -61,7 +61,9 @@ This directory is a knowledge base. Everything is relative to this root:
|
|
|
61
61
|
│ ├── Organizations/ # Notes on companies and teams
|
|
62
62
|
│ ├── Projects/ # Notes on initiatives and workstreams
|
|
63
63
|
│ ├── Topics/ # Notes on recurring themes
|
|
64
|
-
│
|
|
64
|
+
│ ├── Candidates/ # Recruitment candidate profiles
|
|
65
|
+
│ ├── Tasks/ # Per-person task boards
|
|
66
|
+
│ └── Weeklies/ # Weekly priorities snapshots
|
|
65
67
|
├── .claude/skills/ # Claude Code skill files (auto-discovered)
|
|
66
68
|
├── drafts/ # Email drafts created by the draft-emails skill
|
|
67
69
|
├── USER.md # Your identity (name, email, domain) — gitignored
|
|
@@ -75,20 +77,24 @@ This knowledge base is maintained by a team of agents, each defined in
|
|
|
75
77
|
`.claude/agents/`. They are woken on a schedule by the Basecamp scheduler. Each
|
|
76
78
|
wake, they observe KB state, decide the most valuable action, and execute.
|
|
77
79
|
|
|
78
|
-
| Agent | Domain | Schedule
|
|
79
|
-
| ------------------ | ------------------------------ |
|
|
80
|
-
| **postman** | Email triage and drafts | Every 5 min
|
|
81
|
-
| **concierge** | Meeting prep and transcripts | Every 10 min
|
|
82
|
-
| **librarian** | Knowledge graph maintenance | Every 15 min
|
|
83
|
-
| **
|
|
80
|
+
| Agent | Domain | Schedule | Skills |
|
|
81
|
+
| ------------------ | ------------------------------ | --------------- | -------------------------------------------------------------- |
|
|
82
|
+
| **postman** | Email triage and drafts | Every 5 min | sync-apple-mail, draft-emails |
|
|
83
|
+
| **concierge** | Meeting prep and transcripts | Every 10 min | sync-apple-calendar, meeting-prep, process-hyprnote |
|
|
84
|
+
| **librarian** | Knowledge graph maintenance | Every 15 min | extract-entities, organize-files, manage-tasks |
|
|
85
|
+
| **recruiter** | Engineering recruitment | Every 30 min | track-candidates, analyze-cv, fit-pathway, fit-map |
|
|
86
|
+
| **chief-of-staff** | Daily briefings and priorities | 7am, Mon 7:30am | weekly-update _(Mon)_, _(reads all state for daily briefings)_ |
|
|
84
87
|
|
|
85
|
-
|
|
88
|
+
Each agent writes a triage file to `~/.cache/fit/basecamp/state/` every wake
|
|
89
|
+
cycle. The naming convention is `{agent}_triage.md`:
|
|
86
90
|
|
|
87
|
-
- `postman_triage.md` —
|
|
88
|
-
- `
|
|
89
|
-
- `
|
|
91
|
+
- `postman_triage.md` — email urgency, reply needs, awaiting responses
|
|
92
|
+
- `concierge_triage.md` — schedule, meeting prep status, unprocessed transcripts
|
|
93
|
+
- `librarian_triage.md` — unprocessed files, knowledge graph size
|
|
94
|
+
- `recruiter_triage.md` — candidate pipeline, assessments, track distribution
|
|
90
95
|
|
|
91
|
-
|
|
96
|
+
The **chief-of-staff** reads all three triage files to synthesize daily
|
|
97
|
+
briefings in `knowledge/Briefings/`.
|
|
92
98
|
|
|
93
99
|
## Cache Directory (`~/.cache/fit/basecamp/`)
|
|
94
100
|
|
|
@@ -97,20 +103,21 @@ Synced data and runtime state live outside the knowledge base in
|
|
|
97
103
|
|
|
98
104
|
```
|
|
99
105
|
~/.cache/fit/basecamp/
|
|
100
|
-
├── apple_mail/
|
|
101
|
-
|
|
102
|
-
├──
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
├──
|
|
106
|
-
|
|
106
|
+
├── apple_mail/ # Synced Apple Mail threads (.md files)
|
|
107
|
+
│ └── attachments/ # Copied email attachments by thread
|
|
108
|
+
├── apple_calendar/ # Synced Apple Calendar events (.json files)
|
|
109
|
+
└── state/ # Runtime state
|
|
110
|
+
├── apple_mail_last_sync # ISO timestamp of last mail sync
|
|
111
|
+
├── graph_processed # TSV of processed files (path<TAB>hash)
|
|
112
|
+
├── postman_triage.md # Agent triage files ({agent}_triage.md)
|
|
113
|
+
├── concierge_triage.md
|
|
114
|
+
├── librarian_triage.md
|
|
115
|
+
└── recruiter_triage.md
|
|
107
116
|
```
|
|
108
117
|
|
|
109
118
|
This separation keeps the knowledge base clean — only the parsed knowledge
|
|
110
|
-
graph, notes, documents, and drafts live in the KB directory. Raw synced data
|
|
111
|
-
|
|
112
|
-
formats (single-value text files, TSV) rather than JSON, making them easy to
|
|
113
|
-
read and write from shell scripts.
|
|
119
|
+
graph, notes, documents, and drafts live in the KB directory. Raw synced data,
|
|
120
|
+
processing state, and agent triage files are cached externally.
|
|
114
121
|
|
|
115
122
|
## How to Access the Knowledge Graph
|
|
116
123
|
|
|
@@ -165,10 +172,9 @@ build a complete picture, then respond. A single note is never the full story.
|
|
|
165
172
|
Synced emails and calendar events are stored in `~/.cache/fit/basecamp/`,
|
|
166
173
|
outside the knowledge base:
|
|
167
174
|
|
|
168
|
-
- **Emails:** `~/.cache/fit/basecamp/apple_mail/`
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
`~/.cache/fit/basecamp/google_calendar/` — each event is a `.json` file
|
|
175
|
+
- **Emails:** `~/.cache/fit/basecamp/apple_mail/` — each thread is a `.md` file
|
|
176
|
+
- **Calendar:** `~/.cache/fit/basecamp/apple_calendar/` — each event is a
|
|
177
|
+
`.json` file
|
|
172
178
|
|
|
173
179
|
When the user asks about calendar, upcoming meetings, or recent emails, read
|
|
174
180
|
directly from these folders.
|
|
@@ -179,19 +185,36 @@ Skills are auto-discovered by Claude Code from `.claude/skills/`. Each skill is
|
|
|
179
185
|
a `SKILL.md` file inside a named directory. You do NOT need to read them
|
|
180
186
|
manually — Claude Code loads them automatically based on context.
|
|
181
187
|
|
|
182
|
-
Available skills:
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
|
187
|
-
|
|
|
188
|
-
|
|
|
189
|
-
|
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
|
194
|
-
|
|
|
188
|
+
Available skills (grouped by function):
|
|
189
|
+
|
|
190
|
+
**Data pipeline** — sync raw sources into the cache directory:
|
|
191
|
+
|
|
192
|
+
| Skill | Purpose |
|
|
193
|
+
| --------------------- | ------------------------------------------ |
|
|
194
|
+
| `sync-apple-mail` | Sync Mail threads to `.md` via SQLite |
|
|
195
|
+
| `sync-apple-calendar` | Sync Calendar events to `.json` via SQLite |
|
|
196
|
+
|
|
197
|
+
**Knowledge graph** — build and maintain structured notes:
|
|
198
|
+
|
|
199
|
+
| Skill | Purpose |
|
|
200
|
+
| ------------------ | ---------------------------------------- |
|
|
201
|
+
| `extract-entities` | Process synced data into knowledge notes |
|
|
202
|
+
| `manage-tasks` | Per-person task boards with lifecycle |
|
|
203
|
+
| `track-candidates` | Recruitment pipeline from email threads |
|
|
204
|
+
| `analyze-cv` | CV assessment against career framework |
|
|
205
|
+
| `weekly-update` | Weekly priorities from tasks + calendar |
|
|
206
|
+
| `process-hyprnote` | Extract entities from Hyprnote sessions |
|
|
207
|
+
| `organize-files` | Tidy Desktop/Downloads, chain to extract |
|
|
208
|
+
|
|
209
|
+
**Communication** — draft, send, and present:
|
|
210
|
+
|
|
211
|
+
| Skill | Purpose |
|
|
212
|
+
| ---------------------- | ----------------------------------------- |
|
|
213
|
+
| `draft-emails` | Draft email responses with KB context |
|
|
214
|
+
| `send-chat` | Send chat messages via browser automation |
|
|
215
|
+
| `meeting-prep` | Briefings for upcoming meetings |
|
|
216
|
+
| `create-presentations` | Generate PDF slide decks |
|
|
217
|
+
| `doc-collab` | Document creation and editing |
|
|
195
218
|
|
|
196
219
|
## User Identity
|
|
197
220
|
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
// Convert HTML slides to PDF using Playwright.
|
|
2
|
-
//
|
|
3
|
-
// Usage: node scripts/convert-to-pdf.js [input.html] [output.pdf]
|
|
4
|
-
//
|
|
5
|
-
// Defaults:
|
|
6
|
-
// input: /tmp/basecamp-presentation.html
|
|
7
|
-
// output: ~/Desktop/presentation.pdf
|
|
8
|
-
//
|
|
9
|
-
// Requires: npm install playwright && npx playwright install chromium
|
|
10
|
-
|
|
11
|
-
const { chromium } = require("playwright");
|
|
12
|
-
const path = require("path");
|
|
13
|
-
|
|
14
|
-
const input = process.argv[2] || "/tmp/basecamp-presentation.html";
|
|
15
|
-
const output =
|
|
16
|
-
process.argv[3] || path.join(process.env.HOME, "Desktop", "presentation.pdf");
|
|
17
|
-
|
|
18
|
-
(async () => {
|
|
19
|
-
const browser = await chromium.launch();
|
|
20
|
-
const page = await browser.newPage();
|
|
21
|
-
await page.goto(`file://${path.resolve(input)}`, {
|
|
22
|
-
waitUntil: "networkidle",
|
|
23
|
-
});
|
|
24
|
-
await page.pdf({
|
|
25
|
-
path: output,
|
|
26
|
-
width: "1280px",
|
|
27
|
-
height: "720px",
|
|
28
|
-
printBackground: true,
|
|
29
|
-
});
|
|
30
|
-
await browser.close();
|
|
31
|
-
console.log(`Done: ${output}`);
|
|
32
|
-
})();
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Scan for unprocessed emails and output their IDs and subjects.
|
|
3
|
-
#
|
|
4
|
-
# Usage: bash scripts/scan-emails.sh
|
|
5
|
-
#
|
|
6
|
-
# Checks ~/.cache/fit/basecamp/apple_mail/ for email files not yet
|
|
7
|
-
# listed in drafts/drafted or drafts/ignored.
|
|
8
|
-
# Outputs tab-separated: email_id<TAB>subject
|
|
9
|
-
|
|
10
|
-
set -euo pipefail
|
|
11
|
-
|
|
12
|
-
MAIL_DIRS=(
|
|
13
|
-
"$HOME/.cache/fit/basecamp/apple_mail"
|
|
14
|
-
"$HOME/.cache/fit/basecamp/gmail"
|
|
15
|
-
)
|
|
16
|
-
|
|
17
|
-
for dir in "${MAIL_DIRS[@]}"; do
|
|
18
|
-
[ -d "$dir" ] || continue
|
|
19
|
-
for file in "$dir"/*.md; do
|
|
20
|
-
[ -f "$file" ] || continue
|
|
21
|
-
|
|
22
|
-
# Extract ID from filename (without extension)
|
|
23
|
-
EMAIL_ID="$(basename "$file" .md)"
|
|
24
|
-
|
|
25
|
-
# Skip if already drafted or ignored
|
|
26
|
-
rg -qxF "$EMAIL_ID" drafts/drafted 2>/dev/null && continue
|
|
27
|
-
rg -qxF "$EMAIL_ID" drafts/ignored 2>/dev/null && continue
|
|
28
|
-
|
|
29
|
-
# Extract subject from first H1 heading
|
|
30
|
-
SUBJECT="$(rg -m1 '^# ' "$file" 2>/dev/null | sed 's/^# //')"
|
|
31
|
-
|
|
32
|
-
printf '%s\t%s\n' "$EMAIL_ID" "$SUBJECT"
|
|
33
|
-
done
|
|
34
|
-
done
|
|
@@ -1,100 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""Manage graph_processed state for entity extraction.
|
|
3
|
-
|
|
4
|
-
Commands:
|
|
5
|
-
check - Find unprocessed or changed source files
|
|
6
|
-
update - Mark a file as processed (updates its hash in state)
|
|
7
|
-
|
|
8
|
-
Usage:
|
|
9
|
-
python3 scripts/state.py check # List new/changed files
|
|
10
|
-
python3 scripts/state.py update <file-path> # Mark file as processed
|
|
11
|
-
python3 scripts/state.py update <file1> <file2> … # Mark multiple files
|
|
12
|
-
|
|
13
|
-
State file: ~/.cache/fit/basecamp/state/graph_processed (TSV: path<TAB>hash)
|
|
14
|
-
"""
|
|
15
|
-
|
|
16
|
-
import hashlib
|
|
17
|
-
import os
|
|
18
|
-
import sys
|
|
19
|
-
from pathlib import Path
|
|
20
|
-
|
|
21
|
-
STATE_FILE = Path.home() / ".cache/fit/basecamp/state/graph_processed"
|
|
22
|
-
SOURCE_DIRS = [
|
|
23
|
-
Path.home() / ".cache/fit/basecamp/apple_mail",
|
|
24
|
-
Path.home() / ".cache/fit/basecamp/apple_calendar",
|
|
25
|
-
]
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
def file_hash(path):
|
|
29
|
-
"""Compute SHA-256 hash of a file."""
|
|
30
|
-
h = hashlib.sha256()
|
|
31
|
-
with open(path, "rb") as f:
|
|
32
|
-
for chunk in iter(lambda: f.read(8192), b""):
|
|
33
|
-
h.update(chunk)
|
|
34
|
-
return h.hexdigest()
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
def load_state():
|
|
38
|
-
"""Load the state file into a dict of {path: hash}."""
|
|
39
|
-
state = {}
|
|
40
|
-
if STATE_FILE.exists():
|
|
41
|
-
for line in STATE_FILE.read_text().splitlines():
|
|
42
|
-
parts = line.split("\t", 1)
|
|
43
|
-
if len(parts) == 2:
|
|
44
|
-
state[parts[0]] = parts[1]
|
|
45
|
-
return state
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def save_state(state):
|
|
49
|
-
"""Write the full state dict back to the state file."""
|
|
50
|
-
STATE_FILE.parent.mkdir(parents=True, exist_ok=True)
|
|
51
|
-
lines = [f"{path}\t{h}" for path, h in sorted(state.items())]
|
|
52
|
-
STATE_FILE.write_text("\n".join(lines) + "\n" if lines else "")
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
def check():
|
|
56
|
-
"""Find source files that are new or have changed since last processing."""
|
|
57
|
-
state = load_state()
|
|
58
|
-
new_files = []
|
|
59
|
-
for source_dir in SOURCE_DIRS:
|
|
60
|
-
if not source_dir.is_dir():
|
|
61
|
-
continue
|
|
62
|
-
for f in source_dir.iterdir():
|
|
63
|
-
if not f.is_file():
|
|
64
|
-
continue
|
|
65
|
-
path_str = str(f)
|
|
66
|
-
h = file_hash(f)
|
|
67
|
-
if state.get(path_str) != h:
|
|
68
|
-
new_files.append(path_str)
|
|
69
|
-
for f in sorted(new_files):
|
|
70
|
-
print(f)
|
|
71
|
-
return len(new_files)
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def update(file_paths):
|
|
75
|
-
"""Mark files as processed by updating their hashes in state."""
|
|
76
|
-
state = load_state()
|
|
77
|
-
for fp in file_paths:
|
|
78
|
-
p = Path(fp)
|
|
79
|
-
if not p.exists():
|
|
80
|
-
print(f"Warning: File not found: {fp}", file=sys.stderr)
|
|
81
|
-
continue
|
|
82
|
-
state[str(p)] = file_hash(p)
|
|
83
|
-
save_state(state)
|
|
84
|
-
print(f"Updated {len(file_paths)} file(s) in graph state")
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
if __name__ == "__main__":
|
|
88
|
-
if len(sys.argv) < 2:
|
|
89
|
-
print(__doc__, file=sys.stderr)
|
|
90
|
-
sys.exit(1)
|
|
91
|
-
|
|
92
|
-
cmd = sys.argv[1]
|
|
93
|
-
if cmd == "check":
|
|
94
|
-
count = check()
|
|
95
|
-
print(f"\n{count} file(s) to process", file=sys.stderr)
|
|
96
|
-
elif cmd == "update" and len(sys.argv) >= 3:
|
|
97
|
-
update(sys.argv[2:])
|
|
98
|
-
else:
|
|
99
|
-
print(__doc__, file=sys.stderr)
|
|
100
|
-
sys.exit(1)
|
|
@@ -1,42 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Organize files in a directory by type into subdirectories.
|
|
3
|
-
#
|
|
4
|
-
# Usage: bash scripts/organize-by-type.sh <directory>
|
|
5
|
-
#
|
|
6
|
-
# Creates subdirectories (Documents, Images, Archives, Installers, Screenshots)
|
|
7
|
-
# and moves matching files. Only operates on top-level files (-maxdepth 1).
|
|
8
|
-
# Prints each move with -v flag. Does NOT delete any files.
|
|
9
|
-
|
|
10
|
-
set -euo pipefail
|
|
11
|
-
|
|
12
|
-
if [ $# -ne 1 ]; then
|
|
13
|
-
echo "Usage: bash scripts/organize-by-type.sh <directory>" >&2
|
|
14
|
-
exit 1
|
|
15
|
-
fi
|
|
16
|
-
|
|
17
|
-
DIR="$1"
|
|
18
|
-
|
|
19
|
-
if [ ! -d "$DIR" ]; then
|
|
20
|
-
echo "Error: Directory not found: $DIR" >&2
|
|
21
|
-
exit 1
|
|
22
|
-
fi
|
|
23
|
-
|
|
24
|
-
# Create subdirectories
|
|
25
|
-
mkdir -p "$DIR"/{Documents,Images,Archives,Installers,Screenshots}
|
|
26
|
-
|
|
27
|
-
# Screenshots
|
|
28
|
-
find "$DIR" -maxdepth 1 -type f \( -name "Screenshot*" -o -name "Screen Shot*" \) -exec mv -v {} "$DIR/Screenshots/" \;
|
|
29
|
-
|
|
30
|
-
# Documents
|
|
31
|
-
find "$DIR" -maxdepth 1 -type f \( -name "*.pdf" -o -name "*.doc*" -o -name "*.txt" -o -name "*.md" -o -name "*.rtf" -o -name "*.csv" -o -name "*.xlsx" \) -exec mv -v {} "$DIR/Documents/" \;
|
|
32
|
-
|
|
33
|
-
# Images (excluding screenshots already moved)
|
|
34
|
-
find "$DIR" -maxdepth 1 -type f \( -name "*.png" -o -name "*.jpg" -o -name "*.jpeg" -o -name "*.gif" -o -name "*.webp" \) -exec mv -v {} "$DIR/Images/" \;
|
|
35
|
-
|
|
36
|
-
# Archives
|
|
37
|
-
find "$DIR" -maxdepth 1 -type f \( -name "*.zip" -o -name "*.tar.gz" -o -name "*.rar" \) -exec mv -v {} "$DIR/Archives/" \;
|
|
38
|
-
|
|
39
|
-
# Installers
|
|
40
|
-
find "$DIR" -maxdepth 1 -type f -name "*.dmg" -exec mv -v {} "$DIR/Installers/" \;
|
|
41
|
-
|
|
42
|
-
echo "Organization complete: $DIR"
|
|
@@ -1,21 +0,0 @@
|
|
|
1
|
-
#!/bin/bash
|
|
2
|
-
# Summarize the contents of ~/Desktop/ and ~/Downloads/.
|
|
3
|
-
#
|
|
4
|
-
# Usage: bash scripts/summarize.sh
|
|
5
|
-
#
|
|
6
|
-
# Counts files by type in both directories (top-level only).
|
|
7
|
-
|
|
8
|
-
set -euo pipefail
|
|
9
|
-
|
|
10
|
-
for dir in "$HOME/Desktop" "$HOME/Downloads"; do
|
|
11
|
-
[ -d "$dir" ] || continue
|
|
12
|
-
echo "=== $(basename "$dir") ==="
|
|
13
|
-
echo "Screenshots: $(find "$dir" -maxdepth 1 \( -name 'Screenshot*' -o -name 'Screen Shot*' \) 2>/dev/null | wc -l | tr -d ' ')"
|
|
14
|
-
echo "PDFs: $(find "$dir" -maxdepth 1 -name '*.pdf' 2>/dev/null | wc -l | tr -d ' ')"
|
|
15
|
-
echo "Images: $(find "$dir" -maxdepth 1 \( -name '*.png' -o -name '*.jpg' -o -name '*.jpeg' -o -name '*.gif' -o -name '*.webp' \) 2>/dev/null | wc -l | tr -d ' ')"
|
|
16
|
-
echo "Documents: $(find "$dir" -maxdepth 1 \( -name '*.doc*' -o -name '*.txt' -o -name '*.md' -o -name '*.rtf' \) 2>/dev/null | wc -l | tr -d ' ')"
|
|
17
|
-
echo "Archives: $(find "$dir" -maxdepth 1 \( -name '*.zip' -o -name '*.tar.gz' -o -name '*.rar' \) 2>/dev/null | wc -l | tr -d ' ')"
|
|
18
|
-
echo "Installers: $(find "$dir" -maxdepth 1 -name '*.dmg' 2>/dev/null | wc -l | tr -d ' ')"
|
|
19
|
-
echo "Other: $(find "$dir" -maxdepth 1 -type f ! \( -name 'Screenshot*' -o -name 'Screen Shot*' -o -name '*.pdf' -o -name '*.png' -o -name '*.jpg' -o -name '*.jpeg' -o -name '*.gif' -o -name '*.webp' -o -name '*.doc*' -o -name '*.txt' -o -name '*.md' -o -name '*.rtf' -o -name '*.zip' -o -name '*.tar.gz' -o -name '*.rar' -o -name '*.dmg' -o -name '.DS_Store' -o -name '.localized' \) 2>/dev/null | wc -l | tr -d ' ')"
|
|
20
|
-
echo ""
|
|
21
|
-
done
|
|
@@ -1,242 +0,0 @@
|
|
|
1
|
-
#!/usr/bin/env python3
|
|
2
|
-
"""Sync Apple Calendar events to ~/.cache/fit/basecamp/apple_calendar/ as JSON.
|
|
3
|
-
|
|
4
|
-
Queries the macOS Calendar SQLite database for events in a sliding window
|
|
5
|
-
(past and future) and writes one JSON file per event.
|
|
6
|
-
|
|
7
|
-
Usage: python3 scripts/sync.py [--days N]
|
|
8
|
-
|
|
9
|
-
Options:
|
|
10
|
-
--days N How many days back to sync (default: 30)
|
|
11
|
-
|
|
12
|
-
Requires: macOS with Calendar app configured and Full Disk Access granted.
|
|
13
|
-
"""
|
|
14
|
-
|
|
15
|
-
import argparse
|
|
16
|
-
import json
|
|
17
|
-
import os
|
|
18
|
-
import subprocess
|
|
19
|
-
from datetime import datetime, timezone, timedelta
|
|
20
|
-
|
|
21
|
-
EPOCH = datetime(2001, 1, 1, tzinfo=timezone.utc)
|
|
22
|
-
OUTDIR = os.path.expanduser("~/.cache/fit/basecamp/apple_calendar")
|
|
23
|
-
|
|
24
|
-
DB_PATHS = [
|
|
25
|
-
os.path.expanduser(
|
|
26
|
-
"~/Library/Group Containers/group.com.apple.calendar/Calendar.sqlitedb"
|
|
27
|
-
),
|
|
28
|
-
os.path.expanduser("~/Library/Calendars/Calendar.sqlitedb"),
|
|
29
|
-
]
|
|
30
|
-
|
|
31
|
-
STATUS_MAP = {
|
|
32
|
-
0: "unknown",
|
|
33
|
-
1: "pending",
|
|
34
|
-
2: "accepted",
|
|
35
|
-
3: "declined",
|
|
36
|
-
4: "tentative",
|
|
37
|
-
5: "delegated",
|
|
38
|
-
6: "completed",
|
|
39
|
-
7: "in-process",
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
ROLE_MAP = {0: "unknown", 1: "required", 2: "optional", 3: "chair"}
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
def find_db():
|
|
46
|
-
db = next((p for p in DB_PATHS if os.path.exists(p)), None)
|
|
47
|
-
if not db:
|
|
48
|
-
print("Error: Apple Calendar database not found. Is Calendar configured?")
|
|
49
|
-
exit(1)
|
|
50
|
-
return db
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def query(db, sql):
|
|
54
|
-
result = subprocess.run(
|
|
55
|
-
["sqlite3", "-readonly", "-json", db, sql], capture_output=True, text=True
|
|
56
|
-
)
|
|
57
|
-
if result.returncode != 0:
|
|
58
|
-
if "database is locked" in result.stderr:
|
|
59
|
-
import time
|
|
60
|
-
|
|
61
|
-
time.sleep(2)
|
|
62
|
-
result = subprocess.run(
|
|
63
|
-
["sqlite3", "-readonly", "-json", db, sql],
|
|
64
|
-
capture_output=True,
|
|
65
|
-
text=True,
|
|
66
|
-
)
|
|
67
|
-
if result.returncode != 0:
|
|
68
|
-
print(f"SQLite error: {result.stderr.strip()}")
|
|
69
|
-
return []
|
|
70
|
-
return json.loads(result.stdout) if result.stdout.strip() else []
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def coredata_to_iso(ts, tz_name=None):
|
|
74
|
-
"""Convert Core Data timestamp to ISO 8601."""
|
|
75
|
-
if ts is None:
|
|
76
|
-
return None
|
|
77
|
-
dt = EPOCH + timedelta(seconds=ts)
|
|
78
|
-
if tz_name and tz_name != "_float":
|
|
79
|
-
try:
|
|
80
|
-
from zoneinfo import ZoneInfo
|
|
81
|
-
|
|
82
|
-
dt = dt.astimezone(ZoneInfo(tz_name))
|
|
83
|
-
except Exception:
|
|
84
|
-
pass
|
|
85
|
-
return dt.isoformat()
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def main():
|
|
89
|
-
parser = argparse.ArgumentParser(description="Sync Apple Calendar events.")
|
|
90
|
-
parser.add_argument("--days", type=int, default=30,
|
|
91
|
-
help="How many days back to sync (default: 30)")
|
|
92
|
-
args = parser.parse_args()
|
|
93
|
-
|
|
94
|
-
db = find_db()
|
|
95
|
-
os.makedirs(OUTDIR, exist_ok=True)
|
|
96
|
-
|
|
97
|
-
now = datetime.now(timezone.utc)
|
|
98
|
-
start = now - timedelta(days=args.days)
|
|
99
|
-
end = now + timedelta(days=14)
|
|
100
|
-
START_TS = (start - EPOCH).total_seconds()
|
|
101
|
-
END_TS = (end - EPOCH).total_seconds()
|
|
102
|
-
|
|
103
|
-
# Fetch events with a single query
|
|
104
|
-
events = query(
|
|
105
|
-
db,
|
|
106
|
-
f"""
|
|
107
|
-
SELECT
|
|
108
|
-
ci.ROWID AS id,
|
|
109
|
-
ci.summary,
|
|
110
|
-
ci.start_date,
|
|
111
|
-
ci.end_date,
|
|
112
|
-
ci.start_tz,
|
|
113
|
-
ci.end_tz,
|
|
114
|
-
ci.all_day,
|
|
115
|
-
ci.description,
|
|
116
|
-
ci.has_attendees,
|
|
117
|
-
ci.conference_url,
|
|
118
|
-
loc.title AS location,
|
|
119
|
-
cal.title AS calendar_name,
|
|
120
|
-
org.address AS organizer_email,
|
|
121
|
-
org.display_name AS organizer_name
|
|
122
|
-
FROM CalendarItem ci
|
|
123
|
-
LEFT JOIN Location loc ON loc.ROWID = ci.location_id
|
|
124
|
-
LEFT JOIN Calendar cal ON cal.ROWID = ci.calendar_id
|
|
125
|
-
LEFT JOIN Identity org ON org.ROWID = ci.organizer_id
|
|
126
|
-
WHERE ci.start_date <= {END_TS}
|
|
127
|
-
AND COALESCE(ci.end_date, ci.start_date) >= {START_TS}
|
|
128
|
-
AND ci.summary IS NOT NULL
|
|
129
|
-
AND ci.summary != ''
|
|
130
|
-
ORDER BY ci.start_date ASC
|
|
131
|
-
LIMIT 1000;
|
|
132
|
-
""",
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
# Collect event IDs for batch attendee query
|
|
136
|
-
event_ids = [str(ev["id"]) for ev in events]
|
|
137
|
-
|
|
138
|
-
# Batch-fetch all attendees in one query (avoids N+1)
|
|
139
|
-
attendees_by_event = {}
|
|
140
|
-
if event_ids:
|
|
141
|
-
id_list = ",".join(event_ids)
|
|
142
|
-
attendees_raw = query(
|
|
143
|
-
db,
|
|
144
|
-
f"""
|
|
145
|
-
SELECT
|
|
146
|
-
p.owner_id,
|
|
147
|
-
p.email,
|
|
148
|
-
p.status,
|
|
149
|
-
p.role,
|
|
150
|
-
p.is_self,
|
|
151
|
-
p.entity_type,
|
|
152
|
-
i.display_name
|
|
153
|
-
FROM Participant p
|
|
154
|
-
LEFT JOIN Identity i ON i.ROWID = p.identity_id
|
|
155
|
-
WHERE p.owner_id IN ({id_list})
|
|
156
|
-
AND p.entity_type = 7;
|
|
157
|
-
""",
|
|
158
|
-
)
|
|
159
|
-
for a in attendees_raw:
|
|
160
|
-
oid = a["owner_id"]
|
|
161
|
-
attendees_by_event.setdefault(oid, []).append(a)
|
|
162
|
-
|
|
163
|
-
# Write event JSON files
|
|
164
|
-
written_ids = set()
|
|
165
|
-
for ev in events:
|
|
166
|
-
eid = ev["id"]
|
|
167
|
-
|
|
168
|
-
# Organizer — strip mailto: prefix from Identity.address
|
|
169
|
-
org_email = ev.get("organizer_email") or None
|
|
170
|
-
if org_email and org_email.startswith("mailto:"):
|
|
171
|
-
org_email = org_email[7:]
|
|
172
|
-
|
|
173
|
-
# Attendees
|
|
174
|
-
attendees = []
|
|
175
|
-
for a in attendees_by_event.get(eid, []):
|
|
176
|
-
if not a.get("email"):
|
|
177
|
-
continue
|
|
178
|
-
attendees.append(
|
|
179
|
-
{
|
|
180
|
-
"email": a["email"],
|
|
181
|
-
"name": (a.get("display_name") or "").strip() or None,
|
|
182
|
-
"status": STATUS_MAP.get(a.get("status"), "unknown"),
|
|
183
|
-
"role": ROLE_MAP.get(a.get("role"), "unknown"),
|
|
184
|
-
"self": bool(a.get("is_self")),
|
|
185
|
-
}
|
|
186
|
-
)
|
|
187
|
-
|
|
188
|
-
is_all_day = bool(ev.get("all_day"))
|
|
189
|
-
|
|
190
|
-
event_json = {
|
|
191
|
-
"id": f"apple_cal_{eid}",
|
|
192
|
-
"summary": ev["summary"],
|
|
193
|
-
"start": {
|
|
194
|
-
"dateTime": coredata_to_iso(ev["start_date"], ev.get("start_tz")),
|
|
195
|
-
"timeZone": ev.get("start_tz")
|
|
196
|
-
if ev.get("start_tz") != "_float"
|
|
197
|
-
else None,
|
|
198
|
-
},
|
|
199
|
-
"end": {
|
|
200
|
-
"dateTime": coredata_to_iso(
|
|
201
|
-
ev["end_date"] if ev["end_date"] else ev["start_date"],
|
|
202
|
-
ev.get("end_tz"),
|
|
203
|
-
),
|
|
204
|
-
"timeZone": ev.get("end_tz")
|
|
205
|
-
if ev.get("end_tz") != "_float"
|
|
206
|
-
else None,
|
|
207
|
-
},
|
|
208
|
-
"allDay": is_all_day,
|
|
209
|
-
"location": ev.get("location") or None,
|
|
210
|
-
"description": ev.get("description") or None,
|
|
211
|
-
"conferenceUrl": ev.get("conference_url") or None,
|
|
212
|
-
"calendar": ev.get("calendar_name") or None,
|
|
213
|
-
"organizer": {
|
|
214
|
-
"email": org_email,
|
|
215
|
-
"name": (ev.get("organizer_name") or "").strip() or None,
|
|
216
|
-
}
|
|
217
|
-
if org_email
|
|
218
|
-
else None,
|
|
219
|
-
"attendees": attendees if attendees else None,
|
|
220
|
-
}
|
|
221
|
-
|
|
222
|
-
filepath = os.path.join(OUTDIR, f"{eid}.json")
|
|
223
|
-
with open(filepath, "w") as f:
|
|
224
|
-
json.dump(event_json, f, indent=2)
|
|
225
|
-
written_ids.add(f"{eid}.json")
|
|
226
|
-
|
|
227
|
-
# Clean up events outside the window
|
|
228
|
-
removed = 0
|
|
229
|
-
for fname in os.listdir(OUTDIR):
|
|
230
|
-
if fname.endswith(".json") and fname not in written_ids:
|
|
231
|
-
os.remove(os.path.join(OUTDIR, fname))
|
|
232
|
-
removed += 1
|
|
233
|
-
|
|
234
|
-
print(f"Apple Calendar Sync Complete")
|
|
235
|
-
print(f"Events synced: {len(written_ids)}")
|
|
236
|
-
print(f"Time window: {start.date()} to {end.date()}")
|
|
237
|
-
print(f"Files cleaned up: {removed} (outside window)")
|
|
238
|
-
print(f"Output: {OUTDIR}")
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
if __name__ == "__main__":
|
|
242
|
-
main()
|