granola-toolkit 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +218 -0
- package/dist/cli.js +788 -0
- package/package.json +62 -0
package/README.md
ADDED
|
@@ -0,0 +1,218 @@
|
|
|
1
|
+
# granola-toolkit
|
|
2
|
+
|
|
3
|
+
General toolkit to do more with Granola notes and transcripts.
|
|
4
|
+
|
|
5
|
+
## Install
|
|
6
|
+
|
|
7
|
+
From npm:
|
|
8
|
+
|
|
9
|
+
```bash
|
|
10
|
+
npm install -g granola-toolkit
|
|
11
|
+
granola --help
|
|
12
|
+
```
|
|
13
|
+
|
|
14
|
+
Without a global install:
|
|
15
|
+
|
|
16
|
+
```bash
|
|
17
|
+
npx granola-toolkit --help
|
|
18
|
+
```
|
|
19
|
+
|
|
20
|
+
For local development:
|
|
21
|
+
|
|
22
|
+
```bash
|
|
23
|
+
curl -fsSL https://vite.plus | bash
|
|
24
|
+
vp help
|
|
25
|
+
vp install
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Run
|
|
29
|
+
|
|
30
|
+
Installed CLI:
|
|
31
|
+
|
|
32
|
+
```bash
|
|
33
|
+
granola --help
|
|
34
|
+
granola notes --help
|
|
35
|
+
granola transcripts --help
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
Local build:
|
|
39
|
+
|
|
40
|
+
```bash
|
|
41
|
+
vp pack
|
|
42
|
+
node dist/cli.js --help
|
|
43
|
+
node dist/cli.js notes --help
|
|
44
|
+
node dist/cli.js transcripts --help
|
|
45
|
+
```
|
|
46
|
+
|
|
47
|
+
You can also use the package scripts:
|
|
48
|
+
|
|
49
|
+
```bash
|
|
50
|
+
npm run build
|
|
51
|
+
npm run notes -- --help
|
|
52
|
+
npm run transcripts -- --help
|
|
53
|
+
```
|
|
54
|
+
|
|
55
|
+
## Examples
|
|
56
|
+
|
|
57
|
+
Export notes:
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
node dist/cli.js notes --supabase "$HOME/Library/Application Support/Granola/supabase.json"
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
Export transcripts:
|
|
64
|
+
|
|
65
|
+
```bash
|
|
66
|
+
node dist/cli.js transcripts --cache "$HOME/Library/Application Support/Granola/cache-v3.json"
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
## How It Works
|
|
70
|
+
|
|
71
|
+
### Notes
|
|
72
|
+
|
|
73
|
+
`notes` exports Granola's generated meeting notes, not the raw transcript.
|
|
74
|
+
|
|
75
|
+
The flow is:
|
|
76
|
+
|
|
77
|
+
1. read your local `supabase.json`
|
|
78
|
+
2. extract the WorkOS access token from it
|
|
79
|
+
3. call Granola's paginated documents API
|
|
80
|
+
4. choose the best available note content for each document
|
|
81
|
+
5. convert ProseMirror content into Markdown
|
|
82
|
+
6. write one Markdown file per document into the output directory
|
|
83
|
+
|
|
84
|
+
Content is chosen in this order:
|
|
85
|
+
|
|
86
|
+
1. `notes`
|
|
87
|
+
2. `last_viewed_panel.content`
|
|
88
|
+
3. `last_viewed_panel.original_content`
|
|
89
|
+
4. raw `content`
|
|
90
|
+
|
|
91
|
+
Each note file includes:
|
|
92
|
+
|
|
93
|
+
- YAML frontmatter with the document id, created timestamp, updated timestamp, and tags
|
|
94
|
+
- a top-level heading from the note title
|
|
95
|
+
- converted note body content
|
|
96
|
+
|
|
97
|
+
### Transcripts
|
|
98
|
+
|
|
99
|
+
`transcripts` exports Granola's locally cached transcript segments.
|
|
100
|
+
|
|
101
|
+
The flow is:
|
|
102
|
+
|
|
103
|
+
1. read Granola's cache JSON from disk
|
|
104
|
+
2. parse the cache payload, whether it is double-encoded or already an object
|
|
105
|
+
3. match transcript segments to documents by document id
|
|
106
|
+
4. format segments as `[HH:MM:SS] Speaker: Text`
|
|
107
|
+
5. write one `.txt` file per document into the output directory
|
|
108
|
+
|
|
109
|
+
Speaker labels are currently normalised to:
|
|
110
|
+
|
|
111
|
+
- `You` for `microphone`
|
|
112
|
+
- `System` for everything else
|
|
113
|
+
|
|
114
|
+
### Incremental Writes
|
|
115
|
+
|
|
116
|
+
Both commands are incremental. They only rewrite an export file when the source document appears newer than the file already on disk.
|
|
117
|
+
|
|
118
|
+
That means repeated runs are cheap, and you can safely point the CLI at the same output directory over time.
|
|
119
|
+
|
|
120
|
+
## Config
|
|
121
|
+
|
|
122
|
+
The CLI reads configuration in this order:
|
|
123
|
+
|
|
124
|
+
1. command-line flags
|
|
125
|
+
2. environment variables
|
|
126
|
+
3. `.granola.toml`
|
|
127
|
+
4. platform defaults
|
|
128
|
+
|
|
129
|
+
Supported config keys:
|
|
130
|
+
|
|
131
|
+
```toml
|
|
132
|
+
debug = true
|
|
133
|
+
supabase = "/Users/yourname/Library/Application Support/Granola/supabase.json"
|
|
134
|
+
output = "./notes"
|
|
135
|
+
timeout = "2m"
|
|
136
|
+
cache-file = "/Users/yourname/Library/Application Support/Granola/cache-v3.json"
|
|
137
|
+
transcript-output = "./transcripts"
|
|
138
|
+
```
|
|
139
|
+
|
|
140
|
+
Supported environment variables:
|
|
141
|
+
|
|
142
|
+
- `DEBUG_MODE`
|
|
143
|
+
- `SUPABASE_FILE`
|
|
144
|
+
- `OUTPUT`
|
|
145
|
+
- `TIMEOUT`
|
|
146
|
+
- `CACHE_FILE`
|
|
147
|
+
- `TRANSCRIPT_OUTPUT`
|
|
148
|
+
|
|
149
|
+
## What Changed In The Port
|
|
150
|
+
|
|
151
|
+
This port deliberately preserves the Go repo's architecture, but it also fixes a few obvious rough edges instead of copying them blindly:
|
|
152
|
+
|
|
153
|
+
- deterministic export ordering, so duplicate-title suffixes are stable across runs
|
|
154
|
+
- shared filename sanitisation between notes and transcripts
|
|
155
|
+
- cross-platform default path discovery for both `supabase.json` and cache files
|
|
156
|
+
- HTML fallback for note export is converted into readable Markdown-ish text instead of being dumped raw
|
|
157
|
+
- transcript timestamps preserve the original clock time instead of being normalised to UTC
|
|
158
|
+
|
|
159
|
+
## Verify
|
|
160
|
+
|
|
161
|
+
```bash
|
|
162
|
+
vp check
|
|
163
|
+
vp test
|
|
164
|
+
vp pack
|
|
165
|
+
npm pack --dry-run
|
|
166
|
+
```
|
|
167
|
+
|
|
168
|
+
`vp build` is for web apps. This repo publishes a CLI bundle, so the correct build step here is `vp pack`.
|
|
169
|
+
|
|
170
|
+
## Publishing
|
|
171
|
+
|
|
172
|
+
Any push to `main` with a package version that is not already on npm becomes a publish candidate automatically. The workflow verifies the build, checks whether `package.json` contains an unpublished version, and then pauses in the `production` environment until someone approves the deployment review in GitHub.
|
|
173
|
+
|
|
174
|
+
That means you can use either flow:
|
|
175
|
+
|
|
176
|
+
- merge a PR that already includes the version bump
|
|
177
|
+
- run the local release helper on `main`
|
|
178
|
+
|
|
179
|
+
Local release helper:
|
|
180
|
+
|
|
181
|
+
```bash
|
|
182
|
+
npm run release
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
That script:
|
|
186
|
+
|
|
187
|
+
1. verifies the git working tree is clean
|
|
188
|
+
2. verifies you are on `main`
|
|
189
|
+
3. bumps the package version with `npm version --no-git-tag-version`
|
|
190
|
+
4. commits and pushes the release commit
|
|
191
|
+
5. lets the push-to-`main` workflow create a publish candidate automatically
|
|
192
|
+
|
|
193
|
+
You can also choose the bump type explicitly:
|
|
194
|
+
|
|
195
|
+
```bash
|
|
196
|
+
npm run release patch
|
|
197
|
+
npm run release minor
|
|
198
|
+
npm run release major
|
|
199
|
+
```
|
|
200
|
+
|
|
201
|
+
The GitHub Actions release job then:
|
|
202
|
+
|
|
203
|
+
- installs dependencies with Vite+ via `setup-vp`
|
|
204
|
+
- runs `vp check`, `vp test`, `vp pack`, and `npm pack --dry-run`
|
|
205
|
+
- checks npm first and skips the publish job if that exact version already exists
|
|
206
|
+
- waits for approval on the `production` environment before npm credentials are exposed
|
|
207
|
+
- publishes to npm using `NPM_TOKEN`
|
|
208
|
+
- tags the published version as `v<version>`
|
|
209
|
+
|
|
210
|
+
### GitHub Setup
|
|
211
|
+
|
|
212
|
+
To get the review dialog you showed in the screenshots, configure this once in GitHub:
|
|
213
|
+
|
|
214
|
+
1. create a `production` environment in repository Settings -> Environments
|
|
215
|
+
2. add required reviewers to that environment
|
|
216
|
+
3. add `NPM_TOKEN` as an environment secret on `production`
|
|
217
|
+
|
|
218
|
+
After that, merges to `main` that contain a new unpublished version will stop at "Review deployments". Approving that deployment is what allows the npm publish step to run.
|
package/dist/cli.js
ADDED
|
@@ -0,0 +1,788 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
|
|
3
|
+
import { existsSync } from "node:fs";
|
|
4
|
+
import { homedir } from "node:os";
|
|
5
|
+
import { dirname, join } from "node:path";
|
|
6
|
+
//#region src/utils.ts
|
|
7
|
+
const INVALID_FILENAME_CHARS = /[<>:"/\\|?*]/g;
|
|
8
|
+
const CONTROL_CHARACTERS = /\p{Cc}/gu;
|
|
9
|
+
const MULTIPLE_UNDERSCORES = /_+/g;
|
|
10
|
+
const MULTIPLE_BLANK_LINES = /\n{3,}/g;
|
|
11
|
+
function normaliseNewlines(value) {
|
|
12
|
+
return value.replace(/\r\n?/g, "\n");
|
|
13
|
+
}
|
|
14
|
+
function quoteYamlString(value) {
|
|
15
|
+
return JSON.stringify(value);
|
|
16
|
+
}
|
|
17
|
+
function sanitiseFilename(name, fallback = "untitled") {
|
|
18
|
+
return (name.trim().replace(INVALID_FILENAME_CHARS, "_").replace(CONTROL_CHARACTERS, "_").replace(MULTIPLE_UNDERSCORES, "_").replace(/^_+|_+$/g, "") || fallback).slice(0, 100);
|
|
19
|
+
}
|
|
20
|
+
function makeUniqueFilename(filename, used) {
|
|
21
|
+
const currentCount = used.get(filename) ?? 0;
|
|
22
|
+
if (currentCount === 0) {
|
|
23
|
+
used.set(filename, 1);
|
|
24
|
+
return filename;
|
|
25
|
+
}
|
|
26
|
+
const unique = `${filename}_${currentCount + 1}`;
|
|
27
|
+
used.set(filename, currentCount + 1);
|
|
28
|
+
used.set(unique, 1);
|
|
29
|
+
return unique;
|
|
30
|
+
}
|
|
31
|
+
function compareStrings(left, right) {
|
|
32
|
+
return left.localeCompare(right, void 0, {
|
|
33
|
+
numeric: true,
|
|
34
|
+
sensitivity: "base"
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
function firstExistingPath(candidates) {
|
|
38
|
+
for (const candidate of candidates) if (existsSync(candidate)) return candidate;
|
|
39
|
+
return candidates[0];
|
|
40
|
+
}
|
|
41
|
+
function granolaSupabaseCandidates() {
|
|
42
|
+
const home = homedir();
|
|
43
|
+
const appData = process.env.APPDATA;
|
|
44
|
+
switch (process.platform) {
|
|
45
|
+
case "darwin": return [join(home, "Library", "Application Support", "Granola", "supabase.json")];
|
|
46
|
+
case "win32": return appData ? [join(appData, "Granola", "supabase.json")] : [];
|
|
47
|
+
default: return [join(home, ".config", "Granola", "supabase.json"), join(home, ".local", "share", "Granola", "supabase.json")];
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
function granolaCacheCandidates() {
|
|
51
|
+
const home = homedir();
|
|
52
|
+
const appData = process.env.APPDATA;
|
|
53
|
+
switch (process.platform) {
|
|
54
|
+
case "darwin": return [
|
|
55
|
+
join(home, "Library", "Application Support", "Granola", "cache-v3.json"),
|
|
56
|
+
join(home, "Library", "Application Support", "Granola", "cache-v6.json"),
|
|
57
|
+
join(home, "Library", "Application Support", "Granola", "cache.json")
|
|
58
|
+
];
|
|
59
|
+
case "win32": return appData ? [
|
|
60
|
+
join(appData, "Granola", "cache-v3.json"),
|
|
61
|
+
join(appData, "Granola", "cache-v6.json"),
|
|
62
|
+
join(appData, "Granola", "cache.json")
|
|
63
|
+
] : [];
|
|
64
|
+
default: return [
|
|
65
|
+
join(home, ".config", "Granola", "cache-v3.json"),
|
|
66
|
+
join(home, ".config", "Granola", "cache-v6.json"),
|
|
67
|
+
join(home, ".config", "Granola", "cache.json"),
|
|
68
|
+
join(home, ".local", "share", "Granola", "cache-v3.json"),
|
|
69
|
+
join(home, ".local", "share", "Granola", "cache-v6.json"),
|
|
70
|
+
join(home, ".local", "share", "Granola", "cache.json")
|
|
71
|
+
];
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
function parseDuration(value) {
|
|
75
|
+
const trimmed = value.trim();
|
|
76
|
+
if (/^\d+$/.test(trimmed)) return Number(trimmed);
|
|
77
|
+
const parts = [...trimmed.matchAll(/(\d+(?:\.\d+)?)(ms|s|m|h|d)/g)];
|
|
78
|
+
if (parts.length === 0) throw new Error(`invalid duration: ${value}`);
|
|
79
|
+
if (parts.reduce((total, match) => total + match[0].length, 0) !== trimmed.length) throw new Error(`invalid duration: ${value}`);
|
|
80
|
+
const units = {
|
|
81
|
+
d: 1440 * 60 * 1e3,
|
|
82
|
+
h: 3600 * 1e3,
|
|
83
|
+
m: 60 * 1e3,
|
|
84
|
+
ms: 1,
|
|
85
|
+
s: 1e3
|
|
86
|
+
};
|
|
87
|
+
return parts.reduce((total, match) => {
|
|
88
|
+
const amount = Number(match[1]);
|
|
89
|
+
const multiplier = units[match[2]];
|
|
90
|
+
if (multiplier == null) throw new Error(`invalid duration: ${value}`);
|
|
91
|
+
return total + amount * multiplier;
|
|
92
|
+
}, 0);
|
|
93
|
+
}
|
|
94
|
+
function asRecord(value) {
|
|
95
|
+
if (value && typeof value === "object" && !Array.isArray(value)) return value;
|
|
96
|
+
}
|
|
97
|
+
function stringValue(value) {
|
|
98
|
+
return typeof value === "string" ? value : "";
|
|
99
|
+
}
|
|
100
|
+
function stringArray(value) {
|
|
101
|
+
if (!Array.isArray(value)) return [];
|
|
102
|
+
return value.filter((item) => typeof item === "string");
|
|
103
|
+
}
|
|
104
|
+
function parseJsonString(value) {
|
|
105
|
+
try {
|
|
106
|
+
return JSON.parse(value);
|
|
107
|
+
} catch {
|
|
108
|
+
return;
|
|
109
|
+
}
|
|
110
|
+
}
|
|
111
|
+
function formatTimestampForTranscript(timestamp) {
|
|
112
|
+
const inlineMatch = timestamp.match(/T(\d{2}:\d{2}:\d{2})/);
|
|
113
|
+
if (inlineMatch?.[1]) return inlineMatch[1];
|
|
114
|
+
const parsed = new Date(timestamp);
|
|
115
|
+
if (Number.isNaN(parsed.getTime())) return timestamp;
|
|
116
|
+
return parsed.toISOString().slice(11, 19);
|
|
117
|
+
}
|
|
118
|
+
function decodeHtmlEntities(value) {
|
|
119
|
+
return value.replaceAll(" ", " ").replaceAll("&", "&").replaceAll("<", "<").replaceAll(">", ">").replaceAll(""", "\"").replaceAll("'", "'");
|
|
120
|
+
}
|
|
121
|
+
function htmlToMarkdownFallback(value) {
|
|
122
|
+
let output = normaliseNewlines(decodeHtmlEntities(value));
|
|
123
|
+
output = output.replace(/<br\s*\/?>/gi, "\n");
|
|
124
|
+
output = output.replace(/<li\b[^>]*>/gi, "- ");
|
|
125
|
+
output = output.replace(/<\/li>/gi, "\n");
|
|
126
|
+
output = output.replace(/<h1\b[^>]*>/gi, "# ");
|
|
127
|
+
output = output.replace(/<h2\b[^>]*>/gi, "## ");
|
|
128
|
+
output = output.replace(/<h3\b[^>]*>/gi, "### ");
|
|
129
|
+
output = output.replace(/<h4\b[^>]*>/gi, "#### ");
|
|
130
|
+
output = output.replace(/<h5\b[^>]*>/gi, "##### ");
|
|
131
|
+
output = output.replace(/<h6\b[^>]*>/gi, "###### ");
|
|
132
|
+
output = output.replace(/<\/(p|div|section|article|ul|ol|blockquote|h[1-6])>/gi, "\n\n");
|
|
133
|
+
output = output.replace(/<[^>]+>/g, "");
|
|
134
|
+
output = output.replace(/[ \t]+\n/g, "\n");
|
|
135
|
+
output = output.replace(MULTIPLE_BLANK_LINES, "\n\n");
|
|
136
|
+
return output.trim();
|
|
137
|
+
}
|
|
138
|
+
function latestDocumentTimestamp(document) {
|
|
139
|
+
const candidates = [
|
|
140
|
+
document.updatedAt,
|
|
141
|
+
document.lastViewedPanel?.updatedAt,
|
|
142
|
+
document.lastViewedPanel?.contentUpdatedAt
|
|
143
|
+
].filter((value) => Boolean(value));
|
|
144
|
+
candidates.sort((left, right) => {
|
|
145
|
+
const leftTime = new Date(left).getTime();
|
|
146
|
+
return new Date(right).getTime() - leftTime;
|
|
147
|
+
});
|
|
148
|
+
return candidates[0] ?? document.updatedAt;
|
|
149
|
+
}
|
|
150
|
+
async function shouldWriteFile(filePath, updatedAt) {
|
|
151
|
+
try {
|
|
152
|
+
const existing = await stat(filePath);
|
|
153
|
+
const updatedTime = new Date(updatedAt);
|
|
154
|
+
if (Number.isNaN(updatedTime.getTime())) return true;
|
|
155
|
+
return updatedTime.getTime() > existing.mtime.getTime();
|
|
156
|
+
} catch {
|
|
157
|
+
return true;
|
|
158
|
+
}
|
|
159
|
+
}
|
|
160
|
+
async function writeTextFile(filePath, content) {
|
|
161
|
+
await mkdir(dirname(filePath), { recursive: true });
|
|
162
|
+
await writeFile(filePath, content, "utf8");
|
|
163
|
+
}
|
|
164
|
+
async function ensureDirectory(pathname) {
|
|
165
|
+
await mkdir(pathname, { recursive: true });
|
|
166
|
+
}
|
|
167
|
+
async function readUtf8(pathname) {
|
|
168
|
+
return await readFile(pathname, "utf8");
|
|
169
|
+
}
|
|
170
|
+
function transcriptSpeakerLabel(segment) {
|
|
171
|
+
return segment.source === "microphone" ? "You" : "System";
|
|
172
|
+
}
|
|
173
|
+
//#endregion
|
|
174
|
+
//#region src/api.ts
|
|
175
|
+
const USER_AGENT = "Granola/5.354.0";
|
|
176
|
+
const CLIENT_VERSION = "5.354.0";
|
|
177
|
+
const DOCUMENTS_URL = "https://api.granola.ai/v2/get-documents";
|
|
178
|
+
function parseProseMirrorDoc(value, options = {}) {
|
|
179
|
+
if (value == null) return;
|
|
180
|
+
if (typeof value === "string") {
|
|
181
|
+
const trimmed = value.trim();
|
|
182
|
+
if (!trimmed) return;
|
|
183
|
+
if (options.skipHtmlStrings && trimmed.startsWith("<")) return;
|
|
184
|
+
const parsed = parseJsonString(trimmed);
|
|
185
|
+
if (!parsed) return;
|
|
186
|
+
return parseProseMirrorDoc(parsed, options);
|
|
187
|
+
}
|
|
188
|
+
const record = asRecord(value);
|
|
189
|
+
if (!record || record.type !== "doc") return;
|
|
190
|
+
return record;
|
|
191
|
+
}
|
|
192
|
+
function parseLastViewedPanel(value) {
|
|
193
|
+
const panel = asRecord(value);
|
|
194
|
+
if (!panel) return;
|
|
195
|
+
return {
|
|
196
|
+
affinityNoteId: stringValue(panel.affinity_note_id),
|
|
197
|
+
content: parseProseMirrorDoc(panel.content, { skipHtmlStrings: true }),
|
|
198
|
+
contentUpdatedAt: stringValue(panel.content_updated_at),
|
|
199
|
+
createdAt: stringValue(panel.created_at),
|
|
200
|
+
deletedAt: stringValue(panel.deleted_at),
|
|
201
|
+
documentId: stringValue(panel.document_id),
|
|
202
|
+
generatedLines: Array.isArray(panel.generated_lines) ? panel.generated_lines : [],
|
|
203
|
+
id: stringValue(panel.id),
|
|
204
|
+
lastViewedAt: stringValue(panel.last_viewed_at),
|
|
205
|
+
originalContent: stringValue(panel.original_content),
|
|
206
|
+
suggestedQuestions: panel.suggested_questions,
|
|
207
|
+
templateSlug: stringValue(panel.template_slug),
|
|
208
|
+
title: stringValue(panel.title),
|
|
209
|
+
updatedAt: stringValue(panel.updated_at)
|
|
210
|
+
};
|
|
211
|
+
}
|
|
212
|
+
function getAccessToken(supabaseContents) {
|
|
213
|
+
const wrapper = parseJsonString(supabaseContents);
|
|
214
|
+
if (!wrapper) throw new Error("failed to parse supabase.json");
|
|
215
|
+
const workosTokens = wrapper.workos_tokens;
|
|
216
|
+
let tokenPayload;
|
|
217
|
+
if (typeof workosTokens === "string") tokenPayload = parseJsonString(workosTokens);
|
|
218
|
+
else tokenPayload = asRecord(workosTokens);
|
|
219
|
+
const accessToken = tokenPayload ? stringValue(tokenPayload.access_token) : "";
|
|
220
|
+
if (!accessToken.trim()) throw new Error("access token not found in supabase.json");
|
|
221
|
+
return accessToken;
|
|
222
|
+
}
|
|
223
|
+
function parseDocument(value) {
|
|
224
|
+
const record = asRecord(value);
|
|
225
|
+
if (!record) throw new Error("document payload is not an object");
|
|
226
|
+
return {
|
|
227
|
+
content: stringValue(record.content),
|
|
228
|
+
createdAt: stringValue(record.created_at),
|
|
229
|
+
id: stringValue(record.id),
|
|
230
|
+
lastViewedPanel: parseLastViewedPanel(record.last_viewed_panel),
|
|
231
|
+
notes: parseProseMirrorDoc(record.notes),
|
|
232
|
+
notesPlain: stringValue(record.notes_plain),
|
|
233
|
+
tags: stringArray(record.tags),
|
|
234
|
+
title: stringValue(record.title),
|
|
235
|
+
updatedAt: stringValue(record.updated_at)
|
|
236
|
+
};
|
|
237
|
+
}
|
|
238
|
+
async function fetchDocuments(options) {
|
|
239
|
+
const fetchImpl = options.fetchImpl ?? fetch;
|
|
240
|
+
const accessToken = getAccessToken(options.supabaseContents);
|
|
241
|
+
const documents = [];
|
|
242
|
+
const url = options.url ?? DOCUMENTS_URL;
|
|
243
|
+
const limit = 100;
|
|
244
|
+
let offset = 0;
|
|
245
|
+
for (;;) {
|
|
246
|
+
const signal = AbortSignal.timeout(options.timeoutMs);
|
|
247
|
+
const response = await fetchImpl(url, {
|
|
248
|
+
body: JSON.stringify({
|
|
249
|
+
include_last_viewed_panel: true,
|
|
250
|
+
limit,
|
|
251
|
+
offset
|
|
252
|
+
}),
|
|
253
|
+
headers: {
|
|
254
|
+
Accept: "*/*",
|
|
255
|
+
Authorization: `Bearer ${accessToken}`,
|
|
256
|
+
"Content-Type": "application/json",
|
|
257
|
+
"User-Agent": USER_AGENT,
|
|
258
|
+
"X-Client-Version": CLIENT_VERSION
|
|
259
|
+
},
|
|
260
|
+
method: "POST",
|
|
261
|
+
signal
|
|
262
|
+
});
|
|
263
|
+
if (!response.ok) {
|
|
264
|
+
const body = (await response.text()).slice(0, 500);
|
|
265
|
+
throw new Error(`failed to get documents: ${response.status} ${response.statusText}${body ? `: ${body}` : ""}`);
|
|
266
|
+
}
|
|
267
|
+
const payload = await response.json();
|
|
268
|
+
if (!Array.isArray(payload.docs)) throw new Error("failed to parse documents response");
|
|
269
|
+
const page = payload.docs.map(parseDocument);
|
|
270
|
+
documents.push(...page);
|
|
271
|
+
if (page.length < limit) break;
|
|
272
|
+
offset += limit;
|
|
273
|
+
}
|
|
274
|
+
return documents;
|
|
275
|
+
}
|
|
276
|
+
//#endregion
|
|
277
|
+
//#region src/cache.ts
|
|
278
|
+
function parseCacheDocument(id, value) {
|
|
279
|
+
const record = asRecord(value);
|
|
280
|
+
if (!record) return;
|
|
281
|
+
return {
|
|
282
|
+
createdAt: stringValue(record.created_at),
|
|
283
|
+
id,
|
|
284
|
+
title: stringValue(record.title),
|
|
285
|
+
updatedAt: stringValue(record.updated_at)
|
|
286
|
+
};
|
|
287
|
+
}
|
|
288
|
+
function parseTranscriptSegments(value) {
|
|
289
|
+
if (!Array.isArray(value)) return;
|
|
290
|
+
return value.flatMap((segment) => {
|
|
291
|
+
const record = asRecord(segment);
|
|
292
|
+
if (!record) return [];
|
|
293
|
+
return [{
|
|
294
|
+
documentId: stringValue(record.document_id),
|
|
295
|
+
endTimestamp: stringValue(record.end_timestamp),
|
|
296
|
+
id: stringValue(record.id),
|
|
297
|
+
isFinal: Boolean(record.is_final),
|
|
298
|
+
source: stringValue(record.source),
|
|
299
|
+
startTimestamp: stringValue(record.start_timestamp),
|
|
300
|
+
text: stringValue(record.text)
|
|
301
|
+
}];
|
|
302
|
+
});
|
|
303
|
+
}
|
|
304
|
+
function parseCacheContents(contents) {
|
|
305
|
+
const outer = parseJsonString(contents);
|
|
306
|
+
if (!outer) throw new Error("failed to parse cache JSON");
|
|
307
|
+
const rawCache = outer.cache;
|
|
308
|
+
let cachePayload;
|
|
309
|
+
if (typeof rawCache === "string") cachePayload = parseJsonString(rawCache);
|
|
310
|
+
else cachePayload = asRecord(rawCache);
|
|
311
|
+
const state = cachePayload ? asRecord(cachePayload.state) : void 0;
|
|
312
|
+
if (!state) throw new Error("failed to parse cache state");
|
|
313
|
+
const rawDocuments = asRecord(state.documents) ?? {};
|
|
314
|
+
const rawTranscripts = asRecord(state.transcripts) ?? {};
|
|
315
|
+
const documents = {};
|
|
316
|
+
for (const [id, rawDocument] of Object.entries(rawDocuments)) {
|
|
317
|
+
const document = parseCacheDocument(id, rawDocument);
|
|
318
|
+
if (document) documents[id] = document;
|
|
319
|
+
}
|
|
320
|
+
const transcripts = {};
|
|
321
|
+
for (const [id, rawTranscript] of Object.entries(rawTranscripts)) {
|
|
322
|
+
const segments = parseTranscriptSegments(rawTranscript);
|
|
323
|
+
if (segments) transcripts[id] = segments;
|
|
324
|
+
}
|
|
325
|
+
return {
|
|
326
|
+
documents,
|
|
327
|
+
transcripts
|
|
328
|
+
};
|
|
329
|
+
}
|
|
330
|
+
//#endregion
|
|
331
|
+
//#region src/config.ts
|
|
332
|
+
function pickString(value) {
|
|
333
|
+
return typeof value === "string" && value.trim() ? value.trim() : void 0;
|
|
334
|
+
}
|
|
335
|
+
function pickBoolean(value) {
|
|
336
|
+
return typeof value === "boolean" ? value : void 0;
|
|
337
|
+
}
|
|
338
|
+
function parseTomlScalar(rawValue) {
|
|
339
|
+
const value = rawValue.trim();
|
|
340
|
+
if (value.startsWith("\"") && value.endsWith("\"") || value.startsWith("'") && value.endsWith("'")) {
|
|
341
|
+
if (value.startsWith("\"")) try {
|
|
342
|
+
return JSON.parse(value);
|
|
343
|
+
} catch {
|
|
344
|
+
return value.slice(1, -1);
|
|
345
|
+
}
|
|
346
|
+
return value.slice(1, -1);
|
|
347
|
+
}
|
|
348
|
+
if (/^(true|false)$/i.test(value)) return value.toLowerCase() === "true";
|
|
349
|
+
if (/^-?\d+(?:\.\d+)?$/.test(value)) return Number(value);
|
|
350
|
+
return value;
|
|
351
|
+
}
|
|
352
|
+
function parseSimpleToml(contents) {
|
|
353
|
+
const values = {};
|
|
354
|
+
for (const rawLine of contents.split(/\r?\n/)) {
|
|
355
|
+
const line = rawLine.trim();
|
|
356
|
+
if (!line || line.startsWith("#")) continue;
|
|
357
|
+
const match = /^([A-Za-z0-9_-]+)\s*=\s*(.+)$/.exec(line);
|
|
358
|
+
if (!match) continue;
|
|
359
|
+
const [, key = "", rawValue = ""] = match;
|
|
360
|
+
values[key] = parseTomlScalar(rawValue);
|
|
361
|
+
}
|
|
362
|
+
return values;
|
|
363
|
+
}
|
|
364
|
+
async function loadTomlConfig(configPath) {
|
|
365
|
+
if (configPath) return {
|
|
366
|
+
path: configPath,
|
|
367
|
+
values: parseSimpleToml(await readUtf8(configPath))
|
|
368
|
+
};
|
|
369
|
+
const candidates = [join(process.cwd(), ".granola.toml"), join(homedir(), ".granola.toml")];
|
|
370
|
+
for (const candidate of candidates) if (existsSync(candidate)) return {
|
|
371
|
+
path: candidate,
|
|
372
|
+
values: parseSimpleToml(await readUtf8(candidate))
|
|
373
|
+
};
|
|
374
|
+
return { values: {} };
|
|
375
|
+
}
|
|
376
|
+
function envFlag(value) {
|
|
377
|
+
if (value == null) return;
|
|
378
|
+
if (/^(1|true|yes|on)$/i.test(value)) return true;
|
|
379
|
+
if (/^(0|false|no|off)$/i.test(value)) return false;
|
|
380
|
+
}
|
|
381
|
+
async function loadConfig(options) {
|
|
382
|
+
const env = options.env ?? process.env;
|
|
383
|
+
const config = await loadTomlConfig(pickString(options.globalFlags.config));
|
|
384
|
+
const configValues = config.values;
|
|
385
|
+
const defaultSupabase = firstExistingPath(granolaSupabaseCandidates());
|
|
386
|
+
const defaultCache = firstExistingPath(granolaCacheCandidates());
|
|
387
|
+
const timeoutValue = pickString(options.subcommandFlags.timeout) ?? pickString(env.TIMEOUT) ?? pickString(configValues.timeout) ?? "2m";
|
|
388
|
+
return {
|
|
389
|
+
configFileUsed: config.path,
|
|
390
|
+
debug: pickBoolean(options.globalFlags.debug) ?? envFlag(env.DEBUG_MODE) ?? pickBoolean(configValues.debug) ?? false,
|
|
391
|
+
notes: {
|
|
392
|
+
output: pickString(options.subcommandFlags.output) ?? pickString(env.OUTPUT) ?? pickString(configValues.output) ?? "./notes",
|
|
393
|
+
timeoutMs: parseDuration(timeoutValue)
|
|
394
|
+
},
|
|
395
|
+
supabase: pickString(options.globalFlags.supabase) ?? pickString(env.SUPABASE_FILE) ?? pickString(configValues.supabase) ?? defaultSupabase,
|
|
396
|
+
transcripts: {
|
|
397
|
+
cacheFile: pickString(options.subcommandFlags.cache) ?? pickString(env.CACHE_FILE) ?? pickString(configValues["cache-file"]) ?? pickString(configValues.cacheFile) ?? defaultCache ?? "",
|
|
398
|
+
output: pickString(options.subcommandFlags.output) ?? pickString(env.TRANSCRIPT_OUTPUT) ?? pickString(configValues["transcript-output"]) ?? pickString(configValues.transcriptOutput) ?? "./transcripts"
|
|
399
|
+
}
|
|
400
|
+
};
|
|
401
|
+
}
|
|
402
|
+
//#endregion
|
|
403
|
+
//#region src/prosemirror.ts
|
|
404
|
+
function repeatIndent(level) {
|
|
405
|
+
return " ".repeat(level);
|
|
406
|
+
}
|
|
407
|
+
function renderInline(nodes = []) {
|
|
408
|
+
return nodes.map((node) => renderInlineNode(node)).join("");
|
|
409
|
+
}
|
|
410
|
+
function applyMarks(text, marks = []) {
|
|
411
|
+
return marks.reduce((current, mark) => {
|
|
412
|
+
switch (mark.type) {
|
|
413
|
+
case "strong": return `**${current}**`;
|
|
414
|
+
case "em": return `*${current}*`;
|
|
415
|
+
case "code": return `\`${current}\``;
|
|
416
|
+
case "strike": return `~~${current}~~`;
|
|
417
|
+
case "link": {
|
|
418
|
+
const href = typeof mark.attrs?.href === "string" ? mark.attrs.href : void 0;
|
|
419
|
+
return href ? `[${current}](${href})` : current;
|
|
420
|
+
}
|
|
421
|
+
default: return current;
|
|
422
|
+
}
|
|
423
|
+
}, text);
|
|
424
|
+
}
|
|
425
|
+
function renderInlineNode(node) {
|
|
426
|
+
switch (node.type) {
|
|
427
|
+
case "text": return applyMarks(node.text ?? "", node.marks);
|
|
428
|
+
case "hardBreak": return " \n";
|
|
429
|
+
default: return applyMarks(renderInline(node.content), node.marks);
|
|
430
|
+
}
|
|
431
|
+
}
|
|
432
|
+
function indentLines(value, level) {
|
|
433
|
+
const indent = repeatIndent(level);
|
|
434
|
+
return value.split("\n").map((line) => line.length === 0 ? line : `${indent}${line}`).join("\n");
|
|
435
|
+
}
|
|
436
|
+
function renderList(items, ordered, indentLevel) {
|
|
437
|
+
return items.map((item, index) => renderListItem(item, ordered ? `${index + 1}.` : "-", indentLevel)).join("\n");
|
|
438
|
+
}
|
|
439
|
+
function renderListItem(node, marker, indentLevel) {
|
|
440
|
+
const children = node.content ?? [];
|
|
441
|
+
const blockChildren = children.filter((child) => child.type !== "bulletList" && child.type !== "orderedList");
|
|
442
|
+
const nestedLists = children.filter((child) => child.type === "bulletList" || child.type === "orderedList");
|
|
443
|
+
const mainText = blockChildren.map((child) => renderBlock(child, indentLevel + 1)).filter(Boolean).join("\n").trim();
|
|
444
|
+
let output = `${`${repeatIndent(indentLevel)}${marker} `}${mainText || ""}`.trimEnd();
|
|
445
|
+
if (nestedLists.length > 0) {
|
|
446
|
+
const nestedText = nestedLists.map((child) => renderBlock(child, indentLevel + 1)).filter(Boolean).map((value) => indentLines(value, 0)).join("\n");
|
|
447
|
+
output = `${output}\n${nestedText}`;
|
|
448
|
+
}
|
|
449
|
+
return output;
|
|
450
|
+
}
|
|
451
|
+
function renderBlock(node, indentLevel) {
|
|
452
|
+
switch (node.type) {
|
|
453
|
+
case "heading": {
|
|
454
|
+
const level = typeof node.attrs?.level === "number" ? node.attrs.level : 1;
|
|
455
|
+
return `${"#".repeat(level)} ${renderInline(node.content).trim()}`.trim();
|
|
456
|
+
}
|
|
457
|
+
case "paragraph": return renderInline(node.content).trim();
|
|
458
|
+
case "bulletList": return renderList(node.content ?? [], false, indentLevel);
|
|
459
|
+
case "orderedList": return renderList(node.content ?? [], true, indentLevel);
|
|
460
|
+
case "listItem": return renderListItem(node, "-", indentLevel);
|
|
461
|
+
case "blockquote": return renderBlocks(node.content ?? [], indentLevel).split("\n").map((line) => line ? `> ${line}` : ">").join("\n").trim();
|
|
462
|
+
case "codeBlock": return `\`\`\`\n${extractPlainText(node).trimEnd()}\n\`\`\``;
|
|
463
|
+
case "horizontalRule": return "---";
|
|
464
|
+
case "hardBreak": return "";
|
|
465
|
+
case "text": return renderInlineNode(node);
|
|
466
|
+
default:
|
|
467
|
+
if (node.content?.length) return renderBlocks(node.content, indentLevel);
|
|
468
|
+
return renderInlineNode(node).trim();
|
|
469
|
+
}
|
|
470
|
+
}
|
|
471
|
+
function renderBlocks(nodes, indentLevel = 0) {
|
|
472
|
+
return nodes.map((node) => renderBlock(node, indentLevel)).filter((value) => value.length > 0).join("\n\n").replace(/\n{3,}/g, "\n\n").trim();
|
|
473
|
+
}
|
|
474
|
+
function extractPlainTextNode(node) {
|
|
475
|
+
switch (node.type) {
|
|
476
|
+
case "hardBreak": return "\n";
|
|
477
|
+
case "text": return node.text ?? "";
|
|
478
|
+
default: return extractPlainText({
|
|
479
|
+
type: "doc",
|
|
480
|
+
content: node.content
|
|
481
|
+
});
|
|
482
|
+
}
|
|
483
|
+
}
|
|
484
|
+
function convertProseMirrorToMarkdown(doc) {
|
|
485
|
+
if (!doc || doc.type !== "doc" || !doc.content?.length) return "";
|
|
486
|
+
const rendered = renderBlocks(doc.content);
|
|
487
|
+
return rendered ? `${rendered}\n` : "";
|
|
488
|
+
}
|
|
489
|
+
function extractPlainText(doc) {
|
|
490
|
+
if (!doc || doc.type !== "doc" || !doc.content?.length) return "";
|
|
491
|
+
return doc.content.map((node) => {
|
|
492
|
+
if (node.type === "bulletList" || node.type === "orderedList") return (node.content ?? []).map((child) => extractPlainTextNode(child)).filter(Boolean).join("\n");
|
|
493
|
+
return extractPlainTextNode(node);
|
|
494
|
+
}).filter(Boolean).join("\n\n").replace(/\n{3,}/g, "\n\n").trim();
|
|
495
|
+
}
|
|
496
|
+
//#endregion
|
|
497
|
+
//#region src/notes.ts
|
|
498
|
+
function documentToMarkdown(document) {
|
|
499
|
+
const lines = [
|
|
500
|
+
"---",
|
|
501
|
+
`id: ${quoteYamlString(document.id)}`,
|
|
502
|
+
`created: ${quoteYamlString(document.createdAt)}`,
|
|
503
|
+
`updated: ${quoteYamlString(document.updatedAt)}`
|
|
504
|
+
];
|
|
505
|
+
if (document.tags.length > 0) {
|
|
506
|
+
lines.push("tags:");
|
|
507
|
+
for (const tag of document.tags) lines.push(` - ${quoteYamlString(tag)}`);
|
|
508
|
+
}
|
|
509
|
+
lines.push("---", "");
|
|
510
|
+
if (document.title.trim()) lines.push(`# ${document.title.trim()}`, "");
|
|
511
|
+
const content = convertProseMirrorToMarkdown(document.notes).trim() || convertProseMirrorToMarkdown(document.lastViewedPanel?.content).trim() || htmlToMarkdownFallback(document.lastViewedPanel?.originalContent ?? "").trim() || document.content.trim();
|
|
512
|
+
if (content) lines.push(content);
|
|
513
|
+
return `${lines.join("\n").trimEnd()}\n`;
|
|
514
|
+
}
|
|
515
|
+
function documentFilename(document) {
|
|
516
|
+
return sanitiseFilename(document.title || document.id, "untitled");
|
|
517
|
+
}
|
|
518
|
+
async function writeNotes(documents, outputDir) {
|
|
519
|
+
await ensureDirectory(outputDir);
|
|
520
|
+
const sorted = [...documents].sort((left, right) => compareStrings(left.title || left.id, right.title || right.id) || compareStrings(left.id, right.id));
|
|
521
|
+
const used = /* @__PURE__ */ new Map();
|
|
522
|
+
let written = 0;
|
|
523
|
+
for (const document of sorted) {
|
|
524
|
+
const filePath = join(outputDir, `${makeUniqueFilename(documentFilename(document), used)}.md`);
|
|
525
|
+
if (!await shouldWriteFile(filePath, latestDocumentTimestamp(document))) continue;
|
|
526
|
+
await writeTextFile(filePath, documentToMarkdown(document));
|
|
527
|
+
written += 1;
|
|
528
|
+
}
|
|
529
|
+
return written;
|
|
530
|
+
}
|
|
531
|
+
//#endregion
|
|
532
|
+
//#region src/transcripts.ts
|
|
533
|
+
function formatTranscript(document, segments) {
|
|
534
|
+
if (segments.length === 0) return "";
|
|
535
|
+
const header = [
|
|
536
|
+
"=".repeat(80),
|
|
537
|
+
document.title || document.id,
|
|
538
|
+
`ID: ${document.id}`,
|
|
539
|
+
document.createdAt ? `Created: ${document.createdAt}` : "",
|
|
540
|
+
document.updatedAt ? `Updated: ${document.updatedAt}` : "",
|
|
541
|
+
`Segments: ${segments.length}`,
|
|
542
|
+
"=".repeat(80),
|
|
543
|
+
""
|
|
544
|
+
].filter(Boolean);
|
|
545
|
+
const body = segments.map((segment) => {
|
|
546
|
+
return `[${formatTimestampForTranscript(segment.startTimestamp)}] ${transcriptSpeakerLabel(segment)}: ${segment.text}`;
|
|
547
|
+
});
|
|
548
|
+
return `${[...header, ...body].join("\n").trimEnd()}\n`;
|
|
549
|
+
}
|
|
550
|
+
function transcriptFilename(document) {
|
|
551
|
+
return sanitiseFilename(document.title || document.id, "untitled");
|
|
552
|
+
}
|
|
553
|
+
async function writeTranscripts(cacheData, outputDir) {
|
|
554
|
+
await ensureDirectory(outputDir);
|
|
555
|
+
const entries = Object.entries(cacheData.transcripts).filter(([, segments]) => segments.length > 0).sort(([leftId], [rightId]) => {
|
|
556
|
+
const leftDocument = cacheData.documents[leftId];
|
|
557
|
+
const rightDocument = cacheData.documents[rightId];
|
|
558
|
+
return compareStrings(leftDocument?.title || leftId, rightDocument?.title || rightId) || compareStrings(leftId, rightId);
|
|
559
|
+
});
|
|
560
|
+
const used = /* @__PURE__ */ new Map();
|
|
561
|
+
let written = 0;
|
|
562
|
+
for (const [documentId, segments] of entries) {
|
|
563
|
+
const document = cacheData.documents[documentId] ?? {
|
|
564
|
+
createdAt: "",
|
|
565
|
+
id: documentId,
|
|
566
|
+
title: documentId,
|
|
567
|
+
updatedAt: ""
|
|
568
|
+
};
|
|
569
|
+
const filePath = join(outputDir, `${makeUniqueFilename(transcriptFilename(document), used)}.txt`);
|
|
570
|
+
if (!await shouldWriteFile(filePath, document.updatedAt)) continue;
|
|
571
|
+
const content = formatTranscript(document, segments);
|
|
572
|
+
if (!content) continue;
|
|
573
|
+
await writeTextFile(filePath, content);
|
|
574
|
+
written += 1;
|
|
575
|
+
}
|
|
576
|
+
return written;
|
|
577
|
+
}
|
|
578
|
+
//#endregion
|
|
579
|
+
//#region src/cli.ts
|
|
580
|
+
function parseBooleanValue(value) {
|
|
581
|
+
if (/^(true|1|yes|on)$/i.test(value)) return true;
|
|
582
|
+
if (/^(false|0|no|off)$/i.test(value)) return false;
|
|
583
|
+
throw new Error(`invalid boolean value: ${value}`);
|
|
584
|
+
}
|
|
585
|
+
function parseFlags(args, spec) {
|
|
586
|
+
const values = {};
|
|
587
|
+
const rest = [];
|
|
588
|
+
for (let index = 0; index < args.length; index += 1) {
|
|
589
|
+
const token = args[index];
|
|
590
|
+
if (token === "--") {
|
|
591
|
+
rest.push(...args.slice(index + 1));
|
|
592
|
+
break;
|
|
593
|
+
}
|
|
594
|
+
if (token === "-h") {
|
|
595
|
+
values.help = true;
|
|
596
|
+
continue;
|
|
597
|
+
}
|
|
598
|
+
if (!token.startsWith("--")) {
|
|
599
|
+
rest.push(token);
|
|
600
|
+
continue;
|
|
601
|
+
}
|
|
602
|
+
const [rawName = "", inlineValue] = token.slice(2).split("=", 2);
|
|
603
|
+
const name = rawName;
|
|
604
|
+
const definition = spec[name];
|
|
605
|
+
if (!definition) {
|
|
606
|
+
rest.push(token);
|
|
607
|
+
continue;
|
|
608
|
+
}
|
|
609
|
+
if (definition.type === "boolean") {
|
|
610
|
+
values[name] = inlineValue == null ? true : parseBooleanValue(inlineValue);
|
|
611
|
+
continue;
|
|
612
|
+
}
|
|
613
|
+
if (inlineValue != null) {
|
|
614
|
+
values[name] = inlineValue;
|
|
615
|
+
continue;
|
|
616
|
+
}
|
|
617
|
+
const next = args[index + 1];
|
|
618
|
+
if (next == null || next.startsWith("--")) throw new Error(`missing value for --${name}`);
|
|
619
|
+
values[name] = next;
|
|
620
|
+
index += 1;
|
|
621
|
+
}
|
|
622
|
+
return {
|
|
623
|
+
rest,
|
|
624
|
+
values
|
|
625
|
+
};
|
|
626
|
+
}
|
|
627
|
+
function splitCommand(argv) {
|
|
628
|
+
const commands = new Set(["notes", "transcripts"]);
|
|
629
|
+
const rest = [];
|
|
630
|
+
let command;
|
|
631
|
+
for (const token of argv) {
|
|
632
|
+
if (!command && !token.startsWith("-") && commands.has(token)) {
|
|
633
|
+
command = token;
|
|
634
|
+
continue;
|
|
635
|
+
}
|
|
636
|
+
rest.push(token);
|
|
637
|
+
}
|
|
638
|
+
return {
|
|
639
|
+
command,
|
|
640
|
+
rest
|
|
641
|
+
};
|
|
642
|
+
}
|
|
643
|
+
function rootHelp() {
|
|
644
|
+
return `Granola CLI
|
|
645
|
+
|
|
646
|
+
Export your Granola notes and transcripts.
|
|
647
|
+
|
|
648
|
+
Usage:
|
|
649
|
+
granola <command> [options]
|
|
650
|
+
|
|
651
|
+
Commands:
|
|
652
|
+
notes Export Granola notes to Markdown
|
|
653
|
+
transcripts Export Granola transcripts to text files
|
|
654
|
+
|
|
655
|
+
Global options:
|
|
656
|
+
--config <path> Path to .granola.toml
|
|
657
|
+
--debug Enable debug logging
|
|
658
|
+
--supabase <path> Path to supabase.json
|
|
659
|
+
-h, --help Show help
|
|
660
|
+
|
|
661
|
+
Examples:
|
|
662
|
+
granola notes --supabase "${granolaSupabaseCandidates()[0] ?? "/path/to/supabase.json"}"
|
|
663
|
+
granola transcripts --cache "${granolaCacheCandidates()[0] ?? "/path/to/cache-v3.json"}"
|
|
664
|
+
`;
|
|
665
|
+
}
|
|
666
|
+
function notesHelp() {
|
|
667
|
+
return `Granola notes
|
|
668
|
+
|
|
669
|
+
Usage:
|
|
670
|
+
granola notes [options]
|
|
671
|
+
|
|
672
|
+
Options:
|
|
673
|
+
--output <path> Output directory for Markdown files (default: ./notes)
|
|
674
|
+
--timeout <value> Request timeout, e.g. 2m, 30s, 120000 (default: 2m)
|
|
675
|
+
--supabase <path> Path to supabase.json
|
|
676
|
+
--debug Enable debug logging
|
|
677
|
+
--config <path> Path to .granola.toml
|
|
678
|
+
-h, --help Show help
|
|
679
|
+
`;
|
|
680
|
+
}
|
|
681
|
+
function transcriptsHelp() {
|
|
682
|
+
return `Granola transcripts
|
|
683
|
+
|
|
684
|
+
Usage:
|
|
685
|
+
granola transcripts [options]
|
|
686
|
+
|
|
687
|
+
Options:
|
|
688
|
+
--cache <path> Path to Granola cache JSON
|
|
689
|
+
--output <path> Output directory for transcript files (default: ./transcripts)
|
|
690
|
+
--debug Enable debug logging
|
|
691
|
+
--config <path> Path to .granola.toml
|
|
692
|
+
-h, --help Show help
|
|
693
|
+
`;
|
|
694
|
+
}
|
|
695
|
+
function debug(enabled, ...values) {
|
|
696
|
+
if (enabled) console.error("[debug]", ...values);
|
|
697
|
+
}
|
|
698
|
+
async function runCli(argv) {
|
|
699
|
+
try {
|
|
700
|
+
const { command, rest } = splitCommand(argv);
|
|
701
|
+
const global = parseFlags(rest, {
|
|
702
|
+
config: { type: "string" },
|
|
703
|
+
debug: { type: "boolean" },
|
|
704
|
+
help: { type: "boolean" },
|
|
705
|
+
supabase: { type: "string" }
|
|
706
|
+
});
|
|
707
|
+
if (global.values.help && !command) {
|
|
708
|
+
console.log(rootHelp());
|
|
709
|
+
return 0;
|
|
710
|
+
}
|
|
711
|
+
if (!command) {
|
|
712
|
+
console.log(rootHelp());
|
|
713
|
+
return 1;
|
|
714
|
+
}
|
|
715
|
+
switch (command) {
|
|
716
|
+
case "notes": {
|
|
717
|
+
const subcommand = parseFlags(global.rest, {
|
|
718
|
+
help: { type: "boolean" },
|
|
719
|
+
output: { type: "string" },
|
|
720
|
+
timeout: { type: "string" }
|
|
721
|
+
});
|
|
722
|
+
if (subcommand.values.help || global.values.help) {
|
|
723
|
+
console.log(notesHelp());
|
|
724
|
+
return 0;
|
|
725
|
+
}
|
|
726
|
+
const config = await loadConfig({
|
|
727
|
+
globalFlags: global.values,
|
|
728
|
+
subcommandFlags: subcommand.values
|
|
729
|
+
});
|
|
730
|
+
if (!config.supabase) throw new Error(`supabase.json not found. Pass --supabase or create .granola.toml. Expected locations include: ${granolaSupabaseCandidates().join(", ")}`);
|
|
731
|
+
debug(config.debug, "using config", config.configFileUsed ?? "(none)");
|
|
732
|
+
debug(config.debug, "supabase", config.supabase);
|
|
733
|
+
debug(config.debug, "timeoutMs", config.notes.timeoutMs);
|
|
734
|
+
debug(config.debug, "output", config.notes.output);
|
|
735
|
+
console.log("Fetching documents from Granola API...");
|
|
736
|
+
const documents = await fetchDocuments({
|
|
737
|
+
supabaseContents: await readFile(config.supabase, "utf8"),
|
|
738
|
+
timeoutMs: config.notes.timeoutMs
|
|
739
|
+
});
|
|
740
|
+
console.log(`Exporting ${documents.length} notes to ${config.notes.output}...`);
|
|
741
|
+
const written = await writeNotes(documents, config.notes.output);
|
|
742
|
+
console.log("✓ Export completed successfully");
|
|
743
|
+
debug(config.debug, "notes written", written);
|
|
744
|
+
return 0;
|
|
745
|
+
}
|
|
746
|
+
case "transcripts": {
|
|
747
|
+
const subcommand = parseFlags(global.rest, {
|
|
748
|
+
cache: { type: "string" },
|
|
749
|
+
help: { type: "boolean" },
|
|
750
|
+
output: { type: "string" }
|
|
751
|
+
});
|
|
752
|
+
if (subcommand.values.help || global.values.help) {
|
|
753
|
+
console.log(transcriptsHelp());
|
|
754
|
+
return 0;
|
|
755
|
+
}
|
|
756
|
+
const config = await loadConfig({
|
|
757
|
+
globalFlags: global.values,
|
|
758
|
+
subcommandFlags: subcommand.values
|
|
759
|
+
});
|
|
760
|
+
if (!config.transcripts.cacheFile) throw new Error(`Granola cache file not found. Pass --cache or create .granola.toml. Expected locations include: ${granolaCacheCandidates().join(", ")}`);
|
|
761
|
+
debug(config.debug, "using config", config.configFileUsed ?? "(none)");
|
|
762
|
+
debug(config.debug, "cacheFile", config.transcripts.cacheFile);
|
|
763
|
+
debug(config.debug, "output", config.transcripts.output);
|
|
764
|
+
console.log("Reading Granola cache file...");
|
|
765
|
+
const cacheData = parseCacheContents(await readFile(config.transcripts.cacheFile, "utf8"));
|
|
766
|
+
const transcriptCount = Object.values(cacheData.transcripts).filter((segments) => segments.length > 0).length;
|
|
767
|
+
console.log(`Exporting ${transcriptCount} transcripts to ${config.transcripts.output}...`);
|
|
768
|
+
const written = await writeTranscripts(cacheData, config.transcripts.output);
|
|
769
|
+
console.log("✓ Export completed successfully");
|
|
770
|
+
debug(config.debug, "transcripts written", written);
|
|
771
|
+
return 0;
|
|
772
|
+
}
|
|
773
|
+
default:
|
|
774
|
+
console.log(rootHelp());
|
|
775
|
+
return 1;
|
|
776
|
+
}
|
|
777
|
+
} catch (error) {
|
|
778
|
+
const message = error instanceof Error ? error.message : String(error);
|
|
779
|
+
console.error(message);
|
|
780
|
+
return 1;
|
|
781
|
+
}
|
|
782
|
+
}
|
|
783
|
+
//#endregion
|
|
784
|
+
//#region index.ts
|
|
785
|
+
const exitCode = await runCli(process.argv.slice(2));
|
|
786
|
+
process.exit(exitCode);
|
|
787
|
+
//#endregion
|
|
788
|
+
export {};
|
package/package.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "granola-toolkit",
|
|
3
|
+
"version": "0.1.0",
|
|
4
|
+
"description": "CLI toolkit for exporting and working with Granola notes and transcripts",
|
|
5
|
+
"keywords": [
|
|
6
|
+
"cli",
|
|
7
|
+
"granola",
|
|
8
|
+
"notes",
|
|
9
|
+
"transcripts"
|
|
10
|
+
],
|
|
11
|
+
"homepage": "https://github.com/kkarimi/granola-toolkit#readme",
|
|
12
|
+
"bugs": {
|
|
13
|
+
"url": "https://github.com/kkarimi/granola-toolkit/issues"
|
|
14
|
+
},
|
|
15
|
+
"author": "Nima Karimi",
|
|
16
|
+
"repository": {
|
|
17
|
+
"type": "git",
|
|
18
|
+
"url": "git@github.com:kkarimi/granola-toolkit.git"
|
|
19
|
+
},
|
|
20
|
+
"bin": {
|
|
21
|
+
"granola": "dist/cli.js"
|
|
22
|
+
},
|
|
23
|
+
"files": [
|
|
24
|
+
"README.md",
|
|
25
|
+
"dist/cli.js"
|
|
26
|
+
],
|
|
27
|
+
"type": "module",
|
|
28
|
+
"main": "dist/cli.js",
|
|
29
|
+
"module": "dist/cli.js",
|
|
30
|
+
"publishConfig": {
|
|
31
|
+
"access": "public"
|
|
32
|
+
},
|
|
33
|
+
"scripts": {
|
|
34
|
+
"build": "vp pack",
|
|
35
|
+
"check": "vp check",
|
|
36
|
+
"dev": "vp pack --watch",
|
|
37
|
+
"fmt": "vp fmt",
|
|
38
|
+
"lint": "vp lint",
|
|
39
|
+
"pack:dry-run": "npm pack --dry-run",
|
|
40
|
+
"prepublishOnly": "vp pack",
|
|
41
|
+
"release": "node scripts/release.mjs",
|
|
42
|
+
"start": "node dist/cli.js",
|
|
43
|
+
"notes": "node dist/cli.js notes",
|
|
44
|
+
"transcripts": "node dist/cli.js transcripts",
|
|
45
|
+
"test": "vp test",
|
|
46
|
+
"typecheck": "vp exec tsc --noEmit",
|
|
47
|
+
"prepare": "vp config"
|
|
48
|
+
},
|
|
49
|
+
"devDependencies": {
|
|
50
|
+
"@types/node": "^25.5.2",
|
|
51
|
+
"typescript": "^5.9.3",
|
|
52
|
+
"vite-plus": "0.1.15"
|
|
53
|
+
},
|
|
54
|
+
"overrides": {
|
|
55
|
+
"vite": "npm:@voidzero-dev/vite-plus-core@latest",
|
|
56
|
+
"vitest": "npm:@voidzero-dev/vite-plus-test@latest"
|
|
57
|
+
},
|
|
58
|
+
"engines": {
|
|
59
|
+
"node": ">=20.0.0"
|
|
60
|
+
},
|
|
61
|
+
"packageManager": "npm@11.7.0"
|
|
62
|
+
}
|