granola-toolkit 0.2.0 → 0.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +13 -7
  2. package/dist/cli.js +344 -80
  3. package/package.json +5 -2
package/README.md CHANGED
@@ -58,12 +58,14 @@ Export notes:
58
58
 
59
59
  ```bash
60
60
  node dist/cli.js notes --supabase "$HOME/Library/Application Support/Granola/supabase.json"
61
+ node dist/cli.js notes --format json --output ./notes-json
61
62
  ```
62
63
 
63
64
  Export transcripts:
64
65
 
65
66
  ```bash
66
67
  node dist/cli.js transcripts --cache "$HOME/Library/Application Support/Granola/cache-v3.json"
68
+ node dist/cli.js transcripts --format yaml --output ./transcripts-yaml
67
69
  ```
68
70
 
69
71
  ## How It Works
@@ -77,9 +79,10 @@ The flow is:
77
79
  1. read your local `supabase.json`
78
80
  2. extract the WorkOS access token from it
79
81
  3. call Granola's paginated documents API
80
- 4. choose the best available note content for each document
81
- 5. convert ProseMirror content into Markdown
82
- 6. write one Markdown file per document into the output directory
82
+ 4. normalise each document into a structured note export
83
+ 5. choose the best available note content for each document
84
+ 6. render that export as Markdown, JSON, YAML, or raw JSON
85
+ 7. write one file per document into the output directory
83
86
 
84
87
  Content is chosen in this order:
85
88
 
@@ -88,7 +91,7 @@ Content is chosen in this order:
88
91
  3. `last_viewed_panel.original_content`
89
92
  4. raw `content`
90
93
 
91
- Each note file includes:
94
+ Markdown note files include:
92
95
 
93
96
  - YAML frontmatter with the document id, created timestamp, updated timestamp, and tags
94
97
  - a top-level heading from the note title
@@ -102,15 +105,18 @@ The flow is:
102
105
 
103
106
  1. read Granola's cache JSON from disk
104
107
  2. parse the cache payload, whether it is double-encoded or already an object
105
- 3. match transcript segments to documents by document id
106
- 4. format segments as `[HH:MM:SS] Speaker: Text`
107
- 5. write one `.txt` file per document into the output directory
108
+ 3. normalise transcript data into a structured export per document
109
+ 4. match transcript segments to documents by document id
110
+ 5. render each export as text, JSON, YAML, or raw JSON
111
+ 6. write one file per document into the output directory
108
112
 
109
113
  Speaker labels are currently normalised to:
110
114
 
111
115
  - `You` for `microphone`
112
116
  - `System` for everything else
113
117
 
118
+ Structured output formats are useful when you want to post-process exports in scripts instead of reading the default human-oriented Markdown or text files.
119
+
114
120
  ### Incremental Writes
115
121
 
116
122
  Both commands are incremental. They only rewrite an export file when the source document appears newer than the file already on disk.
package/dist/cli.js CHANGED
@@ -171,10 +171,56 @@ function transcriptSpeakerLabel(segment) {
171
171
  return segment.source === "microphone" ? "You" : "System";
172
172
  }
173
173
  //#endregion
174
- //#region src/api.ts
175
- const USER_AGENT = "Granola/5.354.0";
176
- const CLIENT_VERSION = "5.354.0";
177
- const DOCUMENTS_URL = "https://api.granola.ai/v2/get-documents";
174
+ //#region src/client/auth.ts
175
+ function getAccessTokenFromSupabaseContents(supabaseContents) {
176
+ const wrapper = parseJsonString(supabaseContents);
177
+ if (!wrapper) throw new Error("failed to parse supabase.json");
178
+ const workosTokens = wrapper.workos_tokens;
179
+ let tokenPayload;
180
+ if (typeof workosTokens === "string") tokenPayload = parseJsonString(workosTokens);
181
+ else tokenPayload = asRecord(workosTokens);
182
+ const accessToken = tokenPayload ? stringValue(tokenPayload.access_token) : "";
183
+ if (!accessToken.trim()) throw new Error("access token not found in supabase.json");
184
+ return accessToken;
185
+ }
186
+ var SupabaseFileTokenSource = class {
187
+ constructor(filePath) {
188
+ this.filePath = filePath;
189
+ }
190
+ async loadAccessToken() {
191
+ return getAccessTokenFromSupabaseContents(await readFile(this.filePath, "utf8"));
192
+ }
193
+ };
194
+ var NoopTokenStore = class {
195
+ async clearToken() {}
196
+ async readToken() {}
197
+ async writeToken(_token) {}
198
+ };
199
+ var CachedTokenProvider = class {
200
+ #token;
201
+ constructor(source, store = new NoopTokenStore()) {
202
+ this.source = source;
203
+ this.store = store;
204
+ }
205
+ async getAccessToken() {
206
+ if (this.#token) return this.#token;
207
+ const storedToken = await this.store.readToken();
208
+ if (storedToken?.trim()) {
209
+ this.#token = storedToken;
210
+ return storedToken;
211
+ }
212
+ const token = await this.source.loadAccessToken();
213
+ this.#token = token;
214
+ await this.store.writeToken(token);
215
+ return token;
216
+ }
217
+ async invalidate() {
218
+ this.#token = void 0;
219
+ await this.store.clearToken();
220
+ }
221
+ };
222
+ //#endregion
223
+ //#region src/client/parsers.ts
178
224
  function parseProseMirrorDoc(value, options = {}) {
179
225
  if (value == null) return;
180
226
  if (typeof value === "string") {
@@ -209,17 +255,6 @@ function parseLastViewedPanel(value) {
209
255
  updatedAt: stringValue(panel.updated_at)
210
256
  };
211
257
  }
212
- function getAccessToken(supabaseContents) {
213
- const wrapper = parseJsonString(supabaseContents);
214
- if (!wrapper) throw new Error("failed to parse supabase.json");
215
- const workosTokens = wrapper.workos_tokens;
216
- let tokenPayload;
217
- if (typeof workosTokens === "string") tokenPayload = parseJsonString(workosTokens);
218
- else tokenPayload = asRecord(workosTokens);
219
- const accessToken = tokenPayload ? stringValue(tokenPayload.access_token) : "";
220
- if (!accessToken.trim()) throw new Error("access token not found in supabase.json");
221
- return accessToken;
222
- }
223
258
  function parseDocument(value) {
224
259
  const record = asRecord(value);
225
260
  if (!record) throw new Error("document payload is not an object");
@@ -235,44 +270,93 @@ function parseDocument(value) {
235
270
  updatedAt: stringValue(record.updated_at)
236
271
  };
237
272
  }
238
- async function fetchDocuments(options) {
239
- const fetchImpl = options.fetchImpl ?? fetch;
240
- const accessToken = getAccessToken(options.supabaseContents);
241
- const documents = [];
242
- const url = options.url ?? DOCUMENTS_URL;
243
- const limit = 100;
244
- let offset = 0;
245
- for (;;) {
246
- const signal = AbortSignal.timeout(options.timeoutMs);
247
- const response = await fetchImpl(url, {
248
- body: JSON.stringify({
273
+ //#endregion
274
+ //#region src/client/granola.ts
275
+ const USER_AGENT = "Granola/5.354.0";
276
+ const CLIENT_VERSION = "5.354.0";
277
+ const DOCUMENTS_URL = "https://api.granola.ai/v2/get-documents";
278
+ var GranolaApiClient = class {
279
+ constructor(httpClient, documentsUrl = DOCUMENTS_URL) {
280
+ this.httpClient = httpClient;
281
+ this.documentsUrl = documentsUrl;
282
+ }
283
+ async listDocuments(options) {
284
+ const documents = [];
285
+ const limit = options.limit ?? 100;
286
+ let offset = 0;
287
+ for (;;) {
288
+ const response = await this.httpClient.postJson(this.documentsUrl, {
249
289
  include_last_viewed_panel: true,
250
290
  limit,
251
291
  offset
252
- }),
292
+ }, {
293
+ headers: {
294
+ "User-Agent": USER_AGENT,
295
+ "X-Client-Version": CLIENT_VERSION
296
+ },
297
+ timeoutMs: options.timeoutMs
298
+ });
299
+ if (!response.ok) {
300
+ const body = (await response.text()).slice(0, 500);
301
+ throw new Error(`failed to get documents: ${response.status} ${response.statusText}${body ? `: ${body}` : ""}`);
302
+ }
303
+ const payload = await response.json();
304
+ if (!Array.isArray(payload.docs)) throw new Error("failed to parse documents response");
305
+ const page = payload.docs.map(parseDocument);
306
+ documents.push(...page);
307
+ if (page.length < limit) break;
308
+ offset += limit;
309
+ }
310
+ return documents;
311
+ }
312
+ };
313
+ //#endregion
314
+ //#region src/client/http.ts
315
+ var AuthenticatedHttpClient = class {
316
+ fetchImpl;
317
+ constructor(options) {
318
+ this.fetchImpl = options.fetchImpl ?? fetch;
319
+ this.logger = options.logger;
320
+ this.tokenProvider = options.tokenProvider;
321
+ }
322
+ logger;
323
+ tokenProvider;
324
+ async request(options) {
325
+ const { retryOnUnauthorized = true, timeoutMs, url } = options;
326
+ const accessToken = await this.tokenProvider.getAccessToken();
327
+ const response = await this.fetchImpl(url, {
328
+ body: options.body,
329
+ headers: {
330
+ ...options.headers,
331
+ Authorization: `Bearer ${accessToken}`
332
+ },
333
+ method: options.method ?? "GET",
334
+ signal: AbortSignal.timeout(timeoutMs)
335
+ });
336
+ if (response.status === 401 && retryOnUnauthorized) {
337
+ this.logger?.warn?.("request returned 401; invalidating token provider and retrying once");
338
+ await this.tokenProvider.invalidate();
339
+ return this.request({
340
+ ...options,
341
+ retryOnUnauthorized: false
342
+ });
343
+ }
344
+ return response;
345
+ }
346
+ async postJson(url, body, options = { timeoutMs: 3e4 }) {
347
+ return this.request({
348
+ ...options,
349
+ body: JSON.stringify(body),
253
350
  headers: {
254
351
  Accept: "*/*",
255
- Authorization: `Bearer ${accessToken}`,
256
352
  "Content-Type": "application/json",
257
- "User-Agent": USER_AGENT,
258
- "X-Client-Version": CLIENT_VERSION
353
+ ...options.headers
259
354
  },
260
355
  method: "POST",
261
- signal
356
+ url
262
357
  });
263
- if (!response.ok) {
264
- const body = (await response.text()).slice(0, 500);
265
- throw new Error(`failed to get documents: ${response.status} ${response.statusText}${body ? `: ${body}` : ""}`);
266
- }
267
- const payload = await response.json();
268
- if (!Array.isArray(payload.docs)) throw new Error("failed to parse documents response");
269
- const page = payload.docs.map(parseDocument);
270
- documents.push(...page);
271
- if (page.length < limit) break;
272
- offset += limit;
273
358
  }
274
- return documents;
275
- }
359
+ };
276
360
  //#endregion
277
361
  //#region src/config.ts
278
362
  function pickString(value) {
@@ -346,6 +430,42 @@ async function loadConfig(options) {
346
430
  };
347
431
  }
348
432
  //#endregion
433
+ //#region src/render.ts
434
+ function formatScalar(value) {
435
+ if (value == null) return "null";
436
+ if (typeof value === "string") return JSON.stringify(value);
437
+ if (typeof value === "number" || typeof value === "boolean") return String(value);
438
+ return JSON.stringify(value);
439
+ }
440
+ function renderYaml(value, depth = 0) {
441
+ const indent = " ".repeat(depth);
442
+ if (Array.isArray(value)) {
443
+ if (value.length === 0) return [`${indent}[]`];
444
+ return value.flatMap((item) => {
445
+ if (item && typeof item === "object" && !Array.isArray(item)) {
446
+ const nested = renderYaml(item, depth + 1);
447
+ return [`${indent}- ${(nested[0] ?? `${" ".repeat(depth + 1)}{}`).trimStart()}`, ...nested.slice(1)];
448
+ }
449
+ return [`${indent}- ${formatScalar(item)}`];
450
+ });
451
+ }
452
+ if (value && typeof value === "object") {
453
+ const entries = Object.entries(value);
454
+ if (entries.length === 0) return [`${indent}{}`];
455
+ return entries.flatMap(([key, entryValue]) => {
456
+ if (Array.isArray(entryValue) || entryValue && typeof entryValue === "object") return [`${indent}${key}:`, ...renderYaml(entryValue, depth + 1)];
457
+ return [`${indent}${key}: ${formatScalar(entryValue)}`];
458
+ });
459
+ }
460
+ return [`${indent}${formatScalar(value)}`];
461
+ }
462
+ function toYaml(value) {
463
+ return `${renderYaml(value).join("\n").trimEnd()}\n`;
464
+ }
465
+ function toJson(value) {
466
+ return `${JSON.stringify(value, null, 2)}\n`;
467
+ }
468
+ //#endregion
349
469
  //#region src/prosemirror.ts
350
470
  function repeatIndent(level) {
351
471
  return " ".repeat(level);
@@ -441,35 +561,98 @@ function extractPlainText(doc) {
441
561
  }
442
562
  //#endregion
443
563
  //#region src/notes.ts
444
- function documentToMarkdown(document) {
564
+ function selectNoteContent(document) {
565
+ const notes = convertProseMirrorToMarkdown(document.notes).trim();
566
+ if (notes) return {
567
+ content: notes,
568
+ source: "notes"
569
+ };
570
+ const lastViewedPanel = convertProseMirrorToMarkdown(document.lastViewedPanel?.content).trim();
571
+ if (lastViewedPanel) return {
572
+ content: lastViewedPanel,
573
+ source: "lastViewedPanel.content"
574
+ };
575
+ const originalContent = htmlToMarkdownFallback(document.lastViewedPanel?.originalContent ?? "").trim();
576
+ if (originalContent) return {
577
+ content: originalContent,
578
+ source: "lastViewedPanel.originalContent"
579
+ };
580
+ return {
581
+ content: document.content.trim(),
582
+ source: "content"
583
+ };
584
+ }
585
+ function buildNoteExport(document) {
586
+ const { content, source } = selectNoteContent(document);
587
+ return {
588
+ content,
589
+ contentSource: source,
590
+ createdAt: document.createdAt,
591
+ id: document.id,
592
+ raw: document,
593
+ tags: document.tags,
594
+ title: document.title,
595
+ updatedAt: document.updatedAt
596
+ };
597
+ }
598
+ function renderNoteExport(note, format = "markdown") {
599
+ switch (format) {
600
+ case "json": return toJson({
601
+ content: note.content,
602
+ contentSource: note.contentSource,
603
+ createdAt: note.createdAt,
604
+ id: note.id,
605
+ tags: note.tags,
606
+ title: note.title,
607
+ updatedAt: note.updatedAt
608
+ });
609
+ case "raw": return toJson(note.raw);
610
+ case "yaml": return toYaml({
611
+ content: note.content,
612
+ contentSource: note.contentSource,
613
+ createdAt: note.createdAt,
614
+ id: note.id,
615
+ tags: note.tags,
616
+ title: note.title,
617
+ updatedAt: note.updatedAt
618
+ });
619
+ case "markdown": break;
620
+ }
445
621
  const lines = [
446
622
  "---",
447
- `id: ${quoteYamlString(document.id)}`,
448
- `created: ${quoteYamlString(document.createdAt)}`,
449
- `updated: ${quoteYamlString(document.updatedAt)}`
623
+ `id: ${quoteYamlString(note.id)}`,
624
+ `created: ${quoteYamlString(note.createdAt)}`,
625
+ `updated: ${quoteYamlString(note.updatedAt)}`
450
626
  ];
451
- if (document.tags.length > 0) {
627
+ if (note.tags.length > 0) {
452
628
  lines.push("tags:");
453
- for (const tag of document.tags) lines.push(` - ${quoteYamlString(tag)}`);
629
+ for (const tag of note.tags) lines.push(` - ${quoteYamlString(tag)}`);
454
630
  }
455
631
  lines.push("---", "");
456
- if (document.title.trim()) lines.push(`# ${document.title.trim()}`, "");
457
- const content = convertProseMirrorToMarkdown(document.notes).trim() || convertProseMirrorToMarkdown(document.lastViewedPanel?.content).trim() || htmlToMarkdownFallback(document.lastViewedPanel?.originalContent ?? "").trim() || document.content.trim();
458
- if (content) lines.push(content);
632
+ if (note.title.trim()) lines.push(`# ${note.title.trim()}`, "");
633
+ if (note.content) lines.push(note.content);
459
634
  return `${lines.join("\n").trimEnd()}\n`;
460
635
  }
461
636
  function documentFilename(document) {
462
637
  return sanitiseFilename(document.title || document.id, "untitled");
463
638
  }
464
- async function writeNotes(documents, outputDir) {
639
+ function noteFileExtension(format) {
640
+ switch (format) {
641
+ case "json": return ".json";
642
+ case "raw": return ".raw.json";
643
+ case "yaml": return ".yaml";
644
+ case "markdown": return ".md";
645
+ }
646
+ }
647
+ async function writeNotes(documents, outputDir, format = "markdown") {
465
648
  await ensureDirectory(outputDir);
466
649
  const sorted = [...documents].sort((left, right) => compareStrings(left.title || left.id, right.title || right.id) || compareStrings(left.id, right.id));
467
650
  const used = /* @__PURE__ */ new Map();
468
651
  let written = 0;
469
652
  for (const document of sorted) {
470
- const filePath = join(outputDir, `${makeUniqueFilename(documentFilename(document), used)}.md`);
653
+ const filePath = join(outputDir, `${makeUniqueFilename(documentFilename(document), used)}${noteFileExtension(format)}`);
471
654
  if (!await shouldWriteFile(filePath, latestDocumentTimestamp(document))) continue;
472
- await writeTextFile(filePath, documentToMarkdown(document));
655
+ await writeTextFile(filePath, renderNoteExport(buildNoteExport(document), format));
473
656
  written += 1;
474
657
  }
475
658
  return written;
@@ -488,7 +671,8 @@ Usage:
488
671
  granola notes [options]
489
672
 
490
673
  Options:
491
- --output <path> Output directory for Markdown files (default: ./notes)
674
+ --format <value> Output format: markdown, json, yaml, raw (default: markdown)
675
+ --output <path> Output directory for note files (default: ./notes)
492
676
  --timeout <value> Request timeout, e.g. 2m, 30s, 120000 (default: 2m)
493
677
  --supabase <path> Path to supabase.json
494
678
  --debug Enable debug logging
@@ -497,8 +681,9 @@ Options:
497
681
  `;
498
682
  }
499
683
  const notesCommand = {
500
- description: "Export Granola notes to Markdown",
684
+ description: "Export Granola notes",
501
685
  flags: {
686
+ format: { type: "string" },
502
687
  help: { type: "boolean" },
503
688
  output: { type: "string" },
504
689
  timeout: { type: "string" }
@@ -515,18 +700,31 @@ const notesCommand = {
515
700
  debug(config.debug, "supabase", config.supabase);
516
701
  debug(config.debug, "timeoutMs", config.notes.timeoutMs);
517
702
  debug(config.debug, "output", config.notes.output);
703
+ const format = resolveNoteFormat(commandFlags.format);
704
+ debug(config.debug, "format", format);
518
705
  console.log("Fetching documents from Granola API...");
519
- const documents = await fetchDocuments({
520
- supabaseContents: await readFile(config.supabase, "utf8"),
521
- timeoutMs: config.notes.timeoutMs
522
- });
706
+ const tokenProvider = new CachedTokenProvider(new SupabaseFileTokenSource(config.supabase), new NoopTokenStore());
707
+ const documents = await new GranolaApiClient(new AuthenticatedHttpClient({
708
+ logger: console,
709
+ tokenProvider
710
+ })).listDocuments({ timeoutMs: config.notes.timeoutMs });
523
711
  console.log(`Exporting ${documents.length} notes to ${config.notes.output}...`);
524
- const written = await writeNotes(documents, config.notes.output);
712
+ const written = await writeNotes(documents, config.notes.output, format);
525
713
  console.log("✓ Export completed successfully");
526
714
  debug(config.debug, "notes written", written);
527
715
  return 0;
528
716
  }
529
717
  };
718
+ function resolveNoteFormat(value) {
719
+ switch (value) {
720
+ case void 0: return "markdown";
721
+ case "json":
722
+ case "markdown":
723
+ case "raw":
724
+ case "yaml": return value;
725
+ default: throw new Error("invalid notes format: expected markdown, json, yaml, or raw");
726
+ }
727
+ }
530
728
  //#endregion
531
729
  //#region src/cache.ts
532
730
  function parseCacheDocument(id, value) {
@@ -583,27 +781,78 @@ function parseCacheContents(contents) {
583
781
  }
584
782
  //#endregion
585
783
  //#region src/transcripts.ts
586
- function formatTranscript(document, segments) {
587
- if (segments.length === 0) return "";
784
+ function buildTranscriptExport(document, segments) {
785
+ const renderedSegments = segments.map((segment) => ({
786
+ endTimestamp: segment.endTimestamp,
787
+ id: segment.id,
788
+ isFinal: segment.isFinal,
789
+ source: segment.source,
790
+ speaker: transcriptSpeakerLabel(segment),
791
+ startTimestamp: segment.startTimestamp,
792
+ text: segment.text
793
+ }));
794
+ return {
795
+ createdAt: document.createdAt,
796
+ id: document.id,
797
+ raw: {
798
+ document,
799
+ segments
800
+ },
801
+ segments: renderedSegments,
802
+ title: document.title,
803
+ updatedAt: document.updatedAt
804
+ };
805
+ }
806
+ function renderTranscriptExport(transcript, format = "text") {
807
+ switch (format) {
808
+ case "json": return toJson({
809
+ createdAt: transcript.createdAt,
810
+ id: transcript.id,
811
+ segments: transcript.segments,
812
+ title: transcript.title,
813
+ updatedAt: transcript.updatedAt
814
+ });
815
+ case "raw": return toJson(transcript.raw);
816
+ case "yaml": return toYaml({
817
+ createdAt: transcript.createdAt,
818
+ id: transcript.id,
819
+ segments: transcript.segments,
820
+ title: transcript.title,
821
+ updatedAt: transcript.updatedAt
822
+ });
823
+ case "text": break;
824
+ }
825
+ return formatTranscriptText(transcript);
826
+ }
827
+ function formatTranscriptText(transcript) {
828
+ if (transcript.segments.length === 0) return "";
588
829
  const header = [
589
830
  "=".repeat(80),
590
- document.title || document.id,
591
- `ID: ${document.id}`,
592
- document.createdAt ? `Created: ${document.createdAt}` : "",
593
- document.updatedAt ? `Updated: ${document.updatedAt}` : "",
594
- `Segments: ${segments.length}`,
831
+ transcript.title || transcript.id,
832
+ `ID: ${transcript.id}`,
833
+ transcript.createdAt ? `Created: ${transcript.createdAt}` : "",
834
+ transcript.updatedAt ? `Updated: ${transcript.updatedAt}` : "",
835
+ `Segments: ${transcript.segments.length}`,
595
836
  "=".repeat(80),
596
837
  ""
597
838
  ].filter(Boolean);
598
- const body = segments.map((segment) => {
599
- return `[${formatTimestampForTranscript(segment.startTimestamp)}] ${transcriptSpeakerLabel(segment)}: ${segment.text}`;
839
+ const body = transcript.segments.map((segment) => {
840
+ return `[${formatTimestampForTranscript(segment.startTimestamp)}] ${segment.speaker}: ${segment.text}`;
600
841
  });
601
842
  return `${[...header, ...body].join("\n").trimEnd()}\n`;
602
843
  }
603
844
  function transcriptFilename(document) {
604
845
  return sanitiseFilename(document.title || document.id, "untitled");
605
846
  }
606
- async function writeTranscripts(cacheData, outputDir) {
847
+ function transcriptFileExtension(format) {
848
+ switch (format) {
849
+ case "json": return ".json";
850
+ case "raw": return ".raw.json";
851
+ case "text": return ".txt";
852
+ case "yaml": return ".yaml";
853
+ }
854
+ }
855
+ async function writeTranscripts(cacheData, outputDir, format = "text") {
607
856
  await ensureDirectory(outputDir);
608
857
  const entries = Object.entries(cacheData.transcripts).filter(([, segments]) => segments.length > 0).sort(([leftId], [rightId]) => {
609
858
  const leftDocument = cacheData.documents[leftId];
@@ -619,9 +868,9 @@ async function writeTranscripts(cacheData, outputDir) {
619
868
  title: documentId,
620
869
  updatedAt: ""
621
870
  };
622
- const filePath = join(outputDir, `${makeUniqueFilename(transcriptFilename(document), used)}.txt`);
871
+ const filePath = join(outputDir, `${makeUniqueFilename(transcriptFilename(document), used)}${transcriptFileExtension(format)}`);
623
872
  if (!await shouldWriteFile(filePath, document.updatedAt)) continue;
624
- const content = formatTranscript(document, segments);
873
+ const content = renderTranscriptExport(buildTranscriptExport(document, segments), format);
625
874
  if (!content) continue;
626
875
  await writeTextFile(filePath, content);
627
876
  written += 1;
@@ -638,18 +887,18 @@ Usage:
638
887
 
639
888
  Options:
640
889
  --cache <path> Path to Granola cache JSON
890
+ --format <value> Output format: text, json, yaml, raw (default: text)
641
891
  --output <path> Output directory for transcript files (default: ./transcripts)
642
892
  --debug Enable debug logging
643
893
  --config <path> Path to .granola.toml
644
894
  -h, --help Show help
645
895
  `;
646
896
  }
647
- //#endregion
648
- //#region src/commands/index.ts
649
- const commands = [notesCommand, {
650
- description: "Export Granola transcripts to text files",
897
+ const transcriptsCommand = {
898
+ description: "Export Granola transcripts",
651
899
  flags: {
652
900
  cache: { type: "string" },
901
+ format: { type: "string" },
653
902
  help: { type: "boolean" },
654
903
  output: { type: "string" }
655
904
  },
@@ -664,16 +913,31 @@ const commands = [notesCommand, {
664
913
  debug(config.debug, "using config", config.configFileUsed ?? "(none)");
665
914
  debug(config.debug, "cacheFile", config.transcripts.cacheFile);
666
915
  debug(config.debug, "output", config.transcripts.output);
916
+ const format = resolveTranscriptFormat(commandFlags.format);
917
+ debug(config.debug, "format", format);
667
918
  console.log("Reading Granola cache file...");
668
919
  const cacheData = parseCacheContents(await readFile(config.transcripts.cacheFile, "utf8"));
669
920
  const transcriptCount = Object.values(cacheData.transcripts).filter((segments) => segments.length > 0).length;
670
921
  console.log(`Exporting ${transcriptCount} transcripts to ${config.transcripts.output}...`);
671
- const written = await writeTranscripts(cacheData, config.transcripts.output);
922
+ const written = await writeTranscripts(cacheData, config.transcripts.output, format);
672
923
  console.log("✓ Export completed successfully");
673
924
  debug(config.debug, "transcripts written", written);
674
925
  return 0;
675
926
  }
676
- }];
927
+ };
928
+ function resolveTranscriptFormat(value) {
929
+ switch (value) {
930
+ case void 0: return "text";
931
+ case "json":
932
+ case "raw":
933
+ case "text":
934
+ case "yaml": return value;
935
+ default: throw new Error("invalid transcripts format: expected text, json, yaml, or raw");
936
+ }
937
+ }
938
+ //#endregion
939
+ //#region src/commands/index.ts
940
+ const commands = [notesCommand, transcriptsCommand];
677
941
  const commandMap = new Map(commands.map((command) => [command.name, command]));
678
942
  //#endregion
679
943
  //#region src/flags.ts
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "granola-toolkit",
3
- "version": "0.2.0",
3
+ "version": "0.4.0",
4
4
  "description": "CLI toolkit for exporting and working with Granola notes and transcripts",
5
5
  "keywords": [
6
6
  "cli",
@@ -37,8 +37,11 @@
37
37
  "fmt": "vp fmt",
38
38
  "lint": "vp lint",
39
39
  "pack:dry-run": "npm pack --dry-run",
40
- "prepublishOnly": "vp pack",
40
+ "prepublishOnly": "node scripts/prepublish.mjs",
41
41
  "release": "node scripts/release.mjs",
42
+ "release:major": "node scripts/release.mjs major",
43
+ "release:minor": "node scripts/release.mjs minor",
44
+ "release:patch": "node scripts/release.mjs patch",
42
45
  "start": "node dist/cli.js",
43
46
  "notes": "node dist/cli.js notes",
44
47
  "transcripts": "node dist/cli.js transcripts",