granola-toolkit 0.1.0 → 0.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +8 -59
  2. package/dist/cli.js +298 -196
  3. package/package.json +6 -3
package/README.md CHANGED
@@ -146,17 +146,9 @@ Supported environment variables:
146
146
  - `CACHE_FILE`
147
147
  - `TRANSCRIPT_OUTPUT`
148
148
 
149
- ## What Changed In The Port
149
+ ## Development Checks
150
150
 
151
- This port deliberately preserves the Go repo's architecture, but it also fixes a few obvious rough edges instead of copying them blindly:
152
-
153
- - deterministic export ordering, so duplicate-title suffixes are stable across runs
154
- - shared filename sanitisation between notes and transcripts
155
- - cross-platform default path discovery for both `supabase.json` and cache files
156
- - HTML fallback for note export is converted into readable Markdown-ish text instead of being dumped raw
157
- - transcript timestamps preserve the original clock time instead of being normalised to UTC
158
-
159
- ## Verify
151
+ Before pushing changes, run:
160
152
 
161
153
  ```bash
162
154
  vp check
@@ -165,54 +157,11 @@ vp pack
165
157
  npm pack --dry-run
166
158
  ```
167
159
 
168
- `vp build` is for web apps. This repo publishes a CLI bundle, so the correct build step here is `vp pack`.
169
-
170
- ## Publishing
171
-
172
- Any push to `main` with a package version that is not already on npm becomes a publish candidate automatically. The workflow verifies the build, checks whether `package.json` contains an unpublished version, and then pauses in the `production` environment until someone approves the deployment review in GitHub.
173
-
174
- That means you can use either flow:
175
-
176
- - merge a PR that already includes the version bump
177
- - run the local release helper on `main`
178
-
179
- Local release helper:
180
-
181
- ```bash
182
- npm run release
183
- ```
184
-
185
- That script:
186
-
187
- 1. verifies the git working tree is clean
188
- 2. verifies you are on `main`
189
- 3. bumps the package version with `npm version --no-git-tag-version`
190
- 4. commits and pushes the release commit
191
- 5. lets the push-to-`main` workflow create a publish candidate automatically
192
-
193
- You can also choose the bump type explicitly:
194
-
195
- ```bash
196
- npm run release patch
197
- npm run release minor
198
- npm run release major
199
- ```
200
-
201
- The GitHub Actions release job then:
202
-
203
- - installs dependencies with Vite+ via `setup-vp`
204
- - runs `vp check`, `vp test`, `vp pack`, and `npm pack --dry-run`
205
- - checks npm first and skips the publish job if that exact version already exists
206
- - waits for approval on the `production` environment before npm credentials are exposed
207
- - publishes to npm using `NPM_TOKEN`
208
- - tags the published version as `v<version>`
209
-
210
- ### GitHub Setup
211
-
212
- To get the review dialog you showed in the screenshots, configure this once in GitHub:
160
+ What those do:
213
161
 
214
- 1. create a `production` environment in repository Settings -> Environments
215
- 2. add required reviewers to that environment
216
- 3. add `NPM_TOKEN` as an environment secret on `production`
162
+ - `vp check`: formatting, linting, and type checks
163
+ - `vp test`: unit tests
164
+ - `vp pack`: builds the CLI bundle into `dist/cli.js`
165
+ - `npm pack --dry-run`: shows the exact npm package contents without publishing
217
166
 
218
- After that, merges to `main` that contain a new unpublished version will stop at "Review deployments". Approving that deployment is what allows the npm publish step to run.
167
+ `vp build` is for web apps. This repo is a CLI package, so the build step here is `vp pack`.
package/dist/cli.js CHANGED
@@ -171,10 +171,56 @@ function transcriptSpeakerLabel(segment) {
171
171
  return segment.source === "microphone" ? "You" : "System";
172
172
  }
173
173
  //#endregion
174
- //#region src/api.ts
175
- const USER_AGENT = "Granola/5.354.0";
176
- const CLIENT_VERSION = "5.354.0";
177
- const DOCUMENTS_URL = "https://api.granola.ai/v2/get-documents";
174
+ //#region src/client/auth.ts
175
+ function getAccessTokenFromSupabaseContents(supabaseContents) {
176
+ const wrapper = parseJsonString(supabaseContents);
177
+ if (!wrapper) throw new Error("failed to parse supabase.json");
178
+ const workosTokens = wrapper.workos_tokens;
179
+ let tokenPayload;
180
+ if (typeof workosTokens === "string") tokenPayload = parseJsonString(workosTokens);
181
+ else tokenPayload = asRecord(workosTokens);
182
+ const accessToken = tokenPayload ? stringValue(tokenPayload.access_token) : "";
183
+ if (!accessToken.trim()) throw new Error("access token not found in supabase.json");
184
+ return accessToken;
185
+ }
186
+ var SupabaseFileTokenSource = class {
187
+ constructor(filePath) {
188
+ this.filePath = filePath;
189
+ }
190
+ async loadAccessToken() {
191
+ return getAccessTokenFromSupabaseContents(await readFile(this.filePath, "utf8"));
192
+ }
193
+ };
194
+ var NoopTokenStore = class {
195
+ async clearToken() {}
196
+ async readToken() {}
197
+ async writeToken(_token) {}
198
+ };
199
+ var CachedTokenProvider = class {
200
+ #token;
201
+ constructor(source, store = new NoopTokenStore()) {
202
+ this.source = source;
203
+ this.store = store;
204
+ }
205
+ async getAccessToken() {
206
+ if (this.#token) return this.#token;
207
+ const storedToken = await this.store.readToken();
208
+ if (storedToken?.trim()) {
209
+ this.#token = storedToken;
210
+ return storedToken;
211
+ }
212
+ const token = await this.source.loadAccessToken();
213
+ this.#token = token;
214
+ await this.store.writeToken(token);
215
+ return token;
216
+ }
217
+ async invalidate() {
218
+ this.#token = void 0;
219
+ await this.store.clearToken();
220
+ }
221
+ };
222
+ //#endregion
223
+ //#region src/client/parsers.ts
178
224
  function parseProseMirrorDoc(value, options = {}) {
179
225
  if (value == null) return;
180
226
  if (typeof value === "string") {
@@ -209,17 +255,6 @@ function parseLastViewedPanel(value) {
209
255
  updatedAt: stringValue(panel.updated_at)
210
256
  };
211
257
  }
212
- function getAccessToken(supabaseContents) {
213
- const wrapper = parseJsonString(supabaseContents);
214
- if (!wrapper) throw new Error("failed to parse supabase.json");
215
- const workosTokens = wrapper.workos_tokens;
216
- let tokenPayload;
217
- if (typeof workosTokens === "string") tokenPayload = parseJsonString(workosTokens);
218
- else tokenPayload = asRecord(workosTokens);
219
- const accessToken = tokenPayload ? stringValue(tokenPayload.access_token) : "";
220
- if (!accessToken.trim()) throw new Error("access token not found in supabase.json");
221
- return accessToken;
222
- }
223
258
  function parseDocument(value) {
224
259
  const record = asRecord(value);
225
260
  if (!record) throw new Error("document payload is not an object");
@@ -235,98 +270,93 @@ function parseDocument(value) {
235
270
  updatedAt: stringValue(record.updated_at)
236
271
  };
237
272
  }
238
- async function fetchDocuments(options) {
239
- const fetchImpl = options.fetchImpl ?? fetch;
240
- const accessToken = getAccessToken(options.supabaseContents);
241
- const documents = [];
242
- const url = options.url ?? DOCUMENTS_URL;
243
- const limit = 100;
244
- let offset = 0;
245
- for (;;) {
246
- const signal = AbortSignal.timeout(options.timeoutMs);
247
- const response = await fetchImpl(url, {
248
- body: JSON.stringify({
273
+ //#endregion
274
+ //#region src/client/granola.ts
275
+ const USER_AGENT = "Granola/5.354.0";
276
+ const CLIENT_VERSION = "5.354.0";
277
+ const DOCUMENTS_URL = "https://api.granola.ai/v2/get-documents";
278
+ var GranolaApiClient = class {
279
+ constructor(httpClient, documentsUrl = DOCUMENTS_URL) {
280
+ this.httpClient = httpClient;
281
+ this.documentsUrl = documentsUrl;
282
+ }
283
+ async listDocuments(options) {
284
+ const documents = [];
285
+ const limit = options.limit ?? 100;
286
+ let offset = 0;
287
+ for (;;) {
288
+ const response = await this.httpClient.postJson(this.documentsUrl, {
249
289
  include_last_viewed_panel: true,
250
290
  limit,
251
291
  offset
252
- }),
292
+ }, {
293
+ headers: {
294
+ "User-Agent": USER_AGENT,
295
+ "X-Client-Version": CLIENT_VERSION
296
+ },
297
+ timeoutMs: options.timeoutMs
298
+ });
299
+ if (!response.ok) {
300
+ const body = (await response.text()).slice(0, 500);
301
+ throw new Error(`failed to get documents: ${response.status} ${response.statusText}${body ? `: ${body}` : ""}`);
302
+ }
303
+ const payload = await response.json();
304
+ if (!Array.isArray(payload.docs)) throw new Error("failed to parse documents response");
305
+ const page = payload.docs.map(parseDocument);
306
+ documents.push(...page);
307
+ if (page.length < limit) break;
308
+ offset += limit;
309
+ }
310
+ return documents;
311
+ }
312
+ };
313
+ //#endregion
314
+ //#region src/client/http.ts
315
+ var AuthenticatedHttpClient = class {
316
+ fetchImpl;
317
+ constructor(options) {
318
+ this.fetchImpl = options.fetchImpl ?? fetch;
319
+ this.logger = options.logger;
320
+ this.tokenProvider = options.tokenProvider;
321
+ }
322
+ logger;
323
+ tokenProvider;
324
+ async request(options) {
325
+ const { retryOnUnauthorized = true, timeoutMs, url } = options;
326
+ const accessToken = await this.tokenProvider.getAccessToken();
327
+ const response = await this.fetchImpl(url, {
328
+ body: options.body,
329
+ headers: {
330
+ ...options.headers,
331
+ Authorization: `Bearer ${accessToken}`
332
+ },
333
+ method: options.method ?? "GET",
334
+ signal: AbortSignal.timeout(timeoutMs)
335
+ });
336
+ if (response.status === 401 && retryOnUnauthorized) {
337
+ this.logger?.warn?.("request returned 401; invalidating token provider and retrying once");
338
+ await this.tokenProvider.invalidate();
339
+ return this.request({
340
+ ...options,
341
+ retryOnUnauthorized: false
342
+ });
343
+ }
344
+ return response;
345
+ }
346
+ async postJson(url, body, options = { timeoutMs: 3e4 }) {
347
+ return this.request({
348
+ ...options,
349
+ body: JSON.stringify(body),
253
350
  headers: {
254
351
  Accept: "*/*",
255
- Authorization: `Bearer ${accessToken}`,
256
352
  "Content-Type": "application/json",
257
- "User-Agent": USER_AGENT,
258
- "X-Client-Version": CLIENT_VERSION
353
+ ...options.headers
259
354
  },
260
355
  method: "POST",
261
- signal
356
+ url
262
357
  });
263
- if (!response.ok) {
264
- const body = (await response.text()).slice(0, 500);
265
- throw new Error(`failed to get documents: ${response.status} ${response.statusText}${body ? `: ${body}` : ""}`);
266
- }
267
- const payload = await response.json();
268
- if (!Array.isArray(payload.docs)) throw new Error("failed to parse documents response");
269
- const page = payload.docs.map(parseDocument);
270
- documents.push(...page);
271
- if (page.length < limit) break;
272
- offset += limit;
273
358
  }
274
- return documents;
275
- }
276
- //#endregion
277
- //#region src/cache.ts
278
- function parseCacheDocument(id, value) {
279
- const record = asRecord(value);
280
- if (!record) return;
281
- return {
282
- createdAt: stringValue(record.created_at),
283
- id,
284
- title: stringValue(record.title),
285
- updatedAt: stringValue(record.updated_at)
286
- };
287
- }
288
- function parseTranscriptSegments(value) {
289
- if (!Array.isArray(value)) return;
290
- return value.flatMap((segment) => {
291
- const record = asRecord(segment);
292
- if (!record) return [];
293
- return [{
294
- documentId: stringValue(record.document_id),
295
- endTimestamp: stringValue(record.end_timestamp),
296
- id: stringValue(record.id),
297
- isFinal: Boolean(record.is_final),
298
- source: stringValue(record.source),
299
- startTimestamp: stringValue(record.start_timestamp),
300
- text: stringValue(record.text)
301
- }];
302
- });
303
- }
304
- function parseCacheContents(contents) {
305
- const outer = parseJsonString(contents);
306
- if (!outer) throw new Error("failed to parse cache JSON");
307
- const rawCache = outer.cache;
308
- let cachePayload;
309
- if (typeof rawCache === "string") cachePayload = parseJsonString(rawCache);
310
- else cachePayload = asRecord(rawCache);
311
- const state = cachePayload ? asRecord(cachePayload.state) : void 0;
312
- if (!state) throw new Error("failed to parse cache state");
313
- const rawDocuments = asRecord(state.documents) ?? {};
314
- const rawTranscripts = asRecord(state.transcripts) ?? {};
315
- const documents = {};
316
- for (const [id, rawDocument] of Object.entries(rawDocuments)) {
317
- const document = parseCacheDocument(id, rawDocument);
318
- if (document) documents[id] = document;
319
- }
320
- const transcripts = {};
321
- for (const [id, rawTranscript] of Object.entries(rawTranscripts)) {
322
- const segments = parseTranscriptSegments(rawTranscript);
323
- if (segments) transcripts[id] = segments;
324
- }
325
- return {
326
- documents,
327
- transcripts
328
- };
329
- }
359
+ };
330
360
  //#endregion
331
361
  //#region src/config.ts
332
362
  function pickString(value) {
@@ -529,6 +559,114 @@ async function writeNotes(documents, outputDir) {
529
559
  return written;
530
560
  }
531
561
  //#endregion
562
+ //#region src/commands/shared.ts
563
+ function debug(enabled, ...values) {
564
+ if (enabled) console.error("[debug]", ...values);
565
+ }
566
+ //#endregion
567
+ //#region src/commands/notes.ts
568
+ function notesHelp() {
569
+ return `Granola notes
570
+
571
+ Usage:
572
+ granola notes [options]
573
+
574
+ Options:
575
+ --output <path> Output directory for Markdown files (default: ./notes)
576
+ --timeout <value> Request timeout, e.g. 2m, 30s, 120000 (default: 2m)
577
+ --supabase <path> Path to supabase.json
578
+ --debug Enable debug logging
579
+ --config <path> Path to .granola.toml
580
+ -h, --help Show help
581
+ `;
582
+ }
583
+ const notesCommand = {
584
+ description: "Export Granola notes to Markdown",
585
+ flags: {
586
+ help: { type: "boolean" },
587
+ output: { type: "string" },
588
+ timeout: { type: "string" }
589
+ },
590
+ help: notesHelp,
591
+ name: "notes",
592
+ async run({ commandFlags, globalFlags }) {
593
+ const config = await loadConfig({
594
+ globalFlags,
595
+ subcommandFlags: commandFlags
596
+ });
597
+ if (!config.supabase) throw new Error(`supabase.json not found. Pass --supabase or create .granola.toml. Expected locations include: ${granolaSupabaseCandidates().join(", ")}`);
598
+ debug(config.debug, "using config", config.configFileUsed ?? "(none)");
599
+ debug(config.debug, "supabase", config.supabase);
600
+ debug(config.debug, "timeoutMs", config.notes.timeoutMs);
601
+ debug(config.debug, "output", config.notes.output);
602
+ console.log("Fetching documents from Granola API...");
603
+ const tokenProvider = new CachedTokenProvider(new SupabaseFileTokenSource(config.supabase), new NoopTokenStore());
604
+ const documents = await new GranolaApiClient(new AuthenticatedHttpClient({
605
+ logger: console,
606
+ tokenProvider
607
+ })).listDocuments({ timeoutMs: config.notes.timeoutMs });
608
+ console.log(`Exporting ${documents.length} notes to ${config.notes.output}...`);
609
+ const written = await writeNotes(documents, config.notes.output);
610
+ console.log("✓ Export completed successfully");
611
+ debug(config.debug, "notes written", written);
612
+ return 0;
613
+ }
614
+ };
615
+ //#endregion
616
+ //#region src/cache.ts
617
+ function parseCacheDocument(id, value) {
618
+ const record = asRecord(value);
619
+ if (!record) return;
620
+ return {
621
+ createdAt: stringValue(record.created_at),
622
+ id,
623
+ title: stringValue(record.title),
624
+ updatedAt: stringValue(record.updated_at)
625
+ };
626
+ }
627
+ function parseTranscriptSegments(value) {
628
+ if (!Array.isArray(value)) return;
629
+ return value.flatMap((segment) => {
630
+ const record = asRecord(segment);
631
+ if (!record) return [];
632
+ return [{
633
+ documentId: stringValue(record.document_id),
634
+ endTimestamp: stringValue(record.end_timestamp),
635
+ id: stringValue(record.id),
636
+ isFinal: Boolean(record.is_final),
637
+ source: stringValue(record.source),
638
+ startTimestamp: stringValue(record.start_timestamp),
639
+ text: stringValue(record.text)
640
+ }];
641
+ });
642
+ }
643
+ function parseCacheContents(contents) {
644
+ const outer = parseJsonString(contents);
645
+ if (!outer) throw new Error("failed to parse cache JSON");
646
+ const rawCache = outer.cache;
647
+ let cachePayload;
648
+ if (typeof rawCache === "string") cachePayload = parseJsonString(rawCache);
649
+ else cachePayload = asRecord(rawCache);
650
+ const state = cachePayload ? asRecord(cachePayload.state) : void 0;
651
+ if (!state) throw new Error("failed to parse cache state");
652
+ const rawDocuments = asRecord(state.documents) ?? {};
653
+ const rawTranscripts = asRecord(state.transcripts) ?? {};
654
+ const documents = {};
655
+ for (const [id, rawDocument] of Object.entries(rawDocuments)) {
656
+ const document = parseCacheDocument(id, rawDocument);
657
+ if (document) documents[id] = document;
658
+ }
659
+ const transcripts = {};
660
+ for (const [id, rawTranscript] of Object.entries(rawTranscripts)) {
661
+ const segments = parseTranscriptSegments(rawTranscript);
662
+ if (segments) transcripts[id] = segments;
663
+ }
664
+ return {
665
+ documents,
666
+ transcripts
667
+ };
668
+ }
669
+ //#endregion
532
670
  //#region src/transcripts.ts
533
671
  function formatTranscript(document, segments) {
534
672
  if (segments.length === 0) return "";
@@ -576,7 +714,54 @@ async function writeTranscripts(cacheData, outputDir) {
576
714
  return written;
577
715
  }
578
716
  //#endregion
579
- //#region src/cli.ts
717
+ //#region src/commands/transcripts.ts
718
+ function transcriptsHelp() {
719
+ return `Granola transcripts
720
+
721
+ Usage:
722
+ granola transcripts [options]
723
+
724
+ Options:
725
+ --cache <path> Path to Granola cache JSON
726
+ --output <path> Output directory for transcript files (default: ./transcripts)
727
+ --debug Enable debug logging
728
+ --config <path> Path to .granola.toml
729
+ -h, --help Show help
730
+ `;
731
+ }
732
+ //#endregion
733
+ //#region src/commands/index.ts
734
+ const commands = [notesCommand, {
735
+ description: "Export Granola transcripts to text files",
736
+ flags: {
737
+ cache: { type: "string" },
738
+ help: { type: "boolean" },
739
+ output: { type: "string" }
740
+ },
741
+ help: transcriptsHelp,
742
+ name: "transcripts",
743
+ async run({ commandFlags, globalFlags }) {
744
+ const config = await loadConfig({
745
+ globalFlags,
746
+ subcommandFlags: commandFlags
747
+ });
748
+ if (!config.transcripts.cacheFile) throw new Error(`Granola cache file not found. Pass --cache or create .granola.toml. Expected locations include: ${granolaCacheCandidates().join(", ")}`);
749
+ debug(config.debug, "using config", config.configFileUsed ?? "(none)");
750
+ debug(config.debug, "cacheFile", config.transcripts.cacheFile);
751
+ debug(config.debug, "output", config.transcripts.output);
752
+ console.log("Reading Granola cache file...");
753
+ const cacheData = parseCacheContents(await readFile(config.transcripts.cacheFile, "utf8"));
754
+ const transcriptCount = Object.values(cacheData.transcripts).filter((segments) => segments.length > 0).length;
755
+ console.log(`Exporting ${transcriptCount} transcripts to ${config.transcripts.output}...`);
756
+ const written = await writeTranscripts(cacheData, config.transcripts.output);
757
+ console.log("✓ Export completed successfully");
758
+ debug(config.debug, "transcripts written", written);
759
+ return 0;
760
+ }
761
+ }];
762
+ const commandMap = new Map(commands.map((command) => [command.name, command]));
763
+ //#endregion
764
+ //#region src/flags.ts
580
765
  function parseBooleanValue(value) {
581
766
  if (/^(true|1|yes|on)$/i.test(value)) return true;
582
767
  if (/^(false|0|no|off)$/i.test(value)) return false;
@@ -624,13 +809,15 @@ function parseFlags(args, spec) {
624
809
  values
625
810
  };
626
811
  }
812
+ //#endregion
813
+ //#region src/cli.ts
627
814
  function splitCommand(argv) {
628
- const commands = new Set(["notes", "transcripts"]);
629
815
  const rest = [];
630
816
  let command;
631
817
  for (const token of argv) {
632
- if (!command && !token.startsWith("-") && commands.has(token)) {
633
- command = token;
818
+ const candidate = !token.startsWith("-") ? commandMap.get(token) : void 0;
819
+ if (!command && candidate) {
820
+ command = candidate;
634
821
  continue;
635
822
  }
636
823
  rest.push(token);
@@ -641,6 +828,7 @@ function splitCommand(argv) {
641
828
  };
642
829
  }
643
830
  function rootHelp() {
831
+ const commandWidth = Math.max(...commands.map((command) => command.name.length));
644
832
  return `Granola CLI
645
833
 
646
834
  Export your Granola notes and transcripts.
@@ -649,8 +837,7 @@ Usage:
649
837
  granola <command> [options]
650
838
 
651
839
  Commands:
652
- notes Export Granola notes to Markdown
653
- transcripts Export Granola transcripts to text files
840
+ ${commands.map((command) => ` ${command.name.padEnd(commandWidth)} ${command.description}`).join("\n")}
654
841
 
655
842
  Global options:
656
843
  --config <path> Path to .granola.toml
@@ -663,38 +850,6 @@ Examples:
663
850
  granola transcripts --cache "${granolaCacheCandidates()[0] ?? "/path/to/cache-v3.json"}"
664
851
  `;
665
852
  }
666
- function notesHelp() {
667
- return `Granola notes
668
-
669
- Usage:
670
- granola notes [options]
671
-
672
- Options:
673
- --output <path> Output directory for Markdown files (default: ./notes)
674
- --timeout <value> Request timeout, e.g. 2m, 30s, 120000 (default: 2m)
675
- --supabase <path> Path to supabase.json
676
- --debug Enable debug logging
677
- --config <path> Path to .granola.toml
678
- -h, --help Show help
679
- `;
680
- }
681
- function transcriptsHelp() {
682
- return `Granola transcripts
683
-
684
- Usage:
685
- granola transcripts [options]
686
-
687
- Options:
688
- --cache <path> Path to Granola cache JSON
689
- --output <path> Output directory for transcript files (default: ./transcripts)
690
- --debug Enable debug logging
691
- --config <path> Path to .granola.toml
692
- -h, --help Show help
693
- `;
694
- }
695
- function debug(enabled, ...values) {
696
- if (enabled) console.error("[debug]", ...values);
697
- }
698
853
  async function runCli(argv) {
699
854
  try {
700
855
  const { command, rest } = splitCommand(argv);
@@ -712,68 +867,15 @@ async function runCli(argv) {
712
867
  console.log(rootHelp());
713
868
  return 1;
714
869
  }
715
- switch (command) {
716
- case "notes": {
717
- const subcommand = parseFlags(global.rest, {
718
- help: { type: "boolean" },
719
- output: { type: "string" },
720
- timeout: { type: "string" }
721
- });
722
- if (subcommand.values.help || global.values.help) {
723
- console.log(notesHelp());
724
- return 0;
725
- }
726
- const config = await loadConfig({
727
- globalFlags: global.values,
728
- subcommandFlags: subcommand.values
729
- });
730
- if (!config.supabase) throw new Error(`supabase.json not found. Pass --supabase or create .granola.toml. Expected locations include: ${granolaSupabaseCandidates().join(", ")}`);
731
- debug(config.debug, "using config", config.configFileUsed ?? "(none)");
732
- debug(config.debug, "supabase", config.supabase);
733
- debug(config.debug, "timeoutMs", config.notes.timeoutMs);
734
- debug(config.debug, "output", config.notes.output);
735
- console.log("Fetching documents from Granola API...");
736
- const documents = await fetchDocuments({
737
- supabaseContents: await readFile(config.supabase, "utf8"),
738
- timeoutMs: config.notes.timeoutMs
739
- });
740
- console.log(`Exporting ${documents.length} notes to ${config.notes.output}...`);
741
- const written = await writeNotes(documents, config.notes.output);
742
- console.log("✓ Export completed successfully");
743
- debug(config.debug, "notes written", written);
744
- return 0;
745
- }
746
- case "transcripts": {
747
- const subcommand = parseFlags(global.rest, {
748
- cache: { type: "string" },
749
- help: { type: "boolean" },
750
- output: { type: "string" }
751
- });
752
- if (subcommand.values.help || global.values.help) {
753
- console.log(transcriptsHelp());
754
- return 0;
755
- }
756
- const config = await loadConfig({
757
- globalFlags: global.values,
758
- subcommandFlags: subcommand.values
759
- });
760
- if (!config.transcripts.cacheFile) throw new Error(`Granola cache file not found. Pass --cache or create .granola.toml. Expected locations include: ${granolaCacheCandidates().join(", ")}`);
761
- debug(config.debug, "using config", config.configFileUsed ?? "(none)");
762
- debug(config.debug, "cacheFile", config.transcripts.cacheFile);
763
- debug(config.debug, "output", config.transcripts.output);
764
- console.log("Reading Granola cache file...");
765
- const cacheData = parseCacheContents(await readFile(config.transcripts.cacheFile, "utf8"));
766
- const transcriptCount = Object.values(cacheData.transcripts).filter((segments) => segments.length > 0).length;
767
- console.log(`Exporting ${transcriptCount} transcripts to ${config.transcripts.output}...`);
768
- const written = await writeTranscripts(cacheData, config.transcripts.output);
769
- console.log("✓ Export completed successfully");
770
- debug(config.debug, "transcripts written", written);
771
- return 0;
772
- }
773
- default:
774
- console.log(rootHelp());
775
- return 1;
870
+ const subcommand = parseFlags(global.rest, command.flags);
871
+ if (subcommand.values.help || global.values.help) {
872
+ console.log(command.help());
873
+ return 0;
776
874
  }
875
+ return await command.run({
876
+ commandFlags: subcommand.values,
877
+ globalFlags: global.values
878
+ });
777
879
  } catch (error) {
778
880
  const message = error instanceof Error ? error.message : String(error);
779
881
  console.error(message);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "granola-toolkit",
3
- "version": "0.1.0",
3
+ "version": "0.3.0",
4
4
  "description": "CLI toolkit for exporting and working with Granola notes and transcripts",
5
5
  "keywords": [
6
6
  "cli",
@@ -15,7 +15,7 @@
15
15
  "author": "Nima Karimi",
16
16
  "repository": {
17
17
  "type": "git",
18
- "url": "git@github.com:kkarimi/granola-toolkit.git"
18
+ "url": "git+https://github.com/kkarimi/granola-toolkit.git"
19
19
  },
20
20
  "bin": {
21
21
  "granola": "dist/cli.js"
@@ -37,8 +37,11 @@
37
37
  "fmt": "vp fmt",
38
38
  "lint": "vp lint",
39
39
  "pack:dry-run": "npm pack --dry-run",
40
- "prepublishOnly": "vp pack",
40
+ "prepublishOnly": "node scripts/prepublish.mjs",
41
41
  "release": "node scripts/release.mjs",
42
+ "release:major": "node scripts/release.mjs major",
43
+ "release:minor": "node scripts/release.mjs minor",
44
+ "release:patch": "node scripts/release.mjs patch",
42
45
  "start": "node dist/cli.js",
43
46
  "notes": "node dist/cli.js notes",
44
47
  "transcripts": "node dist/cli.js transcripts",