granola-toolkit 0.4.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/README.md +14 -2
  2. package/dist/cli.js +177 -49
  3. package/package.json +1 -1
package/README.md CHANGED
@@ -119,9 +119,21 @@ Structured output formats are useful when you want to post-process exports in sc
119
119
 
120
120
  ### Incremental Writes
121
121
 
122
- Both commands are incremental. They only rewrite an export file when the source document appears newer than the file already on disk.
122
+ Both commands keep a small hidden state file in the output directory to track:
123
123
 
124
- That means repeated runs are cheap, and you can safely point the CLI at the same output directory over time.
124
+ - document id to filename
125
+ - content hash
126
+ - source timestamp
127
+ - last export time
128
+
129
+ That state is used to:
130
+
131
+ - keep filenames stable even if a meeting title changes later
132
+ - skip rewrites when the rendered content is unchanged
133
+ - migrate old files cleanly when the output format changes
134
+ - delete stale exports when a document disappears from the source data
135
+
136
+ That makes repeated runs cheap and keeps long-lived export directories much cleaner.
125
137
 
126
138
  ## Config
127
139
 
package/dist/cli.js CHANGED
@@ -1,8 +1,9 @@
1
1
  #!/usr/bin/env node
2
- import { mkdir, readFile, stat, writeFile } from "node:fs/promises";
3
2
  import { existsSync } from "node:fs";
3
+ import { mkdir, readFile, rm, stat, writeFile } from "node:fs/promises";
4
4
  import { homedir } from "node:os";
5
5
  import { dirname, join } from "node:path";
6
+ import { createHash } from "node:crypto";
6
7
  //#region src/utils.ts
7
8
  const INVALID_FILENAME_CHARS = /[<>:"/\\|?*]/g;
8
9
  const CONTROL_CHARACTERS = /\p{Cc}/gu;
@@ -36,7 +37,6 @@ function compareStrings(left, right) {
36
37
  }
37
38
  function firstExistingPath(candidates) {
38
39
  for (const candidate of candidates) if (existsSync(candidate)) return candidate;
39
- return candidates[0];
40
40
  }
41
41
  function granolaSupabaseCandidates() {
42
42
  const home = homedir();
@@ -147,16 +147,6 @@ function latestDocumentTimestamp(document) {
147
147
  });
148
148
  return candidates[0] ?? document.updatedAt;
149
149
  }
150
- async function shouldWriteFile(filePath, updatedAt) {
151
- try {
152
- const existing = await stat(filePath);
153
- const updatedTime = new Date(updatedAt);
154
- if (Number.isNaN(updatedTime.getTime())) return true;
155
- return updatedTime.getTime() > existing.mtime.getTime();
156
- } catch {
157
- return true;
158
- }
159
- }
160
150
  async function writeTextFile(filePath, content) {
161
151
  await mkdir(dirname(filePath), { recursive: true });
162
152
  await writeFile(filePath, content, "utf8");
@@ -392,10 +382,13 @@ function parseSimpleToml(contents) {
392
382
  return values;
393
383
  }
394
384
  async function loadTomlConfig(configPath) {
395
- if (configPath) return {
396
- path: configPath,
397
- values: parseSimpleToml(await readUtf8(configPath))
398
- };
385
+ if (configPath) {
386
+ if (!existsSync(configPath)) throw new Error(`config file not found: ${configPath}`);
387
+ return {
388
+ path: configPath,
389
+ values: parseSimpleToml(await readUtf8(configPath))
390
+ };
391
+ }
399
392
  const candidates = [join(process.cwd(), ".granola.toml"), join(homedir(), ".granola.toml")];
400
393
  for (const candidate of candidates) if (existsSync(candidate)) return {
401
394
  path: candidate,
@@ -430,6 +423,134 @@ async function loadConfig(options) {
430
423
  };
431
424
  }
432
425
  //#endregion
426
+ //#region src/export-state.ts
427
+ const EXPORT_STATE_VERSION = 1;
428
+ function exportStatePath(outputDir, kind) {
429
+ return join(outputDir, `.granola-toolkit-${kind}-state.json`);
430
+ }
431
+ function emptyExportState(kind) {
432
+ return {
433
+ entries: {},
434
+ kind,
435
+ version: EXPORT_STATE_VERSION
436
+ };
437
+ }
438
+ function normaliseExportState(parsed, kind) {
439
+ const record = asRecord(parsed);
440
+ if (!record || record.version !== EXPORT_STATE_VERSION || record.kind !== kind) return emptyExportState(kind);
441
+ const rawEntries = asRecord(record.entries) ?? {};
442
+ return {
443
+ entries: Object.fromEntries(Object.entries(rawEntries).map(([id, entry]) => {
444
+ const value = asRecord(entry);
445
+ if (!value) return;
446
+ const fileName = stringValue(value.fileName);
447
+ const fileStem = stringValue(value.fileStem);
448
+ if (!fileName || !fileStem) return;
449
+ return [id, {
450
+ contentHash: stringValue(value.contentHash),
451
+ exportedAt: stringValue(value.exportedAt),
452
+ fileName,
453
+ fileStem,
454
+ sourceUpdatedAt: stringValue(value.sourceUpdatedAt)
455
+ }];
456
+ }).filter((entry) => Boolean(entry))),
457
+ kind,
458
+ version: EXPORT_STATE_VERSION
459
+ };
460
+ }
461
+ async function loadExportState(outputDir, kind) {
462
+ const statePath = exportStatePath(outputDir, kind);
463
+ try {
464
+ return normaliseExportState(parseJsonString(await readUtf8(statePath)), kind);
465
+ } catch {
466
+ return emptyExportState(kind);
467
+ }
468
+ }
469
+ function hashContent(content) {
470
+ return createHash("sha256").update(content).digest("hex");
471
+ }
472
+ function reserveStem(used, preferredStem, existingStem) {
473
+ if (existingStem && (used.get(existingStem) ?? 0) === 0) {
474
+ used.set(existingStem, 1);
475
+ return existingStem;
476
+ }
477
+ return makeUniqueFilename(preferredStem, used);
478
+ }
479
+ async function fileExists(pathname) {
480
+ try {
481
+ await stat(pathname);
482
+ return true;
483
+ } catch {
484
+ return false;
485
+ }
486
+ }
487
+ function entryChanged(left, right) {
488
+ if (!left) return true;
489
+ return left.contentHash !== right.contentHash || left.exportedAt !== right.exportedAt || left.fileName !== right.fileName || left.fileStem !== right.fileStem || left.sourceUpdatedAt !== right.sourceUpdatedAt;
490
+ }
491
+ async function syncManagedExports({ items, kind, outputDir }) {
492
+ await ensureDirectory(outputDir);
493
+ const previousEntries = (await loadExportState(outputDir, kind)).entries;
494
+ const used = /* @__PURE__ */ new Map();
495
+ const plans = items.map((item) => {
496
+ const existing = previousEntries[item.id];
497
+ const fileStem = reserveStem(used, item.preferredStem, existing?.fileStem);
498
+ return {
499
+ content: item.content,
500
+ contentHash: hashContent(item.content),
501
+ existing,
502
+ fileName: `${fileStem}${item.extension}`,
503
+ fileStem,
504
+ id: item.id,
505
+ sourceUpdatedAt: item.sourceUpdatedAt
506
+ };
507
+ });
508
+ const activeIds = new Set(plans.map((plan) => plan.id));
509
+ const activeFileNames = new Set(plans.map((plan) => plan.fileName));
510
+ const exportedAt = (/* @__PURE__ */ new Date()).toISOString();
511
+ const nextEntries = {};
512
+ let written = 0;
513
+ let stateChanged = false;
514
+ for (const plan of plans) {
515
+ const filePath = join(outputDir, plan.fileName);
516
+ const shouldWrite = !plan.existing || plan.existing.contentHash !== plan.contentHash || plan.existing.fileName !== plan.fileName || !await fileExists(filePath);
517
+ if (shouldWrite) {
518
+ await writeTextFile(filePath, plan.content);
519
+ written += 1;
520
+ }
521
+ const nextEntry = {
522
+ contentHash: plan.contentHash,
523
+ exportedAt: shouldWrite ? exportedAt : plan.existing?.exportedAt ?? exportedAt,
524
+ fileName: plan.fileName,
525
+ fileStem: plan.fileStem,
526
+ sourceUpdatedAt: plan.sourceUpdatedAt
527
+ };
528
+ nextEntries[plan.id] = nextEntry;
529
+ stateChanged = stateChanged || entryChanged(plan.existing, nextEntry);
530
+ }
531
+ for (const plan of plans) {
532
+ const previousFileName = plan.existing?.fileName;
533
+ if (previousFileName && previousFileName !== plan.fileName && !activeFileNames.has(previousFileName)) {
534
+ await rm(join(outputDir, previousFileName), { force: true });
535
+ stateChanged = true;
536
+ }
537
+ }
538
+ for (const [id, entry] of Object.entries(previousEntries)) {
539
+ if (activeIds.has(id)) continue;
540
+ if (!activeFileNames.has(entry.fileName)) await rm(join(outputDir, entry.fileName), { force: true });
541
+ stateChanged = true;
542
+ }
543
+ const serialisedState = `${JSON.stringify({
544
+ entries: nextEntries,
545
+ kind,
546
+ version: EXPORT_STATE_VERSION
547
+ }, null, 2)}\n`;
548
+ const statePath = exportStatePath(outputDir, kind);
549
+ const existingState = await fileExists(statePath) ? await readUtf8(statePath) : void 0;
550
+ if (stateChanged || existingState !== serialisedState) await writeTextFile(statePath, serialisedState);
551
+ return written;
552
+ }
553
+ //#endregion
433
554
  //#region src/render.ts
434
555
  function formatScalar(value) {
435
556
  if (value == null) return "null";
@@ -645,17 +766,20 @@ function noteFileExtension(format) {
645
766
  }
646
767
  }
647
768
  async function writeNotes(documents, outputDir, format = "markdown") {
648
- await ensureDirectory(outputDir);
649
- const sorted = [...documents].sort((left, right) => compareStrings(left.title || left.id, right.title || right.id) || compareStrings(left.id, right.id));
650
- const used = /* @__PURE__ */ new Map();
651
- let written = 0;
652
- for (const document of sorted) {
653
- const filePath = join(outputDir, `${makeUniqueFilename(documentFilename(document), used)}${noteFileExtension(format)}`);
654
- if (!await shouldWriteFile(filePath, latestDocumentTimestamp(document))) continue;
655
- await writeTextFile(filePath, renderNoteExport(buildNoteExport(document), format));
656
- written += 1;
657
- }
658
- return written;
769
+ return await syncManagedExports({
770
+ items: [...documents].sort((left, right) => compareStrings(left.title || left.id, right.title || right.id) || compareStrings(left.id, right.id)).map((document) => {
771
+ const note = buildNoteExport(document);
772
+ return {
773
+ content: renderNoteExport(note, format),
774
+ extension: noteFileExtension(format),
775
+ id: note.id,
776
+ preferredStem: documentFilename(document),
777
+ sourceUpdatedAt: latestDocumentTimestamp(document)
778
+ };
779
+ }),
780
+ kind: "notes",
781
+ outputDir
782
+ });
659
783
  }
660
784
  //#endregion
661
785
  //#region src/commands/shared.ts
@@ -696,6 +820,7 @@ const notesCommand = {
696
820
  subcommandFlags: commandFlags
697
821
  });
698
822
  if (!config.supabase) throw new Error(`supabase.json not found. Pass --supabase or create .granola.toml. Expected locations include: ${granolaSupabaseCandidates().join(", ")}`);
823
+ if (!existsSync(config.supabase)) throw new Error(`supabase.json not found: ${config.supabase}`);
699
824
  debug(config.debug, "using config", config.configFileUsed ?? "(none)");
700
825
  debug(config.debug, "supabase", config.supabase);
701
826
  debug(config.debug, "timeoutMs", config.notes.timeoutMs);
@@ -853,29 +978,31 @@ function transcriptFileExtension(format) {
853
978
  }
854
979
  }
855
980
  async function writeTranscripts(cacheData, outputDir, format = "text") {
856
- await ensureDirectory(outputDir);
857
- const entries = Object.entries(cacheData.transcripts).filter(([, segments]) => segments.length > 0).sort(([leftId], [rightId]) => {
858
- const leftDocument = cacheData.documents[leftId];
859
- const rightDocument = cacheData.documents[rightId];
860
- return compareStrings(leftDocument?.title || leftId, rightDocument?.title || rightId) || compareStrings(leftId, rightId);
981
+ return await syncManagedExports({
982
+ items: Object.entries(cacheData.transcripts).filter(([, segments]) => segments.length > 0).sort(([leftId], [rightId]) => {
983
+ const leftDocument = cacheData.documents[leftId];
984
+ const rightDocument = cacheData.documents[rightId];
985
+ return compareStrings(leftDocument?.title || leftId, rightDocument?.title || rightId) || compareStrings(leftId, rightId);
986
+ }).flatMap(([documentId, segments]) => {
987
+ const document = cacheData.documents[documentId] ?? {
988
+ createdAt: "",
989
+ id: documentId,
990
+ title: documentId,
991
+ updatedAt: ""
992
+ };
993
+ const content = renderTranscriptExport(buildTranscriptExport(document, segments), format);
994
+ if (!content) return [];
995
+ return [{
996
+ content,
997
+ extension: transcriptFileExtension(format),
998
+ id: document.id,
999
+ preferredStem: transcriptFilename(document),
1000
+ sourceUpdatedAt: document.updatedAt
1001
+ }];
1002
+ }),
1003
+ kind: "transcripts",
1004
+ outputDir
861
1005
  });
862
- const used = /* @__PURE__ */ new Map();
863
- let written = 0;
864
- for (const [documentId, segments] of entries) {
865
- const document = cacheData.documents[documentId] ?? {
866
- createdAt: "",
867
- id: documentId,
868
- title: documentId,
869
- updatedAt: ""
870
- };
871
- const filePath = join(outputDir, `${makeUniqueFilename(transcriptFilename(document), used)}${transcriptFileExtension(format)}`);
872
- if (!await shouldWriteFile(filePath, document.updatedAt)) continue;
873
- const content = renderTranscriptExport(buildTranscriptExport(document, segments), format);
874
- if (!content) continue;
875
- await writeTextFile(filePath, content);
876
- written += 1;
877
- }
878
- return written;
879
1006
  }
880
1007
  //#endregion
881
1008
  //#region src/commands/transcripts.ts
@@ -910,6 +1037,7 @@ const transcriptsCommand = {
910
1037
  subcommandFlags: commandFlags
911
1038
  });
912
1039
  if (!config.transcripts.cacheFile) throw new Error(`Granola cache file not found. Pass --cache or create .granola.toml. Expected locations include: ${granolaCacheCandidates().join(", ")}`);
1040
+ if (!existsSync(config.transcripts.cacheFile)) throw new Error(`Granola cache file not found: ${config.transcripts.cacheFile}`);
913
1041
  debug(config.debug, "using config", config.configFileUsed ?? "(none)");
914
1042
  debug(config.debug, "cacheFile", config.transcripts.cacheFile);
915
1043
  debug(config.debug, "output", config.transcripts.output);
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "granola-toolkit",
3
- "version": "0.4.0",
3
+ "version": "0.6.0",
4
4
  "description": "CLI toolkit for exporting and working with Granola notes and transcripts",
5
5
  "keywords": [
6
6
  "cli",