@helloao/cli 0.0.13 → 0.0.14

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cjs/cli.cjs CHANGED
@@ -5959,14 +5959,16 @@ function getBookId(book) {
5959
5959
  return null;
5960
5960
  }
5961
5961
  var KNOWN_SKIPPED_VERSES = /* @__PURE__ */ new Set([
5962
+ "MAT 12:47",
5962
5963
  "MAT 17:21",
5963
5964
  "MAT 18:11",
5964
5965
  "MAT 23:14",
5965
- "MAR 7:16",
5966
- "MAR 9:44",
5967
- "MAR 9:46",
5968
- "MAR 11:26",
5966
+ "MRK 7:16",
5967
+ "MRK 9:44",
5968
+ "MRK 9:46",
5969
+ "MRK 11:26",
5969
5970
  "LUK 17:36",
5971
+ "LUK 23:17",
5970
5972
  "JHN 5:4",
5971
5973
  "ACT 8:37",
5972
5974
  "ACT 15:34",
@@ -6029,19 +6031,23 @@ function generateApiForDataset(dataset, options = {}) {
6029
6031
  };
6030
6032
  let translationChapters = [];
6031
6033
  for (let { chapters, ...book } of books) {
6034
+ const firstChapterNumber = chapters[0].chapter.number;
6035
+ const lastChapterNumber = chapters[chapters.length - 1].chapter.number;
6032
6036
  const apiBook = {
6033
6037
  ...book,
6038
+ firstChapterNumber,
6034
6039
  firstChapterApiLink: bookChapterApiLink(
6035
6040
  translation.id,
6036
6041
  getBookLink(book),
6037
- 1,
6042
+ firstChapterNumber,
6038
6043
  "json",
6039
6044
  apiPathPrefix
6040
6045
  ),
6046
+ lastChapterNumber,
6041
6047
  lastChapterApiLink: bookChapterApiLink(
6042
6048
  translation.id,
6043
6049
  getBookLink(book),
6044
- chapters.length,
6050
+ lastChapterNumber,
6045
6051
  "json",
6046
6052
  apiPathPrefix
6047
6053
  ),
@@ -6166,22 +6172,26 @@ function generateApiForDataset(dataset, options = {}) {
6166
6172
  };
6167
6173
  let commentaryChapters = [];
6168
6174
  for (let { chapters, ...book } of books) {
6175
+ const firstChapterNumber = chapters[0]?.chapter.number ?? null;
6176
+ const lastChapterNumber = chapters[chapters.length - 1]?.chapter.number ?? null;
6169
6177
  const apiBook = {
6170
6178
  ...book,
6171
- firstChapterApiLink: bookCommentaryChapterApiLink(
6179
+ firstChapterNumber,
6180
+ firstChapterApiLink: firstChapterNumber ? bookCommentaryChapterApiLink(
6172
6181
  commentary.id,
6173
6182
  getBookLink(book),
6174
- 1,
6183
+ firstChapterNumber,
6175
6184
  "json",
6176
6185
  apiPathPrefix
6177
- ),
6178
- lastChapterApiLink: bookCommentaryChapterApiLink(
6186
+ ) : null,
6187
+ lastChapterNumber,
6188
+ lastChapterApiLink: lastChapterNumber ? bookCommentaryChapterApiLink(
6179
6189
  commentary.id,
6180
6190
  getBookLink(book),
6181
- chapters.length,
6191
+ lastChapterNumber,
6182
6192
  "json",
6183
6193
  apiPathPrefix
6184
- ),
6194
+ ) : null,
6185
6195
  numberOfChapters: chapters.length,
6186
6196
  totalNumberOfVerses: 0
6187
6197
  };
@@ -13961,8 +13971,9 @@ function getDbPath(p) {
13961
13971
  }
13962
13972
  return getDbPathFromDir(process.cwd());
13963
13973
  }
13964
- function getPrismaDbFromDir(dir) {
13965
- const dbPath = getDbPathFromDir(dir);
13974
+ function getPrismaDb(path6) {
13975
+ const dbPath = getDbPath(path6);
13976
+ console.log("Opening database at", dbPath);
13966
13977
  const prisma = new import_prisma_gen.PrismaClient({
13967
13978
  datasources: {
13968
13979
  db: {
@@ -13972,12 +13983,13 @@ function getPrismaDbFromDir(dir) {
13972
13983
  });
13973
13984
  return prisma;
13974
13985
  }
13975
- async function getDbFromDir(dir) {
13976
- const dbPath = getDbPathFromDir(dir);
13977
- const db = await getDb(dbPath);
13986
+ async function getDb(path6) {
13987
+ const dbPath = getDbPath(path6);
13988
+ const db = await getDbFromPath(dbPath);
13978
13989
  return db;
13979
13990
  }
13980
- async function getDb(dbPath) {
13991
+ async function getDbFromPath(dbPath) {
13992
+ console.log("Opening database at", dbPath);
13981
13993
  const logger2 = log_exports.getLogger();
13982
13994
  const migrationsPath = await getMigrationsPath();
13983
13995
  if (!migrationsPath) {
@@ -14922,7 +14934,7 @@ async function initDb(dbPath, options) {
14922
14934
  }
14923
14935
  }
14924
14936
  } else {
14925
- const db = await getDb(getDbPath(dbPath));
14937
+ const db = await getDb(dbPath);
14926
14938
  db.close();
14927
14939
  }
14928
14940
  }
@@ -14931,7 +14943,7 @@ async function importTranslation(dir, dirs, options) {
14931
14943
  globalThis.DOMParser = import_linkedom.DOMParser;
14932
14944
  globalThis.Element = import_linkedom.Element;
14933
14945
  globalThis.Node = import_linkedom.Node;
14934
- const db = await getDbFromDir(process.cwd());
14946
+ const db = await getDb(options.db);
14935
14947
  try {
14936
14948
  await importTranslations(
14937
14949
  db,
@@ -14949,7 +14961,7 @@ async function importTranslations2(dir, options) {
14949
14961
  globalThis.DOMParser = import_linkedom.DOMParser;
14950
14962
  globalThis.Element = import_linkedom.Element;
14951
14963
  globalThis.Node = import_linkedom.Node;
14952
- const db = await getDbFromDir(process.cwd());
14964
+ const db = await getDb(options.db);
14953
14965
  try {
14954
14966
  const files = await (0, import_promises5.readdir)(dir);
14955
14967
  const translationDirs = files.map((f) => import_node_path2.default.resolve(dir, f));
@@ -14969,7 +14981,7 @@ async function importCommentary(dir, dirs, options) {
14969
14981
  globalThis.DOMParser = import_linkedom.DOMParser;
14970
14982
  globalThis.Element = import_linkedom.Element;
14971
14983
  globalThis.Node = import_linkedom.Node;
14972
- const db = await getDbFromDir(process.cwd());
14984
+ const db = await getDb(options.db);
14973
14985
  try {
14974
14986
  await importCommentaries(
14975
14987
  db,
@@ -14987,7 +14999,7 @@ async function importCommentaries2(dir, options) {
14987
14999
  globalThis.DOMParser = import_linkedom.DOMParser;
14988
15000
  globalThis.Element = import_linkedom.Element;
14989
15001
  globalThis.Node = import_linkedom.Node;
14990
- const db = await getDbFromDir(process.cwd());
15002
+ const db = await getDb(options.db);
14991
15003
  try {
14992
15004
  const files = await (0, import_promises5.readdir)(dir);
14993
15005
  const commentaryDirs = files.map((f) => import_node_path2.default.resolve(dir, f));
@@ -15158,7 +15170,7 @@ Found ${sources.length} sources for '${translationQuery}':`);
15158
15170
  }
15159
15171
  async function sourceTranslations(outputDir, translations, options = {}) {
15160
15172
  const logger2 = log_exports.getLogger();
15161
- const {
15173
+ let {
15162
15174
  convertToUsx3 = false,
15163
15175
  useDatabase = true,
15164
15176
  // Default to true
@@ -15193,20 +15205,27 @@ async function sourceTranslations(outputDir, translations, options = {}) {
15193
15205
  let db = null;
15194
15206
  let sourceExists = null;
15195
15207
  let sourceUpsert = null;
15196
- let skippedByDatabase = 0;
15197
15208
  if (useDatabase) {
15209
+ if (!overwrite) {
15210
+ overwrite = true;
15211
+ console.warn(
15212
+ "Overwriting files is enabled due to database tracking."
15213
+ );
15214
+ }
15198
15215
  logger2.log("Connecting to database for download tracking...");
15199
- db = await getDbFromDir(process.cwd());
15216
+ db = await getDb(options.db);
15200
15217
  sourceExists = db.prepare(
15201
15218
  "SELECT usfmZipEtag, usfmDownloadDate FROM EBibleSource WHERE id = @id AND sha256 = @sha256;"
15202
15219
  );
15220
+ let skippedByDatabase = 0;
15221
+ let sourcesChecked = 0;
15222
+ let notDownloaded = 0;
15203
15223
  filteredSources = filteredSources.filter((source) => {
15204
- if (overwrite) {
15205
- return true;
15206
- }
15207
15224
  const existingSource = sourceExists.get(source);
15225
+ sourcesChecked++;
15208
15226
  if (existingSource) {
15209
15227
  if (!existingSource.usfmDownloadDate) {
15228
+ notDownloaded++;
15210
15229
  return true;
15211
15230
  }
15212
15231
  source.usfmZipEtag = existingSource.usfmZipEtag;
@@ -15221,13 +15240,18 @@ async function sourceTranslations(outputDir, translations, options = {}) {
15221
15240
  });
15222
15241
  if (skippedByDatabase > 0) {
15223
15242
  logger2.log(
15224
- `Database filtering: Skipped ${skippedByDatabase} already downloaded sources`
15243
+ `Database filtering: Skipped ${skippedByDatabase} already downloaded sources.`
15225
15244
  );
15226
15245
  } else {
15227
- logger2.log(
15228
- "Database filtering: No sources were skipped (none previously downloaded)"
15229
- );
15246
+ logger2.log(`Database filtering: No sources were skipped.`);
15230
15247
  }
15248
+ logger2.log(`Database filtering:`);
15249
+ logger2.log(` \u2022 Checked: ${sourcesChecked}`);
15250
+ logger2.log(` \u2022 Skipped: ${skippedByDatabase}`);
15251
+ logger2.log(` \u2022 New: ${notDownloaded}`);
15252
+ logger2.log(
15253
+ ` \u2022 Needs Update: ${filteredSources.length - notDownloaded}`
15254
+ );
15231
15255
  sourceUpsert = db.prepare(`INSERT INTO EBibleSource(
15232
15256
  id, translationId, title, shortTitle, languageCode, textDirection, copyright, description,
15233
15257
  oldTestamentBooks, oldTestamentChapters, oldTestamentVerses,
@@ -15434,9 +15458,9 @@ Total selected: ${selectedSources.length} sources from ${sourceGroups.size} tran
15434
15458
  if (!convertToUsx3) {
15435
15459
  source.usfmDownloadDate = import_luxon2.DateTime.utc().toISO();
15436
15460
  source.usfmDownloadPath = outputDir;
15437
- }
15438
- if (sourceUpsert) {
15439
- sourceUpsert.run(source);
15461
+ if (sourceUpsert) {
15462
+ sourceUpsert.run(source);
15463
+ }
15440
15464
  }
15441
15465
  } catch (error) {
15442
15466
  numErrored++;
@@ -15704,7 +15728,7 @@ async function start() {
15704
15728
  globalThis.Element = import_linkedom2.Element;
15705
15729
  globalThis.Node = import_linkedom2.Node;
15706
15730
  const program = new import_commander.Command();
15707
- program.name("helloao").description("A CLI for managing a Free Use Bible API.").version("0.0.1");
15731
+ program.name("helloao").description("A CLI for managing a Free Use Bible API.").option("--db <path>", "Path to the database file.").version("0.0.1");
15708
15732
  program.command("init [path]").description("Initialize a new Bible API DB.").option(
15709
15733
  "--source <path>",
15710
15734
  "The source database to copy from. If given a HTTPS URL, then the database will be downloaded from the given URL."
@@ -15712,7 +15736,10 @@ async function start() {
15712
15736
  "--language <languages...>",
15713
15737
  "The language(s) that the database should be initialized with."
15714
15738
  ).action(async (dbPath, options) => {
15715
- await initDb(dbPath, options);
15739
+ await initDb(dbPath, {
15740
+ ...program.opts(),
15741
+ ...options
15742
+ });
15716
15743
  });
15717
15744
  program.command("generate-translation-metadata").description("Generates a metadata file for a translation.").action(async () => {
15718
15745
  const meta = await askForMetadata();
@@ -15741,22 +15768,34 @@ async function start() {
15741
15768
  program.command("import-translation <dir> [dirs...]").description(
15742
15769
  "Imports a translation from the given directory into the database."
15743
15770
  ).option("--overwrite", "Whether to overwrite existing files.").action(async (dir, dirs, options) => {
15744
- await importTranslation(dir, dirs, options);
15771
+ await importTranslation(dir, dirs, {
15772
+ ...program.opts(),
15773
+ ...options
15774
+ });
15745
15775
  });
15746
15776
  program.command("import-translations <dir>").description(
15747
15777
  "Imports all translations from the given directory into the database."
15748
15778
  ).option("--overwrite", "Whether to overwrite existing files.").action(async (dir, options) => {
15749
- await importTranslations2(dir, options);
15779
+ await importTranslations2(dir, {
15780
+ ...program.opts(),
15781
+ ...options
15782
+ });
15750
15783
  });
15751
15784
  program.command("import-commentary <dir> [dirs...]").description(
15752
15785
  "Imports a commentary from the given directory into the database."
15753
15786
  ).option("--overwrite", "Whether to overwrite existing files.").action(async (dir, dirs, options) => {
15754
- await importCommentary(dir, dirs, options);
15787
+ await importCommentary(dir, dirs, {
15788
+ ...program.opts(),
15789
+ ...options
15790
+ });
15755
15791
  });
15756
15792
  program.command("import-commentaries <dir>").description(
15757
15793
  "Imports all commentaries from the given directory into the database."
15758
15794
  ).option("--overwrite", "Whether to overwrite existing files.").action(async (dir, options) => {
15759
- await importCommentaries2(dir, options);
15795
+ await importCommentaries2(dir, {
15796
+ ...program.opts(),
15797
+ ...options
15798
+ });
15760
15799
  });
15761
15800
  program.command("upload-test-translation <input>").description(
15762
15801
  `Uploads a translation to the HelloAO Free Bible API test S3 bucket.
@@ -15805,7 +15844,10 @@ For inquiries, please contact hello@helloao.org.`
15805
15844
  if (!good) {
15806
15845
  return;
15807
15846
  }
15808
- const result = await uploadTestTranslation(input5, options);
15847
+ const result = await uploadTestTranslation(input5, {
15848
+ ...program.opts(),
15849
+ ...options
15850
+ });
15809
15851
  if (result) {
15810
15852
  const logger2 = log_exports.getLogger();
15811
15853
  logger2.log("\n");
@@ -15865,7 +15907,10 @@ For inquiries, please contact hello@helloao.org.`
15865
15907
  if (!good) {
15866
15908
  return;
15867
15909
  }
15868
- const result = await uploadTestTranslations(input5, options);
15910
+ const result = await uploadTestTranslations(input5, {
15911
+ ...program.opts(),
15912
+ ...options
15913
+ });
15869
15914
  if (result) {
15870
15915
  const logger2 = log_exports.getLogger();
15871
15916
  logger2.log("\nVersion: ", result.version);
@@ -15909,7 +15954,10 @@ For inquiries, please contact hello@helloao.org.`
15909
15954
  "--s3-region <region>",
15910
15955
  "The AWS region to use for uploading to S3."
15911
15956
  ).option("--pretty", "Whether to generate pretty-printed JSON files.").action(async (input5, dest, options) => {
15912
- await generateTranslationFiles(input5, dest, options);
15957
+ await generateTranslationFiles(input5, dest, {
15958
+ ...program.opts(),
15959
+ ...options
15960
+ });
15913
15961
  });
15914
15962
  program.command("generate-translations-files <input> <dir>").description("Generates API files from the given input translations.").option(
15915
15963
  "--batch-size <size>",
@@ -15943,7 +15991,10 @@ For inquiries, please contact hello@helloao.org.`
15943
15991
  "--s3-region <region>",
15944
15992
  "The AWS region to use for uploading to S3."
15945
15993
  ).option("--pretty", "Whether to generate pretty-printed JSON files.").action(async (input5, dest, options) => {
15946
- await generateTranslationsFiles(input5, dest, options);
15994
+ await generateTranslationsFiles(input5, dest, {
15995
+ ...program.opts(),
15996
+ ...options
15997
+ });
15947
15998
  });
15948
15999
  program.command("upload-api-files").argument("<dest>", "The destination to upload the API files to.").description(
15949
16000
  "Uploads API files to the specified destination. For S3, use the format s3://bucket-name/path/to/folder."
@@ -15985,7 +16036,7 @@ For inquiries, please contact hello@helloao.org.`
15985
16036
  "--verbose",
15986
16037
  "Whether to output verbose information during the upload."
15987
16038
  ).action(async (dest, options) => {
15988
- const db = getPrismaDbFromDir(process.cwd());
16039
+ const db = getPrismaDb(program.opts().db);
15989
16040
  try {
15990
16041
  await uploadApiFilesFromDatabase(db, dest, options);
15991
16042
  } finally {
@@ -16010,7 +16061,10 @@ For inquiries, please contact hello@helloao.org.`
16010
16061
  overwrite: options.overwrite
16011
16062
  }
16012
16063
  };
16013
- await sourceTranslations(dir, translations, sourceOptions);
16064
+ await sourceTranslations(dir, translations, {
16065
+ ...program.opts(),
16066
+ ...sourceOptions
16067
+ });
16014
16068
  });
16015
16069
  program.command("list-ebible-translations [search]").description(
16016
16070
  "List available eBible translations. Optionally filter by search term."
@@ -16023,7 +16077,10 @@ For inquiries, please contact hello@helloao.org.`
16023
16077
  "-a, --all",
16024
16078
  "Fetch all translations. If omitted, only undownloaded translations will be fetched."
16025
16079
  ).action(async (dir, translations, options) => {
16026
- await fetchAudio(dir, translations, options);
16080
+ await fetchAudio(dir, translations, {
16081
+ ...program.opts(),
16082
+ ...options
16083
+ });
16027
16084
  });
16028
16085
  program.command("fetch-bible-metadata <dir>").description(
16029
16086
  "Fetches the Theographic bible metadata and places it in the given directory."
@@ -5746,11 +5746,10 @@ var db_exports = {};
5746
5746
  __export(db_exports, {
5747
5747
  getChangedOrNewInputFiles: () => getChangedOrNewInputFiles,
5748
5748
  getDb: () => getDb,
5749
- getDbFromDir: () => getDbFromDir,
5750
5749
  getDbPath: () => getDbPath,
5751
5750
  getDbPathFromDir: () => getDbPathFromDir,
5752
5751
  getMigrationsPath: () => getMigrationsPath,
5753
- getPrismaDbFromDir: () => getPrismaDbFromDir,
5752
+ getPrismaDb: () => getPrismaDb,
5754
5753
  importCommentaries: () => importCommentaries,
5755
5754
  importFileBatch: () => importFileBatch,
5756
5755
  importFiles: () => importFiles,
@@ -6886,14 +6885,16 @@ function getBookId(book) {
6886
6885
  return null;
6887
6886
  }
6888
6887
  var KNOWN_SKIPPED_VERSES = /* @__PURE__ */ new Set([
6888
+ "MAT 12:47",
6889
6889
  "MAT 17:21",
6890
6890
  "MAT 18:11",
6891
6891
  "MAT 23:14",
6892
- "MAR 7:16",
6893
- "MAR 9:44",
6894
- "MAR 9:46",
6895
- "MAR 11:26",
6892
+ "MRK 7:16",
6893
+ "MRK 9:44",
6894
+ "MRK 9:46",
6895
+ "MRK 11:26",
6896
6896
  "LUK 17:36",
6897
+ "LUK 23:17",
6897
6898
  "JHN 5:4",
6898
6899
  "ACT 8:37",
6899
6900
  "ACT 15:34",
@@ -12432,19 +12433,23 @@ function generateApiForDataset(dataset, options = {}) {
12432
12433
  };
12433
12434
  let translationChapters = [];
12434
12435
  for (let { chapters, ...book } of books) {
12436
+ const firstChapterNumber = chapters[0].chapter.number;
12437
+ const lastChapterNumber = chapters[chapters.length - 1].chapter.number;
12435
12438
  const apiBook = {
12436
12439
  ...book,
12440
+ firstChapterNumber,
12437
12441
  firstChapterApiLink: bookChapterApiLink(
12438
12442
  translation.id,
12439
12443
  getBookLink(book),
12440
- 1,
12444
+ firstChapterNumber,
12441
12445
  "json",
12442
12446
  apiPathPrefix
12443
12447
  ),
12448
+ lastChapterNumber,
12444
12449
  lastChapterApiLink: bookChapterApiLink(
12445
12450
  translation.id,
12446
12451
  getBookLink(book),
12447
- chapters.length,
12452
+ lastChapterNumber,
12448
12453
  "json",
12449
12454
  apiPathPrefix
12450
12455
  ),
@@ -12569,22 +12574,26 @@ function generateApiForDataset(dataset, options = {}) {
12569
12574
  };
12570
12575
  let commentaryChapters = [];
12571
12576
  for (let { chapters, ...book } of books) {
12577
+ const firstChapterNumber = chapters[0]?.chapter.number ?? null;
12578
+ const lastChapterNumber = chapters[chapters.length - 1]?.chapter.number ?? null;
12572
12579
  const apiBook = {
12573
12580
  ...book,
12574
- firstChapterApiLink: bookCommentaryChapterApiLink(
12581
+ firstChapterNumber,
12582
+ firstChapterApiLink: firstChapterNumber ? bookCommentaryChapterApiLink(
12575
12583
  commentary.id,
12576
12584
  getBookLink(book),
12577
- 1,
12585
+ firstChapterNumber,
12578
12586
  "json",
12579
12587
  apiPathPrefix
12580
- ),
12581
- lastChapterApiLink: bookCommentaryChapterApiLink(
12588
+ ) : null,
12589
+ lastChapterNumber,
12590
+ lastChapterApiLink: lastChapterNumber ? bookCommentaryChapterApiLink(
12582
12591
  commentary.id,
12583
12592
  getBookLink(book),
12584
- chapters.length,
12593
+ lastChapterNumber,
12585
12594
  "json",
12586
12595
  apiPathPrefix
12587
- ),
12596
+ ) : null,
12588
12597
  numberOfChapters: chapters.length,
12589
12598
  totalNumberOfVerses: 0
12590
12599
  };
@@ -13956,8 +13965,9 @@ function getDbPath(p) {
13956
13965
  }
13957
13966
  return getDbPathFromDir(process.cwd());
13958
13967
  }
13959
- function getPrismaDbFromDir(dir) {
13960
- const dbPath = getDbPathFromDir(dir);
13968
+ function getPrismaDb(path5) {
13969
+ const dbPath = getDbPath(path5);
13970
+ console.log("Opening database at", dbPath);
13961
13971
  const prisma = new import_prisma_gen.PrismaClient({
13962
13972
  datasources: {
13963
13973
  db: {
@@ -13967,12 +13977,13 @@ function getPrismaDbFromDir(dir) {
13967
13977
  });
13968
13978
  return prisma;
13969
13979
  }
13970
- async function getDbFromDir(dir) {
13971
- const dbPath = getDbPathFromDir(dir);
13972
- const db = await getDb(dbPath);
13980
+ async function getDb(path5) {
13981
+ const dbPath = getDbPath(path5);
13982
+ const db = await getDbFromPath(dbPath);
13973
13983
  return db;
13974
13984
  }
13975
- async function getDb(dbPath) {
13985
+ async function getDbFromPath(dbPath) {
13986
+ console.log("Opening database at", dbPath);
13976
13987
  const logger2 = log_exports.getLogger();
13977
13988
  const migrationsPath = await getMigrationsPath();
13978
13989
  if (!migrationsPath) {
@@ -15029,7 +15040,7 @@ async function initDb(dbPath, options) {
15029
15040
  }
15030
15041
  }
15031
15042
  } else {
15032
- const db = await getDb(getDbPath(dbPath));
15043
+ const db = await getDb(dbPath);
15033
15044
  db.close();
15034
15045
  }
15035
15046
  }
@@ -15038,7 +15049,7 @@ async function importTranslation(dir, dirs, options) {
15038
15049
  globalThis.DOMParser = import_linkedom.DOMParser;
15039
15050
  globalThis.Element = import_linkedom.Element;
15040
15051
  globalThis.Node = import_linkedom.Node;
15041
- const db = await getDbFromDir(process.cwd());
15052
+ const db = await getDb(options.db);
15042
15053
  try {
15043
15054
  await importTranslations(
15044
15055
  db,
@@ -15056,7 +15067,7 @@ async function importTranslations2(dir, options) {
15056
15067
  globalThis.DOMParser = import_linkedom.DOMParser;
15057
15068
  globalThis.Element = import_linkedom.Element;
15058
15069
  globalThis.Node = import_linkedom.Node;
15059
- const db = await getDbFromDir(process.cwd());
15070
+ const db = await getDb(options.db);
15060
15071
  try {
15061
15072
  const files = await (0, import_promises5.readdir)(dir);
15062
15073
  const translationDirs = files.map((f) => import_node_path2.default.resolve(dir, f));
@@ -15076,7 +15087,7 @@ async function importCommentary(dir, dirs, options) {
15076
15087
  globalThis.DOMParser = import_linkedom.DOMParser;
15077
15088
  globalThis.Element = import_linkedom.Element;
15078
15089
  globalThis.Node = import_linkedom.Node;
15079
- const db = await getDbFromDir(process.cwd());
15090
+ const db = await getDb(options.db);
15080
15091
  try {
15081
15092
  await importCommentaries(
15082
15093
  db,
@@ -15094,7 +15105,7 @@ async function importCommentaries2(dir, options) {
15094
15105
  globalThis.DOMParser = import_linkedom.DOMParser;
15095
15106
  globalThis.Element = import_linkedom.Element;
15096
15107
  globalThis.Node = import_linkedom.Node;
15097
- const db = await getDbFromDir(process.cwd());
15108
+ const db = await getDb(options.db);
15098
15109
  try {
15099
15110
  const files = await (0, import_promises5.readdir)(dir);
15100
15111
  const commentaryDirs = files.map((f) => import_node_path2.default.resolve(dir, f));
@@ -15265,7 +15276,7 @@ Found ${sources.length} sources for '${translationQuery}':`);
15265
15276
  }
15266
15277
  async function sourceTranslations(outputDir, translations, options = {}) {
15267
15278
  const logger2 = log_exports.getLogger();
15268
- const {
15279
+ let {
15269
15280
  convertToUsx3 = false,
15270
15281
  useDatabase = true,
15271
15282
  // Default to true
@@ -15300,20 +15311,27 @@ async function sourceTranslations(outputDir, translations, options = {}) {
15300
15311
  let db = null;
15301
15312
  let sourceExists = null;
15302
15313
  let sourceUpsert = null;
15303
- let skippedByDatabase = 0;
15304
15314
  if (useDatabase) {
15315
+ if (!overwrite) {
15316
+ overwrite = true;
15317
+ console.warn(
15318
+ "Overwriting files is enabled due to database tracking."
15319
+ );
15320
+ }
15305
15321
  logger2.log("Connecting to database for download tracking...");
15306
- db = await getDbFromDir(process.cwd());
15322
+ db = await getDb(options.db);
15307
15323
  sourceExists = db.prepare(
15308
15324
  "SELECT usfmZipEtag, usfmDownloadDate FROM EBibleSource WHERE id = @id AND sha256 = @sha256;"
15309
15325
  );
15326
+ let skippedByDatabase = 0;
15327
+ let sourcesChecked = 0;
15328
+ let notDownloaded = 0;
15310
15329
  filteredSources = filteredSources.filter((source) => {
15311
- if (overwrite) {
15312
- return true;
15313
- }
15314
15330
  const existingSource = sourceExists.get(source);
15331
+ sourcesChecked++;
15315
15332
  if (existingSource) {
15316
15333
  if (!existingSource.usfmDownloadDate) {
15334
+ notDownloaded++;
15317
15335
  return true;
15318
15336
  }
15319
15337
  source.usfmZipEtag = existingSource.usfmZipEtag;
@@ -15328,13 +15346,18 @@ async function sourceTranslations(outputDir, translations, options = {}) {
15328
15346
  });
15329
15347
  if (skippedByDatabase > 0) {
15330
15348
  logger2.log(
15331
- `Database filtering: Skipped ${skippedByDatabase} already downloaded sources`
15349
+ `Database filtering: Skipped ${skippedByDatabase} already downloaded sources.`
15332
15350
  );
15333
15351
  } else {
15334
- logger2.log(
15335
- "Database filtering: No sources were skipped (none previously downloaded)"
15336
- );
15352
+ logger2.log(`Database filtering: No sources were skipped.`);
15337
15353
  }
15354
+ logger2.log(`Database filtering:`);
15355
+ logger2.log(` \u2022 Checked: ${sourcesChecked}`);
15356
+ logger2.log(` \u2022 Skipped: ${skippedByDatabase}`);
15357
+ logger2.log(` \u2022 New: ${notDownloaded}`);
15358
+ logger2.log(
15359
+ ` \u2022 Needs Update: ${filteredSources.length - notDownloaded}`
15360
+ );
15338
15361
  sourceUpsert = db.prepare(`INSERT INTO EBibleSource(
15339
15362
  id, translationId, title, shortTitle, languageCode, textDirection, copyright, description,
15340
15363
  oldTestamentBooks, oldTestamentChapters, oldTestamentVerses,
@@ -15541,9 +15564,9 @@ Total selected: ${selectedSources.length} sources from ${sourceGroups.size} tran
15541
15564
  if (!convertToUsx3) {
15542
15565
  source.usfmDownloadDate = import_luxon2.DateTime.utc().toISO();
15543
15566
  source.usfmDownloadPath = outputDir;
15544
- }
15545
- if (sourceUpsert) {
15546
- sourceUpsert.run(source);
15567
+ if (sourceUpsert) {
15568
+ sourceUpsert.run(source);
15569
+ }
15547
15570
  }
15548
15571
  } catch (error) {
15549
15572
  numErrored++;