@helloao/cli 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -34,4 +34,92 @@ Commands:
34
34
  Translations should be in the format "translationId/audioId". e.g. "BSB/gilbert"
35
35
  fetch-bible-metadata <dir> Fetches the Theographic bible metadata and places it in the given directory.
36
36
  help [command] display help for command
37
- ```
37
+ ```
38
+
39
+ The `@helloao/cli` package can also be used as a library.
40
+
41
+ The library exports a variety of actions, utilities, and supporting classes designed to assist with generating and managing a Free Use Bible API.
42
+
43
+ There are 6 main exports:
44
+
45
+ - `actions` - This export contains function versions of the CLI commands. They make it easy to call a CLI command from a script.
46
+ - `db` - This export contains functions that make working with a database easier. It supports operations like importing translations into a database, inserting chapters, verses, etc. and getting an updated database instance from a path.
47
+ - `downloads` - This export contains functions that make downloading files easier.
48
+ - `files` - This export contains functions that make working with files easier. It has functions to load files from a translation, discover translation metadata from the filesystem, and classes that support uploading API files to the local file system or to a zip archive.
49
+ - `uploads` - This export contains functions that make it easy to upload an API to a destination like S3, the local filesystem, or a zip archive.
50
+ - `s3` - This export contains a class that can upload files to S3.
51
+
52
+ Here are some common operations that you might want to perform:
53
+
54
+ #### Get a SQL Database
55
+
56
+ ```typescript
57
+ import { db } from '@helloao/cli';
58
+
59
+ const pathToDb = './bible-database.db';
60
+ const database = await db.getDb(pathToDb);
61
+
62
+ // do work on the database
63
+
64
+ // Close it when you are done.
65
+ database.close();
66
+ ```
67
+
68
+ #### Import a translation into a database from a directory
69
+
70
+ ```typescript
71
+ import { db } from '@helloao/cli';
72
+
73
+ const pathToDb = './bible-database.db';
74
+ const database = await db.getDb(pathToDb);
75
+
76
+ // Get a DOMParser for parsing USX.
77
+ // On Node.js, you may have to import jsdom or linkedom.
78
+ const parser = new DOMParser();
79
+
80
+ const pathToTranslation = './path/to/translation';
81
+
82
+ // Whether to overwrite files that already exist in the database.
83
+ // The system will automatically determine the hashes of the input files and overwrite changed files if needed, so this is only needed
84
+ // when you know that they need to be overwritten.
85
+ const overwrite = false;
86
+ await db.importTranslations(database, pathToTranslation, parser, overwrite);
87
+ ```
88
+
89
+ #### Generate an API from a translation
90
+
91
+ ```typescript
92
+ import { files, uploads } from '@helloao/cli';
93
+ import { generation } from '@helloao/tools';
94
+ import { toAsyncIterable } from '@helloao/tools/parser/iterators';
95
+
96
+ const translationPath = './path/to/translation';
97
+ const translationFiles = await files.loadTranslationFiles(translationPath);
98
+
99
+ // Used to parse XML
100
+ const domParser = new DOMParser();
101
+
102
+ // Generate a dataset from the files
103
+ // Datasets organize all the files and their content
104
+ // by translation, book, chapter, and verse
105
+ const dataset = generation.dataset.generateDataset(files, parser);
106
+
107
+ // You can optionally specifiy a prefix that should be added to all API
108
+ // links
109
+ const pathPrefix = '';
110
+
111
+ // Generate an API representation from the files
112
+ // This adds links between chapters and additional metadata.
113
+ const api = generation.api.generateApiForDataset(dataset, {
114
+ pathPrefix,
115
+ });
116
+
117
+ // Generate output files from the API representation.
118
+ // This will give us a list of files and file paths that represent
119
+ // the entire API.
120
+ const outputFiles = generation.api.generateFilesForApi(api);
121
+
122
+ // Optionally upload files by using:
123
+ // const dest = 's3://my-bucket';
124
+ // await uploads.serializeAndUploadDatasets(dest, toAsyncIterable(outputFiles));
125
+ ```
package/actions.d.ts CHANGED
@@ -1,3 +1,4 @@
1
+ import { UploadApiFromDatabaseOptions, UploadApiOptions } from './uploads';
1
2
  export interface InitDbOptions {
2
3
  /**
3
4
  * The path to the source database to copy the schema from.
@@ -54,4 +55,67 @@ export declare function fetchTranslations(dir: string, translations?: string[],
54
55
  * @param options The options.
55
56
  */
56
57
  export declare function fetchAudio(dir: string, translations: string[], options?: FetchTranslationsOptions): Promise<void>;
58
+ /**
59
+ * Generates the translation files directly from the translations stored in the given input directory.
60
+ * @param input The input directory that the translations are stored in.
61
+ * @param dest The destination to upload the API files to.
62
+ * @param options The options for the generation.
63
+ */
64
+ export declare function generateTranslationsFiles(input: string, dest: string, options: UploadApiFromDatabaseOptions): Promise<void>;
65
+ /**
66
+ * Generates the translation files directly from the translation stored in the given input directory.
67
+ * @param input The input directory that the translation is stored in.
68
+ * @param dest The destination to upload the API files to.
69
+ * @param options The options for the generation.
70
+ */
71
+ export declare function generateTranslationFiles(input: string, dest: string, options: UploadApiOptions): Promise<void>;
72
+ /**
73
+ * The options for uploading the test translations.
74
+ */
75
+ export interface UploadTestTranslationOptions extends UploadApiOptions {
76
+ /**
77
+ * The s3 URL to upload the translations to.
78
+ * Defaults to "s3://ao-bible-api-public-uploads"
79
+ */
80
+ s3Url?: string;
81
+ }
82
+ export interface UploadTestTranslationResult {
83
+ /**
84
+ * The S3 URL where the translations were uploaded to.
85
+ */
86
+ uploadS3Url: string;
87
+ /**
88
+ * The HTTP URL that the version can be accessed at.
89
+ */
90
+ url: string;
91
+ /**
92
+ * The URL that the available translations can be accessed at.
93
+ */
94
+ availableTranslationsUrl: string;
95
+ /**
96
+ * The version that was uploaded.
97
+ * This is a SHA-256 hash of the input files.
98
+ */
99
+ version: string;
100
+ }
101
+ /**
102
+ * Generates the API files directly from the translations stored in the given input directory and
103
+ * uploads them to the HelloAO test s3 bucket.
104
+ *
105
+ * Requires access to the HelloAO test s3 bucket. Email hello@helloao.org for access.
106
+ *
107
+ * @param input The input directory that the translations are stored in.
108
+ * @param options The options to use for the upload.
109
+ */
110
+ export declare function uploadTestTranslations(input: string, options: UploadTestTranslationOptions): Promise<UploadTestTranslationResult>;
111
+ /**
112
+ * Generates the API files directly from the given translation and
113
+ * uploads them to the HelloAO test s3 bucket.
114
+ *
115
+ * Requires access to the HelloAO test s3 bucket. Email hello@helloao.org for access.
116
+ *
117
+ * @param input The input directory that the translations are stored in.
118
+ * @param options The options to use for the upload.
119
+ */
120
+ export declare function uploadTestTranslation(input: string, options: UploadTestTranslationOptions): Promise<UploadTestTranslationResult>;
57
121
  //# sourceMappingURL=actions.d.ts.map
package/actions.js CHANGED
@@ -31,6 +31,10 @@ exports.importTranslation = importTranslation;
31
31
  exports.importTranslations = importTranslations;
32
32
  exports.fetchTranslations = fetchTranslations;
33
33
  exports.fetchAudio = fetchAudio;
34
+ exports.generateTranslationsFiles = generateTranslationsFiles;
35
+ exports.generateTranslationFiles = generateTranslationFiles;
36
+ exports.uploadTestTranslations = uploadTestTranslations;
37
+ exports.uploadTestTranslation = uploadTestTranslation;
34
38
  const node_path_1 = __importStar(require("node:path"));
35
39
  const database = __importStar(require("./db"));
36
40
  const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
@@ -42,6 +46,11 @@ const fs_extra_1 = require("fs-extra");
42
46
  const audio_1 = require("@helloao/tools/generation/audio");
43
47
  const book_order_1 = require("@helloao/tools/generation/book-order");
44
48
  const downloads_1 = require("./downloads");
49
+ const iterators_1 = require("@helloao/tools/parser/iterators");
50
+ const files_1 = require("./files");
51
+ const dataset_1 = require("@helloao/tools/generation/dataset");
52
+ const uploads_1 = require("./uploads");
53
+ const s3_1 = require("./s3");
45
54
  /**
46
55
  * Initializes a new Bible API DB.
47
56
  * @param dbPath The path to the database. If null or empty, then the "bible-api.db" will be used from the current working directory.
@@ -56,7 +65,9 @@ async function initDb(dbPath, options) {
56
65
  console.log('Copying schema from source DB...');
57
66
  if (options.language) {
58
67
  console.log('Copying only the following languages:', options.language);
59
- const languages = `(${options.language.map((l) => `'${l}'`).join(', ')})`;
68
+ const languages = `(${options.language
69
+ .map((l) => `'${l}'`)
70
+ .join(', ')})`;
60
71
  db.exec(`
61
72
  ATTACH DATABASE "${sourcePath}" AS source;
62
73
 
@@ -142,7 +153,7 @@ async function importTranslations(dir, options) {
142
153
  const db = await database.getDbFromDir(process.cwd());
143
154
  try {
144
155
  const files = await (0, promises_1.readdir)(dir);
145
- const translationDirs = files.map(f => node_path_1.default.resolve(dir, f));
156
+ const translationDirs = files.map((f) => node_path_1.default.resolve(dir, f));
146
157
  console.log(`Importing ${translationDirs.length} translations`);
147
158
  await database.importTranslations(db, translationDirs, parser, !!options.overwrite);
148
159
  }
@@ -164,7 +175,9 @@ async function fetchTranslations(dir, translations, options = {}) {
164
175
  const collection = await client.fetch_collection();
165
176
  const collectionTranslations = collection.get_translations();
166
177
  console.log(`Discovered ${collectionTranslations.length} translations`);
167
- const filtered = translations && translations.length <= 0 ? collectionTranslations : collectionTranslations.filter(t => translationsSet.has(t.id));
178
+ const filtered = translations && translations.length <= 0
179
+ ? collectionTranslations
180
+ : collectionTranslations.filter((t) => translationsSet.has(t.id));
168
181
  let batches = [];
169
182
  while (filtered.length > 0) {
170
183
  batches.push(filtered.splice(0, 10));
@@ -187,7 +200,8 @@ async function fetchTranslations(dir, translations, options = {}) {
187
200
  };
188
201
  const books = await Promise.all(collection.get_books(t.id).map(async (b) => {
189
202
  const name = `${b.id}.usx`;
190
- if (!options.all && await (0, fs_extra_1.exists)(node_path_1.default.resolve(dir, translation.id, name))) {
203
+ if (!options.all &&
204
+ (await (0, fs_extra_1.exists)(node_path_1.default.resolve(dir, translation.id, name)))) {
191
205
  return null;
192
206
  }
193
207
  const content = await collection.fetch_book(t.id, b.id, 'usx');
@@ -196,7 +210,7 @@ async function fetchTranslations(dir, translations, options = {}) {
196
210
  fileType: 'usx',
197
211
  content: contentString,
198
212
  metadata: {
199
- translation
213
+ translation,
200
214
  },
201
215
  name,
202
216
  };
@@ -252,7 +266,7 @@ async function fetchAudio(dir, translations, options = {}) {
252
266
  const [translationId, reader] = translation.split('/');
253
267
  const name = `${chapter}.${reader}${ext}`;
254
268
  const fullPath = node_path_1.default.resolve(dir, 'audio', translationId, bookId, name);
255
- if (!options.all && await (0, fs_extra_1.exists)(fullPath)) {
269
+ if (!options.all && (await (0, fs_extra_1.exists)(fullPath))) {
256
270
  continue;
257
271
  }
258
272
  await (0, downloads_1.downloadFile)(url, fullPath);
@@ -260,3 +274,105 @@ async function fetchAudio(dir, translations, options = {}) {
260
274
  }
261
275
  }
262
276
  }
277
+ /**
278
+ * Generates the translation files directly from the translations stored in the given input directory.
279
+ * @param input The input directory that the translations are stored in.
280
+ * @param dest The destination to upload the API files to.
281
+ * @param options The options for the generation.
282
+ */
283
+ async function generateTranslationsFiles(input, dest, options) {
284
+ const parser = new linkedom_1.DOMParser();
285
+ globalThis.DOMParser = linkedom_1.DOMParser;
286
+ globalThis.Element = linkedom_1.Element;
287
+ globalThis.Node = linkedom_1.Node;
288
+ const dirs = await (0, promises_1.readdir)(node_path_1.default.resolve(input));
289
+ const batchSize = typeof options.batchSize === 'number'
290
+ ? options.batchSize
291
+ : parseInt(options.batchSize);
292
+ for (let b of (0, iterators_1.batch)(dirs, batchSize)) {
293
+ const files = await (0, files_1.loadTranslationsFiles)(b);
294
+ const dataset = (0, dataset_1.generateDataset)(files, parser);
295
+ await (0, uploads_1.serializeAndUploadDatasets)(dest, (0, iterators_1.toAsyncIterable)([dataset]), options);
296
+ }
297
+ }
298
+ /**
299
+ * Generates the translation files directly from the translation stored in the given input directory.
300
+ * @param input The input directory that the translation is stored in.
301
+ * @param dest The destination to upload the API files to.
302
+ * @param options The options for the generation.
303
+ */
304
+ async function generateTranslationFiles(input, dest, options) {
305
+ const parser = new linkedom_1.DOMParser();
306
+ globalThis.DOMParser = linkedom_1.DOMParser;
307
+ globalThis.Element = linkedom_1.Element;
308
+ globalThis.Node = linkedom_1.Node;
309
+ const files = await (0, files_1.loadTranslationFiles)(node_path_1.default.resolve(input));
310
+ const dataset = (0, dataset_1.generateDataset)(files, parser);
311
+ await (0, uploads_1.serializeAndUploadDatasets)(dest, (0, iterators_1.toAsyncIterable)([dataset]), options);
312
+ }
313
+ /**
314
+ * Generates the API files directly from the translations stored in the given input directory and
315
+ * uploads them to the HelloAO test s3 bucket.
316
+ *
317
+ * Requires access to the HelloAO test s3 bucket. Email hello@helloao.org for access.
318
+ *
319
+ * @param input The input directory that the translations are stored in.
320
+ * @param options The options to use for the upload.
321
+ */
322
+ async function uploadTestTranslations(input, options) {
323
+ const parser = new linkedom_1.DOMParser();
324
+ globalThis.DOMParser = linkedom_1.DOMParser;
325
+ globalThis.Element = linkedom_1.Element;
326
+ globalThis.Node = linkedom_1.Node;
327
+ const dirs = await (0, promises_1.readdir)(node_path_1.default.resolve(input));
328
+ const files = await (0, files_1.loadTranslationsFiles)(dirs);
329
+ const hash = (0, files_1.hashInputFiles)(files);
330
+ const dataset = (0, dataset_1.generateDataset)(files, parser);
331
+ const url = options.s3Url || 's3://ao-bible-api-public-uploads';
332
+ await (0, uploads_1.serializeAndUploadDatasets)(url, (0, iterators_1.toAsyncIterable)([dataset]), {
333
+ ...options,
334
+ pathPrefix: `/${hash}`,
335
+ });
336
+ const urls = getUrls(url);
337
+ return {
338
+ ...urls,
339
+ version: hash,
340
+ availableTranslationsUrl: `${urls.url}/${hash}/api/available_translations.json`,
341
+ };
342
+ }
343
+ /**
344
+ * Generates the API files directly from the given translation and
345
+ * uploads them to the HelloAO test s3 bucket.
346
+ *
347
+ * Requires access to the HelloAO test s3 bucket. Email hello@helloao.org for access.
348
+ *
349
+ * @param input The input directory that the translations are stored in.
350
+ * @param options The options to use for the upload.
351
+ */
352
+ async function uploadTestTranslation(input, options) {
353
+ const parser = new linkedom_1.DOMParser();
354
+ globalThis.DOMParser = linkedom_1.DOMParser;
355
+ globalThis.Element = linkedom_1.Element;
356
+ globalThis.Node = linkedom_1.Node;
357
+ const files = await (0, files_1.loadTranslationFiles)(node_path_1.default.resolve(input));
358
+ const hash = (0, files_1.hashInputFiles)(files);
359
+ const dataset = (0, dataset_1.generateDataset)(files, parser);
360
+ const url = options.s3Url || 's3://ao-bible-api-public-uploads';
361
+ await (0, uploads_1.serializeAndUploadDatasets)(url, (0, iterators_1.toAsyncIterable)([dataset]), {
362
+ ...options,
363
+ pathPrefix: `/${hash}`,
364
+ });
365
+ const urls = getUrls(url);
366
+ return {
367
+ ...urls,
368
+ version: hash,
369
+ availableTranslationsUrl: `${urls.url}/${hash}/api/available_translations.json`,
370
+ };
371
+ }
372
+ function getUrls(dest) {
373
+ const url = (0, s3_1.getHttpUrl)(dest);
374
+ return {
375
+ uploadS3Url: dest,
376
+ url: url,
377
+ };
378
+ }
package/cli.js CHANGED
@@ -11,38 +11,101 @@ const linkedom_1 = require("linkedom");
11
11
  const downloads_1 = require("./downloads");
12
12
  const uploads_1 = require("./uploads");
13
13
  const actions_1 = require("./actions");
14
- const files_1 = require("./files");
15
- const dataset_1 = require("@helloao/tools/generation/dataset");
16
- const iterators_1 = require("@helloao/tools/parser/iterators");
14
+ const db_1 = require("./db");
15
+ const prompts_1 = require("@inquirer/prompts");
17
16
  async function start() {
18
17
  const parser = new linkedom_1.DOMParser();
19
18
  globalThis.DOMParser = linkedom_1.DOMParser;
20
19
  globalThis.Element = linkedom_1.Element;
21
20
  globalThis.Node = linkedom_1.Node;
22
21
  const program = new commander_1.Command();
23
- program.name('helloao')
22
+ program
23
+ .name('helloao')
24
24
  .description('A CLI for managing a Free Use Bible API.')
25
25
  .version('0.0.1');
26
- program.command('init [path]')
26
+ program
27
+ .command('init [path]')
27
28
  .description('Initialize a new Bible API DB.')
28
29
  .option('--source <path>', 'The source database to copy from.')
29
30
  .option('--language <languages...>', 'The language(s) that the database should be initialized with.')
30
31
  .action(async (dbPath, options) => {
31
32
  await (0, actions_1.initDb)(dbPath, options);
32
33
  });
33
- program.command('import-translation <dir> [dirs...]')
34
+ program
35
+ .command('import-translation <dir> [dirs...]')
34
36
  .description('Imports a translation from the given directory into the database.')
35
37
  .option('--overwrite', 'Whether to overwrite existing files.')
36
38
  .action(async (dir, dirs, options) => {
37
39
  await (0, actions_1.importTranslation)(dir, dirs, options);
38
40
  });
39
- program.command('import-translations <dir>')
41
+ program
42
+ .command('import-translations <dir>')
40
43
  .description('Imports all translations from the given directory into the database.')
41
44
  .option('--overwrite', 'Whether to overwrite existing files.')
42
45
  .action(async (dir, options) => {
43
46
  await (0, actions_1.importTranslations)(dir, options);
44
47
  });
45
- program.command('generate-translation-files <input> <dir>')
48
+ program
49
+ .command('upload-test-translation <input>')
50
+ .description(`Uploads a translation to the HelloAO Free Bible API test S3 bucket.\nRequires access to the HelloAO Free Bible API test S3 bucket.\nFor inquiries, please contact hello@helloao.org.`)
51
+ .option('--batch-size <size>', 'The number of translations to generate API files for in each batch.', '50')
52
+ .option('--translations <translations...>', 'The translations to generate API files for.')
53
+ .option('--overwrite', 'Whether to overwrite existing files.')
54
+ .option('--overwrite-common-files', 'Whether to overwrite only common files.')
55
+ .option('--file-pattern <pattern>', 'The file pattern regex that should be used to filter the files that are generated.')
56
+ .option('--use-common-name', 'Whether to use the common name for the book chapter API link. If false, then book IDs are used.')
57
+ .option('--generate-audio-files', 'Whether to replace the audio URLs in the dataset with ones that are hosted locally.')
58
+ .option('--profile <profile>', 'The AWS profile to use for uploading to S3.')
59
+ .option('--access-key-id <accessKeyId>', 'The AWS access key ID to use for uploading to S3.')
60
+ .option('--secret-access-key <secretAccessKey>', 'The AWS Secret Access Key to use for uploading to S3.')
61
+ .option('--pretty', 'Whether to generate pretty-printed JSON files.')
62
+ .option('--s3-url <s3Url>', 'The S3 bucket URL to upload the files to.', 's3://ao-bible-api-public-uploads')
63
+ .action(async (input, options) => {
64
+ const good = await (0, prompts_1.confirm)({
65
+ message: 'Uploaded files will be publicly accessible. Continue?',
66
+ default: false,
67
+ });
68
+ if (!good) {
69
+ return;
70
+ }
71
+ const result = await (0, actions_1.uploadTestTranslation)(input, options);
72
+ console.log('\n');
73
+ console.log('Version: ', result.version);
74
+ console.log('Uploaded to: ', result.uploadS3Url);
75
+ console.log('URL: ', result.url);
76
+ console.log('Available Translations:', result.availableTranslationsUrl);
77
+ });
78
+ program
79
+ .command('upload-test-translations <input>')
80
+ .description(`Uploads all the translations in the given input directory to the HelloAO Free Bible API test S3 bucket.\nRequires access to the HelloAO Free Bible API test S3 bucket.\nFor inquiries, please contact hello@helloao.org.`)
81
+ .option('--batch-size <size>', 'The number of translations to generate API files for in each batch.', '50')
82
+ .option('--translations <translations...>', 'The translations to generate API files for.')
83
+ .option('--overwrite', 'Whether to overwrite existing files.')
84
+ .option('--overwrite-common-files', 'Whether to overwrite only common files.')
85
+ .option('--file-pattern <pattern>', 'The file pattern regex that should be used to filter the files that are generated.')
86
+ .option('--use-common-name', 'Whether to use the common name for the book chapter API link. If false, then book IDs are used.')
87
+ .option('--generate-audio-files', 'Whether to replace the audio URLs in the dataset with ones that are hosted locally.')
88
+ .option('--profile <profile>', 'The AWS profile to use for uploading to S3.')
89
+ .option('--access-key-id <accessKeyId>', 'The AWS access key ID to use for uploading to S3.')
90
+ .option('--secret-access-key <secretAccessKey>', 'The AWS Secret Access Key to use for uploading to S3.')
91
+ .option('--pretty', 'Whether to generate pretty-printed JSON files.')
92
+ .option('--s3-url <s3Url>', 'The S3 bucket URL to upload the files to.', 's3://ao-bible-api-public-uploads')
93
+ .action(async (input, options) => {
94
+ const good = await (0, prompts_1.confirm)({
95
+ message: 'Uploaded files will be publicly accessible. Continue?',
96
+ default: false,
97
+ });
98
+ if (!good) {
99
+ return;
100
+ }
101
+ const result = await (0, actions_1.uploadTestTranslations)(input, options);
102
+ console.log('\nVersion: ', result.version);
103
+ console.log('Uploaded to: ', result.uploadS3Url);
104
+ console.log('URL: ', result.url);
105
+ console.log('Available Translations:', result.availableTranslationsUrl);
106
+ });
107
+ program
108
+ .command('generate-translation-files <input> <dir>')
46
109
  .description('Generates API files from the given input translation.')
47
110
  .option('--batch-size <size>', 'The number of translations to generate API files for in each batch.', '50')
48
111
  .option('--translations <translations...>', 'The translations to generate API files for.')
@@ -52,17 +115,14 @@ async function start() {
52
115
  .option('--use-common-name', 'Whether to use the common name for the book chapter API link. If false, then book IDs are used.')
53
116
  .option('--generate-audio-files', 'Whether to replace the audio URLs in the dataset with ones that are hosted locally.')
54
117
  .option('--profile <profile>', 'The AWS profile to use for uploading to S3.')
118
+ .option('--access-key-id <accessKeyId>', 'The AWS access key ID to use for uploading to S3.')
119
+ .option('--secret-access-key <secretAccessKey>', 'The AWS Secret Access Key to use for uploading to S3.')
55
120
  .option('--pretty', 'Whether to generate pretty-printed JSON files.')
56
121
  .action(async (input, dest, options) => {
57
- const parser = new linkedom_1.DOMParser();
58
- globalThis.DOMParser = linkedom_1.DOMParser;
59
- globalThis.Element = linkedom_1.Element;
60
- globalThis.Node = linkedom_1.Node;
61
- const files = await (0, files_1.loadTranslationFiles)(path_1.default.resolve(input));
62
- const dataset = (0, dataset_1.generateDataset)(files, parser);
63
- await (0, uploads_1.uploadApiFiles)(path_1.default.resolve(dest), options, (0, iterators_1.toAsyncIterable)([dataset]));
122
+ await (0, actions_1.generateTranslationFiles)(input, dest, options);
64
123
  });
65
- program.command('generate-translations-files <input> <dir>')
124
+ program
125
+ .command('generate-translations-files <input> <dir>')
66
126
  .description('Generates API files from the given input translations.')
67
127
  .option('--batch-size <size>', 'The number of translations to generate API files for in each batch.', '50')
68
128
  .option('--translations <translations...>', 'The translations to generate API files for.')
@@ -72,21 +132,14 @@ async function start() {
72
132
  .option('--use-common-name', 'Whether to use the common name for the book chapter API link. If false, then book IDs are used.')
73
133
  .option('--generate-audio-files', 'Whether to replace the audio URLs in the dataset with ones that are hosted locally.')
74
134
  .option('--profile <profile>', 'The AWS profile to use for uploading to S3.')
135
+ .option('--access-key-id <accessKeyId>', 'The AWS access key ID to use for uploading to S3.')
136
+ .option('--secret-access-key <secretAccessKey>', 'The AWS Secret Access Key to use for uploading to S3.')
75
137
  .option('--pretty', 'Whether to generate pretty-printed JSON files.')
76
138
  .action(async (input, dest, options) => {
77
- const parser = new linkedom_1.DOMParser();
78
- globalThis.DOMParser = linkedom_1.DOMParser;
79
- globalThis.Element = linkedom_1.Element;
80
- globalThis.Node = linkedom_1.Node;
81
- const dirs = await (0, promises_1.readdir)(path_1.default.resolve(input));
82
- const batchSize = parseInt(options.batchSize);
83
- for (let b of (0, iterators_1.batch)(dirs, batchSize)) {
84
- const files = await (0, files_1.loadTranslationsFiles)(b);
85
- const dataset = (0, dataset_1.generateDataset)(files, parser);
86
- await (0, uploads_1.uploadApiFiles)(dest, options, (0, iterators_1.toAsyncIterable)([dataset]));
87
- }
139
+ await (0, actions_1.generateTranslationsFiles)(input, dest, options);
88
140
  });
89
- program.command('upload-api-files')
141
+ program
142
+ .command('upload-api-files')
90
143
  .argument('<dest>', 'The destination to upload the API files to.')
91
144
  .description('Uploads API files to the specified destination. For S3, use the format s3://bucket-name/path/to/folder.')
92
145
  .option('--batch-size <size>', 'The number of translations to generate API files for in each batch.', '50')
@@ -97,23 +150,34 @@ async function start() {
97
150
  .option('--use-common-name', 'Whether to use the common name for the book chapter API link. If false, then book IDs are used.')
98
151
  .option('--generate-audio-files', 'Whether to replace the audio URLs in the dataset with ones that are hosted locally.')
99
152
  .option('--profile <profile>', 'The AWS profile to use for uploading to S3.')
153
+ .option('--access-key-id <accessKeyId>', 'The AWS access key ID to use for uploading to S3.')
154
+ .option('--secret-access-key <secretAccessKey>', 'The AWS Secret Access Key to use for uploading to S3.')
100
155
  .option('--pretty', 'Whether to generate pretty-printed JSON files.')
101
156
  .action(async (dest, options) => {
102
- await (0, uploads_1.uploadApiFilesFromDatabase)(dest, options);
157
+ const db = (0, db_1.getPrismaDbFromDir)(process.cwd());
158
+ try {
159
+ await (0, uploads_1.uploadApiFilesFromDatabase)(db, dest, options);
160
+ }
161
+ finally {
162
+ db.$disconnect();
163
+ }
103
164
  });
104
- program.command('fetch-translations <dir> [translations...]')
165
+ program
166
+ .command('fetch-translations <dir> [translations...]')
105
167
  .description('Fetches the specified translations from fetch.bible and places them in the given directory.')
106
168
  .option('-a, --all', 'Fetch all translations. If omitted, only undownloaded translations will be fetched.')
107
169
  .action(async (dir, translations, options) => {
108
170
  await (0, actions_1.fetchTranslations)(dir, translations, options);
109
171
  });
110
- program.command('fetch-audio <dir> [translations...]')
172
+ program
173
+ .command('fetch-audio <dir> [translations...]')
111
174
  .description('Fetches the specified audio translations and places them in the given directory.\nTranslations should be in the format "translationId/audioId". e.g. "BSB/gilbert"')
112
175
  .option('-a, --all', 'Fetch all translations. If omitted, only undownloaded translations will be fetched.')
113
176
  .action(async (dir, translations, options) => {
114
177
  await (0, actions_1.fetchAudio)(dir, translations, options);
115
178
  });
116
- program.command('fetch-bible-metadata <dir>')
179
+ program
180
+ .command('fetch-bible-metadata <dir>')
117
181
  .description('Fetches the Theographic bible metadata and places it in the given directory.')
118
182
  .action(async (dir) => {
119
183
  let files = [
package/db.d.ts CHANGED
@@ -1,10 +1,10 @@
1
- import { PrismaClient } from "./prisma-gen";
1
+ import { PrismaClient } from './prisma-gen';
2
2
  import { Database } from 'better-sqlite3';
3
- import { DatasetOutput, DatasetTranslation, DatasetTranslationBook } from "@helloao/tools/generation/dataset";
4
- import { InputFile, TranslationBookChapter } from "@helloao/tools/generation";
5
- import { GenerateApiOptions } from "@helloao/tools/generation/api";
6
- import { DOMParser } from "linkedom";
7
- import { Readable } from "stream";
3
+ import { DatasetOutput, DatasetTranslation, DatasetTranslationBook } from '@helloao/tools/generation/dataset';
4
+ import { InputFile, TranslationBookChapter } from '@helloao/tools/generation';
5
+ import { GenerateApiOptions } from '@helloao/tools/generation/api';
6
+ import { DOMParser } from 'linkedom';
7
+ import { Readable } from 'stream';
8
8
  /**
9
9
  * Imports the translations from the given directories into the database.
10
10
  * @param db The database to import the translations into.
@@ -72,39 +72,21 @@ export interface SerializeApiOptions extends GenerateApiOptions {
72
72
  pretty?: boolean;
73
73
  }
74
74
  /**
75
- * Generates and serializes the API files for the dataset that is stored in the database.
75
+ * Generates and serializes the API files for the datasets that are stored in the database.
76
76
  * Yields each batch of serialized files.
77
77
  * @param db The database that the dataset should be loaded from.
78
- * @param options The options to use for generating the API.
78
+ * @param options The options to use for serializing the files.
79
+ * @param apiOptions The options to use for generating the API files.
79
80
  * @param translationsPerBatch The number of translations that should be loaded and written per batch.
80
81
  * @param translations The list of translations that should be loaded. If not provided, all translations will be loaded.
81
82
  */
82
- export declare function serializeFilesForDataset(db: PrismaClient, options: SerializeApiOptions, translationsPerBatch?: number, translations?: string[]): AsyncGenerator<SerializedFile[]>;
83
- /**
84
- * Serializes the API files for the given datasets.
85
- * @param datasets The dataasets to serialize.
86
- * @param options The options to use for serializing the files.
87
- */
88
- export declare function serializeFiles(datasets: AsyncIterable<DatasetOutput>, options: SerializeApiOptions): AsyncGenerator<SerializedFile[]>;
83
+ export declare function serializeFilesFromDatabase(db: PrismaClient, options?: SerializeApiOptions, translationsPerBatch?: number, translations?: string[]): AsyncGenerator<SerializedFile[]>;
89
84
  /**
90
- * Defines an interface that contains information about a serialized file.
85
+ * Generates and serializes the API files for the given datasets.
86
+ * Yields each batch of serialized files.
87
+ *
88
+ * @param datasets The datasets to serialize.
89
+ * @param options The options to use for generating and serializing the files.
91
90
  */
92
- export interface Uploader {
93
- /**
94
- * Gets the ideal batch size for the uploader.
95
- * Null if the uploader does not need batching.
96
- */
97
- idealBatchSize: number | null;
98
- /**
99
- * Uploads the given file.
100
- * @param file The file to upload.
101
- * @param overwrite Whether the file should be overwritten if it already exists.
102
- * @returns True if the file was uploaded. False if the file was skipped due to already existing.
103
- */
104
- upload(file: SerializedFile, overwrite: boolean): Promise<boolean>;
105
- /**
106
- * Disposes resources that the uploader uses.
107
- */
108
- dispose?(): Promise<void>;
109
- }
91
+ export declare function serializeDatasets(datasets: AsyncIterable<DatasetOutput>, options?: SerializeApiOptions): AsyncGenerator<SerializedFile[]>;
110
92
  //# sourceMappingURL=db.d.ts.map