@helloao/cli 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (110) hide show
  1. package/actions.d.ts +57 -0
  2. package/actions.js +262 -0
  3. package/cli.d.ts +2 -0
  4. package/cli.js +139 -0
  5. package/db.d.ts +110 -0
  6. package/db.js +754 -0
  7. package/downloads.d.ts +2 -0
  8. package/downloads.js +12 -0
  9. package/files.d.ts +56 -0
  10. package/files.js +232 -0
  11. package/index.d.ts +8 -0
  12. package/index.js +38 -0
  13. package/migrations/20240420231455_initial/migration.sql +66 -0
  14. package/migrations/20240623183848_add_book_order/migration.sql +26 -0
  15. package/migrations/20240629194121_add_chapter_links/migration.sql +45 -0
  16. package/migrations/20240629194513_add_chapter_content/migration.sql +30 -0
  17. package/migrations/20240705221833_remove_unused_columns/migration.sql +27 -0
  18. package/migrations/20240711173108_add_chapter_audio/migration.sql +13 -0
  19. package/migrations/20240724212651_add_hashing/migration.sql +25 -0
  20. package/node_modules/@zip.js/zip.js/LICENSE +28 -0
  21. package/node_modules/@zip.js/zip.js/README.md +173 -0
  22. package/node_modules/@zip.js/zip.js/deno.json +8 -0
  23. package/node_modules/@zip.js/zip.js/dist/README.md +28 -0
  24. package/node_modules/@zip.js/zip.js/dist/z-worker-fflate.js +1 -0
  25. package/node_modules/@zip.js/zip.js/dist/z-worker-pako.js +1 -0
  26. package/node_modules/@zip.js/zip.js/dist/z-worker.js +1 -0
  27. package/node_modules/@zip.js/zip.js/dist/zip-fs-full.js +11935 -0
  28. package/node_modules/@zip.js/zip.js/dist/zip-fs-full.min.js +1 -0
  29. package/node_modules/@zip.js/zip.js/dist/zip-fs.js +6079 -0
  30. package/node_modules/@zip.js/zip.js/dist/zip-fs.min.js +1 -0
  31. package/node_modules/@zip.js/zip.js/dist/zip-full.js +9463 -0
  32. package/node_modules/@zip.js/zip.js/dist/zip-full.min.js +1 -0
  33. package/node_modules/@zip.js/zip.js/dist/zip-no-worker-deflate.min.js +1 -0
  34. package/node_modules/@zip.js/zip.js/dist/zip-no-worker-inflate.min.js +1 -0
  35. package/node_modules/@zip.js/zip.js/dist/zip-no-worker.min.js +1 -0
  36. package/node_modules/@zip.js/zip.js/dist/zip.js +5240 -0
  37. package/node_modules/@zip.js/zip.js/dist/zip.min.js +1 -0
  38. package/node_modules/@zip.js/zip.js/index-fflate.js +82 -0
  39. package/node_modules/@zip.js/zip.js/index.cjs +11927 -0
  40. package/node_modules/@zip.js/zip.js/index.d.ts +2048 -0
  41. package/node_modules/@zip.js/zip.js/index.js +87 -0
  42. package/node_modules/@zip.js/zip.js/index.min.js +1 -0
  43. package/node_modules/@zip.js/zip.js/lib/core/codec-pool.js +127 -0
  44. package/node_modules/@zip.js/zip.js/lib/core/codec-worker.js +348 -0
  45. package/node_modules/@zip.js/zip.js/lib/core/configuration.js +127 -0
  46. package/node_modules/@zip.js/zip.js/lib/core/constants.js +114 -0
  47. package/node_modules/@zip.js/zip.js/lib/core/io.js +749 -0
  48. package/node_modules/@zip.js/zip.js/lib/core/streams/aes-crypto-stream.js +326 -0
  49. package/node_modules/@zip.js/zip.js/lib/core/streams/codec-stream.js +154 -0
  50. package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/crc32.js +63 -0
  51. package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/deflate.js +2063 -0
  52. package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/inflate.js +2167 -0
  53. package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/sjcl.js +827 -0
  54. package/node_modules/@zip.js/zip.js/lib/core/streams/common-crypto.js +55 -0
  55. package/node_modules/@zip.js/zip.js/lib/core/streams/crc32-stream.js +56 -0
  56. package/node_modules/@zip.js/zip.js/lib/core/streams/stream-adapter.js +55 -0
  57. package/node_modules/@zip.js/zip.js/lib/core/streams/zip-crypto-stream.js +162 -0
  58. package/node_modules/@zip.js/zip.js/lib/core/streams/zip-entry-stream.js +165 -0
  59. package/node_modules/@zip.js/zip.js/lib/core/util/cp437-decode.js +48 -0
  60. package/node_modules/@zip.js/zip.js/lib/core/util/decode-text.js +43 -0
  61. package/node_modules/@zip.js/zip.js/lib/core/util/default-mime-type.js +38 -0
  62. package/node_modules/@zip.js/zip.js/lib/core/util/encode-text.js +48 -0
  63. package/node_modules/@zip.js/zip.js/lib/core/util/mime-type.js +1639 -0
  64. package/node_modules/@zip.js/zip.js/lib/core/util/stream-codec-shim.js +91 -0
  65. package/node_modules/@zip.js/zip.js/lib/core/z-worker-core.js +176 -0
  66. package/node_modules/@zip.js/zip.js/lib/core/zip-entry.js +86 -0
  67. package/node_modules/@zip.js/zip.js/lib/core/zip-fs-core.js +865 -0
  68. package/node_modules/@zip.js/zip.js/lib/core/zip-reader.js +757 -0
  69. package/node_modules/@zip.js/zip.js/lib/core/zip-writer.js +1186 -0
  70. package/node_modules/@zip.js/zip.js/lib/z-worker-bootstrap-fflate.js +40 -0
  71. package/node_modules/@zip.js/zip.js/lib/z-worker-bootstrap-pako.js +39 -0
  72. package/node_modules/@zip.js/zip.js/lib/z-worker-fflate.js +40 -0
  73. package/node_modules/@zip.js/zip.js/lib/z-worker-inline-template.js +42 -0
  74. package/node_modules/@zip.js/zip.js/lib/z-worker-inline.js +1 -0
  75. package/node_modules/@zip.js/zip.js/lib/z-worker.js +38 -0
  76. package/node_modules/@zip.js/zip.js/lib/zip-data-uri.js +53 -0
  77. package/node_modules/@zip.js/zip.js/lib/zip-fflate-shim.js +37 -0
  78. package/node_modules/@zip.js/zip.js/lib/zip-fs.js +53 -0
  79. package/node_modules/@zip.js/zip.js/lib/zip-full-fflate.js +53 -0
  80. package/node_modules/@zip.js/zip.js/lib/zip-full.js +54 -0
  81. package/node_modules/@zip.js/zip.js/lib/zip-no-worker-deflate.js +42 -0
  82. package/node_modules/@zip.js/zip.js/lib/zip-no-worker-fflate-deflate.js +42 -0
  83. package/node_modules/@zip.js/zip.js/lib/zip-no-worker-fflate-inflate.js +42 -0
  84. package/node_modules/@zip.js/zip.js/lib/zip-no-worker-fflate.js +43 -0
  85. package/node_modules/@zip.js/zip.js/lib/zip-no-worker-inflate.js +42 -0
  86. package/node_modules/@zip.js/zip.js/lib/zip-no-worker.js +44 -0
  87. package/node_modules/@zip.js/zip.js/lib/zip.js +52 -0
  88. package/node_modules/@zip.js/zip.js/package.json +86 -0
  89. package/package.json +43 -0
  90. package/prisma-gen/default.d.ts +1 -0
  91. package/prisma-gen/default.js +1 -0
  92. package/prisma-gen/edge.d.ts +1 -0
  93. package/prisma-gen/edge.js +242 -0
  94. package/prisma-gen/index-browser.js +236 -0
  95. package/prisma-gen/index.d.ts +13248 -0
  96. package/prisma-gen/index.js +265 -0
  97. package/prisma-gen/runtime/edge-esm.js +28 -0
  98. package/prisma-gen/runtime/edge.js +28 -0
  99. package/prisma-gen/runtime/index-browser.d.ts +365 -0
  100. package/prisma-gen/runtime/index-browser.js +13 -0
  101. package/prisma-gen/runtime/library.d.ts +3168 -0
  102. package/prisma-gen/runtime/library.js +140 -0
  103. package/prisma-gen/runtime/wasm.js +29 -0
  104. package/prisma-gen/wasm.d.ts +1 -0
  105. package/prisma-gen/wasm.js +236 -0
  106. package/s3.d.ts +14 -0
  107. package/s3.js +76 -0
  108. package/schema.prisma +154 -0
  109. package/uploads.d.ts +54 -0
  110. package/uploads.js +141 -0
package/db.js ADDED
@@ -0,0 +1,754 @@
1
+ "use strict";
2
+ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
+ if (k2 === undefined) k2 = k;
4
+ var desc = Object.getOwnPropertyDescriptor(m, k);
5
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
+ desc = { enumerable: true, get: function() { return m[k]; } };
7
+ }
8
+ Object.defineProperty(o, k2, desc);
9
+ }) : (function(o, m, k, k2) {
10
+ if (k2 === undefined) k2 = k;
11
+ o[k2] = m[k];
12
+ }));
13
+ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
15
+ }) : function(o, v) {
16
+ o["default"] = v;
17
+ });
18
+ var __importStar = (this && this.__importStar) || function (mod) {
19
+ if (mod && mod.__esModule) return mod;
20
+ var result = {};
21
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
22
+ __setModuleDefault(result, mod);
23
+ return result;
24
+ };
25
+ var __importDefault = (this && this.__importDefault) || function (mod) {
26
+ return (mod && mod.__esModule) ? mod : { "default": mod };
27
+ };
28
+ Object.defineProperty(exports, "__esModule", { value: true });
29
+ exports.importTranslations = importTranslations;
30
+ exports.importTranslationBatch = importTranslationBatch;
31
+ exports.importTranslationFileBatch = importTranslationFileBatch;
32
+ exports.getChangedOrNewInputFiles = getChangedOrNewInputFiles;
33
+ exports.insertFileMetadata = insertFileMetadata;
34
+ exports.insertTranslations = insertTranslations;
35
+ exports.insertTranslationBooks = insertTranslationBooks;
36
+ exports.insertTranslationContent = insertTranslationContent;
37
+ exports.getDbPathFromDir = getDbPathFromDir;
38
+ exports.getDbPath = getDbPath;
39
+ exports.getPrismaDbFromDir = getPrismaDbFromDir;
40
+ exports.getDbFromDir = getDbFromDir;
41
+ exports.getDb = getDb;
42
+ exports.loadDatasets = loadDatasets;
43
+ exports.serializeFilesForDataset = serializeFilesForDataset;
44
+ exports.serializeFiles = serializeFiles;
45
+ const prisma_gen_1 = require("./prisma-gen");
46
+ const path_1 = __importStar(require("path"));
47
+ const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
48
+ const fs_extra_1 = require("fs-extra");
49
+ const node_crypto_1 = require("node:crypto");
50
+ const dataset_1 = require("@helloao/tools/generation/dataset");
51
+ const api_1 = require("@helloao/tools/generation/api");
52
+ const files_1 = require("./files");
53
+ const hash_js_1 = require("hash.js");
54
+ const lodash_1 = require("lodash");
55
+ const path_2 = require("path");
56
+ const stream_1 = require("stream");
57
+ const base64_js_1 = require("base64-js");
58
+ const cliPath = require.resolve('./index');
59
+ const migrationsPath = path_1.default.resolve((0, path_1.dirname)(cliPath), 'migrations');
60
+ /**
61
+ * Imports the translations from the given directories into the database.
62
+ * @param db The database to import the translations into.
63
+ * @param dirs The directories to import the translations from.
64
+ * @param parser The DOM parser that should be used for USX files.
65
+ * @param overwrite Whether to force a reload of the translations.
66
+ */
67
+ async function importTranslations(db, dirs, parser, overwrite) {
68
+ let batches = [];
69
+ while (dirs.length > 0) {
70
+ batches.push(dirs.splice(0, 10));
71
+ }
72
+ console.log('Processing', batches.length, 'batches of translations');
73
+ for (let i = 0; i < batches.length; i++) {
74
+ const batch = batches[i];
75
+ console.log(`Processing batch ${i + 1} of ${batches.length}`);
76
+ await importTranslationBatch(db, batch, parser, overwrite);
77
+ }
78
+ }
79
+ /**
80
+ * Imports a batch of translations from the given directories into the database.
81
+ * @param db The database to import the translations into.
82
+ * @param dirs The directories that contain the translations.
83
+ * @param parser The DOM parser that should be used for USX files.
84
+ * @param overwrite Whether to force a reload of the translations.
85
+ */
86
+ async function importTranslationBatch(db, dirs, parser, overwrite) {
87
+ const promises = [];
88
+ for (let dir of dirs) {
89
+ const fullPath = path_1.default.resolve(dir);
90
+ promises.push((0, files_1.loadTranslationFiles)(fullPath));
91
+ }
92
+ const allFiles = await Promise.all(promises);
93
+ const files = allFiles.flat();
94
+ await importTranslationFileBatch(db, files, parser, overwrite);
95
+ }
96
+ /**
97
+ * Parses and imports the given files into the database.
98
+ * @param db The database to import the files into.
99
+ * @param files The files that should be parsed.
100
+ * @param parser The DOM parser that should be used for USX files.
101
+ * @param overwrite Whether to force a reload of the translations.
102
+ */
103
+ async function importTranslationFileBatch(db, files, parser, overwrite) {
104
+ console.log('Importing', files.length, 'files');
105
+ if (overwrite) {
106
+ console.log('Overwriting existing translations.');
107
+ }
108
+ const changedFiles = overwrite ? files : getChangedOrNewInputFiles(db, files);
109
+ console.log('Processing', changedFiles.length, 'changed files');
110
+ console.log('Skipping', files.length - changedFiles.length, 'unchanged files');
111
+ const output = (0, dataset_1.generateDataset)(changedFiles, parser);
112
+ console.log('Generated', output.translations.length, 'translations');
113
+ insertTranslations(db, output.translations);
114
+ updateTranslationHashes(db, output.translations);
115
+ insertFileMetadata(db, changedFiles);
116
+ console.log(`Inserted ${output.translations} translations into DB`);
117
+ }
118
+ /**
119
+ * Filters the given input files to only include those that have changed.
120
+ * @param db The database to check for changes.
121
+ * @param files The files to filter.
122
+ */
123
+ function getChangedOrNewInputFiles(db, files) {
124
+ const fileExists = db.prepare('SELECT COUNT(*) as c FROM InputFile WHERE translationId = @translationId AND name = @name AND sha256 = @sha256;');
125
+ return files.filter(f => {
126
+ const count = fileExists.get({
127
+ translationId: f.metadata.translation.id,
128
+ name: path_1.default.basename(f.name),
129
+ sha256: f.sha256,
130
+ });
131
+ return count.c <= 0;
132
+ });
133
+ }
134
+ function insertFileMetadata(db, files) {
135
+ const fileUpsert = db.prepare(`INSERT INTO InputFile(
136
+ translationId,
137
+ name,
138
+ format,
139
+ sha256,
140
+ sizeInBytes
141
+ ) VALUES (
142
+ @translationId,
143
+ @name,
144
+ @format,
145
+ @sha256,
146
+ @sizeInBytes
147
+ ) ON CONFLICT(translationId, name) DO
148
+ UPDATE SET
149
+ format=excluded.format,
150
+ sha256=excluded.sha256,
151
+ sizeInBytes=excluded.sizeInBytes;`);
152
+ const insertManyFiles = db.transaction((files) => {
153
+ for (let file of files) {
154
+ fileUpsert.run({
155
+ translationId: file.metadata.translation.id,
156
+ name: path_1.default.basename(file.name),
157
+ format: file.fileType,
158
+ sha256: file.sha256,
159
+ sizeInBytes: file.content.length,
160
+ });
161
+ }
162
+ });
163
+ insertManyFiles(files);
164
+ }
165
+ function insertTranslations(db, translations) {
166
+ const translationUpsert = db.prepare(`INSERT INTO Translation(
167
+ id,
168
+ name,
169
+ language,
170
+ shortName,
171
+ textDirection,
172
+ licenseUrl,
173
+ website,
174
+ englishName
175
+ ) VALUES (
176
+ @id,
177
+ @name,
178
+ @language,
179
+ @shortName,
180
+ @textDirection,
181
+ @licenseUrl,
182
+ @website,
183
+ @englishName
184
+ ) ON CONFLICT(id) DO
185
+ UPDATE SET
186
+ name=excluded.name,
187
+ language=excluded.language,
188
+ shortName=excluded.shortName,
189
+ textDirection=excluded.textDirection,
190
+ licenseUrl=excluded.licenseUrl,
191
+ website=excluded.website,
192
+ englishName=excluded.englishName;`);
193
+ const insertManyTranslations = db.transaction((translations) => {
194
+ for (let translation of translations) {
195
+ translationUpsert.run({
196
+ id: translation.id,
197
+ name: translation.name,
198
+ language: translation.language,
199
+ shortName: translation.shortName,
200
+ textDirection: translation.textDirection,
201
+ licenseUrl: translation.licenseUrl,
202
+ website: translation.website,
203
+ englishName: translation.englishName,
204
+ });
205
+ }
206
+ });
207
+ insertManyTranslations(translations);
208
+ for (let translation of translations) {
209
+ insertTranslationBooks(db, translation, translation.books);
210
+ }
211
+ }
212
+ function insertTranslationBooks(db, translation, translationBooks) {
213
+ const bookUpsert = db.prepare(`INSERT INTO Book(
214
+ id,
215
+ translationId,
216
+ title,
217
+ name,
218
+ commonName,
219
+ numberOfChapters,
220
+ \`order\`
221
+ ) VALUES (
222
+ @id,
223
+ @translationId,
224
+ @title,
225
+ @name,
226
+ @commonName,
227
+ @numberOfChapters,
228
+ @bookOrder
229
+ ) ON CONFLICT(id,translationId) DO
230
+ UPDATE SET
231
+ title=excluded.title,
232
+ name=excluded.name,
233
+ commonName=excluded.commonName,
234
+ numberOfChapters=excluded.numberOfChapters;`);
235
+ const insertMany = db.transaction((books) => {
236
+ for (let book of books) {
237
+ if (!book) {
238
+ continue;
239
+ }
240
+ bookUpsert.run({
241
+ id: book.id,
242
+ translationId: translation.id,
243
+ title: book.title,
244
+ name: book.name,
245
+ commonName: book.commonName,
246
+ numberOfChapters: book.chapters.length,
247
+ bookOrder: book.order ?? 9999
248
+ });
249
+ }
250
+ });
251
+ insertMany(translationBooks);
252
+ for (let book of translationBooks) {
253
+ insertTranslationContent(db, translation, book, book.chapters);
254
+ }
255
+ }
256
+ function insertTranslationContent(db, translation, book, chapters) {
257
+ const chapterUpsert = db.prepare(`INSERT INTO Chapter(
258
+ translationId,
259
+ bookId,
260
+ number,
261
+ json
262
+ ) VALUES (
263
+ @translationId,
264
+ @bookId,
265
+ @number,
266
+ @json
267
+ ) ON CONFLICT(translationId,bookId,number) DO
268
+ UPDATE SET
269
+ json=excluded.json;`);
270
+ const verseUpsert = db.prepare(`INSERT INTO ChapterVerse(
271
+ translationId,
272
+ bookId,
273
+ chapterNumber,
274
+ number,
275
+ text,
276
+ contentJson
277
+ ) VALUES (
278
+ @translationId,
279
+ @bookId,
280
+ @chapterNumber,
281
+ @number,
282
+ @text,
283
+ @contentJson
284
+ ) ON CONFLICT(translationId,bookId,chapterNumber,number) DO
285
+ UPDATE SET
286
+ text=excluded.text,
287
+ contentJson=excluded.contentJson;`);
288
+ const footnoteUpsert = db.prepare(`INSERT INTO ChapterFootnote(
289
+ translationId,
290
+ bookId,
291
+ chapterNumber,
292
+ id,
293
+ verseNumber,
294
+ text
295
+ ) VALUES (
296
+ @translationId,
297
+ @bookId,
298
+ @chapterNumber,
299
+ @id,
300
+ @verseNumber,
301
+ @text
302
+ ) ON CONFLICT(translationId,bookId,chapterNumber,id) DO
303
+ UPDATE SET
304
+ verseNumber=excluded.verseNumber,
305
+ text=excluded.text;`);
306
+ const chapterAudioUpsert = db.prepare(`INSERT INTO ChapterAudioUrl(
307
+ translationId,
308
+ bookId,
309
+ number,
310
+ reader,
311
+ url
312
+ ) VALUES (
313
+ @translationId,
314
+ @bookId,
315
+ @number,
316
+ @reader,
317
+ @url
318
+ ) ON CONFLICT(translationId,bookId,number,reader) DO
319
+ UPDATE SET
320
+ url=excluded.url;`);
321
+ const insertChaptersAndVerses = db.transaction(() => {
322
+ for (let chapter of chapters) {
323
+ let verses = [];
324
+ let footnotes = new Map();
325
+ for (let c of chapter.chapter.footnotes) {
326
+ footnotes.set(c.noteId, {
327
+ id: c.noteId,
328
+ text: c.text,
329
+ });
330
+ }
331
+ for (let c of chapter.chapter.content) {
332
+ if (c.type === 'verse') {
333
+ const verse = c;
334
+ if (!verse.number) {
335
+ console.error('Verse missing number', translation.id, book.id, chapter.chapter.number, verse.number);
336
+ continue;
337
+ }
338
+ let text = '';
339
+ for (let c of verse.content) {
340
+ if (typeof c === 'string') {
341
+ text += c + ' ';
342
+ }
343
+ else if (typeof c === 'object') {
344
+ if ('lineBreak' in c) {
345
+ text += '\n';
346
+ }
347
+ else if ('text' in c) {
348
+ text += c.text + ' ';
349
+ }
350
+ else if ('noteId' in c) {
351
+ const note = footnotes.get(c.noteId);
352
+ if (note) {
353
+ note.verseNumber = verse.number;
354
+ }
355
+ }
356
+ }
357
+ }
358
+ let contentJson = JSON.stringify(verse.content);
359
+ verses.push({
360
+ number: verse.number,
361
+ text: text.trimEnd(),
362
+ contentJson,
363
+ });
364
+ }
365
+ }
366
+ chapterUpsert.run({
367
+ translationId: translation.id,
368
+ bookId: book.id,
369
+ number: chapter.chapter.number,
370
+ json: JSON.stringify(chapter.chapter),
371
+ });
372
+ for (let verse of verses) {
373
+ verseUpsert.run({
374
+ translationId: translation.id,
375
+ bookId: book.id,
376
+ chapterNumber: chapter.chapter.number,
377
+ number: verse.number,
378
+ text: verse.text,
379
+ contentJson: verse.contentJson,
380
+ });
381
+ }
382
+ for (let footnote of footnotes.values()) {
383
+ footnoteUpsert.run({
384
+ translationId: translation.id,
385
+ bookId: book.id,
386
+ chapterNumber: chapter.chapter.number,
387
+ id: footnote.id,
388
+ verseNumber: footnote.verseNumber,
389
+ text: footnote.text,
390
+ });
391
+ }
392
+ for (let reader in chapter.thisChapterAudioLinks) {
393
+ const url = chapter.thisChapterAudioLinks[reader];
394
+ if (url) {
395
+ chapterAudioUpsert.run({
396
+ translationId: translation.id,
397
+ bookId: book.id,
398
+ number: chapter.chapter.number,
399
+ reader: reader,
400
+ url,
401
+ });
402
+ }
403
+ }
404
+ }
405
+ });
406
+ insertChaptersAndVerses();
407
+ }
408
+ /**
409
+ * Updates the hashes for the translations in the database.
410
+ * @param db The database to update the hashes in.
411
+ * @param translations The translations to update the hashes for.
412
+ */
413
+ function updateTranslationHashes(db, translations) {
414
+ console.log(`Updating hashes for ${translations.length} translations.`);
415
+ const updateTranslationHash = db.prepare(`UPDATE Translation SET sha256 = @sha256 WHERE id = @translationId;`);
416
+ const updateBookHash = db.prepare(`UPDATE Book SET sha256 = @sha256 WHERE translationId = @translationId AND id = @bookId;`);
417
+ const updateChapterHash = db.prepare(`UPDATE Chapter SET sha256 = @sha256 WHERE translationId = @translationId AND bookId = @bookId AND number = @chapterNumber;`);
418
+ const getBooks = db.prepare('SELECT * FROM Book WHERE translationId = ?;');
419
+ const getChapters = db.prepare('SELECT * FROM Chapter WHERE translationId = @translationId AND bookId = @bookId;');
420
+ for (let translation of translations) {
421
+ const translationSha = (0, hash_js_1.sha256)()
422
+ .update(translation.id)
423
+ .update(translation.name)
424
+ .update(translation.language)
425
+ .update(translation.licenseUrl)
426
+ .update(translation.textDirection)
427
+ .update(translation.website)
428
+ .update(translation.englishName)
429
+ .update(translation.shortName);
430
+ const books = getBooks.all(translation.id);
431
+ for (let book of books) {
432
+ const chapters = getChapters.all({
433
+ translationId: translation.id,
434
+ bookId: book.id
435
+ });
436
+ const bookSha = (0, hash_js_1.sha256)()
437
+ .update(book.translationId)
438
+ .update(book.id)
439
+ .update(book.numberOfChapters)
440
+ .update(book.order)
441
+ .update(book.name)
442
+ .update(book.title)
443
+ .update(book.commonName);
444
+ for (let chapter of chapters) {
445
+ const hash = (0, hash_js_1.sha256)()
446
+ .update(chapter.translationId)
447
+ .update(chapter.bookId)
448
+ .update(chapter.number)
449
+ .update(chapter.json)
450
+ .digest('hex');
451
+ chapter.sha256 = hash;
452
+ bookSha.update(hash);
453
+ }
454
+ const updateChapters = db.transaction(() => {
455
+ for (let chapter of chapters) {
456
+ updateChapterHash.run({
457
+ sha256: chapter.sha256,
458
+ translationId: chapter.translationId,
459
+ bookId: chapter.bookId,
460
+ chapterNumber: chapter.number
461
+ });
462
+ }
463
+ });
464
+ updateChapters();
465
+ const bookHash = bookSha.digest('hex');
466
+ book.sha256 = bookHash;
467
+ translationSha.update(bookHash);
468
+ }
469
+ const updateBooks = db.transaction(() => {
470
+ for (let book of books) {
471
+ updateBookHash.run({
472
+ sha256: book.sha256,
473
+ translationId: book.translationId,
474
+ bookId: book.id
475
+ });
476
+ }
477
+ });
478
+ updateBooks();
479
+ const hash = translationSha.digest('hex');
480
+ translation.sha256 = hash;
481
+ }
482
+ const updateTranslations = db.transaction(() => {
483
+ for (let translation of translations) {
484
+ updateTranslationHash.run({
485
+ sha256: translation.sha256,
486
+ translationId: translation.id
487
+ });
488
+ }
489
+ });
490
+ updateTranslations();
491
+ console.log(`Updated.`);
492
+ }
493
+ function getDbPathFromDir(dir) {
494
+ dir = dir || process.cwd();
495
+ return path_1.default.resolve(dir, 'bible-api.db');
496
+ }
497
+ function getDbPath(p) {
498
+ if (p) {
499
+ return path_1.default.resolve(p);
500
+ }
501
+ return getDbPathFromDir(process.cwd());
502
+ }
503
+ function getPrismaDbFromDir(dir) {
504
+ const dbPath = getDbPathFromDir(dir);
505
+ const prisma = new prisma_gen_1.PrismaClient({
506
+ datasources: {
507
+ db: {
508
+ url: `file:${dbPath}`,
509
+ }
510
+ }
511
+ });
512
+ return prisma;
513
+ }
514
+ async function getDbFromDir(dir) {
515
+ const dbPath = getDbPathFromDir(dir);
516
+ const db = await getDb(dbPath);
517
+ return db;
518
+ }
519
+ async function getDb(dbPath) {
520
+ const db = new better_sqlite3_1.default(dbPath, {});
521
+ db.exec(`CREATE TABLE IF NOT EXISTS "_prisma_migrations" (
522
+ "id" TEXT PRIMARY KEY NOT NULL,
523
+ "checksum" TEXT NOT NULL,
524
+ "finished_at" DATETIME,
525
+ "migration_name" TEXT NOT NULL,
526
+ "logs" TEXT,
527
+ "rolled_back_at" DATETIME,
528
+ "started_at" DATETIME NOT NULL DEFAULT current_timestamp,
529
+ "applied_steps_count" INTEGER UNSIGNED NOT NULL DEFAULT 0
530
+ );`);
531
+ const migrations = await (0, fs_extra_1.readdir)(migrationsPath);
532
+ const appliedMigrations = db.prepare('SELECT * FROM _prisma_migrations;').all();
533
+ let missingMigrations = [];
534
+ for (let migration of migrations) {
535
+ if (appliedMigrations.some(m => m.migration_name === migration)) {
536
+ continue;
537
+ }
538
+ if (path_1.default.extname(migration) !== '') {
539
+ continue;
540
+ }
541
+ missingMigrations.push(migration);
542
+ }
543
+ const insertMigrationStatement = db.prepare('INSERT INTO _prisma_migrations (id, checksum, started_at, finished_at, migration_name, applied_steps_count, logs, rolled_back_at) VALUES (?, ?, ?, ?, ?, ?, NULL, NULL);');
544
+ for (let missingMigration of missingMigrations) {
545
+ console.log(`Applying migration ${missingMigration}...`);
546
+ const migration = path_1.default.resolve(migrationsPath, missingMigration, 'migration.sql');
547
+ const migrationFile = await (0, fs_extra_1.readFile)(migration, 'utf8');
548
+ db.exec(migrationFile);
549
+ insertMigrationStatement.run((0, node_crypto_1.randomUUID)(), '', new Date().toISOString(), new Date().toISOString(), missingMigration, 1);
550
+ }
551
+ return db;
552
+ }
553
+ /**
554
+ * Loads the datasets from the database in a series of batches.
555
+ * @param db The database.
556
+ * @param translationsPerBatch The number of translations to load per batch.
557
+ * @param translationsToLoad The list of translations to load. If not provided, all translations will be loaded.
558
+ */
559
+ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad) {
560
+ let offset = 0;
561
+ let pageSize = translationsPerBatch;
562
+ console.log('Generating API files in batches of', pageSize);
563
+ const totalTranslations = await db.translation.count();
564
+ const totalBatches = Math.ceil(totalTranslations / pageSize);
565
+ let batchNumber = 1;
566
+ while (true) {
567
+ console.log('Generating API batch', batchNumber, 'of', totalBatches);
568
+ batchNumber++;
569
+ const query = {
570
+ skip: offset,
571
+ take: pageSize,
572
+ };
573
+ if (translationsToLoad && translationsToLoad.length > 0) {
574
+ query.where = {
575
+ id: {
576
+ in: translationsToLoad,
577
+ }
578
+ };
579
+ }
580
+ const translations = await db.translation.findMany(query);
581
+ if (translations.length <= 0) {
582
+ break;
583
+ }
584
+ const dataset = {
585
+ translations: []
586
+ };
587
+ for (let translation of translations) {
588
+ const datasetTranslation = {
589
+ ...translation,
590
+ shortName: translation.shortName,
591
+ textDirection: translation.textDirection,
592
+ books: [],
593
+ };
594
+ dataset.translations.push(datasetTranslation);
595
+ const books = await db.book.findMany({
596
+ where: {
597
+ translationId: translation.id,
598
+ },
599
+ orderBy: {
600
+ order: 'asc',
601
+ },
602
+ });
603
+ for (let book of books) {
604
+ const chapters = await db.chapter.findMany({
605
+ where: {
606
+ translationId: translation.id,
607
+ bookId: book.id,
608
+ },
609
+ orderBy: {
610
+ number: 'asc',
611
+ },
612
+ });
613
+ const audioLinks = await db.chapterAudioUrl.findMany({
614
+ where: {
615
+ translationId: translation.id,
616
+ bookId: book.id
617
+ },
618
+ orderBy: [
619
+ { number: 'asc' },
620
+ { reader: 'asc' }
621
+ ]
622
+ });
623
+ const bookChapters = chapters.map(chapter => {
624
+ return {
625
+ chapter: JSON.parse(chapter.json),
626
+ thisChapterAudioLinks: audioLinks
627
+ .filter(link => link.number === chapter.number)
628
+ .reduce((acc, link) => {
629
+ acc[link.reader] = link.url;
630
+ return acc;
631
+ }, {})
632
+ };
633
+ });
634
+ const datasetBook = {
635
+ ...book,
636
+ chapters: bookChapters,
637
+ };
638
+ datasetTranslation.books.push(datasetBook);
639
+ }
640
+ }
641
+ yield dataset;
642
+ offset += pageSize;
643
+ }
644
+ }
645
+ /**
646
+ * Generates and serializes the API files for the dataset that is stored in the database.
647
+ * Yields each batch of serialized files.
648
+ * @param db The database that the dataset should be loaded from.
649
+ * @param options The options to use for generating the API.
650
+ * @param translationsPerBatch The number of translations that should be loaded and written per batch.
651
+ * @param translations The list of translations that should be loaded. If not provided, all translations will be loaded.
652
+ */
653
+ async function* serializeFilesForDataset(db, options, translationsPerBatch = 50, translations) {
654
+ yield* serializeFiles(loadDatasets(db, translationsPerBatch, translations), options);
655
+ }
656
+ /**
657
+ * Serializes the API files for the given datasets.
658
+ * @param datasets The dataasets to serialize.
659
+ * @param options The options to use for serializing the files.
660
+ */
661
+ async function* serializeFiles(datasets, options) {
662
+ const mergableFiles = new Map();
663
+ for await (let dataset of datasets) {
664
+ const api = (0, api_1.generateApiForDataset)(dataset, options);
665
+ const files = (0, api_1.generateFilesForApi)(api);
666
+ console.log('Generated', files.length, 'files');
667
+ let serializedFiles = [];
668
+ for (let file of files) {
669
+ if (file.mergable) {
670
+ let arr = mergableFiles.get(file.path);
671
+ if (!arr) {
672
+ arr = [];
673
+ mergableFiles.set(file.path, arr);
674
+ }
675
+ arr.push(file);
676
+ continue;
677
+ }
678
+ const serialized = await transformFile(file.path, file.content);
679
+ if (serialized) {
680
+ serializedFiles.push(serialized);
681
+ }
682
+ }
683
+ yield serializedFiles;
684
+ }
685
+ let serializedFiles = [];
686
+ for (let [path, files] of mergableFiles) {
687
+ let content = {};
688
+ for (let file of files) {
689
+ if (!content) {
690
+ content = file.content;
691
+ }
692
+ else {
693
+ content = (0, lodash_1.mergeWith)(content, file.content, (objValue, srcValue) => {
694
+ if (Array.isArray(objValue)) {
695
+ return objValue.concat(srcValue);
696
+ }
697
+ return undefined;
698
+ });
699
+ }
700
+ }
701
+ if (content) {
702
+ const serialized = await transformFile(path, content);
703
+ if (serialized) {
704
+ serializedFiles.push(serialized);
705
+ }
706
+ }
707
+ }
708
+ yield serializedFiles;
709
+ async function transformFile(path, content) {
710
+ let fileContent;
711
+ if (typeof content === 'function') {
712
+ fileContent = await content();
713
+ }
714
+ else {
715
+ fileContent = content;
716
+ }
717
+ const ext = (0, path_2.extname)(path);
718
+ if (ext === '.json') {
719
+ let json;
720
+ if (fileContent instanceof ReadableStream) {
721
+ json = '';
722
+ for await (const chunk of stream_1.Readable.fromWeb(fileContent, {
723
+ encoding: 'utf-8'
724
+ })) {
725
+ json += chunk;
726
+ }
727
+ }
728
+ else {
729
+ json = JSON.stringify(content, undefined, options.pretty ? 2 : undefined);
730
+ }
731
+ return {
732
+ path,
733
+ content: json,
734
+ sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest()))
735
+ };
736
+ }
737
+ else if (ext === '.mp3') {
738
+ if (fileContent instanceof ReadableStream) {
739
+ return {
740
+ path,
741
+ content: stream_1.Readable.fromWeb(fileContent),
742
+ };
743
+ }
744
+ else {
745
+ console.warn('Expected content to be a readable stream for', path);
746
+ console.warn('Skipping file');
747
+ return null;
748
+ }
749
+ }
750
+ console.warn('Unknown file type', path);
751
+ console.warn('Skipping file');
752
+ return null;
753
+ }
754
+ }