@helloao/cli 0.0.3 → 0.0.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +125 -36
- package/actions.js +37 -37
- package/cli.d.ts +1 -0
- package/cli.js +13 -5
- package/db.d.ts +13 -44
- package/db.js +147 -239
- package/files.d.ts +59 -2
- package/files.js +109 -0
- package/migrations/20240623183848_add_book_order/migration.sql +26 -26
- package/migrations/20240629194121_add_chapter_links/migration.sql +45 -45
- package/migrations/20240629194513_add_chapter_content/migration.sql +30 -30
- package/migrations/20240705221833_remove_unused_columns/migration.sql +27 -27
- package/package.json +3 -5
- package/prisma-gen/edge.js +5 -5
- package/prisma-gen/index.js +10 -10
- package/s3.d.ts +1 -1
- package/schema.prisma +154 -154
- package/uploads.d.ts +23 -4
- package/uploads.js +130 -11
package/db.js
CHANGED
|
@@ -40,21 +40,17 @@ exports.getPrismaDbFromDir = getPrismaDbFromDir;
|
|
|
40
40
|
exports.getDbFromDir = getDbFromDir;
|
|
41
41
|
exports.getDb = getDb;
|
|
42
42
|
exports.loadDatasets = loadDatasets;
|
|
43
|
-
exports.
|
|
44
|
-
exports.
|
|
43
|
+
exports.serializeFilesFromDatabase = serializeFilesFromDatabase;
|
|
44
|
+
exports.serializeDatasets = serializeDatasets;
|
|
45
45
|
const prisma_gen_1 = require("./prisma-gen");
|
|
46
46
|
const path_1 = __importStar(require("path"));
|
|
47
47
|
const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
|
|
48
48
|
const fs_extra_1 = require("fs-extra");
|
|
49
49
|
const node_crypto_1 = require("node:crypto");
|
|
50
50
|
const dataset_1 = require("@helloao/tools/generation/dataset");
|
|
51
|
-
const api_1 = require("@helloao/tools/generation/api");
|
|
52
51
|
const files_1 = require("./files");
|
|
53
52
|
const hash_js_1 = require("hash.js");
|
|
54
|
-
const
|
|
55
|
-
const path_2 = require("path");
|
|
56
|
-
const stream_1 = require("stream");
|
|
57
|
-
const base64_js_1 = require("base64-js");
|
|
53
|
+
const api_1 = require("@helloao/tools/generation/api");
|
|
58
54
|
const cliPath = require.resolve('./index');
|
|
59
55
|
const migrationsPath = path_1.default.resolve((0, path_1.dirname)(cliPath), 'migrations');
|
|
60
56
|
/**
|
|
@@ -132,22 +128,22 @@ function getChangedOrNewInputFiles(db, files) {
|
|
|
132
128
|
});
|
|
133
129
|
}
|
|
134
130
|
function insertFileMetadata(db, files) {
|
|
135
|
-
const fileUpsert = db.prepare(`INSERT INTO InputFile(
|
|
136
|
-
translationId,
|
|
137
|
-
name,
|
|
138
|
-
format,
|
|
139
|
-
sha256,
|
|
140
|
-
sizeInBytes
|
|
141
|
-
) VALUES (
|
|
142
|
-
@translationId,
|
|
143
|
-
@name,
|
|
144
|
-
@format,
|
|
145
|
-
@sha256,
|
|
146
|
-
@sizeInBytes
|
|
147
|
-
) ON CONFLICT(translationId, name) DO
|
|
148
|
-
UPDATE SET
|
|
149
|
-
format=excluded.format,
|
|
150
|
-
sha256=excluded.sha256,
|
|
131
|
+
const fileUpsert = db.prepare(`INSERT INTO InputFile(
|
|
132
|
+
translationId,
|
|
133
|
+
name,
|
|
134
|
+
format,
|
|
135
|
+
sha256,
|
|
136
|
+
sizeInBytes
|
|
137
|
+
) VALUES (
|
|
138
|
+
@translationId,
|
|
139
|
+
@name,
|
|
140
|
+
@format,
|
|
141
|
+
@sha256,
|
|
142
|
+
@sizeInBytes
|
|
143
|
+
) ON CONFLICT(translationId, name) DO
|
|
144
|
+
UPDATE SET
|
|
145
|
+
format=excluded.format,
|
|
146
|
+
sha256=excluded.sha256,
|
|
151
147
|
sizeInBytes=excluded.sizeInBytes;`);
|
|
152
148
|
const insertManyFiles = db.transaction((files) => {
|
|
153
149
|
for (let file of files) {
|
|
@@ -163,32 +159,32 @@ function insertFileMetadata(db, files) {
|
|
|
163
159
|
insertManyFiles(files);
|
|
164
160
|
}
|
|
165
161
|
function insertTranslations(db, translations) {
|
|
166
|
-
const translationUpsert = db.prepare(`INSERT INTO Translation(
|
|
167
|
-
id,
|
|
168
|
-
name,
|
|
169
|
-
language,
|
|
170
|
-
shortName,
|
|
171
|
-
textDirection,
|
|
172
|
-
licenseUrl,
|
|
173
|
-
website,
|
|
174
|
-
englishName
|
|
175
|
-
) VALUES (
|
|
176
|
-
@id,
|
|
177
|
-
@name,
|
|
178
|
-
@language,
|
|
179
|
-
@shortName,
|
|
180
|
-
@textDirection,
|
|
181
|
-
@licenseUrl,
|
|
182
|
-
@website,
|
|
183
|
-
@englishName
|
|
184
|
-
) ON CONFLICT(id) DO
|
|
185
|
-
UPDATE SET
|
|
186
|
-
name=excluded.name,
|
|
187
|
-
language=excluded.language,
|
|
188
|
-
shortName=excluded.shortName,
|
|
189
|
-
textDirection=excluded.textDirection,
|
|
190
|
-
licenseUrl=excluded.licenseUrl,
|
|
191
|
-
website=excluded.website,
|
|
162
|
+
const translationUpsert = db.prepare(`INSERT INTO Translation(
|
|
163
|
+
id,
|
|
164
|
+
name,
|
|
165
|
+
language,
|
|
166
|
+
shortName,
|
|
167
|
+
textDirection,
|
|
168
|
+
licenseUrl,
|
|
169
|
+
website,
|
|
170
|
+
englishName
|
|
171
|
+
) VALUES (
|
|
172
|
+
@id,
|
|
173
|
+
@name,
|
|
174
|
+
@language,
|
|
175
|
+
@shortName,
|
|
176
|
+
@textDirection,
|
|
177
|
+
@licenseUrl,
|
|
178
|
+
@website,
|
|
179
|
+
@englishName
|
|
180
|
+
) ON CONFLICT(id) DO
|
|
181
|
+
UPDATE SET
|
|
182
|
+
name=excluded.name,
|
|
183
|
+
language=excluded.language,
|
|
184
|
+
shortName=excluded.shortName,
|
|
185
|
+
textDirection=excluded.textDirection,
|
|
186
|
+
licenseUrl=excluded.licenseUrl,
|
|
187
|
+
website=excluded.website,
|
|
192
188
|
englishName=excluded.englishName;`);
|
|
193
189
|
const insertManyTranslations = db.transaction((translations) => {
|
|
194
190
|
for (let translation of translations) {
|
|
@@ -210,27 +206,27 @@ function insertTranslations(db, translations) {
|
|
|
210
206
|
}
|
|
211
207
|
}
|
|
212
208
|
function insertTranslationBooks(db, translation, translationBooks) {
|
|
213
|
-
const bookUpsert = db.prepare(`INSERT INTO Book(
|
|
214
|
-
id,
|
|
215
|
-
translationId,
|
|
216
|
-
title,
|
|
217
|
-
name,
|
|
218
|
-
commonName,
|
|
219
|
-
numberOfChapters,
|
|
220
|
-
\`order\`
|
|
221
|
-
) VALUES (
|
|
222
|
-
@id,
|
|
223
|
-
@translationId,
|
|
224
|
-
@title,
|
|
225
|
-
@name,
|
|
226
|
-
@commonName,
|
|
227
|
-
@numberOfChapters,
|
|
228
|
-
@bookOrder
|
|
229
|
-
) ON CONFLICT(id,translationId) DO
|
|
230
|
-
UPDATE SET
|
|
231
|
-
title=excluded.title,
|
|
232
|
-
name=excluded.name,
|
|
233
|
-
commonName=excluded.commonName,
|
|
209
|
+
const bookUpsert = db.prepare(`INSERT INTO Book(
|
|
210
|
+
id,
|
|
211
|
+
translationId,
|
|
212
|
+
title,
|
|
213
|
+
name,
|
|
214
|
+
commonName,
|
|
215
|
+
numberOfChapters,
|
|
216
|
+
\`order\`
|
|
217
|
+
) VALUES (
|
|
218
|
+
@id,
|
|
219
|
+
@translationId,
|
|
220
|
+
@title,
|
|
221
|
+
@name,
|
|
222
|
+
@commonName,
|
|
223
|
+
@numberOfChapters,
|
|
224
|
+
@bookOrder
|
|
225
|
+
) ON CONFLICT(id,translationId) DO
|
|
226
|
+
UPDATE SET
|
|
227
|
+
title=excluded.title,
|
|
228
|
+
name=excluded.name,
|
|
229
|
+
commonName=excluded.commonName,
|
|
234
230
|
numberOfChapters=excluded.numberOfChapters;`);
|
|
235
231
|
const insertMany = db.transaction((books) => {
|
|
236
232
|
for (let book of books) {
|
|
@@ -254,69 +250,69 @@ function insertTranslationBooks(db, translation, translationBooks) {
|
|
|
254
250
|
}
|
|
255
251
|
}
|
|
256
252
|
function insertTranslationContent(db, translation, book, chapters) {
|
|
257
|
-
const chapterUpsert = db.prepare(`INSERT INTO Chapter(
|
|
258
|
-
translationId,
|
|
259
|
-
bookId,
|
|
260
|
-
number,
|
|
261
|
-
json
|
|
262
|
-
) VALUES (
|
|
263
|
-
@translationId,
|
|
264
|
-
@bookId,
|
|
265
|
-
@number,
|
|
266
|
-
@json
|
|
267
|
-
) ON CONFLICT(translationId,bookId,number) DO
|
|
268
|
-
UPDATE SET
|
|
253
|
+
const chapterUpsert = db.prepare(`INSERT INTO Chapter(
|
|
254
|
+
translationId,
|
|
255
|
+
bookId,
|
|
256
|
+
number,
|
|
257
|
+
json
|
|
258
|
+
) VALUES (
|
|
259
|
+
@translationId,
|
|
260
|
+
@bookId,
|
|
261
|
+
@number,
|
|
262
|
+
@json
|
|
263
|
+
) ON CONFLICT(translationId,bookId,number) DO
|
|
264
|
+
UPDATE SET
|
|
269
265
|
json=excluded.json;`);
|
|
270
|
-
const verseUpsert = db.prepare(`INSERT INTO ChapterVerse(
|
|
271
|
-
translationId,
|
|
272
|
-
bookId,
|
|
273
|
-
chapterNumber,
|
|
274
|
-
number,
|
|
275
|
-
text,
|
|
276
|
-
contentJson
|
|
277
|
-
) VALUES (
|
|
278
|
-
@translationId,
|
|
279
|
-
@bookId,
|
|
280
|
-
@chapterNumber,
|
|
281
|
-
@number,
|
|
282
|
-
@text,
|
|
283
|
-
@contentJson
|
|
284
|
-
) ON CONFLICT(translationId,bookId,chapterNumber,number) DO
|
|
285
|
-
UPDATE SET
|
|
286
|
-
text=excluded.text,
|
|
266
|
+
const verseUpsert = db.prepare(`INSERT INTO ChapterVerse(
|
|
267
|
+
translationId,
|
|
268
|
+
bookId,
|
|
269
|
+
chapterNumber,
|
|
270
|
+
number,
|
|
271
|
+
text,
|
|
272
|
+
contentJson
|
|
273
|
+
) VALUES (
|
|
274
|
+
@translationId,
|
|
275
|
+
@bookId,
|
|
276
|
+
@chapterNumber,
|
|
277
|
+
@number,
|
|
278
|
+
@text,
|
|
279
|
+
@contentJson
|
|
280
|
+
) ON CONFLICT(translationId,bookId,chapterNumber,number) DO
|
|
281
|
+
UPDATE SET
|
|
282
|
+
text=excluded.text,
|
|
287
283
|
contentJson=excluded.contentJson;`);
|
|
288
|
-
const footnoteUpsert = db.prepare(`INSERT INTO ChapterFootnote(
|
|
289
|
-
translationId,
|
|
290
|
-
bookId,
|
|
291
|
-
chapterNumber,
|
|
292
|
-
id,
|
|
293
|
-
verseNumber,
|
|
294
|
-
text
|
|
295
|
-
) VALUES (
|
|
296
|
-
@translationId,
|
|
297
|
-
@bookId,
|
|
298
|
-
@chapterNumber,
|
|
299
|
-
@id,
|
|
300
|
-
@verseNumber,
|
|
301
|
-
@text
|
|
302
|
-
) ON CONFLICT(translationId,bookId,chapterNumber,id) DO
|
|
303
|
-
UPDATE SET
|
|
304
|
-
verseNumber=excluded.verseNumber,
|
|
284
|
+
const footnoteUpsert = db.prepare(`INSERT INTO ChapterFootnote(
|
|
285
|
+
translationId,
|
|
286
|
+
bookId,
|
|
287
|
+
chapterNumber,
|
|
288
|
+
id,
|
|
289
|
+
verseNumber,
|
|
290
|
+
text
|
|
291
|
+
) VALUES (
|
|
292
|
+
@translationId,
|
|
293
|
+
@bookId,
|
|
294
|
+
@chapterNumber,
|
|
295
|
+
@id,
|
|
296
|
+
@verseNumber,
|
|
297
|
+
@text
|
|
298
|
+
) ON CONFLICT(translationId,bookId,chapterNumber,id) DO
|
|
299
|
+
UPDATE SET
|
|
300
|
+
verseNumber=excluded.verseNumber,
|
|
305
301
|
text=excluded.text;`);
|
|
306
|
-
const chapterAudioUpsert = db.prepare(`INSERT INTO ChapterAudioUrl(
|
|
307
|
-
translationId,
|
|
308
|
-
bookId,
|
|
309
|
-
number,
|
|
310
|
-
reader,
|
|
311
|
-
url
|
|
312
|
-
) VALUES (
|
|
313
|
-
@translationId,
|
|
314
|
-
@bookId,
|
|
315
|
-
@number,
|
|
316
|
-
@reader,
|
|
317
|
-
@url
|
|
318
|
-
) ON CONFLICT(translationId,bookId,number,reader) DO
|
|
319
|
-
UPDATE SET
|
|
302
|
+
const chapterAudioUpsert = db.prepare(`INSERT INTO ChapterAudioUrl(
|
|
303
|
+
translationId,
|
|
304
|
+
bookId,
|
|
305
|
+
number,
|
|
306
|
+
reader,
|
|
307
|
+
url
|
|
308
|
+
) VALUES (
|
|
309
|
+
@translationId,
|
|
310
|
+
@bookId,
|
|
311
|
+
@number,
|
|
312
|
+
@reader,
|
|
313
|
+
@url
|
|
314
|
+
) ON CONFLICT(translationId,bookId,number,reader) DO
|
|
315
|
+
UPDATE SET
|
|
320
316
|
url=excluded.url;`);
|
|
321
317
|
const insertChaptersAndVerses = db.transaction(() => {
|
|
322
318
|
for (let chapter of chapters) {
|
|
@@ -518,15 +514,15 @@ async function getDbFromDir(dir) {
|
|
|
518
514
|
}
|
|
519
515
|
async function getDb(dbPath) {
|
|
520
516
|
const db = new better_sqlite3_1.default(dbPath, {});
|
|
521
|
-
db.exec(`CREATE TABLE IF NOT EXISTS "_prisma_migrations" (
|
|
522
|
-
"id" TEXT PRIMARY KEY NOT NULL,
|
|
523
|
-
"checksum" TEXT NOT NULL,
|
|
524
|
-
"finished_at" DATETIME,
|
|
525
|
-
"migration_name" TEXT NOT NULL,
|
|
526
|
-
"logs" TEXT,
|
|
527
|
-
"rolled_back_at" DATETIME,
|
|
528
|
-
"started_at" DATETIME NOT NULL DEFAULT current_timestamp,
|
|
529
|
-
"applied_steps_count" INTEGER UNSIGNED NOT NULL DEFAULT 0
|
|
517
|
+
db.exec(`CREATE TABLE IF NOT EXISTS "_prisma_migrations" (
|
|
518
|
+
"id" TEXT PRIMARY KEY NOT NULL,
|
|
519
|
+
"checksum" TEXT NOT NULL,
|
|
520
|
+
"finished_at" DATETIME,
|
|
521
|
+
"migration_name" TEXT NOT NULL,
|
|
522
|
+
"logs" TEXT,
|
|
523
|
+
"rolled_back_at" DATETIME,
|
|
524
|
+
"started_at" DATETIME NOT NULL DEFAULT current_timestamp,
|
|
525
|
+
"applied_steps_count" INTEGER UNSIGNED NOT NULL DEFAULT 0
|
|
530
526
|
);`);
|
|
531
527
|
const migrations = await (0, fs_extra_1.readdir)(migrationsPath);
|
|
532
528
|
const appliedMigrations = db.prepare('SELECT * FROM _prisma_migrations;').all();
|
|
@@ -643,112 +639,24 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
|
|
|
643
639
|
}
|
|
644
640
|
}
|
|
645
641
|
/**
|
|
646
|
-
* Generates and serializes the API files for the
|
|
642
|
+
* Generates and serializes the API files for the datasets that are stored in the database.
|
|
647
643
|
* Yields each batch of serialized files.
|
|
648
644
|
* @param db The database that the dataset should be loaded from.
|
|
649
|
-
* @param options The options to use for
|
|
645
|
+
* @param options The options to use for serializing the files.
|
|
646
|
+
* @param apiOptions The options to use for generating the API files.
|
|
650
647
|
* @param translationsPerBatch The number of translations that should be loaded and written per batch.
|
|
651
648
|
* @param translations The list of translations that should be loaded. If not provided, all translations will be loaded.
|
|
652
649
|
*/
|
|
653
|
-
|
|
654
|
-
|
|
650
|
+
function serializeFilesFromDatabase(db, options = {}, translationsPerBatch = 50, translations) {
|
|
651
|
+
return serializeDatasets(loadDatasets(db, translationsPerBatch, translations), options);
|
|
655
652
|
}
|
|
656
653
|
/**
|
|
657
|
-
*
|
|
658
|
-
*
|
|
659
|
-
*
|
|
654
|
+
* Generates and serializes the API files for the given datasets.
|
|
655
|
+
* Yields each batch of serialized files.
|
|
656
|
+
*
|
|
657
|
+
* @param datasets The datasets to serialize.
|
|
658
|
+
* @param options The options to use for generating and serializing the files.
|
|
660
659
|
*/
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
for await (let dataset of datasets) {
|
|
664
|
-
const api = (0, api_1.generateApiForDataset)(dataset, options);
|
|
665
|
-
const files = (0, api_1.generateFilesForApi)(api);
|
|
666
|
-
console.log('Generated', files.length, 'files');
|
|
667
|
-
let serializedFiles = [];
|
|
668
|
-
for (let file of files) {
|
|
669
|
-
if (file.mergable) {
|
|
670
|
-
let arr = mergableFiles.get(file.path);
|
|
671
|
-
if (!arr) {
|
|
672
|
-
arr = [];
|
|
673
|
-
mergableFiles.set(file.path, arr);
|
|
674
|
-
}
|
|
675
|
-
arr.push(file);
|
|
676
|
-
continue;
|
|
677
|
-
}
|
|
678
|
-
const serialized = await transformFile(file.path, file.content);
|
|
679
|
-
if (serialized) {
|
|
680
|
-
serializedFiles.push(serialized);
|
|
681
|
-
}
|
|
682
|
-
}
|
|
683
|
-
yield serializedFiles;
|
|
684
|
-
}
|
|
685
|
-
let serializedFiles = [];
|
|
686
|
-
for (let [path, files] of mergableFiles) {
|
|
687
|
-
let content = {};
|
|
688
|
-
for (let file of files) {
|
|
689
|
-
if (!content) {
|
|
690
|
-
content = file.content;
|
|
691
|
-
}
|
|
692
|
-
else {
|
|
693
|
-
content = (0, lodash_1.mergeWith)(content, file.content, (objValue, srcValue) => {
|
|
694
|
-
if (Array.isArray(objValue)) {
|
|
695
|
-
return objValue.concat(srcValue);
|
|
696
|
-
}
|
|
697
|
-
return undefined;
|
|
698
|
-
});
|
|
699
|
-
}
|
|
700
|
-
}
|
|
701
|
-
if (content) {
|
|
702
|
-
const serialized = await transformFile(path, content);
|
|
703
|
-
if (serialized) {
|
|
704
|
-
serializedFiles.push(serialized);
|
|
705
|
-
}
|
|
706
|
-
}
|
|
707
|
-
}
|
|
708
|
-
yield serializedFiles;
|
|
709
|
-
async function transformFile(path, content) {
|
|
710
|
-
let fileContent;
|
|
711
|
-
if (typeof content === 'function') {
|
|
712
|
-
fileContent = await content();
|
|
713
|
-
}
|
|
714
|
-
else {
|
|
715
|
-
fileContent = content;
|
|
716
|
-
}
|
|
717
|
-
const ext = (0, path_2.extname)(path);
|
|
718
|
-
if (ext === '.json') {
|
|
719
|
-
let json;
|
|
720
|
-
if (fileContent instanceof ReadableStream) {
|
|
721
|
-
json = '';
|
|
722
|
-
for await (const chunk of stream_1.Readable.fromWeb(fileContent, {
|
|
723
|
-
encoding: 'utf-8'
|
|
724
|
-
})) {
|
|
725
|
-
json += chunk;
|
|
726
|
-
}
|
|
727
|
-
}
|
|
728
|
-
else {
|
|
729
|
-
json = JSON.stringify(content, undefined, options.pretty ? 2 : undefined);
|
|
730
|
-
}
|
|
731
|
-
return {
|
|
732
|
-
path,
|
|
733
|
-
content: json,
|
|
734
|
-
sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest()))
|
|
735
|
-
};
|
|
736
|
-
}
|
|
737
|
-
else if (ext === '.mp3') {
|
|
738
|
-
if (fileContent instanceof ReadableStream) {
|
|
739
|
-
return {
|
|
740
|
-
path,
|
|
741
|
-
content: stream_1.Readable.fromWeb(fileContent),
|
|
742
|
-
};
|
|
743
|
-
}
|
|
744
|
-
else {
|
|
745
|
-
console.warn('Expected content to be a readable stream for', path);
|
|
746
|
-
console.warn('Skipping file');
|
|
747
|
-
return null;
|
|
748
|
-
}
|
|
749
|
-
}
|
|
750
|
-
console.warn('Unknown file type', path);
|
|
751
|
-
console.warn('Skipping file');
|
|
752
|
-
return null;
|
|
753
|
-
}
|
|
660
|
+
function serializeDatasets(datasets, options = {}) {
|
|
661
|
+
return (0, files_1.serializeOutputFiles)((0, api_1.generateOutputFilesFromDatasets)(datasets, options), options);
|
|
754
662
|
}
|
package/files.d.ts
CHANGED
|
@@ -1,5 +1,62 @@
|
|
|
1
|
-
import { InputFile } from "@helloao/tools/generation/common-types";
|
|
2
|
-
import {
|
|
1
|
+
import { InputFile, OutputFile } from "@helloao/tools/generation/common-types";
|
|
2
|
+
import { Readable } from "stream";
|
|
3
|
+
/**
|
|
4
|
+
* Defines an interface that contains information about a serialized file.
|
|
5
|
+
*/
|
|
6
|
+
export interface Uploader {
|
|
7
|
+
/**
|
|
8
|
+
* Gets the ideal batch size for the uploader.
|
|
9
|
+
* Null if the uploader does not need batching.
|
|
10
|
+
*/
|
|
11
|
+
idealBatchSize: number | null;
|
|
12
|
+
/**
|
|
13
|
+
* Uploads the given file.
|
|
14
|
+
* @param file The file to upload.
|
|
15
|
+
* @param overwrite Whether the file should be overwritten if it already exists.
|
|
16
|
+
* @returns True if the file was uploaded. False if the file was skipped due to already existing.
|
|
17
|
+
*/
|
|
18
|
+
upload(file: SerializedFile, overwrite: boolean): Promise<boolean>;
|
|
19
|
+
/**
|
|
20
|
+
* Disposes resources that the uploader uses.
|
|
21
|
+
*/
|
|
22
|
+
dispose?(): Promise<void>;
|
|
23
|
+
}
|
|
24
|
+
/**
|
|
25
|
+
* Defines an interface for a file that has been serialized.
|
|
26
|
+
*/
|
|
27
|
+
export interface SerializedFile {
|
|
28
|
+
path: string;
|
|
29
|
+
content: string | Readable;
|
|
30
|
+
/**
|
|
31
|
+
* Gets the base64-encoded SHA256 hash of the content of the file.
|
|
32
|
+
*/
|
|
33
|
+
sha256?(): string;
|
|
34
|
+
}
|
|
35
|
+
/**
|
|
36
|
+
* The options for serializing API files.
|
|
37
|
+
*/
|
|
38
|
+
export interface SerializeApiOptions {
|
|
39
|
+
/**
|
|
40
|
+
* Whether the output should be pretty-printed.
|
|
41
|
+
*/
|
|
42
|
+
pretty?: boolean;
|
|
43
|
+
}
|
|
44
|
+
/**
|
|
45
|
+
* Serializes the given output files into serialized files using the given options.
|
|
46
|
+
*
|
|
47
|
+
* Each iteration of the given files will be processed as a batch, and any mergable files will automatically be merged together and serialized in the final batch.
|
|
48
|
+
*
|
|
49
|
+
* @param files The files that should be serialized.
|
|
50
|
+
* @param options The options for serialization.
|
|
51
|
+
*/
|
|
52
|
+
export declare function serializeOutputFiles(files: AsyncIterable<OutputFile[]>, options: SerializeApiOptions): AsyncGenerator<SerializedFile[]>;
|
|
53
|
+
/**
|
|
54
|
+
* Serializes the given output file content into a serialized file.
|
|
55
|
+
* @param path The path that the file should be saved to.
|
|
56
|
+
* @param content The content of the file.
|
|
57
|
+
* @param options The options for serialization.
|
|
58
|
+
*/
|
|
59
|
+
export declare function serializeFile(path: string, content: OutputFile['content'], options: SerializeApiOptions): Promise<SerializedFile | null>;
|
|
3
60
|
/**
|
|
4
61
|
* Loads the files for the given translations.
|
|
5
62
|
* @param dir The directory that the translations exist in.
|
package/files.js
CHANGED
|
@@ -24,6 +24,8 @@ var __importStar = (this && this.__importStar) || function (mod) {
|
|
|
24
24
|
};
|
|
25
25
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
26
26
|
exports.ZipUploader = exports.FilesUploader = void 0;
|
|
27
|
+
exports.serializeOutputFiles = serializeOutputFiles;
|
|
28
|
+
exports.serializeFile = serializeFile;
|
|
27
29
|
exports.loadTranslationsFiles = loadTranslationsFiles;
|
|
28
30
|
exports.loadTranslationFiles = loadTranslationFiles;
|
|
29
31
|
const promises_1 = require("fs/promises");
|
|
@@ -34,6 +36,113 @@ const zip_js_1 = require("@zip.js/zip.js");
|
|
|
34
36
|
const stream_1 = require("stream");
|
|
35
37
|
const hash_js_1 = require("hash.js");
|
|
36
38
|
const usx_parser_1 = require("@helloao/tools/parser/usx-parser");
|
|
39
|
+
const lodash_1 = require("lodash");
|
|
40
|
+
const base64_js_1 = require("base64-js");
|
|
41
|
+
/**
|
|
42
|
+
* Serializes the given output files into serialized files using the given options.
|
|
43
|
+
*
|
|
44
|
+
* Each iteration of the given files will be processed as a batch, and any mergable files will automatically be merged together and serialized in the final batch.
|
|
45
|
+
*
|
|
46
|
+
* @param files The files that should be serialized.
|
|
47
|
+
* @param options The options for serialization.
|
|
48
|
+
*/
|
|
49
|
+
async function* serializeOutputFiles(files, options) {
|
|
50
|
+
const mergableFiles = new Map();
|
|
51
|
+
for await (let batch of files) {
|
|
52
|
+
let serializedFiles = [];
|
|
53
|
+
for (let file of batch) {
|
|
54
|
+
if (file.mergable) {
|
|
55
|
+
let arr = mergableFiles.get(file.path);
|
|
56
|
+
if (!arr) {
|
|
57
|
+
arr = [];
|
|
58
|
+
mergableFiles.set(file.path, arr);
|
|
59
|
+
}
|
|
60
|
+
arr.push(file);
|
|
61
|
+
continue;
|
|
62
|
+
}
|
|
63
|
+
const serialized = await serializeFile(file.path, file.content, options);
|
|
64
|
+
if (serialized) {
|
|
65
|
+
serializedFiles.push(serialized);
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
yield serializedFiles;
|
|
69
|
+
}
|
|
70
|
+
let serializedFiles = [];
|
|
71
|
+
for (let [path, files] of mergableFiles) {
|
|
72
|
+
let content = {};
|
|
73
|
+
for (let file of files) {
|
|
74
|
+
if (!content) {
|
|
75
|
+
content = file.content;
|
|
76
|
+
}
|
|
77
|
+
else {
|
|
78
|
+
content = (0, lodash_1.mergeWith)(content, file.content, (objValue, srcValue) => {
|
|
79
|
+
if (Array.isArray(objValue)) {
|
|
80
|
+
return objValue.concat(srcValue);
|
|
81
|
+
}
|
|
82
|
+
return undefined;
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
if (content) {
|
|
87
|
+
const serialized = await serializeFile(path, content, options);
|
|
88
|
+
if (serialized) {
|
|
89
|
+
serializedFiles.push(serialized);
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
}
|
|
93
|
+
yield serializedFiles;
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Serializes the given output file content into a serialized file.
|
|
97
|
+
* @param path The path that the file should be saved to.
|
|
98
|
+
* @param content The content of the file.
|
|
99
|
+
* @param options The options for serialization.
|
|
100
|
+
*/
|
|
101
|
+
async function serializeFile(path, content, options) {
|
|
102
|
+
let fileContent;
|
|
103
|
+
if (typeof content === 'function') {
|
|
104
|
+
fileContent = await content();
|
|
105
|
+
}
|
|
106
|
+
else {
|
|
107
|
+
fileContent = content;
|
|
108
|
+
}
|
|
109
|
+
const ext = (0, path_1.extname)(path);
|
|
110
|
+
if (ext === '.json') {
|
|
111
|
+
let json;
|
|
112
|
+
if (fileContent instanceof ReadableStream) {
|
|
113
|
+
json = '';
|
|
114
|
+
for await (const chunk of stream_1.Readable.fromWeb(fileContent, {
|
|
115
|
+
encoding: 'utf-8'
|
|
116
|
+
})) {
|
|
117
|
+
json += chunk;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
json = JSON.stringify(content, undefined, options.pretty ? 2 : undefined);
|
|
122
|
+
}
|
|
123
|
+
return {
|
|
124
|
+
path,
|
|
125
|
+
content: json,
|
|
126
|
+
sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest()))
|
|
127
|
+
};
|
|
128
|
+
}
|
|
129
|
+
else if (ext === '.mp3') {
|
|
130
|
+
if (fileContent instanceof ReadableStream) {
|
|
131
|
+
return {
|
|
132
|
+
path,
|
|
133
|
+
content: stream_1.Readable.fromWeb(fileContent),
|
|
134
|
+
};
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
console.warn('Expected content to be a readable stream for', path);
|
|
138
|
+
console.warn('Skipping file');
|
|
139
|
+
return null;
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
console.warn('Unknown file type', path);
|
|
143
|
+
console.warn('Skipping file');
|
|
144
|
+
return null;
|
|
145
|
+
}
|
|
37
146
|
/**
|
|
38
147
|
* Loads the files for the given translations.
|
|
39
148
|
* @param dir The directory that the translations exist in.
|