@helloao/cli 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/actions.d.ts +57 -0
- package/actions.js +262 -0
- package/cli.d.ts +2 -0
- package/cli.js +139 -0
- package/db.d.ts +110 -0
- package/db.js +754 -0
- package/downloads.d.ts +2 -0
- package/downloads.js +12 -0
- package/files.d.ts +56 -0
- package/files.js +232 -0
- package/index.d.ts +8 -0
- package/index.js +38 -0
- package/migrations/20240420231455_initial/migration.sql +66 -0
- package/migrations/20240623183848_add_book_order/migration.sql +26 -0
- package/migrations/20240629194121_add_chapter_links/migration.sql +45 -0
- package/migrations/20240629194513_add_chapter_content/migration.sql +30 -0
- package/migrations/20240705221833_remove_unused_columns/migration.sql +27 -0
- package/migrations/20240711173108_add_chapter_audio/migration.sql +13 -0
- package/migrations/20240724212651_add_hashing/migration.sql +25 -0
- package/node_modules/@zip.js/zip.js/LICENSE +28 -0
- package/node_modules/@zip.js/zip.js/README.md +173 -0
- package/node_modules/@zip.js/zip.js/deno.json +8 -0
- package/node_modules/@zip.js/zip.js/dist/README.md +28 -0
- package/node_modules/@zip.js/zip.js/dist/z-worker-fflate.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/z-worker-pako.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/z-worker.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip-fs-full.js +11935 -0
- package/node_modules/@zip.js/zip.js/dist/zip-fs-full.min.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip-fs.js +6079 -0
- package/node_modules/@zip.js/zip.js/dist/zip-fs.min.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip-full.js +9463 -0
- package/node_modules/@zip.js/zip.js/dist/zip-full.min.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip-no-worker-deflate.min.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip-no-worker-inflate.min.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip-no-worker.min.js +1 -0
- package/node_modules/@zip.js/zip.js/dist/zip.js +5240 -0
- package/node_modules/@zip.js/zip.js/dist/zip.min.js +1 -0
- package/node_modules/@zip.js/zip.js/index-fflate.js +82 -0
- package/node_modules/@zip.js/zip.js/index.cjs +11927 -0
- package/node_modules/@zip.js/zip.js/index.d.ts +2048 -0
- package/node_modules/@zip.js/zip.js/index.js +87 -0
- package/node_modules/@zip.js/zip.js/index.min.js +1 -0
- package/node_modules/@zip.js/zip.js/lib/core/codec-pool.js +127 -0
- package/node_modules/@zip.js/zip.js/lib/core/codec-worker.js +348 -0
- package/node_modules/@zip.js/zip.js/lib/core/configuration.js +127 -0
- package/node_modules/@zip.js/zip.js/lib/core/constants.js +114 -0
- package/node_modules/@zip.js/zip.js/lib/core/io.js +749 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/aes-crypto-stream.js +326 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/codec-stream.js +154 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/crc32.js +63 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/deflate.js +2063 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/inflate.js +2167 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/codecs/sjcl.js +827 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/common-crypto.js +55 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/crc32-stream.js +56 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/stream-adapter.js +55 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/zip-crypto-stream.js +162 -0
- package/node_modules/@zip.js/zip.js/lib/core/streams/zip-entry-stream.js +165 -0
- package/node_modules/@zip.js/zip.js/lib/core/util/cp437-decode.js +48 -0
- package/node_modules/@zip.js/zip.js/lib/core/util/decode-text.js +43 -0
- package/node_modules/@zip.js/zip.js/lib/core/util/default-mime-type.js +38 -0
- package/node_modules/@zip.js/zip.js/lib/core/util/encode-text.js +48 -0
- package/node_modules/@zip.js/zip.js/lib/core/util/mime-type.js +1639 -0
- package/node_modules/@zip.js/zip.js/lib/core/util/stream-codec-shim.js +91 -0
- package/node_modules/@zip.js/zip.js/lib/core/z-worker-core.js +176 -0
- package/node_modules/@zip.js/zip.js/lib/core/zip-entry.js +86 -0
- package/node_modules/@zip.js/zip.js/lib/core/zip-fs-core.js +865 -0
- package/node_modules/@zip.js/zip.js/lib/core/zip-reader.js +757 -0
- package/node_modules/@zip.js/zip.js/lib/core/zip-writer.js +1186 -0
- package/node_modules/@zip.js/zip.js/lib/z-worker-bootstrap-fflate.js +40 -0
- package/node_modules/@zip.js/zip.js/lib/z-worker-bootstrap-pako.js +39 -0
- package/node_modules/@zip.js/zip.js/lib/z-worker-fflate.js +40 -0
- package/node_modules/@zip.js/zip.js/lib/z-worker-inline-template.js +42 -0
- package/node_modules/@zip.js/zip.js/lib/z-worker-inline.js +1 -0
- package/node_modules/@zip.js/zip.js/lib/z-worker.js +38 -0
- package/node_modules/@zip.js/zip.js/lib/zip-data-uri.js +53 -0
- package/node_modules/@zip.js/zip.js/lib/zip-fflate-shim.js +37 -0
- package/node_modules/@zip.js/zip.js/lib/zip-fs.js +53 -0
- package/node_modules/@zip.js/zip.js/lib/zip-full-fflate.js +53 -0
- package/node_modules/@zip.js/zip.js/lib/zip-full.js +54 -0
- package/node_modules/@zip.js/zip.js/lib/zip-no-worker-deflate.js +42 -0
- package/node_modules/@zip.js/zip.js/lib/zip-no-worker-fflate-deflate.js +42 -0
- package/node_modules/@zip.js/zip.js/lib/zip-no-worker-fflate-inflate.js +42 -0
- package/node_modules/@zip.js/zip.js/lib/zip-no-worker-fflate.js +43 -0
- package/node_modules/@zip.js/zip.js/lib/zip-no-worker-inflate.js +42 -0
- package/node_modules/@zip.js/zip.js/lib/zip-no-worker.js +44 -0
- package/node_modules/@zip.js/zip.js/lib/zip.js +52 -0
- package/node_modules/@zip.js/zip.js/package.json +86 -0
- package/package.json +43 -0
- package/prisma-gen/default.d.ts +1 -0
- package/prisma-gen/default.js +1 -0
- package/prisma-gen/edge.d.ts +1 -0
- package/prisma-gen/edge.js +242 -0
- package/prisma-gen/index-browser.js +236 -0
- package/prisma-gen/index.d.ts +13248 -0
- package/prisma-gen/index.js +265 -0
- package/prisma-gen/runtime/edge-esm.js +28 -0
- package/prisma-gen/runtime/edge.js +28 -0
- package/prisma-gen/runtime/index-browser.d.ts +365 -0
- package/prisma-gen/runtime/index-browser.js +13 -0
- package/prisma-gen/runtime/library.d.ts +3168 -0
- package/prisma-gen/runtime/library.js +140 -0
- package/prisma-gen/runtime/wasm.js +29 -0
- package/prisma-gen/wasm.d.ts +1 -0
- package/prisma-gen/wasm.js +236 -0
- package/s3.d.ts +14 -0
- package/s3.js +76 -0
- package/schema.prisma +154 -0
- package/uploads.d.ts +54 -0
- package/uploads.js +141 -0
package/s3.js
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.S3Uploader = void 0;
|
|
4
|
+
exports.parseS3Url = parseS3Url;
|
|
5
|
+
const client_s3_1 = require("@aws-sdk/client-s3");
|
|
6
|
+
const credential_providers_1 = require("@aws-sdk/credential-providers"); // ES6 import
|
|
7
|
+
class S3Uploader {
|
|
8
|
+
_client;
|
|
9
|
+
_bucketName;
|
|
10
|
+
_keyPrefix;
|
|
11
|
+
get idealBatchSize() {
|
|
12
|
+
return 50;
|
|
13
|
+
}
|
|
14
|
+
constructor(bucketName, keyPrefix, profile) {
|
|
15
|
+
this._bucketName = bucketName;
|
|
16
|
+
this._keyPrefix = keyPrefix;
|
|
17
|
+
this._client = new client_s3_1.S3Client({
|
|
18
|
+
credentials: (0, credential_providers_1.fromNodeProviderChain)({
|
|
19
|
+
profile: profile ?? undefined,
|
|
20
|
+
})
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
async upload(file, overwrite) {
|
|
24
|
+
const path = file.path.startsWith('/') ? file.path.substring(1) : file.path;
|
|
25
|
+
const key = this._keyPrefix ? `${this._keyPrefix}/${path}` : path;
|
|
26
|
+
const hash = file.sha256?.();
|
|
27
|
+
const head = new client_s3_1.HeadObjectCommand({
|
|
28
|
+
Bucket: this._bucketName,
|
|
29
|
+
Key: key,
|
|
30
|
+
});
|
|
31
|
+
if (hash || !overwrite) {
|
|
32
|
+
try {
|
|
33
|
+
const existingFile = await this._client.send(head);
|
|
34
|
+
if (hash && hash.localeCompare(existingFile?.ChecksumSHA256 ?? "", undefined, { sensitivity: 'base' }) === 0) {
|
|
35
|
+
// File is already uploaded and matches the checksum.
|
|
36
|
+
console.log(`[s3] Matches checksum: ${key}`);
|
|
37
|
+
return false;
|
|
38
|
+
}
|
|
39
|
+
if (!overwrite) {
|
|
40
|
+
return false;
|
|
41
|
+
}
|
|
42
|
+
}
|
|
43
|
+
catch (err) {
|
|
44
|
+
if (err instanceof client_s3_1.NotFound) {
|
|
45
|
+
// not found, so we can try to write the file.
|
|
46
|
+
}
|
|
47
|
+
else {
|
|
48
|
+
throw err;
|
|
49
|
+
}
|
|
50
|
+
}
|
|
51
|
+
}
|
|
52
|
+
const command = new client_s3_1.PutObjectCommand({
|
|
53
|
+
Bucket: this._bucketName,
|
|
54
|
+
Key: key,
|
|
55
|
+
Body: file.content,
|
|
56
|
+
ContentType: 'application/json',
|
|
57
|
+
ChecksumSHA256: hash,
|
|
58
|
+
});
|
|
59
|
+
await this._client.send(command);
|
|
60
|
+
return true;
|
|
61
|
+
}
|
|
62
|
+
}
|
|
63
|
+
exports.S3Uploader = S3Uploader;
|
|
64
|
+
function parseS3Url(url) {
|
|
65
|
+
const regex = /^s3:\/\/([a-z0-9.\-]+)(\/[^${}]*)?$/;
|
|
66
|
+
const matched = url.match(regex);
|
|
67
|
+
if (matched) {
|
|
68
|
+
const arr = [...matched];
|
|
69
|
+
return {
|
|
70
|
+
bucketName: arr[1],
|
|
71
|
+
objectKey: arr[2] ?? "",
|
|
72
|
+
};
|
|
73
|
+
}
|
|
74
|
+
return undefined;
|
|
75
|
+
}
|
|
76
|
+
;
|
package/schema.prisma
ADDED
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
datasource db {
|
|
2
|
+
provider = "sqlite"
|
|
3
|
+
url = "file:../../bible-api.dev.db"
|
|
4
|
+
}
|
|
5
|
+
|
|
6
|
+
generator client {
|
|
7
|
+
provider = "prisma-client-js"
|
|
8
|
+
output = "./prisma-gen"
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
model Translation {
|
|
12
|
+
id String @id
|
|
13
|
+
name String
|
|
14
|
+
website String
|
|
15
|
+
licenseUrl String
|
|
16
|
+
shortName String?
|
|
17
|
+
englishName String
|
|
18
|
+
language String
|
|
19
|
+
textDirection String
|
|
20
|
+
|
|
21
|
+
// The SHA-256 hash of the translation
|
|
22
|
+
// includes everything about the translation, including the books, chapters, verses, footnotes, etc.
|
|
23
|
+
sha256 String?
|
|
24
|
+
|
|
25
|
+
books Book[]
|
|
26
|
+
chapters Chapter[]
|
|
27
|
+
verses ChapterVerse[]
|
|
28
|
+
footnotes ChapterFootnote[]
|
|
29
|
+
audioUrls ChapterAudioUrl[]
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
model InputFile {
|
|
33
|
+
// The ID of the translation that the file is for
|
|
34
|
+
translationId String
|
|
35
|
+
|
|
36
|
+
// The name of the file
|
|
37
|
+
name String
|
|
38
|
+
|
|
39
|
+
format String
|
|
40
|
+
|
|
41
|
+
// The SHA-256 hash of the file
|
|
42
|
+
sha256 String
|
|
43
|
+
|
|
44
|
+
sizeInBytes Int
|
|
45
|
+
|
|
46
|
+
@@id([translationId, name])
|
|
47
|
+
}
|
|
48
|
+
|
|
49
|
+
model Book {
|
|
50
|
+
id String
|
|
51
|
+
|
|
52
|
+
translationId String
|
|
53
|
+
translation Translation @relation(fields: [translationId], references: [id])
|
|
54
|
+
|
|
55
|
+
name String
|
|
56
|
+
commonName String
|
|
57
|
+
title String?
|
|
58
|
+
order Int
|
|
59
|
+
|
|
60
|
+
numberOfChapters Int
|
|
61
|
+
|
|
62
|
+
// The SHA-256 hash of the book
|
|
63
|
+
sha256 String?
|
|
64
|
+
|
|
65
|
+
chapters Chapter[]
|
|
66
|
+
verses ChapterVerse[]
|
|
67
|
+
footnotes ChapterFootnote[]
|
|
68
|
+
audioUrls ChapterAudioUrl[]
|
|
69
|
+
|
|
70
|
+
@@id([translationId, id])
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
model Chapter {
|
|
74
|
+
number Int
|
|
75
|
+
|
|
76
|
+
bookId String
|
|
77
|
+
book Book @relation(fields: [translationId, bookId], references: [translationId, id])
|
|
78
|
+
|
|
79
|
+
translationId String
|
|
80
|
+
translation Translation @relation(fields: [translationId], references: [id])
|
|
81
|
+
|
|
82
|
+
json String // The JSON of the chapter
|
|
83
|
+
|
|
84
|
+
// The SHA-256 hash of the chapter
|
|
85
|
+
sha256 String?
|
|
86
|
+
|
|
87
|
+
verses ChapterVerse[]
|
|
88
|
+
footnotes ChapterFootnote[]
|
|
89
|
+
audioUrls ChapterAudioUrl[]
|
|
90
|
+
|
|
91
|
+
@@id([translationId, bookId, number])
|
|
92
|
+
}
|
|
93
|
+
|
|
94
|
+
model ChapterAudioUrl {
|
|
95
|
+
number Int
|
|
96
|
+
bookId String
|
|
97
|
+
book Book @relation(fields: [translationId, bookId], references: [translationId, id])
|
|
98
|
+
|
|
99
|
+
translationId String
|
|
100
|
+
translation Translation @relation(fields: [translationId], references: [id])
|
|
101
|
+
|
|
102
|
+
chapter Chapter @relation(fields: [translationId, bookId, number], references: [translationId, bookId, number])
|
|
103
|
+
|
|
104
|
+
reader String
|
|
105
|
+
url String
|
|
106
|
+
|
|
107
|
+
@@id([translationId, bookId, number, reader])
|
|
108
|
+
}
|
|
109
|
+
|
|
110
|
+
model ChapterVerse {
|
|
111
|
+
number Int
|
|
112
|
+
|
|
113
|
+
chapterNumber Int
|
|
114
|
+
chapter Chapter @relation(fields: [translationId, bookId, chapterNumber], references: [translationId, bookId, number])
|
|
115
|
+
|
|
116
|
+
bookId String
|
|
117
|
+
book Book @relation(fields: [translationId, bookId], references: [translationId, id])
|
|
118
|
+
|
|
119
|
+
translationId String
|
|
120
|
+
translation Translation @relation(fields: [translationId], references: [id])
|
|
121
|
+
|
|
122
|
+
text String // The text of the verse
|
|
123
|
+
contentJson String // The JSON of the verse content
|
|
124
|
+
|
|
125
|
+
// The SHA-256 hash of the verse
|
|
126
|
+
sha256 String?
|
|
127
|
+
|
|
128
|
+
footnotes ChapterFootnote[]
|
|
129
|
+
|
|
130
|
+
@@id([translationId, bookId, chapterNumber, number])
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
model ChapterFootnote {
|
|
134
|
+
id Int
|
|
135
|
+
|
|
136
|
+
chapterNumber Int
|
|
137
|
+
chapter Chapter @relation(fields: [translationId, bookId, chapterNumber], references: [translationId, bookId, number])
|
|
138
|
+
|
|
139
|
+
bookId String
|
|
140
|
+
book Book @relation(fields: [translationId, bookId], references: [translationId, id])
|
|
141
|
+
|
|
142
|
+
translationId String
|
|
143
|
+
translation Translation @relation(fields: [translationId], references: [id])
|
|
144
|
+
|
|
145
|
+
text String
|
|
146
|
+
|
|
147
|
+
// The SHA-256 hash of the footnote
|
|
148
|
+
sha256 String?
|
|
149
|
+
|
|
150
|
+
verseNumber Int?
|
|
151
|
+
verse ChapterVerse? @relation(fields: [translationId, bookId, chapterNumber, verseNumber], references: [translationId, bookId, chapterNumber, number])
|
|
152
|
+
|
|
153
|
+
@@id([translationId, bookId, chapterNumber, id])
|
|
154
|
+
}
|
package/uploads.d.ts
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
import { DatasetOutput } from "@helloao/tools/generation/dataset";
|
|
2
|
+
export interface UploadApiOptions {
|
|
3
|
+
/**
|
|
4
|
+
* The number of files to upload in each batch.
|
|
5
|
+
*/
|
|
6
|
+
batchSize: string;
|
|
7
|
+
/**
|
|
8
|
+
* Whether to overwrite existing files.
|
|
9
|
+
*/
|
|
10
|
+
overwrite?: boolean;
|
|
11
|
+
/**
|
|
12
|
+
* Whether to only overwrite common files.
|
|
13
|
+
* "Common files" are files that are similar between translations, like the books.json endpoint, or individual chapter endpoints.
|
|
14
|
+
*/
|
|
15
|
+
overwriteCommonFiles?: boolean;
|
|
16
|
+
/**
|
|
17
|
+
* The file pattern regex that should be used to filter the files that are uploaded.
|
|
18
|
+
*/
|
|
19
|
+
filePattern?: string;
|
|
20
|
+
/**
|
|
21
|
+
* The translations to generate API files for.
|
|
22
|
+
*/
|
|
23
|
+
translations?: string[];
|
|
24
|
+
/**
|
|
25
|
+
* The AWS profile to use for uploading to S3.
|
|
26
|
+
*/
|
|
27
|
+
profile?: string;
|
|
28
|
+
/**
|
|
29
|
+
* Whether to generate API files that use the common name instead of book IDs.
|
|
30
|
+
*/
|
|
31
|
+
useCommonName?: boolean;
|
|
32
|
+
/**
|
|
33
|
+
* Whether to generate audio files for the API.
|
|
34
|
+
*/
|
|
35
|
+
generateAudioFiles?: boolean;
|
|
36
|
+
/**
|
|
37
|
+
* Whether to generate pretty-printed JSON files.
|
|
38
|
+
*/
|
|
39
|
+
pretty?: boolean;
|
|
40
|
+
}
|
|
41
|
+
/**
|
|
42
|
+
* Loads and generates the API files from the database and uploads them to the specified destination.
|
|
43
|
+
* @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
|
|
44
|
+
* @param options The options to use for the upload.
|
|
45
|
+
*/
|
|
46
|
+
export declare function uploadApiFilesFromDatabase(dest: string, options: UploadApiOptions): Promise<void>;
|
|
47
|
+
/**
|
|
48
|
+
* Generates the API files from the given datasets and uploads them to the specified destination.
|
|
49
|
+
* @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
|
|
50
|
+
* @param options The options to use for the upload.
|
|
51
|
+
* @param datasets The datasets to generate the API files from.
|
|
52
|
+
*/
|
|
53
|
+
export declare function uploadApiFiles(dest: string, options: UploadApiOptions, datasets: AsyncIterable<DatasetOutput>): Promise<void>;
|
|
54
|
+
//# sourceMappingURL=uploads.d.ts.map
|
package/uploads.js
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.uploadApiFilesFromDatabase = uploadApiFilesFromDatabase;
|
|
4
|
+
exports.uploadApiFiles = uploadApiFiles;
|
|
5
|
+
const db_1 = require("./db");
|
|
6
|
+
const db_2 = require("./db");
|
|
7
|
+
const s3_1 = require("./s3");
|
|
8
|
+
const path_1 = require("path");
|
|
9
|
+
const files_1 = require("./files");
|
|
10
|
+
const node_stream_1 = require("node:stream");
|
|
11
|
+
/**
|
|
12
|
+
* Loads and generates the API files from the database and uploads them to the specified destination.
|
|
13
|
+
* @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
|
|
14
|
+
* @param options The options to use for the upload.
|
|
15
|
+
*/
|
|
16
|
+
async function uploadApiFilesFromDatabase(dest, options) {
|
|
17
|
+
const db = (0, db_2.getPrismaDbFromDir)(process.cwd());
|
|
18
|
+
try {
|
|
19
|
+
const pageSize = parseInt(options.batchSize);
|
|
20
|
+
await uploadApiFiles(dest, options, (0, db_1.loadDatasets)(db, pageSize, options.translations));
|
|
21
|
+
}
|
|
22
|
+
finally {
|
|
23
|
+
db.$disconnect();
|
|
24
|
+
}
|
|
25
|
+
}
|
|
26
|
+
/**
|
|
27
|
+
* Generates the API files from the given datasets and uploads them to the specified destination.
|
|
28
|
+
* @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
|
|
29
|
+
* @param options The options to use for the upload.
|
|
30
|
+
* @param datasets The datasets to generate the API files from.
|
|
31
|
+
*/
|
|
32
|
+
async function uploadApiFiles(dest, options, datasets) {
|
|
33
|
+
const overwrite = !!options.overwrite;
|
|
34
|
+
if (overwrite) {
|
|
35
|
+
console.log('Overwriting existing files');
|
|
36
|
+
}
|
|
37
|
+
const overwriteCommonFiles = !!options.overwriteCommonFiles;
|
|
38
|
+
if (overwriteCommonFiles) {
|
|
39
|
+
console.log('Overwriting only common files');
|
|
40
|
+
}
|
|
41
|
+
let filePattern;
|
|
42
|
+
if (!!options.filePattern) {
|
|
43
|
+
filePattern = new RegExp(options.filePattern, 'g');
|
|
44
|
+
console.log('Using file pattern:', filePattern);
|
|
45
|
+
}
|
|
46
|
+
if (options.translations) {
|
|
47
|
+
console.log('Generating for specific translations:', options.translations);
|
|
48
|
+
}
|
|
49
|
+
else {
|
|
50
|
+
console.log('Generating for all translations');
|
|
51
|
+
}
|
|
52
|
+
if (options.pretty) {
|
|
53
|
+
console.log('Generating pretty-printed JSON files');
|
|
54
|
+
}
|
|
55
|
+
let uploader;
|
|
56
|
+
if (dest.startsWith('s3://')) {
|
|
57
|
+
console.log('Uploading to S3');
|
|
58
|
+
// Upload to S3
|
|
59
|
+
const url = dest;
|
|
60
|
+
const s3Url = (0, s3_1.parseS3Url)(url);
|
|
61
|
+
if (!s3Url) {
|
|
62
|
+
throw new Error(`Invalid S3 URL: ${url}`);
|
|
63
|
+
}
|
|
64
|
+
if (!s3Url.bucketName) {
|
|
65
|
+
throw new Error(`Invalid S3 URL: ${url}\nUnable to determine bucket name`);
|
|
66
|
+
}
|
|
67
|
+
uploader = new s3_1.S3Uploader(s3Url.bucketName, s3Url.objectKey, options.profile ?? null);
|
|
68
|
+
}
|
|
69
|
+
else if (dest.startsWith('console://')) {
|
|
70
|
+
console.log('Uploading to console');
|
|
71
|
+
uploader = {
|
|
72
|
+
idealBatchSize: 50,
|
|
73
|
+
async upload(file, _overwrite) {
|
|
74
|
+
console.log(file.path);
|
|
75
|
+
console.log(file.content);
|
|
76
|
+
return true;
|
|
77
|
+
}
|
|
78
|
+
};
|
|
79
|
+
}
|
|
80
|
+
else if ((0, path_1.extname)(dest) === '.zip') {
|
|
81
|
+
console.log('Writing to zip file:', dest);
|
|
82
|
+
uploader = new files_1.ZipUploader(dest);
|
|
83
|
+
}
|
|
84
|
+
else if (dest) {
|
|
85
|
+
console.log('Writing to local directory:', dest);
|
|
86
|
+
uploader = new files_1.FilesUploader(dest);
|
|
87
|
+
}
|
|
88
|
+
else {
|
|
89
|
+
console.error('Unsupported destination:', dest);
|
|
90
|
+
process.exit(1);
|
|
91
|
+
}
|
|
92
|
+
try {
|
|
93
|
+
for await (let files of (0, db_1.serializeFiles)(datasets, {
|
|
94
|
+
useCommonName: !!options.useCommonName,
|
|
95
|
+
generateAudioFiles: !!options.generateAudioFiles,
|
|
96
|
+
pretty: !!options.pretty,
|
|
97
|
+
})) {
|
|
98
|
+
const batchSize = uploader.idealBatchSize ?? files.length;
|
|
99
|
+
const totalBatches = Math.ceil(files.length / batchSize);
|
|
100
|
+
console.log('Uploading', files.length, 'total files');
|
|
101
|
+
console.log('Uploading in batches of', batchSize);
|
|
102
|
+
let offset = 0;
|
|
103
|
+
let batchNumber = 1;
|
|
104
|
+
let batch = files.slice(offset, offset + batchSize);
|
|
105
|
+
while (batch.length > 0) {
|
|
106
|
+
console.log('Uploading batch', batchNumber, 'of', totalBatches);
|
|
107
|
+
let writtenFiles = 0;
|
|
108
|
+
const promises = batch.map(async (file) => {
|
|
109
|
+
if (filePattern) {
|
|
110
|
+
if (!filePattern.test(file.path)) {
|
|
111
|
+
console.log('Skipping file:', file.path);
|
|
112
|
+
return;
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
const isAvailableTranslations = file.path.endsWith('available_translations.json');
|
|
116
|
+
const isCommonFile = !isAvailableTranslations;
|
|
117
|
+
if (await uploader.upload(file, overwrite || (overwriteCommonFiles && isCommonFile))) {
|
|
118
|
+
writtenFiles++;
|
|
119
|
+
}
|
|
120
|
+
else {
|
|
121
|
+
console.warn('File already exists:', file.path);
|
|
122
|
+
console.warn('Skipping file');
|
|
123
|
+
}
|
|
124
|
+
if (file.content instanceof node_stream_1.Readable) {
|
|
125
|
+
file.content.destroy();
|
|
126
|
+
}
|
|
127
|
+
});
|
|
128
|
+
await Promise.all(promises);
|
|
129
|
+
console.log('Wrote', writtenFiles, 'files');
|
|
130
|
+
batchNumber++;
|
|
131
|
+
offset += batchSize;
|
|
132
|
+
batch = files.slice(offset, offset + batchSize);
|
|
133
|
+
}
|
|
134
|
+
}
|
|
135
|
+
}
|
|
136
|
+
finally {
|
|
137
|
+
if (uploader && uploader.dispose) {
|
|
138
|
+
await uploader.dispose();
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|