@helloao/cli 0.0.6 → 0.0.8-alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/uploads.js DELETED
@@ -1,180 +0,0 @@
1
- "use strict";
2
- Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.uploadApiFilesFromDatabase = uploadApiFilesFromDatabase;
4
- exports.serializeAndUploadDatasets = serializeAndUploadDatasets;
5
- exports.uploadFiles = uploadFiles;
6
- exports.uploadFilesUsingUploader = uploadFilesUsingUploader;
7
- const db_1 = require("./db");
8
- const s3_1 = require("./s3");
9
- const path_1 = require("path");
10
- const files_1 = require("./files");
11
- const node_stream_1 = require("node:stream");
12
- /**
13
- * Loads and generates the API files from the database and uploads them to the specified destination.
14
- * @param db The database that the datasets should be loaded from.
15
- * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
16
- * @param options The options to use for the upload.
17
- */
18
- async function uploadApiFilesFromDatabase(db, dest, options) {
19
- if (options.overwrite) {
20
- console.log('Overwriting existing files');
21
- }
22
- if (options.overwriteCommonFiles) {
23
- console.log('Overwriting only common files');
24
- }
25
- if (!!options.filePattern) {
26
- console.log('Using file pattern:', options.filePattern);
27
- }
28
- if (options.translations) {
29
- console.log('Generating for specific translations:', options.translations);
30
- }
31
- else {
32
- console.log('Generating for all translations');
33
- }
34
- if (options.pretty) {
35
- console.log('Generating pretty-printed JSON files');
36
- }
37
- const pageSize = typeof options.batchSize === 'number'
38
- ? options.batchSize
39
- : parseInt(options.batchSize);
40
- await serializeAndUploadDatasets(dest, (0, db_1.loadDatasets)(db, pageSize, options.translations), options);
41
- }
42
- /**
43
- * Generates the API files from the given datasets and uploads them to the specified destination.
44
- * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
45
- * @param options The options to use for the upload.
46
- * @param datasets The datasets to generate the API files from.
47
- */
48
- async function serializeAndUploadDatasets(dest, datasets, options = {}) {
49
- const overwrite = !!options.overwrite;
50
- if (overwrite) {
51
- console.log('Overwriting existing files');
52
- }
53
- const overwriteCommonFiles = !!options.overwriteCommonFiles;
54
- if (overwriteCommonFiles) {
55
- console.log('Overwriting only common files');
56
- }
57
- let filePattern;
58
- if (!!options.filePattern) {
59
- filePattern = new RegExp(options.filePattern, 'g');
60
- console.log('Using file pattern:', filePattern);
61
- }
62
- if (options.translations) {
63
- console.log('Generating for specific translations:', options.translations);
64
- }
65
- else {
66
- console.log('Generating for all translations');
67
- }
68
- if (options.pretty) {
69
- console.log('Generating pretty-printed JSON files');
70
- }
71
- const files = (0, db_1.serializeDatasets)(datasets, {
72
- ...options,
73
- });
74
- await uploadFiles(dest, options, files);
75
- }
76
- /**
77
- * Uploads the given serialized files to the specified destination.
78
- * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
79
- * @param options The options to use for the upload.
80
- * @param datasets The datasets to generate the API files from.
81
- */
82
- async function uploadFiles(dest, options, serializedFiles) {
83
- let uploader;
84
- if (dest.startsWith('s3://')) {
85
- console.log('Uploading to S3');
86
- // Upload to S3
87
- const url = dest;
88
- const s3Url = (0, s3_1.parseS3Url)(url);
89
- if (!s3Url) {
90
- throw new Error(`Invalid S3 URL: ${url}`);
91
- }
92
- if (!s3Url.bucketName) {
93
- throw new Error(`Invalid S3 URL: ${url}\nUnable to determine bucket name`);
94
- }
95
- uploader = new s3_1.S3Uploader(s3Url.bucketName, s3Url.objectKey, (0, s3_1.defaultProviderForOptions)(options));
96
- }
97
- else if (dest.startsWith('console://')) {
98
- console.log('Uploading to console');
99
- uploader = {
100
- idealBatchSize: 50,
101
- async upload(file, _overwrite) {
102
- console.log(file.path);
103
- console.log(file.content);
104
- return true;
105
- },
106
- };
107
- }
108
- else if ((0, path_1.extname)(dest) === '.zip') {
109
- console.log('Writing to zip file:', dest);
110
- uploader = new files_1.ZipUploader(dest);
111
- }
112
- else if (dest) {
113
- console.log('Writing to local directory:', dest);
114
- uploader = new files_1.FilesUploader(dest);
115
- }
116
- else {
117
- console.error('Unsupported destination:', dest);
118
- process.exit(1);
119
- }
120
- try {
121
- await uploadFilesUsingUploader(uploader, options, serializedFiles);
122
- }
123
- finally {
124
- if (uploader && uploader.dispose) {
125
- await uploader.dispose();
126
- }
127
- }
128
- }
129
- /**
130
- * Uploads the given serialized files using the given uploader.
131
- * @param uploader The uploader to use.
132
- * @param options The options to use for the upload.
133
- * @param datasets The datasets to generate the API files from.
134
- */
135
- async function uploadFilesUsingUploader(uploader, options, serializedFiles) {
136
- const overwrite = !!options.overwrite;
137
- const overwriteCommonFiles = !!options.overwriteCommonFiles;
138
- let filePattern;
139
- if (!!options.filePattern) {
140
- filePattern = new RegExp(options.filePattern, 'g');
141
- }
142
- for await (let files of serializedFiles) {
143
- const batchSize = uploader.idealBatchSize ?? files.length;
144
- const totalBatches = Math.ceil(files.length / batchSize);
145
- console.log('Uploading', files.length, 'total files');
146
- console.log('Uploading in batches of', batchSize);
147
- let offset = 0;
148
- let batchNumber = 1;
149
- let batch = files.slice(offset, offset + batchSize);
150
- while (batch.length > 0) {
151
- console.log('Uploading batch', batchNumber, 'of', totalBatches);
152
- let writtenFiles = 0;
153
- const promises = batch.map(async (file) => {
154
- if (filePattern) {
155
- if (!filePattern.test(file.path)) {
156
- console.log('Skipping file:', file.path);
157
- return;
158
- }
159
- }
160
- const isAvailableTranslations = file.path.endsWith('available_translations.json');
161
- const isCommonFile = !isAvailableTranslations;
162
- if (await uploader.upload(file, overwrite || (overwriteCommonFiles && isCommonFile))) {
163
- writtenFiles++;
164
- }
165
- else {
166
- console.warn('File already exists:', file.path);
167
- console.warn('Skipping file');
168
- }
169
- if (file.content instanceof node_stream_1.Readable) {
170
- file.content.destroy();
171
- }
172
- });
173
- await Promise.all(promises);
174
- console.log('Wrote', writtenFiles, 'files');
175
- batchNumber++;
176
- offset += batchSize;
177
- batch = files.slice(offset, offset + batchSize);
178
- }
179
- }
180
- }
File without changes
File without changes