@helloao/cli 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/s3.js CHANGED
@@ -1,9 +1,13 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
- exports.S3Uploader = void 0;
3
+ exports.askForAccessKeyProvider = exports.S3Uploader = void 0;
4
4
  exports.parseS3Url = parseS3Url;
5
+ exports.getHttpUrl = getHttpUrl;
6
+ exports.providerChain = providerChain;
7
+ exports.defaultProviderForOptions = defaultProviderForOptions;
5
8
  const client_s3_1 = require("@aws-sdk/client-s3");
6
9
  const credential_providers_1 = require("@aws-sdk/credential-providers"); // ES6 import
10
+ const prompts_1 = require("@inquirer/prompts");
7
11
  class S3Uploader {
8
12
  _client;
9
13
  _bucketName;
@@ -15,28 +19,43 @@ class S3Uploader {
15
19
  this._bucketName = bucketName;
16
20
  this._keyPrefix = keyPrefix;
17
21
  this._client = new client_s3_1.S3Client({
18
- credentials: (0, credential_providers_1.fromNodeProviderChain)({
19
- profile: profile ?? undefined,
20
- })
22
+ credentials: !profile || typeof profile === 'string'
23
+ ? (0, credential_providers_1.fromNodeProviderChain)({ profile: profile ?? undefined })
24
+ : profile,
21
25
  });
22
26
  }
23
27
  async upload(file, overwrite) {
24
- const path = file.path.startsWith('/') ? file.path.substring(1) : file.path;
28
+ const path = file.path.startsWith('/')
29
+ ? file.path.substring(1)
30
+ : file.path;
25
31
  const key = this._keyPrefix ? `${this._keyPrefix}/${path}` : path;
26
32
  const hash = file.sha256?.();
27
33
  const head = new client_s3_1.HeadObjectCommand({
28
34
  Bucket: this._bucketName,
29
35
  Key: key,
36
+ ChecksumMode: 'ENABLED',
30
37
  });
31
38
  if (hash || !overwrite) {
32
39
  try {
33
40
  const existingFile = await this._client.send(head);
34
- if (hash && hash.localeCompare(existingFile?.ChecksumSHA256 ?? "", undefined, { sensitivity: 'base' }) === 0) {
35
- // File is already uploaded and matches the checksum.
36
- console.log(`[s3] Matches checksum: ${key}`);
37
- return false;
41
+ let matches = true;
42
+ if (hash && existingFile.ChecksumSHA256) {
43
+ if (hash.localeCompare(existingFile?.ChecksumSHA256 ?? '', undefined, {
44
+ sensitivity: 'base',
45
+ }) === 0) {
46
+ // File is already uploaded and matches the checksum.
47
+ return false;
48
+ }
49
+ else {
50
+ // File is already uploaded but the checksums don't match.
51
+ matches = false;
52
+ }
53
+ }
54
+ else {
55
+ // File is already uploaded but the checksum is not available.
56
+ console.log(`[s3] Checksum not available: ${key}`);
38
57
  }
39
- if (!overwrite) {
58
+ if (matches && !overwrite) {
40
59
  return false;
41
60
  }
42
61
  }
@@ -55,22 +74,96 @@ class S3Uploader {
55
74
  Body: file.content,
56
75
  ContentType: 'application/json',
57
76
  ChecksumSHA256: hash,
77
+ ChecksumAlgorithm: 'SHA256',
58
78
  });
59
79
  await this._client.send(command);
60
80
  return true;
61
81
  }
62
82
  }
63
83
  exports.S3Uploader = S3Uploader;
84
+ /**
85
+ * Parses the given S3 URL into its bucket name and object key.
86
+ * @param url The URL to parse.
87
+ */
64
88
  function parseS3Url(url) {
65
89
  const regex = /^s3:\/\/([a-z0-9.\-]+)(\/[^${}]*)?$/;
66
90
  const matched = url.match(regex);
67
91
  if (matched) {
68
92
  const arr = [...matched];
93
+ let key = arr[2] ?? '';
94
+ if (key.startsWith('/')) {
95
+ key = key.substring(1);
96
+ }
69
97
  return {
70
98
  bucketName: arr[1],
71
- objectKey: arr[2] ?? "",
99
+ objectKey: key,
72
100
  };
73
101
  }
74
102
  return undefined;
75
103
  }
76
- ;
104
+ /**
105
+ * Gets the HTTP URL for the given S3 URL.
106
+ * @param s3Url The S3 URL to convert.
107
+ */
108
+ function getHttpUrl(s3Url) {
109
+ const parsed = parseS3Url(s3Url);
110
+ if (!parsed) {
111
+ return undefined;
112
+ }
113
+ const { bucketName, objectKey } = parsed;
114
+ if (objectKey) {
115
+ return `https://${bucketName}.s3.amazonaws.com/${objectKey}`;
116
+ }
117
+ else {
118
+ return `https://${bucketName}.s3.amazonaws.com`;
119
+ }
120
+ }
121
+ /**
122
+ * A provider that gets the credentials directly from the user input.
123
+ */
124
+ const askForAccessKeyProvider = async () => {
125
+ const accessKeyId = await (0, prompts_1.input)({
126
+ message: 'Enter your AWS Access Key ID',
127
+ });
128
+ const secretAccessKey = await (0, prompts_1.password)({
129
+ message: 'Enter your AWS Secret Access Key',
130
+ });
131
+ return {
132
+ accessKeyId,
133
+ secretAccessKey,
134
+ };
135
+ };
136
+ exports.askForAccessKeyProvider = askForAccessKeyProvider;
137
+ /**
138
+ * Defines a provider that tries to get the credentials from the given list of providers.
139
+ * @param providers The providers to try.
140
+ */
141
+ function providerChain(...providers) {
142
+ return async () => {
143
+ for (const provider of providers) {
144
+ const creds = await provider();
145
+ if (creds?.accessKeyId && creds?.secretAccessKey) {
146
+ return creds;
147
+ }
148
+ }
149
+ return {
150
+ accessKeyId: '',
151
+ secretAccessKey: '',
152
+ };
153
+ };
154
+ }
155
+ /**
156
+ * Gets the default provider for the given options.
157
+ *
158
+ * Defaults first to using the provided access key and secret access key, then to using the given profile, then finally to asking the user for the access key.
159
+ * @param options
160
+ */
161
+ function defaultProviderForOptions(options) {
162
+ if (options.accessKeyId && options.secretAccessKey) {
163
+ return {
164
+ accessKeyId: options.accessKeyId,
165
+ secretAccessKey: options.secretAccessKey,
166
+ };
167
+ }
168
+ return providerChain((0, credential_providers_1.fromNodeProviderChain)({ profile: options.profile }), exports.askForAccessKeyProvider);
169
+ }
package/uploads.d.ts CHANGED
@@ -1,9 +1,15 @@
1
- import { DatasetOutput } from "@helloao/tools/generation/dataset";
2
- export interface UploadApiOptions {
1
+ import { SerializedFile } from './db';
2
+ import { Uploader } from './files';
3
+ import { DatasetOutput } from '@helloao/tools/generation/dataset';
4
+ import { PrismaClient } from './prisma-gen';
5
+ import { GenerateApiOptions } from '@helloao/tools/generation/api';
6
+ export interface UploadApiFromDatabaseOptions extends UploadApiOptions, GenerateApiOptions {
3
7
  /**
4
8
  * The number of files to upload in each batch.
5
9
  */
6
- batchSize: string;
10
+ batchSize: string | number;
11
+ }
12
+ export interface UploadApiOptions {
7
13
  /**
8
14
  * Whether to overwrite existing files.
9
15
  */
@@ -25,6 +31,14 @@ export interface UploadApiOptions {
25
31
  * The AWS profile to use for uploading to S3.
26
32
  */
27
33
  profile?: string;
34
+ /**
35
+ * The AWS access key ID to use for uploading to S3.
36
+ */
37
+ accessKeyId?: string;
38
+ /**
39
+ * The AWS secret access key to use for uploading to S3.
40
+ */
41
+ secretAccessKey?: string;
28
42
  /**
29
43
  * Whether to generate API files that use the common name instead of book IDs.
30
44
  */
@@ -40,15 +54,30 @@ export interface UploadApiOptions {
40
54
  }
41
55
  /**
42
56
  * Loads and generates the API files from the database and uploads them to the specified destination.
57
+ * @param db The database that the datasets should be loaded from.
43
58
  * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
44
59
  * @param options The options to use for the upload.
45
60
  */
46
- export declare function uploadApiFilesFromDatabase(dest: string, options: UploadApiOptions): Promise<void>;
61
+ export declare function uploadApiFilesFromDatabase(db: PrismaClient, dest: string, options: UploadApiFromDatabaseOptions): Promise<void>;
47
62
  /**
48
63
  * Generates the API files from the given datasets and uploads them to the specified destination.
49
64
  * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
50
65
  * @param options The options to use for the upload.
51
66
  * @param datasets The datasets to generate the API files from.
52
67
  */
53
- export declare function uploadApiFiles(dest: string, options: UploadApiOptions, datasets: AsyncIterable<DatasetOutput>): Promise<void>;
68
+ export declare function serializeAndUploadDatasets(dest: string, datasets: AsyncIterable<DatasetOutput>, options?: UploadApiOptions & GenerateApiOptions): Promise<void>;
69
+ /**
70
+ * Uploads the given serialized files to the specified destination.
71
+ * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
72
+ * @param options The options to use for the upload.
73
+ * @param datasets The datasets to generate the API files from.
74
+ */
75
+ export declare function uploadFiles(dest: string, options: UploadApiOptions, serializedFiles: AsyncIterable<SerializedFile[]>): Promise<void>;
76
+ /**
77
+ * Uploads the given serialized files using the given uploader.
78
+ * @param uploader The uploader to use.
79
+ * @param options The options to use for the upload.
80
+ * @param datasets The datasets to generate the API files from.
81
+ */
82
+ export declare function uploadFilesUsingUploader(uploader: Uploader, options: UploadApiOptions, serializedFiles: AsyncIterable<SerializedFile[]>): Promise<void>;
54
83
  //# sourceMappingURL=uploads.d.ts.map
package/uploads.js CHANGED
@@ -1,27 +1,43 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.uploadApiFilesFromDatabase = uploadApiFilesFromDatabase;
4
- exports.uploadApiFiles = uploadApiFiles;
4
+ exports.serializeAndUploadDatasets = serializeAndUploadDatasets;
5
+ exports.uploadFiles = uploadFiles;
6
+ exports.uploadFilesUsingUploader = uploadFilesUsingUploader;
5
7
  const db_1 = require("./db");
6
- const db_2 = require("./db");
7
8
  const s3_1 = require("./s3");
8
9
  const path_1 = require("path");
9
10
  const files_1 = require("./files");
10
11
  const node_stream_1 = require("node:stream");
11
12
  /**
12
13
  * Loads and generates the API files from the database and uploads them to the specified destination.
14
+ * @param db The database that the datasets should be loaded from.
13
15
  * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
14
16
  * @param options The options to use for the upload.
15
17
  */
16
- async function uploadApiFilesFromDatabase(dest, options) {
17
- const db = (0, db_2.getPrismaDbFromDir)(process.cwd());
18
- try {
19
- const pageSize = parseInt(options.batchSize);
20
- await uploadApiFiles(dest, options, (0, db_1.loadDatasets)(db, pageSize, options.translations));
18
+ async function uploadApiFilesFromDatabase(db, dest, options) {
19
+ if (options.overwrite) {
20
+ console.log('Overwriting existing files');
21
21
  }
22
- finally {
23
- db.$disconnect();
22
+ if (options.overwriteCommonFiles) {
23
+ console.log('Overwriting only common files');
24
+ }
25
+ if (!!options.filePattern) {
26
+ console.log('Using file pattern:', options.filePattern);
27
+ }
28
+ if (options.translations) {
29
+ console.log('Generating for specific translations:', options.translations);
30
+ }
31
+ else {
32
+ console.log('Generating for all translations');
33
+ }
34
+ if (options.pretty) {
35
+ console.log('Generating pretty-printed JSON files');
24
36
  }
37
+ const pageSize = typeof options.batchSize === 'number'
38
+ ? options.batchSize
39
+ : parseInt(options.batchSize);
40
+ await serializeAndUploadDatasets(dest, (0, db_1.loadDatasets)(db, pageSize, options.translations), options);
25
41
  }
26
42
  /**
27
43
  * Generates the API files from the given datasets and uploads them to the specified destination.
@@ -29,7 +45,7 @@ async function uploadApiFilesFromDatabase(dest, options) {
29
45
  * @param options The options to use for the upload.
30
46
  * @param datasets The datasets to generate the API files from.
31
47
  */
32
- async function uploadApiFiles(dest, options, datasets) {
48
+ async function serializeAndUploadDatasets(dest, datasets, options = {}) {
33
49
  const overwrite = !!options.overwrite;
34
50
  if (overwrite) {
35
51
  console.log('Overwriting existing files');
@@ -52,6 +68,18 @@ async function uploadApiFiles(dest, options, datasets) {
52
68
  if (options.pretty) {
53
69
  console.log('Generating pretty-printed JSON files');
54
70
  }
71
+ const files = (0, db_1.serializeDatasets)(datasets, {
72
+ ...options,
73
+ });
74
+ await uploadFiles(dest, options, files);
75
+ }
76
+ /**
77
+ * Uploads the given serialized files to the specified destination.
78
+ * @param dest The destination to upload the API files to. Supported destinations are S3, zip files, and local directories.
79
+ * @param options The options to use for the upload.
80
+ * @param datasets The datasets to generate the API files from.
81
+ */
82
+ async function uploadFiles(dest, options, serializedFiles) {
55
83
  let uploader;
56
84
  if (dest.startsWith('s3://')) {
57
85
  console.log('Uploading to S3');
@@ -64,7 +92,7 @@ async function uploadApiFiles(dest, options, datasets) {
64
92
  if (!s3Url.bucketName) {
65
93
  throw new Error(`Invalid S3 URL: ${url}\nUnable to determine bucket name`);
66
94
  }
67
- uploader = new s3_1.S3Uploader(s3Url.bucketName, s3Url.objectKey, options.profile ?? null);
95
+ uploader = new s3_1.S3Uploader(s3Url.bucketName, s3Url.objectKey, (0, s3_1.defaultProviderForOptions)(options));
68
96
  }
69
97
  else if (dest.startsWith('console://')) {
70
98
  console.log('Uploading to console');
@@ -74,7 +102,7 @@ async function uploadApiFiles(dest, options, datasets) {
74
102
  console.log(file.path);
75
103
  console.log(file.content);
76
104
  return true;
77
- }
105
+ },
78
106
  };
79
107
  }
80
108
  else if ((0, path_1.extname)(dest) === '.zip') {
@@ -90,48 +118,7 @@ async function uploadApiFiles(dest, options, datasets) {
90
118
  process.exit(1);
91
119
  }
92
120
  try {
93
- for await (let files of (0, db_1.serializeFiles)(datasets, {
94
- useCommonName: !!options.useCommonName,
95
- generateAudioFiles: !!options.generateAudioFiles,
96
- pretty: !!options.pretty,
97
- })) {
98
- const batchSize = uploader.idealBatchSize ?? files.length;
99
- const totalBatches = Math.ceil(files.length / batchSize);
100
- console.log('Uploading', files.length, 'total files');
101
- console.log('Uploading in batches of', batchSize);
102
- let offset = 0;
103
- let batchNumber = 1;
104
- let batch = files.slice(offset, offset + batchSize);
105
- while (batch.length > 0) {
106
- console.log('Uploading batch', batchNumber, 'of', totalBatches);
107
- let writtenFiles = 0;
108
- const promises = batch.map(async (file) => {
109
- if (filePattern) {
110
- if (!filePattern.test(file.path)) {
111
- console.log('Skipping file:', file.path);
112
- return;
113
- }
114
- }
115
- const isAvailableTranslations = file.path.endsWith('available_translations.json');
116
- const isCommonFile = !isAvailableTranslations;
117
- if (await uploader.upload(file, overwrite || (overwriteCommonFiles && isCommonFile))) {
118
- writtenFiles++;
119
- }
120
- else {
121
- console.warn('File already exists:', file.path);
122
- console.warn('Skipping file');
123
- }
124
- if (file.content instanceof node_stream_1.Readable) {
125
- file.content.destroy();
126
- }
127
- });
128
- await Promise.all(promises);
129
- console.log('Wrote', writtenFiles, 'files');
130
- batchNumber++;
131
- offset += batchSize;
132
- batch = files.slice(offset, offset + batchSize);
133
- }
134
- }
121
+ await uploadFilesUsingUploader(uploader, options, serializedFiles);
135
122
  }
136
123
  finally {
137
124
  if (uploader && uploader.dispose) {
@@ -139,3 +126,55 @@ async function uploadApiFiles(dest, options, datasets) {
139
126
  }
140
127
  }
141
128
  }
129
+ /**
130
+ * Uploads the given serialized files using the given uploader.
131
+ * @param uploader The uploader to use.
132
+ * @param options The options to use for the upload.
133
+ * @param datasets The datasets to generate the API files from.
134
+ */
135
+ async function uploadFilesUsingUploader(uploader, options, serializedFiles) {
136
+ const overwrite = !!options.overwrite;
137
+ const overwriteCommonFiles = !!options.overwriteCommonFiles;
138
+ let filePattern;
139
+ if (!!options.filePattern) {
140
+ filePattern = new RegExp(options.filePattern, 'g');
141
+ }
142
+ for await (let files of serializedFiles) {
143
+ const batchSize = uploader.idealBatchSize ?? files.length;
144
+ const totalBatches = Math.ceil(files.length / batchSize);
145
+ console.log('Uploading', files.length, 'total files');
146
+ console.log('Uploading in batches of', batchSize);
147
+ let offset = 0;
148
+ let batchNumber = 1;
149
+ let batch = files.slice(offset, offset + batchSize);
150
+ while (batch.length > 0) {
151
+ console.log('Uploading batch', batchNumber, 'of', totalBatches);
152
+ let writtenFiles = 0;
153
+ const promises = batch.map(async (file) => {
154
+ if (filePattern) {
155
+ if (!filePattern.test(file.path)) {
156
+ console.log('Skipping file:', file.path);
157
+ return;
158
+ }
159
+ }
160
+ const isAvailableTranslations = file.path.endsWith('available_translations.json');
161
+ const isCommonFile = !isAvailableTranslations;
162
+ if (await uploader.upload(file, overwrite || (overwriteCommonFiles && isCommonFile))) {
163
+ writtenFiles++;
164
+ }
165
+ else {
166
+ console.warn('File already exists:', file.path);
167
+ console.warn('Skipping file');
168
+ }
169
+ if (file.content instanceof node_stream_1.Readable) {
170
+ file.content.destroy();
171
+ }
172
+ });
173
+ await Promise.all(promises);
174
+ console.log('Wrote', writtenFiles, 'files');
175
+ batchNumber++;
176
+ offset += batchSize;
177
+ batch = files.slice(offset, offset + batchSize);
178
+ }
179
+ }
180
+ }