@helloao/cli 0.0.4 → 0.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/db.js CHANGED
@@ -40,8 +40,8 @@ exports.getPrismaDbFromDir = getPrismaDbFromDir;
40
40
  exports.getDbFromDir = getDbFromDir;
41
41
  exports.getDb = getDb;
42
42
  exports.loadDatasets = loadDatasets;
43
- exports.serializeFilesForDataset = serializeFilesForDataset;
44
- exports.serializeFiles = serializeFiles;
43
+ exports.serializeFilesFromDatabase = serializeFilesFromDatabase;
44
+ exports.serializeDatasets = serializeDatasets;
45
45
  const prisma_gen_1 = require("./prisma-gen");
46
46
  const path_1 = __importStar(require("path"));
47
47
  const better_sqlite3_1 = __importDefault(require("better-sqlite3"));
@@ -51,10 +51,6 @@ const dataset_1 = require("@helloao/tools/generation/dataset");
51
51
  const api_1 = require("@helloao/tools/generation/api");
52
52
  const files_1 = require("./files");
53
53
  const hash_js_1 = require("hash.js");
54
- const lodash_1 = require("lodash");
55
- const path_2 = require("path");
56
- const stream_1 = require("stream");
57
- const base64_js_1 = require("base64-js");
58
54
  const cliPath = require.resolve('./index');
59
55
  const migrationsPath = path_1.default.resolve((0, path_1.dirname)(cliPath), 'migrations');
60
56
  /**
@@ -105,7 +101,9 @@ async function importTranslationFileBatch(db, files, parser, overwrite) {
105
101
  if (overwrite) {
106
102
  console.log('Overwriting existing translations.');
107
103
  }
108
- const changedFiles = overwrite ? files : getChangedOrNewInputFiles(db, files);
104
+ const changedFiles = overwrite
105
+ ? files
106
+ : getChangedOrNewInputFiles(db, files);
109
107
  console.log('Processing', changedFiles.length, 'changed files');
110
108
  console.log('Skipping', files.length - changedFiles.length, 'unchanged files');
111
109
  const output = (0, dataset_1.generateDataset)(changedFiles, parser);
@@ -122,7 +120,7 @@ async function importTranslationFileBatch(db, files, parser, overwrite) {
122
120
  */
123
121
  function getChangedOrNewInputFiles(db, files) {
124
122
  const fileExists = db.prepare('SELECT COUNT(*) as c FROM InputFile WHERE translationId = @translationId AND name = @name AND sha256 = @sha256;');
125
- return files.filter(f => {
123
+ return files.filter((f) => {
126
124
  const count = fileExists.get({
127
125
  translationId: f.metadata.translation.id,
128
126
  name: path_1.default.basename(f.name),
@@ -244,7 +242,7 @@ function insertTranslationBooks(db, translation, translationBooks) {
244
242
  name: book.name,
245
243
  commonName: book.commonName,
246
244
  numberOfChapters: book.chapters.length,
247
- bookOrder: book.order ?? 9999
245
+ bookOrder: book.order ?? 9999,
248
246
  });
249
247
  }
250
248
  });
@@ -431,7 +429,7 @@ function updateTranslationHashes(db, translations) {
431
429
  for (let book of books) {
432
430
  const chapters = getChapters.all({
433
431
  translationId: translation.id,
434
- bookId: book.id
432
+ bookId: book.id,
435
433
  });
436
434
  const bookSha = (0, hash_js_1.sha256)()
437
435
  .update(book.translationId)
@@ -457,7 +455,7 @@ function updateTranslationHashes(db, translations) {
457
455
  sha256: chapter.sha256,
458
456
  translationId: chapter.translationId,
459
457
  bookId: chapter.bookId,
460
- chapterNumber: chapter.number
458
+ chapterNumber: chapter.number,
461
459
  });
462
460
  }
463
461
  });
@@ -471,7 +469,7 @@ function updateTranslationHashes(db, translations) {
471
469
  updateBookHash.run({
472
470
  sha256: book.sha256,
473
471
  translationId: book.translationId,
474
- bookId: book.id
472
+ bookId: book.id,
475
473
  });
476
474
  }
477
475
  });
@@ -483,7 +481,7 @@ function updateTranslationHashes(db, translations) {
483
481
  for (let translation of translations) {
484
482
  updateTranslationHash.run({
485
483
  sha256: translation.sha256,
486
- translationId: translation.id
484
+ translationId: translation.id,
487
485
  });
488
486
  }
489
487
  });
@@ -506,8 +504,8 @@ function getPrismaDbFromDir(dir) {
506
504
  datasources: {
507
505
  db: {
508
506
  url: `file:${dbPath}`,
509
- }
510
- }
507
+ },
508
+ },
511
509
  });
512
510
  return prisma;
513
511
  }
@@ -529,10 +527,12 @@ async function getDb(dbPath) {
529
527
  "applied_steps_count" INTEGER UNSIGNED NOT NULL DEFAULT 0
530
528
  );`);
531
529
  const migrations = await (0, fs_extra_1.readdir)(migrationsPath);
532
- const appliedMigrations = db.prepare('SELECT * FROM _prisma_migrations;').all();
530
+ const appliedMigrations = db
531
+ .prepare('SELECT * FROM _prisma_migrations;')
532
+ .all();
533
533
  let missingMigrations = [];
534
534
  for (let migration of migrations) {
535
- if (appliedMigrations.some(m => m.migration_name === migration)) {
535
+ if (appliedMigrations.some((m) => m.migration_name === migration)) {
536
536
  continue;
537
537
  }
538
538
  if (path_1.default.extname(migration) !== '') {
@@ -574,7 +574,7 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
574
574
  query.where = {
575
575
  id: {
576
576
  in: translationsToLoad,
577
- }
577
+ },
578
578
  };
579
579
  }
580
580
  const translations = await db.translation.findMany(query);
@@ -582,7 +582,7 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
582
582
  break;
583
583
  }
584
584
  const dataset = {
585
- translations: []
585
+ translations: [],
586
586
  };
587
587
  for (let translation of translations) {
588
588
  const datasetTranslation = {
@@ -613,22 +613,19 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
613
613
  const audioLinks = await db.chapterAudioUrl.findMany({
614
614
  where: {
615
615
  translationId: translation.id,
616
- bookId: book.id
616
+ bookId: book.id,
617
617
  },
618
- orderBy: [
619
- { number: 'asc' },
620
- { reader: 'asc' }
621
- ]
618
+ orderBy: [{ number: 'asc' }, { reader: 'asc' }],
622
619
  });
623
- const bookChapters = chapters.map(chapter => {
620
+ const bookChapters = chapters.map((chapter) => {
624
621
  return {
625
622
  chapter: JSON.parse(chapter.json),
626
623
  thisChapterAudioLinks: audioLinks
627
- .filter(link => link.number === chapter.number)
624
+ .filter((link) => link.number === chapter.number)
628
625
  .reduce((acc, link) => {
629
626
  acc[link.reader] = link.url;
630
627
  return acc;
631
- }, {})
628
+ }, {}),
632
629
  };
633
630
  });
634
631
  const datasetBook = {
@@ -643,112 +640,24 @@ async function* loadDatasets(db, translationsPerBatch = 50, translationsToLoad)
643
640
  }
644
641
  }
645
642
  /**
646
- * Generates and serializes the API files for the dataset that is stored in the database.
643
+ * Generates and serializes the API files for the datasets that are stored in the database.
647
644
  * Yields each batch of serialized files.
648
645
  * @param db The database that the dataset should be loaded from.
649
- * @param options The options to use for generating the API.
646
+ * @param options The options to use for serializing the files.
647
+ * @param apiOptions The options to use for generating the API files.
650
648
  * @param translationsPerBatch The number of translations that should be loaded and written per batch.
651
649
  * @param translations The list of translations that should be loaded. If not provided, all translations will be loaded.
652
650
  */
653
- async function* serializeFilesForDataset(db, options, translationsPerBatch = 50, translations) {
654
- yield* serializeFiles(loadDatasets(db, translationsPerBatch, translations), options);
651
+ function serializeFilesFromDatabase(db, options = {}, translationsPerBatch = 50, translations) {
652
+ return serializeDatasets(loadDatasets(db, translationsPerBatch, translations), options);
655
653
  }
656
654
  /**
657
- * Serializes the API files for the given datasets.
658
- * @param datasets The dataasets to serialize.
659
- * @param options The options to use for serializing the files.
655
+ * Generates and serializes the API files for the given datasets.
656
+ * Yields each batch of serialized files.
657
+ *
658
+ * @param datasets The datasets to serialize.
659
+ * @param options The options to use for generating and serializing the files.
660
660
  */
661
- async function* serializeFiles(datasets, options) {
662
- const mergableFiles = new Map();
663
- for await (let dataset of datasets) {
664
- const api = (0, api_1.generateApiForDataset)(dataset, options);
665
- const files = (0, api_1.generateFilesForApi)(api);
666
- console.log('Generated', files.length, 'files');
667
- let serializedFiles = [];
668
- for (let file of files) {
669
- if (file.mergable) {
670
- let arr = mergableFiles.get(file.path);
671
- if (!arr) {
672
- arr = [];
673
- mergableFiles.set(file.path, arr);
674
- }
675
- arr.push(file);
676
- continue;
677
- }
678
- const serialized = await transformFile(file.path, file.content);
679
- if (serialized) {
680
- serializedFiles.push(serialized);
681
- }
682
- }
683
- yield serializedFiles;
684
- }
685
- let serializedFiles = [];
686
- for (let [path, files] of mergableFiles) {
687
- let content = {};
688
- for (let file of files) {
689
- if (!content) {
690
- content = file.content;
691
- }
692
- else {
693
- content = (0, lodash_1.mergeWith)(content, file.content, (objValue, srcValue) => {
694
- if (Array.isArray(objValue)) {
695
- return objValue.concat(srcValue);
696
- }
697
- return undefined;
698
- });
699
- }
700
- }
701
- if (content) {
702
- const serialized = await transformFile(path, content);
703
- if (serialized) {
704
- serializedFiles.push(serialized);
705
- }
706
- }
707
- }
708
- yield serializedFiles;
709
- async function transformFile(path, content) {
710
- let fileContent;
711
- if (typeof content === 'function') {
712
- fileContent = await content();
713
- }
714
- else {
715
- fileContent = content;
716
- }
717
- const ext = (0, path_2.extname)(path);
718
- if (ext === '.json') {
719
- let json;
720
- if (fileContent instanceof ReadableStream) {
721
- json = '';
722
- for await (const chunk of stream_1.Readable.fromWeb(fileContent, {
723
- encoding: 'utf-8'
724
- })) {
725
- json += chunk;
726
- }
727
- }
728
- else {
729
- json = JSON.stringify(content, undefined, options.pretty ? 2 : undefined);
730
- }
731
- return {
732
- path,
733
- content: json,
734
- sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest()))
735
- };
736
- }
737
- else if (ext === '.mp3') {
738
- if (fileContent instanceof ReadableStream) {
739
- return {
740
- path,
741
- content: stream_1.Readable.fromWeb(fileContent),
742
- };
743
- }
744
- else {
745
- console.warn('Expected content to be a readable stream for', path);
746
- console.warn('Skipping file');
747
- return null;
748
- }
749
- }
750
- console.warn('Unknown file type', path);
751
- console.warn('Skipping file');
752
- return null;
753
- }
661
+ function serializeDatasets(datasets, options = {}) {
662
+ return (0, files_1.serializeOutputFiles)((0, api_1.generateOutputFilesFromDatasets)(datasets, options), options);
754
663
  }
package/files.d.ts CHANGED
@@ -1,5 +1,62 @@
1
- import { InputFile } from "@helloao/tools/generation/common-types";
2
- import { SerializedFile, Uploader } from "./db";
1
+ import { InputFile, OutputFile } from '@helloao/tools/generation/common-types';
2
+ import { Readable } from 'stream';
3
+ /**
4
+ * Defines an interface that contains information about a serialized file.
5
+ */
6
+ export interface Uploader {
7
+ /**
8
+ * Gets the ideal batch size for the uploader.
9
+ * Null if the uploader does not need batching.
10
+ */
11
+ idealBatchSize: number | null;
12
+ /**
13
+ * Uploads the given file.
14
+ * @param file The file to upload.
15
+ * @param overwrite Whether the file should be overwritten if it already exists.
16
+ * @returns True if the file was uploaded. False if the file was skipped due to already existing.
17
+ */
18
+ upload(file: SerializedFile, overwrite: boolean): Promise<boolean>;
19
+ /**
20
+ * Disposes resources that the uploader uses.
21
+ */
22
+ dispose?(): Promise<void>;
23
+ }
24
+ /**
25
+ * Defines an interface for a file that has been serialized.
26
+ */
27
+ export interface SerializedFile {
28
+ path: string;
29
+ content: string | Readable;
30
+ /**
31
+ * Gets the base64-encoded SHA256 hash of the content of the file.
32
+ */
33
+ sha256?(): string;
34
+ }
35
+ /**
36
+ * The options for serializing API files.
37
+ */
38
+ export interface SerializeApiOptions {
39
+ /**
40
+ * Whether the output should be pretty-printed.
41
+ */
42
+ pretty?: boolean;
43
+ }
44
+ /**
45
+ * Serializes the given output files into serialized files using the given options.
46
+ *
47
+ * Each iteration of the given files will be processed as a batch, and any mergable files will automatically be merged together and serialized in the final batch.
48
+ *
49
+ * @param files The files that should be serialized.
50
+ * @param options The options for serialization.
51
+ */
52
+ export declare function serializeOutputFiles(files: AsyncIterable<OutputFile[]>, options: SerializeApiOptions): AsyncGenerator<SerializedFile[]>;
53
+ /**
54
+ * Serializes the given output file content into a serialized file.
55
+ * @param path The path that the file should be saved to.
56
+ * @param content The content of the file.
57
+ * @param options The options for serialization.
58
+ */
59
+ export declare function serializeFile(path: string, content: OutputFile['content'], options: SerializeApiOptions): Promise<SerializedFile | null>;
3
60
  /**
4
61
  * Loads the files for the given translations.
5
62
  * @param dir The directory that the translations exist in.
@@ -53,4 +110,9 @@ export declare class ZipUploader implements Uploader {
53
110
  upload(file: SerializedFile, _overwrite: boolean): Promise<boolean>;
54
111
  dispose(): Promise<void>;
55
112
  }
113
+ /**
114
+ * Calculates the SHa256 hash of the given input files.
115
+ * @param files The files to hash.
116
+ */
117
+ export declare function hashInputFiles(files: InputFile[]): string;
56
118
  //# sourceMappingURL=files.d.ts.map
package/files.js CHANGED
@@ -24,8 +24,11 @@ var __importStar = (this && this.__importStar) || function (mod) {
24
24
  };
25
25
  Object.defineProperty(exports, "__esModule", { value: true });
26
26
  exports.ZipUploader = exports.FilesUploader = void 0;
27
+ exports.serializeOutputFiles = serializeOutputFiles;
28
+ exports.serializeFile = serializeFile;
27
29
  exports.loadTranslationsFiles = loadTranslationsFiles;
28
30
  exports.loadTranslationFiles = loadTranslationFiles;
31
+ exports.hashInputFiles = hashInputFiles;
29
32
  const promises_1 = require("fs/promises");
30
33
  const path_1 = require("path");
31
34
  const path = __importStar(require("path"));
@@ -34,6 +37,113 @@ const zip_js_1 = require("@zip.js/zip.js");
34
37
  const stream_1 = require("stream");
35
38
  const hash_js_1 = require("hash.js");
36
39
  const usx_parser_1 = require("@helloao/tools/parser/usx-parser");
40
+ const lodash_1 = require("lodash");
41
+ const base64_js_1 = require("base64-js");
42
+ /**
43
+ * Serializes the given output files into serialized files using the given options.
44
+ *
45
+ * Each iteration of the given files will be processed as a batch, and any mergable files will automatically be merged together and serialized in the final batch.
46
+ *
47
+ * @param files The files that should be serialized.
48
+ * @param options The options for serialization.
49
+ */
50
+ async function* serializeOutputFiles(files, options) {
51
+ const mergableFiles = new Map();
52
+ for await (let batch of files) {
53
+ let serializedFiles = [];
54
+ for (let file of batch) {
55
+ if (file.mergable) {
56
+ let arr = mergableFiles.get(file.path);
57
+ if (!arr) {
58
+ arr = [];
59
+ mergableFiles.set(file.path, arr);
60
+ }
61
+ arr.push(file);
62
+ continue;
63
+ }
64
+ const serialized = await serializeFile(file.path, file.content, options);
65
+ if (serialized) {
66
+ serializedFiles.push(serialized);
67
+ }
68
+ }
69
+ yield serializedFiles;
70
+ }
71
+ let serializedFiles = [];
72
+ for (let [path, files] of mergableFiles) {
73
+ let content = {};
74
+ for (let file of files) {
75
+ if (!content) {
76
+ content = file.content;
77
+ }
78
+ else {
79
+ content = (0, lodash_1.mergeWith)(content, file.content, (objValue, srcValue) => {
80
+ if (Array.isArray(objValue)) {
81
+ return objValue.concat(srcValue);
82
+ }
83
+ return undefined;
84
+ });
85
+ }
86
+ }
87
+ if (content) {
88
+ const serialized = await serializeFile(path, content, options);
89
+ if (serialized) {
90
+ serializedFiles.push(serialized);
91
+ }
92
+ }
93
+ }
94
+ yield serializedFiles;
95
+ }
96
+ /**
97
+ * Serializes the given output file content into a serialized file.
98
+ * @param path The path that the file should be saved to.
99
+ * @param content The content of the file.
100
+ * @param options The options for serialization.
101
+ */
102
+ async function serializeFile(path, content, options) {
103
+ let fileContent;
104
+ if (typeof content === 'function') {
105
+ fileContent = await content();
106
+ }
107
+ else {
108
+ fileContent = content;
109
+ }
110
+ const ext = (0, path_1.extname)(path);
111
+ if (ext === '.json') {
112
+ let json;
113
+ if (fileContent instanceof ReadableStream) {
114
+ json = '';
115
+ for await (const chunk of stream_1.Readable.fromWeb(fileContent, {
116
+ encoding: 'utf-8',
117
+ })) {
118
+ json += chunk;
119
+ }
120
+ }
121
+ else {
122
+ json = JSON.stringify(content, undefined, options.pretty ? 2 : undefined);
123
+ }
124
+ return {
125
+ path,
126
+ content: json,
127
+ sha256: () => (0, base64_js_1.fromByteArray)(new Uint8Array((0, hash_js_1.sha256)().update(json).digest())),
128
+ };
129
+ }
130
+ else if (ext === '.mp3') {
131
+ if (fileContent instanceof ReadableStream) {
132
+ return {
133
+ path,
134
+ content: stream_1.Readable.fromWeb(fileContent),
135
+ };
136
+ }
137
+ else {
138
+ console.warn('Expected content to be a readable stream for', path);
139
+ console.warn('Skipping file');
140
+ return null;
141
+ }
142
+ }
143
+ console.warn('Unknown file type', path);
144
+ console.warn('Skipping file');
145
+ return null;
146
+ }
37
147
  /**
38
148
  * Loads the files for the given translations.
39
149
  * @param dir The directory that the translations exist in.
@@ -60,12 +170,14 @@ async function loadTranslationFiles(translation) {
60
170
  return [];
61
171
  }
62
172
  let files = await (0, promises_1.readdir)(translation);
63
- let usfmFiles = files.filter(f => (0, path_1.extname)(f) === '.usfm' || (0, path_1.extname)(f) === '.usx' || (0, path_1.extname)(f) === '.json');
173
+ let usfmFiles = files.filter((f) => (0, path_1.extname)(f) === '.usfm' ||
174
+ (0, path_1.extname)(f) === '.usx' ||
175
+ (0, path_1.extname)(f) === '.json');
64
176
  if (usfmFiles.length <= 0) {
65
177
  translation = path.resolve(translation, 'usfm');
66
178
  if ((0, fs_extra_1.existsSync)(translation)) {
67
179
  files = await (0, promises_1.readdir)(translation);
68
- usfmFiles = files.filter(f => (0, path_1.extname)(f) === '.usfm');
180
+ usfmFiles = files.filter((f) => (0, path_1.extname)(f) === '.usfm');
69
181
  }
70
182
  }
71
183
  if (usfmFiles.length <= 0) {
@@ -79,7 +191,7 @@ async function loadTranslationFiles(translation) {
79
191
  }
80
192
  const filePath = path.resolve(translation, file);
81
193
  promises.push(loadFile(filePath, {
82
- translation: metadata
194
+ translation: metadata,
83
195
  }));
84
196
  }
85
197
  return await Promise.all(promises);
@@ -107,13 +219,15 @@ async function loadTranslationMetadata(translation) {
107
219
  licenseUrl: metadata.copyright.attribution_url,
108
220
  website: metadata.copyright.attribution_url,
109
221
  shortName: metadata.name.abbrev,
110
- direction: metadata.direction
222
+ direction: metadata.direction,
111
223
  };
112
224
  }
113
225
  else {
114
226
  const metadataJson = path.resolve(translation, 'metadata.json');
115
227
  if ((0, fs_extra_1.existsSync)(metadataJson)) {
116
- const data = await (0, promises_1.readFile)(metadataJson, { encoding: 'utf-8' });
228
+ const data = await (0, promises_1.readFile)(metadataJson, {
229
+ encoding: 'utf-8',
230
+ });
117
231
  return JSON.parse(data);
118
232
  }
119
233
  }
@@ -129,7 +243,7 @@ async function loadTranslationMetadata(translation) {
129
243
  async function loadFile(file, metadata) {
130
244
  const extension = path.extname(file);
131
245
  const content = await (0, promises_1.readFile)(file, {
132
- encoding: 'utf-8'
246
+ encoding: 'utf-8',
133
247
  });
134
248
  const hash = (0, hash_js_1.sha256)()
135
249
  .update(content)
@@ -230,3 +344,19 @@ function makeRelativePath(path) {
230
344
  }
231
345
  return path;
232
346
  }
347
+ /**
348
+ * Calculates the SHa256 hash of the given input files.
349
+ * @param files The files to hash.
350
+ */
351
+ function hashInputFiles(files) {
352
+ let sha = (0, hash_js_1.sha256)();
353
+ for (let file of files) {
354
+ if (file.sha256) {
355
+ sha.update(file.sha256);
356
+ }
357
+ else {
358
+ sha.update(file.content);
359
+ }
360
+ }
361
+ return sha.digest('hex');
362
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@helloao/cli",
3
- "version": "0.0.4",
3
+ "version": "0.0.6",
4
4
  "description": "A CLI and related tools for managing HelloAO's Free Bible API",
5
5
  "module": "index.js",
6
6
  "types": "index.d.ts",
@@ -10,7 +10,7 @@
10
10
  "author": "Kallyn Gowdy <kal@helloao.org>",
11
11
  "license": "MIT",
12
12
  "dependencies": {
13
- "@helloao/tools": "^0.0.4",
13
+ "@helloao/tools": "^0.0.5",
14
14
  "commander": "12.1.0",
15
15
  "@gracious.tech/fetch-client": "^0.7.0",
16
16
  "prisma": "^5.12.1",
@@ -23,7 +23,10 @@
23
23
  "hash.js": "1.1.7",
24
24
  "@zip.js/zip.js": "^2.6.40",
25
25
  "@aws-sdk/client-s3": "^3.609.0",
26
- "@aws-sdk/credential-providers": "^3.609.0"
26
+ "@aws-sdk/credential-providers": "^3.609.0",
27
+ "@smithy/types": "^3.3.0",
28
+ "@inquirer/prompts": "5.3.8",
29
+ "all-iso-language-codes": "1.0.17"
27
30
  },
28
31
  "files": [
29
32
  "/README.md",
@@ -37,7 +40,5 @@
37
40
  "prisma": {
38
41
  "schema": "./schema.prisma"
39
42
  },
40
- "scripts": {
41
- "postinstall": "prisma generate"
42
- }
43
+ "scripts": {}
43
44
  }
@@ -229,8 +229,8 @@ const fs = require('fs')
229
229
  config.dirname = __dirname
230
230
  if (!fs.existsSync(path.join(__dirname, 'schema.prisma'))) {
231
231
  const alternativePaths = [
232
- "prisma-gen",
233
- "",
232
+ "packages/helloao-cli/prisma-gen",
233
+ "helloao-cli/prisma-gen",
234
234
  ]
235
235
 
236
236
  const alternativePath = alternativePaths.find((altPath) => {
@@ -259,7 +259,7 @@ Object.assign(exports, Prisma)
259
259
 
260
260
  // file annotations for bundling tools to include these files
261
261
  path.join(__dirname, "query_engine-windows.dll.node");
262
- path.join(process.cwd(), "prisma-gen/query_engine-windows.dll.node")
262
+ path.join(process.cwd(), "packages/helloao-cli/prisma-gen/query_engine-windows.dll.node")
263
263
  // file annotations for bundling tools to include these files
264
264
  path.join(__dirname, "schema.prisma");
265
- path.join(process.cwd(), "prisma-gen/schema.prisma")
265
+ path.join(process.cwd(), "packages/helloao-cli/prisma-gen/schema.prisma")
package/s3.d.ts CHANGED
@@ -1,14 +1,44 @@
1
- import { SerializedFile, Uploader } from "./db";
1
+ import { SerializedFile, Uploader } from './files';
2
+ import { AwsCredentialIdentity, Provider } from '@smithy/types';
2
3
  export declare class S3Uploader implements Uploader {
3
4
  private _client;
4
5
  private _bucketName;
5
6
  private _keyPrefix;
6
7
  get idealBatchSize(): number;
7
- constructor(bucketName: string, keyPrefix: string, profile: string | null);
8
+ constructor(bucketName: string, keyPrefix: string, profile: string | null | AwsCredentialIdentity | Provider<AwsCredentialIdentity>);
8
9
  upload(file: SerializedFile, overwrite: boolean): Promise<boolean>;
9
10
  }
11
+ /**
12
+ * Parses the given S3 URL into its bucket name and object key.
13
+ * @param url The URL to parse.
14
+ */
10
15
  export declare function parseS3Url(url: string): {
11
16
  bucketName: string;
12
17
  objectKey: string;
13
18
  } | undefined;
19
+ /**
20
+ * Gets the HTTP URL for the given S3 URL.
21
+ * @param s3Url The S3 URL to convert.
22
+ */
23
+ export declare function getHttpUrl(s3Url: string): string | undefined;
24
+ /**
25
+ * A provider that gets the credentials directly from the user input.
26
+ */
27
+ export declare const askForAccessKeyProvider: Provider<AwsCredentialIdentity>;
28
+ /**
29
+ * Defines a provider that tries to get the credentials from the given list of providers.
30
+ * @param providers The providers to try.
31
+ */
32
+ export declare function providerChain(...providers: Provider<AwsCredentialIdentity>[]): Provider<AwsCredentialIdentity>;
33
+ /**
34
+ * Gets the default provider for the given options.
35
+ *
36
+ * Defaults first to using the provided access key and secret access key, then to using the given profile, then finally to asking the user for the access key.
37
+ * @param options
38
+ */
39
+ export declare function defaultProviderForOptions(options: {
40
+ accessKeyId?: string;
41
+ secretAccessKey?: string;
42
+ profile?: string;
43
+ }): Provider<AwsCredentialIdentity> | AwsCredentialIdentity;
14
44
  //# sourceMappingURL=s3.d.ts.map