sliftutils 1.2.1 → 1.2.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/index.d.ts CHANGED
@@ -1235,6 +1235,107 @@ declare module "sliftutils/storage/TransactionStorage" {
1235
1235
 
1236
1236
  }
1237
1237
 
1238
+ declare module "sliftutils/storage/backblaze" {
1239
+ /// <reference types="node" />
1240
+ /// <reference types="node" />
1241
+ export declare class ArchivesBackblaze {
1242
+ private config;
1243
+ constructor(config: {
1244
+ bucketName: string;
1245
+ public?: boolean;
1246
+ immutable?: boolean;
1247
+ cacheTime?: number;
1248
+ });
1249
+ private bucketName;
1250
+ private bucketId;
1251
+ private logging;
1252
+ enableLogging(): void;
1253
+ private log;
1254
+ getDebugName(): string;
1255
+ private getBucketAPI;
1256
+ private last503Reset;
1257
+ private apiRetryLogic;
1258
+ get(fileName: string, config?: {
1259
+ range?: {
1260
+ start: number;
1261
+ end: number;
1262
+ };
1263
+ retryCount?: number;
1264
+ }): Promise<Buffer | undefined>;
1265
+ set(fileName: string, data: Buffer): Promise<void>;
1266
+ del(fileName: string): Promise<void>;
1267
+ setLargeFile(config: {
1268
+ path: string;
1269
+ getNextData(): Promise<Buffer | undefined>;
1270
+ }): Promise<void>;
1271
+ getInfo(fileName: string): Promise<{
1272
+ writeTime: number;
1273
+ size: number;
1274
+ } | undefined>;
1275
+ find(prefix: string, config?: {
1276
+ shallow?: boolean;
1277
+ type: "files" | "folders";
1278
+ }): Promise<string[]>;
1279
+ findInfo(prefix: string, config?: {
1280
+ shallow?: boolean;
1281
+ type: "files" | "folders";
1282
+ }): Promise<{
1283
+ path: string;
1284
+ createTime: number;
1285
+ size: number;
1286
+ }[]>;
1287
+ assertPathValid(path: string): Promise<void>;
1288
+ getURL(path: string): Promise<string>;
1289
+ getDownloadAuthorization(config: {
1290
+ fileNamePrefix?: string;
1291
+ validDurationInSeconds: number;
1292
+ b2ContentDisposition?: string;
1293
+ b2ContentLanguage?: string;
1294
+ b2Expires?: string;
1295
+ b2CacheControl?: string;
1296
+ b2ContentEncoding?: string;
1297
+ b2ContentType?: string;
1298
+ }): Promise<{
1299
+ bucketId: string;
1300
+ fileNamePrefix: string;
1301
+ authorizationToken: string;
1302
+ }>;
1303
+ }
1304
+ export declare const getArchivesBackblaze: {
1305
+ (key: string): ArchivesBackblaze;
1306
+ clear(key: string): void;
1307
+ clearAll(): void;
1308
+ forceSet(key: string, value: ArchivesBackblaze): void;
1309
+ getAllKeys(): string[];
1310
+ get(key: string): ArchivesBackblaze | undefined;
1311
+ };
1312
+ export declare const getArchivesBackblazePrivateImmutable: {
1313
+ (key: string): ArchivesBackblaze;
1314
+ clear(key: string): void;
1315
+ clearAll(): void;
1316
+ forceSet(key: string, value: ArchivesBackblaze): void;
1317
+ getAllKeys(): string[];
1318
+ get(key: string): ArchivesBackblaze | undefined;
1319
+ };
1320
+ export declare const getArchivesBackblazePublicImmutable: {
1321
+ (key: string): ArchivesBackblaze;
1322
+ clear(key: string): void;
1323
+ clearAll(): void;
1324
+ forceSet(key: string, value: ArchivesBackblaze): void;
1325
+ getAllKeys(): string[];
1326
+ get(key: string): ArchivesBackblaze | undefined;
1327
+ };
1328
+ export declare const getArchivesBackblazePublic: {
1329
+ (key: string): ArchivesBackblaze;
1330
+ clear(key: string): void;
1331
+ clearAll(): void;
1332
+ forceSet(key: string, value: ArchivesBackblaze): void;
1333
+ getAllKeys(): string[];
1334
+ get(key: string): ArchivesBackblaze | undefined;
1335
+ };
1336
+
1337
+ }
1338
+
1238
1339
  declare module "sliftutils/storage/fileSystemPointer" {
1239
1340
  export type FileSystemPointer = string;
1240
1341
  export declare function storeFileSystemPointer(config: {
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "sliftutils",
3
- "version": "1.2.1",
3
+ "version": "1.2.2",
4
4
  "main": "index.js",
5
5
  "license": "MIT",
6
6
  "files": [
@@ -0,0 +1,97 @@
1
+ /// <reference types="node" />
2
+ /// <reference types="node" />
3
+ export declare class ArchivesBackblaze {
4
+ private config;
5
+ constructor(config: {
6
+ bucketName: string;
7
+ public?: boolean;
8
+ immutable?: boolean;
9
+ cacheTime?: number;
10
+ });
11
+ private bucketName;
12
+ private bucketId;
13
+ private logging;
14
+ enableLogging(): void;
15
+ private log;
16
+ getDebugName(): string;
17
+ private getBucketAPI;
18
+ private last503Reset;
19
+ private apiRetryLogic;
20
+ get(fileName: string, config?: {
21
+ range?: {
22
+ start: number;
23
+ end: number;
24
+ };
25
+ retryCount?: number;
26
+ }): Promise<Buffer | undefined>;
27
+ set(fileName: string, data: Buffer): Promise<void>;
28
+ del(fileName: string): Promise<void>;
29
+ setLargeFile(config: {
30
+ path: string;
31
+ getNextData(): Promise<Buffer | undefined>;
32
+ }): Promise<void>;
33
+ getInfo(fileName: string): Promise<{
34
+ writeTime: number;
35
+ size: number;
36
+ } | undefined>;
37
+ find(prefix: string, config?: {
38
+ shallow?: boolean;
39
+ type: "files" | "folders";
40
+ }): Promise<string[]>;
41
+ findInfo(prefix: string, config?: {
42
+ shallow?: boolean;
43
+ type: "files" | "folders";
44
+ }): Promise<{
45
+ path: string;
46
+ createTime: number;
47
+ size: number;
48
+ }[]>;
49
+ assertPathValid(path: string): Promise<void>;
50
+ getURL(path: string): Promise<string>;
51
+ getDownloadAuthorization(config: {
52
+ fileNamePrefix?: string;
53
+ validDurationInSeconds: number;
54
+ b2ContentDisposition?: string;
55
+ b2ContentLanguage?: string;
56
+ b2Expires?: string;
57
+ b2CacheControl?: string;
58
+ b2ContentEncoding?: string;
59
+ b2ContentType?: string;
60
+ }): Promise<{
61
+ bucketId: string;
62
+ fileNamePrefix: string;
63
+ authorizationToken: string;
64
+ }>;
65
+ }
66
+ export declare const getArchivesBackblaze: {
67
+ (key: string): ArchivesBackblaze;
68
+ clear(key: string): void;
69
+ clearAll(): void;
70
+ forceSet(key: string, value: ArchivesBackblaze): void;
71
+ getAllKeys(): string[];
72
+ get(key: string): ArchivesBackblaze | undefined;
73
+ };
74
+ export declare const getArchivesBackblazePrivateImmutable: {
75
+ (key: string): ArchivesBackblaze;
76
+ clear(key: string): void;
77
+ clearAll(): void;
78
+ forceSet(key: string, value: ArchivesBackblaze): void;
79
+ getAllKeys(): string[];
80
+ get(key: string): ArchivesBackblaze | undefined;
81
+ };
82
+ export declare const getArchivesBackblazePublicImmutable: {
83
+ (key: string): ArchivesBackblaze;
84
+ clear(key: string): void;
85
+ clearAll(): void;
86
+ forceSet(key: string, value: ArchivesBackblaze): void;
87
+ getAllKeys(): string[];
88
+ get(key: string): ArchivesBackblaze | undefined;
89
+ };
90
+ export declare const getArchivesBackblazePublic: {
91
+ (key: string): ArchivesBackblaze;
92
+ clear(key: string): void;
93
+ clearAll(): void;
94
+ forceSet(key: string, value: ArchivesBackblaze): void;
95
+ getAllKeys(): string[];
96
+ get(key: string): ArchivesBackblaze | undefined;
97
+ };
@@ -0,0 +1,915 @@
1
+ import { cache, lazy } from "socket-function/src/caching";
2
+ import fs from "fs";
3
+ import os from "os";
4
+ import { isNode, sort, timeInHour, timeInMinute } from "socket-function/src/misc";
5
+ import { delay } from "socket-function/src/batching";
6
+ import { formatNumber, formatTime } from "socket-function/src/formatting/format";
7
+ import { blue, green, magenta } from "socket-function/src/formatting/logColors";
8
+ import debugbreak from "debugbreak";
9
+ import dns from "dns";
10
+ import { getSecret } from "../misc/getSecret";
11
+ import { httpsRequest } from "socket-function/src/https";
12
+
13
+ type BackblazeCreds = {
14
+ applicationKeyId: string;
15
+ applicationKey: string;
16
+ };
17
+
18
+ let backblazeCreds = lazy(async (): Promise<BackblazeCreds> => {
19
+ let keyId = await getSecret("backblaze.json.applicationKeyId");
20
+ let key = await getSecret("backblaze.json.applicationKey");
21
+ return {
22
+ applicationKeyId: keyId,
23
+ applicationKey: key,
24
+ };
25
+ });
26
+ const getAPI = lazy(async () => {
27
+ let creds = await backblazeCreds();
28
+
29
+ // NOTE: On errors, our retry code resets this lazy, so we DO get new authorize when needed.
30
+ // TODO: Maybe we should get new authorization periodically at well?
31
+ let authorizeRaw = await httpsRequest("https://api.backblazeb2.com/b2api/v2/b2_authorize_account", undefined, "GET", undefined, {
32
+ headers: {
33
+ Authorization: "Basic " + Buffer.from(creds.applicationKeyId + ":" + creds.applicationKey).toString("base64"),
34
+ }
35
+ });
36
+
37
+ let auth = JSON.parse(authorizeRaw.toString()) as {
38
+ accountId: string;
39
+ authorizationToken: string;
40
+ apiUrl: string;
41
+ downloadUrl: string;
42
+ allowed: {
43
+ bucketId: string;
44
+ bucketName: string;
45
+ capabilities: string[];
46
+ namePrefix: string;
47
+ }[];
48
+ };
49
+
50
+ function createB2Function<Arg, Result>(name: string, type: "POST" | "GET", noAccountId?: "noAccountId"): (arg: Arg) => Promise<Result> {
51
+ return async (arg: Arg) => {
52
+ if (!noAccountId) {
53
+ arg = { accountId: auth.accountId, ...arg };
54
+ }
55
+ try {
56
+ let url = auth.apiUrl + "/b2api/v2/" + name;
57
+ let time = Date.now();
58
+ let result = await httpsRequest(url, Buffer.from(JSON.stringify(arg)), type, undefined, {
59
+ headers: {
60
+ Authorization: auth.authorizationToken,
61
+ }
62
+ });
63
+ return JSON.parse(result.toString());
64
+ } catch (e: any) {
65
+ throw new Error(`Error in ${name}, arg ${JSON.stringify(arg).slice(0, 1000)}: ${e.stack}`);
66
+ }
67
+ };
68
+ }
69
+
70
+ const createBucket = createB2Function<{
71
+ bucketName: string;
72
+ bucketType: "allPrivate" | "allPublic";
73
+ lifecycleRules?: any[];
74
+ corsRules?: unknown[];
75
+ bucketInfo?: {
76
+ [key: string]: unknown;
77
+ };
78
+ }, {
79
+ accountId: string;
80
+ bucketId: string;
81
+ bucketName: string;
82
+ bucketType: "allPrivate" | "allPublic";
83
+ bucketInfo: {
84
+ lifecycleRules: any[];
85
+ };
86
+ corsRules: any[];
87
+ lifecycleRules: any[];
88
+ revision: number;
89
+ }>("b2_create_bucket", "POST");
90
+
91
+ const updateBucket = createB2Function<{
92
+ accountId: string;
93
+ bucketId: string;
94
+ bucketType?: "allPrivate" | "allPublic";
95
+ lifecycleRules?: any[];
96
+ bucketInfo?: {
97
+ [key: string]: unknown;
98
+ };
99
+ corsRules?: unknown[];
100
+ }, {
101
+ accountId: string;
102
+ bucketId: string;
103
+ bucketName: string;
104
+ bucketType: "allPrivate" | "allPublic";
105
+ bucketInfo: {
106
+ lifecycleRules: any[];
107
+ };
108
+ corsRules: any[];
109
+ lifecycleRules: any[];
110
+ revision: number;
111
+ }>("b2_update_bucket", "POST");
112
+
113
+ // https://www.backblaze.com/apidocs/b2-update-bucket
114
+ // TODO: b2_update_bucket, so we can update CORS, etc
115
+
116
+ const listBuckets = createB2Function<{
117
+ bucketName?: string;
118
+ }, {
119
+ buckets: {
120
+ accountId: string;
121
+ bucketId: string;
122
+ bucketName: string;
123
+ bucketType: "allPrivate" | "allPublic";
124
+ bucketInfo: {
125
+ lifecycleRules: any[];
126
+ };
127
+ corsRules: any[];
128
+ lifecycleRules: any[];
129
+ revision: number;
130
+ }[];
131
+ }>("b2_list_buckets", "POST");
132
+
133
+ function encodePath(path: string) {
134
+ // Preserve slashes, but encode everything else
135
+ path = path.split("/").map(encodeURIComponent).join("/");
136
+ if (path.startsWith("/")) path = "%2F" + path.slice(1);
137
+ if (path.endsWith("/")) path = path.slice(0, -1) + "%2F";
138
+ // NOTE: For some reason, this won't render in the web UI correctly. BUT, it'll
139
+ // work get get/set and find
140
+ // - ALSO, it seems to add duplicate files? This might also be a web UI thing. It
141
+ // seems to work though.
142
+ while (path.includes("//")) {
143
+ path = path.replaceAll("//", "/%2F");
144
+ }
145
+ return path;
146
+ }
147
+
148
+ async function downloadFileByName(config: {
149
+ bucketName: string;
150
+ fileName: string;
151
+ range?: { start: number; end: number; };
152
+ }) {
153
+ let fileName = encodePath(config.fileName);
154
+
155
+ let result = await httpsRequest(auth.apiUrl + "/file/" + config.bucketName + "/" + fileName, Buffer.from(JSON.stringify({
156
+ accountId: auth.accountId,
157
+ responseType: "arraybuffer",
158
+ })), "GET", undefined, {
159
+ headers: Object.fromEntries(Object.entries({
160
+ Authorization: auth.authorizationToken,
161
+ "Content-Type": "application/json",
162
+ Range: config.range ? `bytes=${config.range.start}-${config.range.end - 1}` : undefined,
163
+ }).filter(x => x[1] !== undefined)),
164
+ });
165
+ return result;
166
+ }
167
+
168
+ // Oh... apparently, we can't reuse these? Huh...
169
+ const getUploadURL = (async (bucketId: string) => {
170
+ //setTimeout(() => getUploadURL.clear(bucketId), timeInHour * 1);
171
+ let getUploadUrlRaw = await httpsRequest(auth.apiUrl + "/b2api/v2/b2_get_upload_url?bucketId=" + bucketId, undefined, "GET", undefined, {
172
+ headers: {
173
+ Authorization: auth.authorizationToken,
174
+ }
175
+ });
176
+
177
+ return JSON.parse(getUploadUrlRaw.toString()) as {
178
+ bucketId: string;
179
+ uploadUrl: string;
180
+ authorizationToken: string;
181
+ };
182
+ });
183
+
184
+ async function uploadFile(config: {
185
+ bucketId: string;
186
+ fileName: string;
187
+ data: Buffer;
188
+ }) {
189
+ let getUploadUrl = await getUploadURL(config.bucketId);
190
+
191
+ await httpsRequest(getUploadUrl.uploadUrl, config.data, "POST", undefined, {
192
+ headers: {
193
+ Authorization: getUploadUrl.authorizationToken,
194
+ "X-Bz-File-Name": encodePath(config.fileName),
195
+ "Content-Type": "b2/x-auto",
196
+ "X-Bz-Content-Sha1": "do_not_verify",
197
+ "Content-Length": config.data.length + "",
198
+ }
199
+ });
200
+ }
201
+
202
+ const hideFile = createB2Function<{
203
+ bucketId: string;
204
+ fileName: string;
205
+ }, {}>("b2_hide_file", "POST", "noAccountId");
206
+
207
+ const getFileInfo = createB2Function<{
208
+ bucketName: string;
209
+ fileId: string;
210
+ }, {
211
+ fileId: string;
212
+ fileName: string;
213
+ accountId: string;
214
+ bucketId: string;
215
+ contentLength: number;
216
+ contentSha1: string;
217
+ contentType: string;
218
+ fileInfo: {
219
+ src_last_modified_millis: number;
220
+ };
221
+ action: string;
222
+ uploadTimestamp: number;
223
+ }>("b2_get_file_info", "POST");
224
+
225
+ const listFileNames = createB2Function<{
226
+ bucketId: string;
227
+ prefix: string;
228
+ startFileName?: string;
229
+ maxFileCount?: number;
230
+ delimiter?: string;
231
+ }, {
232
+ files: {
233
+ fileId: string;
234
+ fileName: string;
235
+ accountId: string;
236
+ bucketId: string;
237
+ contentLength: number;
238
+ contentSha1: string;
239
+ contentType: string;
240
+ fileInfo: {
241
+ src_last_modified_millis: number;
242
+ };
243
+ action: string;
244
+ uploadTimestamp: number;
245
+ }[];
246
+ nextFileName: string;
247
+ }>("b2_list_file_names", "POST", "noAccountId");
248
+
249
+ const copyFile = createB2Function<{
250
+ sourceFileId: string;
251
+ fileName: string;
252
+ destinationBucketId: string;
253
+ }, {}>("b2_copy_file", "POST", "noAccountId");
254
+
255
+ const startLargeFile = createB2Function<{
256
+ bucketId: string;
257
+ fileName: string;
258
+ contentType: string;
259
+ fileInfo: { [key: string]: string };
260
+ }, {
261
+ fileId: string;
262
+ fileName: string;
263
+ accountId: string;
264
+ bucketId: string;
265
+ contentType: string;
266
+ fileInfo: any;
267
+ uploadTimestamp: number;
268
+ }>("b2_start_large_file", "POST", "noAccountId");
269
+
270
+ // Apparently we can't reuse these?
271
+ const getUploadPartURL = (async (fileId: string) => {
272
+ let uploadPartRaw = await httpsRequest(auth.apiUrl + "/b2api/v2/b2_get_upload_part_url?fileId=" + fileId, undefined, "GET", undefined, {
273
+ headers: {
274
+ Authorization: auth.authorizationToken,
275
+ }
276
+ });
277
+ return JSON.parse(uploadPartRaw.toString()) as {
278
+ fileId: string;
279
+ partNumber: number;
280
+ uploadUrl: string;
281
+ authorizationToken: string;
282
+ };
283
+ });
284
+ async function uploadPart(config: {
285
+ fileId: string;
286
+ partNumber: number;
287
+ data: Buffer;
288
+ sha1: string;
289
+ }): Promise<{
290
+ fileId: string;
291
+ partNumber: number;
292
+ contentLength: number;
293
+ contentSha1: string;
294
+ }> {
295
+ let uploadPart = await getUploadPartURL(config.fileId);
296
+
297
+ let result = await httpsRequest(uploadPart.uploadUrl, config.data, "POST", undefined, {
298
+ headers: {
299
+ Authorization: uploadPart.authorizationToken,
300
+ "X-Bz-Part-Number": config.partNumber + "",
301
+ "X-Bz-Content-Sha1": config.sha1,
302
+ "Content-Length": config.data.length + "",
303
+
304
+ }
305
+ });
306
+ return JSON.parse(result.toString());
307
+ }
308
+
309
+ const finishLargeFile = createB2Function<{
310
+ fileId: string;
311
+ partSha1Array: string[];
312
+ }, {
313
+ fileId: string;
314
+ fileName: string;
315
+ accountId: string;
316
+ bucketId: string;
317
+ contentLength: number;
318
+ contentSha1: string;
319
+ contentType: string;
320
+ fileInfo: any;
321
+ uploadTimestamp: number;
322
+ }>("b2_finish_large_file", "POST", "noAccountId");
323
+
324
+ const cancelLargeFile = createB2Function<{
325
+ fileId: string;
326
+ }, {}>("b2_cancel_large_file", "POST", "noAccountId");
327
+
328
+ const getDownloadAuthorization = createB2Function<{
329
+ bucketId: string;
330
+ fileNamePrefix: string;
331
+ validDurationInSeconds: number;
332
+ b2ContentDisposition?: string;
333
+ b2ContentLanguage?: string;
334
+ b2Expires?: string;
335
+ b2CacheControl?: string;
336
+ b2ContentEncoding?: string;
337
+ b2ContentType?: string;
338
+ }, {
339
+ bucketId: string;
340
+ fileNamePrefix: string;
341
+ authorizationToken: string;
342
+ }>("b2_get_download_authorization", "POST", "noAccountId");
343
+
344
+ async function getDownloadURL(path: string) {
345
+ if (!path.startsWith("/")) {
346
+ path = "/" + path;
347
+ }
348
+ return auth.downloadUrl + path;
349
+ }
350
+
351
+
352
+ return {
353
+ createBucket,
354
+ updateBucket,
355
+ listBuckets,
356
+ downloadFileByName,
357
+ uploadFile,
358
+ hideFile,
359
+ getFileInfo,
360
+ listFileNames,
361
+ copyFile,
362
+ startLargeFile,
363
+ uploadPart,
364
+ finishLargeFile,
365
+ cancelLargeFile,
366
+ getDownloadAuthorization,
367
+ getDownloadURL,
368
+ apiUrl: auth.apiUrl,
369
+ };
370
+ });
371
+
372
+ type B2Api = (typeof getAPI) extends () => Promise<infer T> ? T : never;
373
+
374
+
375
+ export class ArchivesBackblaze {
376
+ public constructor(private config: {
377
+ bucketName: string;
378
+ public?: boolean;
379
+ immutable?: boolean;
380
+ cacheTime?: number;
381
+ }) { }
382
+
383
+ private bucketName = this.config.bucketName.replaceAll(/[^\w\d]/g, "-");
384
+ private bucketId = "";
385
+
386
+ private logging = false;
387
+ public enableLogging() {
388
+ this.logging = true;
389
+ }
390
+ private log(text: string) {
391
+ if (!this.logging) return;
392
+ console.log(text);
393
+ }
394
+
395
+ public getDebugName() {
396
+ return "backblaze/" + this.config.bucketName;
397
+ }
398
+
399
+ private getBucketAPI = lazy(async () => {
400
+ let api = await getAPI();
401
+
402
+ let cacheTime = this.config.cacheTime ?? 0;
403
+ if (this.config.immutable) {
404
+ cacheTime = 86400 * 1000;
405
+ }
406
+
407
+ // ALWAYS set access control, as we can make urls for private buckets with getDownloadAuthorization
408
+ let desiredCorsRules = [{
409
+ corsRuleName: "allowAll",
410
+ allowedOrigins: ["https"],
411
+ allowedOperations: ["b2_download_file_by_id", "b2_download_file_by_name"],
412
+ allowedHeaders: ["range"],
413
+ exposeHeaders: ["x-bz-content-sha1"],
414
+ maxAgeSeconds: cacheTime / 1000,
415
+ }];
416
+ let bucketInfo: Record<string, unknown> = {};
417
+ if (cacheTime) {
418
+ bucketInfo["cache-control"] = `max-age=${cacheTime / 1000}`;
419
+ }
420
+
421
+
422
+ let exists = false;
423
+ try {
424
+ await api.createBucket({
425
+ bucketName: this.bucketName,
426
+ bucketType: this.config.public ? "allPublic" : "allPrivate",
427
+ lifecycleRules: [{
428
+ "daysFromUploadingToHiding": null,
429
+ // Keep files for 7 days, which should be enough time to recover accidental hiding.
430
+ "daysFromHidingToDeleting": 7,
431
+ "fileNamePrefix": ""
432
+ }],
433
+ corsRules: desiredCorsRules,
434
+ bucketInfo
435
+ });
436
+ } catch (e: any) {
437
+ if (!e.stack.includes(`"duplicate_bucket_name"`)) {
438
+ throw e;
439
+ }
440
+ exists = true;
441
+ }
442
+
443
+ let bucketList = await api.listBuckets({
444
+ bucketName: this.bucketName,
445
+ });
446
+ if (bucketList.buckets.length === 0) {
447
+ throw new Error(`Bucket name "${this.bucketName}" is being used by someone else. Bucket names have to be globally unique. Try a different name until you find a free one.`);
448
+ }
449
+ this.bucketId = bucketList.buckets[0].bucketId;
450
+
451
+ if (exists) {
452
+ let bucket = bucketList.buckets[0];
453
+ function normalize(obj: Record<string, unknown>) {
454
+ let kvps = Object.entries(obj);
455
+ sort(kvps, x => x[0]);
456
+ return Object.fromEntries(kvps);
457
+ }
458
+ function orderIndependentEqual(lhs: Record<string, unknown>, rhs: Record<string, unknown>) {
459
+ return JSON.stringify(normalize(lhs)) === JSON.stringify(normalize(rhs));
460
+ }
461
+ function orderIndependentEqualArray(lhs: unknown[], rhs: unknown[]) {
462
+ if (lhs.length !== rhs.length) return false;
463
+ for (let i = 0; i < lhs.length; i++) {
464
+ if (!orderIndependentEqual(lhs[i] as Record<string, unknown>, rhs[i] as Record<string, unknown>)) return false;
465
+ }
466
+ return true;
467
+ }
468
+ if (
469
+ !orderIndependentEqualArray(bucket.corsRules, desiredCorsRules)
470
+ || !orderIndependentEqual(bucket.bucketInfo, bucketInfo)
471
+ ) {
472
+ console.log(magenta(`Updating CORS rules for ${this.bucketName}`), bucket.corsRules, desiredCorsRules);
473
+ await api.updateBucket({
474
+ accountId: bucket.accountId,
475
+ bucketId: bucket.bucketId,
476
+ bucketType: bucket.bucketType,
477
+ lifecycleRules: bucket.lifecycleRules,
478
+ corsRules: desiredCorsRules,
479
+ bucketInfo: bucketInfo,
480
+ });
481
+ }
482
+ }
483
+ return api;
484
+ });
485
+
486
+ // Keep track of when we last reset because of a 503
487
+ private last503Reset = 0;
488
+ // IMPORTANT! We must always CATCH AROUND the apiRetryLogic, NEVER inside of fnc. Otherwise we won't
489
+ // be able to recreate the auth token.
490
+ private async apiRetryLogic<T>(
491
+ fnc: (api: B2Api) => Promise<T>,
492
+ retries = 3
493
+ ): Promise<T> {
494
+ let api = await this.getBucketAPI();
495
+ try {
496
+ return await fnc(api);
497
+ } catch (err: any) {
498
+ if (retries <= 0) throw err;
499
+
500
+ // If it's a 503 and it's been a minute since we last reset, then Wait and reset.
501
+ if (
502
+ (err.stack.includes(`503`)
503
+ || err.stack.includes(`"service_unavailable"`)
504
+ || err.stack.includes(`"internal_error"`)
505
+ || err.stack.includes(`ENOBUFS`)
506
+ ) && Date.now() - this.last503Reset > 60 * 1000) {
507
+ console.error("503 error, waiting a minute and resetting: " + err.message);
508
+ this.log("503 error, waiting a minute and resetting: " + err.message);
509
+ await delay(10 * 1000);
510
+ // We check again in case, and in the very likely case that this is being run in parallel, we only want to reset once.
511
+ if (Date.now() - this.last503Reset > 60 * 1000) {
512
+ this.log("Resetting getAPI and getBucketAPI: " + err.message);
513
+ this.last503Reset = Date.now();
514
+ getAPI.reset();
515
+ this.getBucketAPI.reset();
516
+ }
517
+ return this.apiRetryLogic(fnc, retries - 1);
518
+ }
519
+
520
+ // If the error is that the authorization token is invalid, reset getBucketAPI and getAPI
521
+ // If the error is that the bucket isn't found, reset getBucketAPI
522
+ if (err.stack.includes(`"expired_auth_token"`)) {
523
+ this.log("Authorization token expired");
524
+ getAPI.reset();
525
+ this.getBucketAPI.reset();
526
+ return this.apiRetryLogic(fnc, retries - 1);
527
+ }
528
+
529
+ if (
530
+ err.stack.includes(`no tomes available`)
531
+ || err.stack.includes(`ETIMEDOUT`)
532
+ || err.stack.includes(`socket hang up`)
533
+ // Eh... this might be bad, but... I think we just get random 400 errors. If this spams errors,
534
+ // we can remove this line.
535
+ || err.stack.includes(`400 Bad Request`)
536
+ || err.stack.includes(`getaddrinfo ENOTFOUND`)
537
+ || err.stack.includes(`ECONNRESET`)
538
+ || err.stack.includes(`ECONNREFUSED`)
539
+ || err.stack.includes(`ENOBUFS`)
540
+ ) {
541
+ console.error("Retrying in 5s: " + err.message);
542
+ this.log(err.message + " retrying in 5s");
543
+ await delay(5000);
544
+ return this.apiRetryLogic(fnc, retries - 1);
545
+ }
546
+
547
+ if (err.stack.includes(`getaddrinfo ENOTFOUND`)) {
548
+ let urlObj = new URL(api.apiUrl);
549
+ let hostname = urlObj.hostname;
550
+ let lookupAddresses = await new Promise(resolve => {
551
+ dns.lookup(hostname, (err, addresses) => {
552
+ resolve(addresses);
553
+ });
554
+ });
555
+ let resolveAddresses = await new Promise(resolve => {
556
+ dns.resolve4(hostname, (err, addresses) => {
557
+ resolve(addresses);
558
+ });
559
+ });
560
+ console.error(`getaddrinfo ENOTFOUND ${hostname}`, { lookupAddresses, resolveAddresses, apiUrl: api.apiUrl, fullError: err.stack });
561
+ }
562
+
563
+ // TODO: Handle if the bucket is deleted?
564
+ throw err;
565
+ }
566
+ }
567
+
568
+ public async get(fileName: string, config?: { range?: { start: number; end: number; }; retryCount?: number }): Promise<Buffer | undefined> {
569
+ let downloading = true;
570
+ try {
571
+ let time = Date.now();
572
+ const downloadPoll = () => {
573
+ if (!downloading) return;
574
+ this.log(`Backblaze download in progress ${fileName}`);
575
+ setTimeout(downloadPoll, 5000);
576
+ };
577
+ setTimeout(downloadPoll, 5000);
578
+ let result = await this.apiRetryLogic(async (api) => {
579
+ return await api.downloadFileByName({
580
+ bucketName: this.bucketName,
581
+ fileName,
582
+ range: config?.range
583
+ });
584
+ });
585
+ let timeStr = formatTime(Date.now() - time);
586
+ let rateStr = formatNumber(result.length / (Date.now() - time) * 1000) + "B/s";
587
+ this.log(`backblaze download (${formatNumber(result.length)}B${config?.range && `, ${formatNumber(config.range.start)} - ${formatNumber(config.range.end)}` || ""}) in ${timeStr} (${rateStr}, ${fileName})`);
588
+ return result;
589
+ } catch (e) {
590
+ this.log(`backblaze file does not exist ${fileName}`);
591
+ return undefined;
592
+ } finally {
593
+ downloading = false;
594
+ }
595
+ }
596
+ public async set(fileName: string, data: Buffer): Promise<void> {
597
+ this.log(`backblaze upload (${formatNumber(data.length)}B) ${fileName}`);
598
+ let f = fileName;
599
+ await this.apiRetryLogic(async (api) => {
600
+ await api.uploadFile({ bucketId: this.bucketId, fileName, data: data, });
601
+ });
602
+ let existsChecks = 30;
603
+ while (existsChecks > 0) {
604
+ let exists = await this.getInfo(fileName);
605
+ if (exists) break;
606
+ await delay(1000);
607
+ existsChecks--;
608
+ }
609
+ if (existsChecks === 0) {
610
+ let exists = await this.getInfo(fileName);
611
+ console.warn(`File ${fileName}/${f} was uploaded, but could not be found afterwards. Hopefully it was just deleted, very quickly? If backblaze is taking too long for files to propagate, then we might run into issues with the database atomicity.`);
612
+ }
613
+
614
+ }
615
+ public async del(fileName: string): Promise<void> {
616
+ this.log(`backblaze delete ${fileName}`);
617
+ try {
618
+ await this.apiRetryLogic(async (api) => {
619
+ await api.hideFile({ bucketId: this.bucketId, fileName: fileName });
620
+ });
621
+ } catch (e: any) {
622
+ this.log(`backblaze error in hide, possibly already hidden ${fileName}\n${e.stack}`);
623
+ }
624
+
625
+ // NOTE: Deletion SEEMS to work. This DOES break if we delete a file which keeps being recreated,
626
+ // ex, the heartbeat.
627
+ // let existsChecks = 10;
628
+ // while (existsChecks > 0) {
629
+ // let exists = await this.getInfo(fileName);
630
+ // if (!exists) break;
631
+ // await delay(1000);
632
+ // existsChecks--;
633
+ // }
634
+ // if (existsChecks === 0) {
635
+ // let exists = await this.getInfo(fileName);
636
+ // devDebugbreak();
637
+ // console.warn(`File ${fileName} was deleted, but was still found afterwards`);
638
+ // exists = await this.getInfo(fileName);
639
+ // }
640
+ }
641
+
642
+ public async setLargeFile(config: { path: string; getNextData(): Promise<Buffer | undefined>; }): Promise<void> {
643
+
644
+ let onError: (() => Promise<void>)[] = [];
645
+ let time = Date.now();
646
+ try {
647
+ let { path } = config;
648
+ // Backblaze requires 5MB chunks. But, larger is more efficient for us.
649
+ const MIN_CHUNK_SIZE = 32 * 1024 * 1024;
650
+ let dataQueue: Buffer[] = [];
651
+ async function getNextData(): Promise<Buffer | undefined> {
652
+ if (dataQueue.length) return dataQueue.shift();
653
+ // Get buffers until we get 5MB, OR, end. Backblaze requires this for large files.
654
+ let totalBytes = 0;
655
+ let buffers: Buffer[] = [];
656
+ while (totalBytes < MIN_CHUNK_SIZE) {
657
+ let data = await config.getNextData();
658
+ if (!data) break;
659
+ totalBytes += data.length;
660
+ buffers.push(data);
661
+ }
662
+ if (!buffers.length) return undefined;
663
+ return Buffer.concat(buffers);
664
+ }
665
+
666
+ let fileName = path;
667
+ let data = await getNextData();
668
+ if (!data?.length) return;
669
+ // Backblaze disallows overly small files
670
+ if (data.length < MIN_CHUNK_SIZE) {
671
+ return await this.set(fileName, data);
672
+ }
673
+ // Backblaze disallows less than 2 chunks
674
+ let secondData = await getNextData();
675
+ if (!secondData?.length) {
676
+ return await this.set(fileName, data);
677
+ }
678
+ // ALSO, if there are two chunks, but one is too small, combine it. This helps allow us never
679
+ // send small chunks.
680
+ if (secondData.length < MIN_CHUNK_SIZE) {
681
+ return await this.set(fileName, Buffer.concat([data, secondData]));
682
+ }
683
+ this.log(`Uploading large file ${config.path}`);
684
+ dataQueue.unshift(data, secondData);
685
+
686
+
687
+ let uploadInfo = await this.apiRetryLogic(async (api) => {
688
+ return await api.startLargeFile({
689
+ bucketId: this.bucketId,
690
+ fileName: fileName,
691
+ contentType: "b2/x-auto",
692
+ fileInfo: {},
693
+ });
694
+ });
695
+ onError.push(async () => {
696
+ await this.apiRetryLogic(async (api) => {
697
+ await api.cancelLargeFile({ fileId: uploadInfo.fileId });
698
+ });
699
+ });
700
+
701
+ const LOG_INTERVAL = timeInMinute;
702
+ let nextLogTime = Date.now() + LOG_INTERVAL;
703
+
704
+ let partNumber = 1;
705
+ let partSha1Array: string[] = [];
706
+ let totalBytes = 0;
707
+ while (true) {
708
+ data = await getNextData();
709
+ if (!data) break;
710
+ // So... if the next chunk is the last one, combine it with the current one. This
711
+ // prevents ANY uploads from being < the threshold, as apparently the "last part"
712
+ // check in backblaze fails when we have to retry an upload (due to "no tomes available").
713
+ // Well it can't fail if even the last part is > 5MB, now can it!
714
+ // BUT, only if this isn't the first chunk, otherwise we might try to send
715
+ // a single chunk, which we can't do.
716
+ if (partSha1Array.length > 0) {
717
+ let maybeLastData = await getNextData();
718
+ if (maybeLastData) {
719
+ if (maybeLastData.length < MIN_CHUNK_SIZE) {
720
+ // It's the last one, so consume it now
721
+ data = Buffer.concat([data, maybeLastData]);
722
+ } else {
723
+ // It's not the last one. Put it back, in case the one AFTER is the last
724
+ // one, in which case we need to merge maybeLastData with the next next data.
725
+ dataQueue.unshift(maybeLastData);
726
+ }
727
+ }
728
+ }
729
+ let sha1 = require("crypto").createHash("sha1");
730
+ sha1.update(data);
731
+ let sha1Hex = sha1.digest("hex");
732
+ partSha1Array.push(sha1Hex);
733
+ await this.apiRetryLogic(async (api) => {
734
+ if (!data) throw new Error("Impossible, data is undefined");
735
+
736
+ let timeStr = formatTime(Date.now() - time);
737
+ let rateStr = formatNumber(totalBytes / (Date.now() - time) * 1000) + "B/s";
738
+ this.log(`Uploading large file part ${partNumber}, uploaded ${blue(formatNumber(totalBytes) + "B")} in ${blue(timeStr)} (${blue(rateStr)}). ${config.path}`);
739
+ totalBytes += data.length;
740
+
741
+ await api.uploadPart({
742
+ fileId: uploadInfo.fileId,
743
+ partNumber: partNumber,
744
+ data: data,
745
+ sha1: sha1Hex,
746
+ });
747
+ });
748
+ partNumber++;
749
+
750
+ if (Date.now() > nextLogTime) {
751
+ nextLogTime = Date.now() + LOG_INTERVAL;
752
+ let timeStr = formatTime(Date.now() - time);
753
+ let rateStr = formatNumber(totalBytes / (Date.now() - time) * 1000) + "B/s";
754
+ console.log(`Still uploading large file at ${Date.now()}. Uploaded ${formatNumber(totalBytes)}B in ${timeStr} (${rateStr}). ${config.path}`);
755
+ }
756
+ }
757
+ this.log(`Finished uploading large file uploaded ${green(formatNumber(totalBytes))}B`);
758
+
759
+ await this.apiRetryLogic(async (api) => {
760
+ await api.finishLargeFile({
761
+ fileId: uploadInfo.fileId,
762
+ partSha1Array: partSha1Array,
763
+ });
764
+ });
765
+ } catch (e: any) {
766
+ for (let c of onError) {
767
+ try {
768
+ await c();
769
+ } catch (e) {
770
+ console.error(`Error during error clean. Ignoring, we will rethrow the original error, path ${config.path}`, e);
771
+ }
772
+ }
773
+
774
+ throw new Error(`Error in setLargeFile for ${config.path}: ${e.stack}`);
775
+ }
776
+ }
777
+
778
+ public async getInfo(fileName: string): Promise<{ writeTime: number; size: number; } | undefined> {
779
+ return await this.apiRetryLogic(async (api) => {
780
+ let info = await api.listFileNames({ bucketId: this.bucketId, prefix: fileName, });
781
+ let file = info.files.find(x => x.fileName === fileName);
782
+ if (!file) {
783
+ this.log(`Backblaze file not exists ${fileName}`);
784
+ return undefined;
785
+ }
786
+ this.log(`Backblaze file exists ${fileName}`);
787
+ return {
788
+ writeTime: file.uploadTimestamp,
789
+ size: file.contentLength,
790
+ };
791
+ });
792
+ }
793
+
794
+ // For example findFileNames("ips/")
795
+ public async find(prefix: string, config?: { shallow?: boolean; type: "files" | "folders" }): Promise<string[]> {
796
+ let result = await this.findInfo(prefix, config);
797
+ return result.map(x => x.path);
798
+ }
799
+ public async findInfo(prefix: string, config?: { shallow?: boolean; type: "files" | "folders" }): Promise<{ path: string; createTime: number; size: number; }[]> {
800
+ return await this.apiRetryLogic(async (api) => {
801
+ if (!config?.shallow && config?.type === "folders") {
802
+ let allFiles = await this.findInfo(prefix);
803
+ let allFolders = new Map<string, { path: string; createTime: number; size: number }>();
804
+ for (let { path, createTime, size } of allFiles) {
805
+ let folder = path.split("/").slice(0, -1).join("/");
806
+ if (!folder) continue;
807
+ allFolders.set(folder, { path: folder, createTime, size });
808
+ }
809
+ return Array.from(allFolders.values());
810
+ }
811
+ let files = new Map<string, { path: string; createTime: number; size: number; }>();
812
+ let startFileName = "";
813
+ while (true) {
814
+ let result = await api.listFileNames({
815
+ bucketId: this.bucketId,
816
+ prefix: prefix,
817
+ startFileName,
818
+ maxFileCount: 1000,
819
+ delimiter: config?.shallow ? "/" : undefined,
820
+ });
821
+ for (let file of result.files) {
822
+ if (file.action === "upload" && config?.type !== "folders") {
823
+ files.set(file.fileName, { path: file.fileName, createTime: file.uploadTimestamp, size: file.contentLength });
824
+ } else if (file.action === "folder" && config?.type === "folders") {
825
+ let folder = file.fileName;
826
+ if (folder.endsWith("/")) {
827
+ folder = folder.slice(0, -1);
828
+ }
829
+ files.set(folder, { path: folder, createTime: file.uploadTimestamp, size: file.contentLength });
830
+ }
831
+
832
+ }
833
+ startFileName = result.nextFileName;
834
+ if (!startFileName) break;
835
+ }
836
+ return Array.from(files.values());
837
+ });
838
+ }
839
+
840
+ public async assertPathValid(path: string) {
841
+ let bytes = Buffer.from(path, "utf8");
842
+ if (bytes.length > 1000) {
843
+ throw new Error(`Path too long: ${path.length} characters > 1000 characters. Path: ${path}`);
844
+ }
845
+ }
846
+
847
+ public async getURL(path: string) {
848
+ return await this.apiRetryLogic(async (api) => {
849
+ if (path.startsWith("/")) {
850
+ path = path.slice(1);
851
+ }
852
+ return await api.getDownloadURL("file/" + this.bucketName + "/" + path);
853
+ });
854
+ }
855
+
856
+ public async getDownloadAuthorization(config: {
857
+ fileNamePrefix?: string;
858
+ validDurationInSeconds: number;
859
+ b2ContentDisposition?: string;
860
+ b2ContentLanguage?: string;
861
+ b2Expires?: string;
862
+ b2CacheControl?: string;
863
+ b2ContentEncoding?: string;
864
+ b2ContentType?: string;
865
+ }): Promise<{
866
+ bucketId: string;
867
+ fileNamePrefix: string;
868
+ authorizationToken: string;
869
+ }> {
870
+ return await this.apiRetryLogic(async (api) => {
871
+ return await api.getDownloadAuthorization({
872
+ bucketId: this.bucketId,
873
+ fileNamePrefix: config.fileNamePrefix ?? "",
874
+ ...config,
875
+ });
876
+ });
877
+ }
878
+ }
879
+
880
+ /*
881
+ Names should be a UTF-8 string up to 1024 bytes with the following exceptions:
882
+ Character codes below 32 are not allowed.
883
+ DEL characters (127) are not allowed.
884
+ Backslashes are not allowed.
885
+ File names cannot start with /, end with /, or contain //.
886
+ */
887
+
888
+
889
+ export const getArchivesBackblaze = cache((domain: string) => {
890
+ return new ArchivesBackblaze({ bucketName: domain });
891
+ });
892
+ export const getArchivesBackblazePrivateImmutable = cache((domain: string) => {
893
+ return new ArchivesBackblaze({
894
+ bucketName: domain + "-private-immutable",
895
+ immutable: true
896
+ });
897
+ });
898
+ export const getArchivesBackblazePublicImmutable = cache((domain: string) => {
899
+ return new ArchivesBackblaze({
900
+ bucketName: domain + "-public-immutable",
901
+ public: true,
902
+ immutable: true
903
+ });
904
+ });
905
+
906
+ // NOTE: Cache by a minute. This might be a bad idea, but... usually whole reason for public is
907
+ // for cloudflare caching (as otherwise we can just access it through a server), or for large files
908
+ // (which should be cached anyways, and probably even use immutable caching).
909
+ export const getArchivesBackblazePublic = cache((domain: string) => {
910
+ return new ArchivesBackblaze({
911
+ bucketName: domain + "-public",
912
+ public: true,
913
+ cacheTime: timeInMinute,
914
+ });
915
+ });