querysub 0.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.dependency-cruiser.js +304 -0
- package/.eslintrc.js +51 -0
- package/.github/copilot-instructions.md +1 -0
- package/.vscode/settings.json +25 -0
- package/bin/deploy.js +4 -0
- package/bin/function.js +4 -0
- package/bin/server.js +4 -0
- package/costsBenefits.txt +112 -0
- package/deploy.ts +3 -0
- package/inject.ts +1 -0
- package/package.json +60 -0
- package/prompts.txt +54 -0
- package/spec.txt +820 -0
- package/src/-a-archives/archiveCache.ts +913 -0
- package/src/-a-archives/archives.ts +148 -0
- package/src/-a-archives/archivesBackBlaze.ts +792 -0
- package/src/-a-archives/archivesDisk.ts +418 -0
- package/src/-a-archives/copyLocalToBackblaze.ts +24 -0
- package/src/-a-auth/certs.ts +517 -0
- package/src/-a-auth/der.ts +122 -0
- package/src/-a-auth/ed25519.ts +1015 -0
- package/src/-a-auth/node-forge-ed25519.d.ts +17 -0
- package/src/-b-authorities/dnsAuthority.ts +203 -0
- package/src/-b-authorities/emailAuthority.ts +57 -0
- package/src/-c-identity/IdentityController.ts +200 -0
- package/src/-d-trust/NetworkTrust2.ts +150 -0
- package/src/-e-certs/EdgeCertController.ts +288 -0
- package/src/-e-certs/certAuthority.ts +192 -0
- package/src/-f-node-discovery/NodeDiscovery.ts +543 -0
- package/src/-g-core-values/NodeCapabilities.ts +134 -0
- package/src/-g-core-values/oneTimeForward.ts +91 -0
- package/src/-h-path-value-serialize/PathValueSerializer.ts +769 -0
- package/src/-h-path-value-serialize/stringSerializer.ts +176 -0
- package/src/0-path-value-core/LoggingClient.tsx +24 -0
- package/src/0-path-value-core/NodePathAuthorities.ts +978 -0
- package/src/0-path-value-core/PathController.ts +1 -0
- package/src/0-path-value-core/PathValueCommitter.ts +565 -0
- package/src/0-path-value-core/PathValueController.ts +231 -0
- package/src/0-path-value-core/archiveLocks/ArchiveLocks.ts +154 -0
- package/src/0-path-value-core/archiveLocks/ArchiveLocks2.ts +820 -0
- package/src/0-path-value-core/archiveLocks/archiveSnapshots.ts +180 -0
- package/src/0-path-value-core/debugLogs.ts +90 -0
- package/src/0-path-value-core/pathValueArchives.ts +483 -0
- package/src/0-path-value-core/pathValueCore.ts +2217 -0
- package/src/1-path-client/RemoteWatcher.ts +558 -0
- package/src/1-path-client/pathValueClientWatcher.ts +702 -0
- package/src/2-proxy/PathValueProxyWatcher.ts +1857 -0
- package/src/2-proxy/archiveMoveHarness.ts +376 -0
- package/src/2-proxy/garbageCollection.ts +753 -0
- package/src/2-proxy/pathDatabaseProxyBase.ts +37 -0
- package/src/2-proxy/pathValueProxy.ts +139 -0
- package/src/2-proxy/schema2.ts +518 -0
- package/src/3-path-functions/PathFunctionHelpers.ts +129 -0
- package/src/3-path-functions/PathFunctionRunner.ts +619 -0
- package/src/3-path-functions/PathFunctionRunnerMain.ts +67 -0
- package/src/3-path-functions/deployBlock.ts +10 -0
- package/src/3-path-functions/deployCheck.ts +7 -0
- package/src/3-path-functions/deployMain.ts +160 -0
- package/src/3-path-functions/pathFunctionLoader.ts +282 -0
- package/src/3-path-functions/syncSchema.ts +475 -0
- package/src/3-path-functions/tests/functionsTest.ts +135 -0
- package/src/3-path-functions/tests/rejectTest.ts +77 -0
- package/src/4-dom/css.tsx +29 -0
- package/src/4-dom/cssTypes.d.ts +212 -0
- package/src/4-dom/qreact.tsx +2322 -0
- package/src/4-dom/qreactTest.tsx +417 -0
- package/src/4-querysub/Querysub.ts +877 -0
- package/src/4-querysub/QuerysubController.ts +620 -0
- package/src/4-querysub/copyEvent.ts +0 -0
- package/src/4-querysub/permissions.ts +289 -0
- package/src/4-querysub/permissionsShared.ts +1 -0
- package/src/4-querysub/querysubPrediction.ts +525 -0
- package/src/5-diagnostics/FullscreenModal.tsx +67 -0
- package/src/5-diagnostics/GenericFormat.tsx +165 -0
- package/src/5-diagnostics/Modal.tsx +79 -0
- package/src/5-diagnostics/Table.tsx +183 -0
- package/src/5-diagnostics/TimeGrouper.tsx +114 -0
- package/src/5-diagnostics/diskValueAudit.ts +216 -0
- package/src/5-diagnostics/memoryValueAudit.ts +442 -0
- package/src/5-diagnostics/nodeMetadata.ts +135 -0
- package/src/5-diagnostics/qreactDebug.tsx +309 -0
- package/src/5-diagnostics/shared.ts +26 -0
- package/src/5-diagnostics/synchronousLagTracking.ts +47 -0
- package/src/TestController.ts +35 -0
- package/src/allowclient.flag +0 -0
- package/src/bits.ts +86 -0
- package/src/buffers.ts +69 -0
- package/src/config.ts +53 -0
- package/src/config2.ts +48 -0
- package/src/diagnostics/ActionsHistory.ts +56 -0
- package/src/diagnostics/NodeViewer.tsx +503 -0
- package/src/diagnostics/SizeLimiter.ts +62 -0
- package/src/diagnostics/TimeDebug.tsx +18 -0
- package/src/diagnostics/benchmark.ts +139 -0
- package/src/diagnostics/errorLogs/ErrorLogController.ts +515 -0
- package/src/diagnostics/errorLogs/ErrorLogCore.ts +274 -0
- package/src/diagnostics/errorLogs/LogClassifiers.tsx +302 -0
- package/src/diagnostics/errorLogs/LogFilterUI.tsx +84 -0
- package/src/diagnostics/errorLogs/LogNotify.tsx +101 -0
- package/src/diagnostics/errorLogs/LogTimeSelector.tsx +724 -0
- package/src/diagnostics/errorLogs/LogViewer.tsx +757 -0
- package/src/diagnostics/errorLogs/hookErrors.ts +60 -0
- package/src/diagnostics/errorLogs/logFiltering.tsx +149 -0
- package/src/diagnostics/heapTag.ts +13 -0
- package/src/diagnostics/listenOnDebugger.ts +77 -0
- package/src/diagnostics/logs/DiskLoggerPage.tsx +572 -0
- package/src/diagnostics/logs/ObjectDisplay.tsx +165 -0
- package/src/diagnostics/logs/ansiFormat.ts +108 -0
- package/src/diagnostics/logs/diskLogGlobalContext.ts +38 -0
- package/src/diagnostics/logs/diskLogger.ts +305 -0
- package/src/diagnostics/logs/diskShimConsoleLogs.ts +32 -0
- package/src/diagnostics/logs/injectFileLocationToConsole.ts +50 -0
- package/src/diagnostics/logs/logGitHashes.ts +30 -0
- package/src/diagnostics/managementPages.tsx +289 -0
- package/src/diagnostics/periodic.ts +89 -0
- package/src/diagnostics/runSaturationTest.ts +416 -0
- package/src/diagnostics/satSchema.ts +64 -0
- package/src/diagnostics/trackResources.ts +82 -0
- package/src/diagnostics/watchdog.ts +55 -0
- package/src/errors.ts +132 -0
- package/src/forceProduction.ts +3 -0
- package/src/fs.ts +72 -0
- package/src/heapDumps.ts +666 -0
- package/src/https.ts +2 -0
- package/src/inject.ts +1 -0
- package/src/library-components/ATag.tsx +84 -0
- package/src/library-components/Button.tsx +344 -0
- package/src/library-components/ButtonSelector.tsx +64 -0
- package/src/library-components/DropdownCustom.tsx +151 -0
- package/src/library-components/DropdownSelector.tsx +32 -0
- package/src/library-components/Input.tsx +334 -0
- package/src/library-components/InputLabel.tsx +198 -0
- package/src/library-components/InputPicker.tsx +125 -0
- package/src/library-components/LazyComponent.tsx +62 -0
- package/src/library-components/MeasureHeightCSS.tsx +48 -0
- package/src/library-components/MeasuredDiv.tsx +47 -0
- package/src/library-components/ShowMore.tsx +51 -0
- package/src/library-components/SyncedController.ts +171 -0
- package/src/library-components/TimeRangeSelector.tsx +407 -0
- package/src/library-components/URLParam.ts +263 -0
- package/src/library-components/colors.tsx +14 -0
- package/src/library-components/drag.ts +114 -0
- package/src/library-components/icons.tsx +692 -0
- package/src/library-components/niceStringify.ts +50 -0
- package/src/library-components/renderToString.ts +52 -0
- package/src/misc/PromiseRace.ts +101 -0
- package/src/misc/color.ts +30 -0
- package/src/misc/getParentProcessId.cs +53 -0
- package/src/misc/getParentProcessId.ts +53 -0
- package/src/misc/hash.ts +83 -0
- package/src/misc/ipPong.js +13 -0
- package/src/misc/networking.ts +2 -0
- package/src/misc/random.ts +45 -0
- package/src/misc.ts +19 -0
- package/src/noserverhotreload.flag +0 -0
- package/src/path.ts +226 -0
- package/src/persistentLocalStore.ts +37 -0
- package/src/promise.ts +15 -0
- package/src/server.ts +73 -0
- package/src/src.d.ts +1 -0
- package/src/test/heapProcess.ts +36 -0
- package/src/test/mongoSatTest.tsx +55 -0
- package/src/test/satTest.ts +193 -0
- package/src/test/test.tsx +552 -0
- package/src/zip.ts +92 -0
- package/src/zipThreaded.ts +106 -0
- package/src/zipThreadedWorker.js +19 -0
- package/tsconfig.json +27 -0
- package/yarnSpec.txt +56 -0
|
@@ -0,0 +1,792 @@
|
|
|
1
|
+
import { cache, lazy } from "socket-function/src/caching";
|
|
2
|
+
import { getStorageDir } from "../fs";
|
|
3
|
+
import { Archives } from "./archives";
|
|
4
|
+
import fs from "fs";
|
|
5
|
+
import os from "os";
|
|
6
|
+
import { isNode, timeInHour, timeInMinute } from "socket-function/src/misc";
|
|
7
|
+
import { httpsRequest } from "../https";
|
|
8
|
+
import { delay } from "socket-function/src/batching";
|
|
9
|
+
import { devDebugbreak } from "../config";
|
|
10
|
+
import { formatNumber, formatTime } from "socket-function/src/formatting/format";
|
|
11
|
+
import { blue, green } from "socket-function/src/formatting/logColors";
|
|
12
|
+
import debugbreak from "debugbreak";
|
|
13
|
+
|
|
14
|
+
export function hasBackblazePermissions() {
|
|
15
|
+
return isNode() && fs.existsSync(getBackblazePath());
|
|
16
|
+
}
|
|
17
|
+
export function getBackblazePath() {
|
|
18
|
+
let testPaths = [
|
|
19
|
+
getStorageDir() + "backblaze.json",
|
|
20
|
+
os.homedir() + "/backblaze.json",
|
|
21
|
+
];
|
|
22
|
+
for (let path of testPaths) {
|
|
23
|
+
if (fs.existsSync(path)) {
|
|
24
|
+
return path;
|
|
25
|
+
}
|
|
26
|
+
}
|
|
27
|
+
return testPaths[0];
|
|
28
|
+
}
|
|
29
|
+
|
|
30
|
+
type BackblazeCreds = {
|
|
31
|
+
applicationKeyId: string;
|
|
32
|
+
applicationKey: string;
|
|
33
|
+
};
|
|
34
|
+
|
|
35
|
+
let backblazeCreds = lazy((): BackblazeCreds => (
|
|
36
|
+
JSON.parse(fs.readFileSync(getBackblazePath(), "utf8")) as {
|
|
37
|
+
applicationKeyId: string;
|
|
38
|
+
applicationKey: string;
|
|
39
|
+
}
|
|
40
|
+
));
|
|
41
|
+
const getAPI = lazy(async () => {
|
|
42
|
+
let creds = backblazeCreds();
|
|
43
|
+
|
|
44
|
+
let authorizeRaw = await httpsRequest("https://api.backblazeb2.com/b2api/v2/b2_authorize_account", undefined, "GET", undefined, {
|
|
45
|
+
headers: {
|
|
46
|
+
Authorization: "Basic " + Buffer.from(creds.applicationKeyId + ":" + creds.applicationKey).toString("base64"),
|
|
47
|
+
}
|
|
48
|
+
});
|
|
49
|
+
let auth = JSON.parse(authorizeRaw.toString()) as {
|
|
50
|
+
accountId: string;
|
|
51
|
+
authorizationToken: string;
|
|
52
|
+
apiUrl: string;
|
|
53
|
+
downloadUrl: string;
|
|
54
|
+
allowed: {
|
|
55
|
+
bucketId: string;
|
|
56
|
+
bucketName: string;
|
|
57
|
+
capabilities: string[];
|
|
58
|
+
namePrefix: string;
|
|
59
|
+
}[];
|
|
60
|
+
};
|
|
61
|
+
|
|
62
|
+
function createB2Function<Arg, Result>(name: string, type: "POST" | "GET", noAccountId?: "noAccountId"): (arg: Arg) => Promise<Result> {
|
|
63
|
+
return async (arg: Arg) => {
|
|
64
|
+
if (!noAccountId) {
|
|
65
|
+
arg = { accountId: auth.accountId, ...arg };
|
|
66
|
+
}
|
|
67
|
+
try {
|
|
68
|
+
let result = await httpsRequest(auth.apiUrl + "/b2api/v2/" + name, Buffer.from(JSON.stringify(arg)), type, undefined, {
|
|
69
|
+
headers: {
|
|
70
|
+
Authorization: auth.authorizationToken,
|
|
71
|
+
}
|
|
72
|
+
});
|
|
73
|
+
return JSON.parse(result.toString());
|
|
74
|
+
} catch (e: any) {
|
|
75
|
+
throw new Error(`Error in ${name}, arg ${JSON.stringify(arg).slice(0, 1000)}: ${e.stack}`);
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
const createBucket = createB2Function<{
|
|
81
|
+
bucketName: string;
|
|
82
|
+
bucketType: "allPrivate" | "allPublic";
|
|
83
|
+
lifecycleRules?: any[];
|
|
84
|
+
corsRules?: unknown[];
|
|
85
|
+
}, {
|
|
86
|
+
accountId: string;
|
|
87
|
+
bucketId: string;
|
|
88
|
+
bucketName: string;
|
|
89
|
+
bucketType: "allPrivate" | "allPublic";
|
|
90
|
+
bucketInfo: {
|
|
91
|
+
lifecycleRules: any[];
|
|
92
|
+
};
|
|
93
|
+
corsRules: any[];
|
|
94
|
+
lifecycleRules: any[];
|
|
95
|
+
revision: number;
|
|
96
|
+
}>("b2_create_bucket", "POST");
|
|
97
|
+
|
|
98
|
+
// https://www.backblaze.com/apidocs/b2-update-bucket
|
|
99
|
+
// TODO: b2_update_bucket, so we can update CORS, etc
|
|
100
|
+
|
|
101
|
+
const listBuckets = createB2Function<{
|
|
102
|
+
bucketName?: string;
|
|
103
|
+
}, {
|
|
104
|
+
buckets: {
|
|
105
|
+
accountId: string;
|
|
106
|
+
bucketId: string;
|
|
107
|
+
bucketName: string;
|
|
108
|
+
bucketType: "allPrivate" | "allPublic";
|
|
109
|
+
bucketInfo: {
|
|
110
|
+
lifecycleRules: any[];
|
|
111
|
+
};
|
|
112
|
+
corsRules: any[];
|
|
113
|
+
lifecycleRules: any[];
|
|
114
|
+
revision: number;
|
|
115
|
+
}[];
|
|
116
|
+
}>("b2_list_buckets", "POST");
|
|
117
|
+
|
|
118
|
+
function encodePath(path: string) {
|
|
119
|
+
// Preserve slashes, but encode everything else
|
|
120
|
+
path = path.split("/").map(encodeURIComponent).join("/");
|
|
121
|
+
if (path.startsWith("/")) path = "%2F" + path.slice(1);
|
|
122
|
+
if (path.endsWith("/")) path = path.slice(0, -1) + "%2F";
|
|
123
|
+
// NOTE: For some reason, this won't render in the web UI correctly. BUT, it'll
|
|
124
|
+
// work get get/set and find
|
|
125
|
+
// - ALSO, it seems to add duplicate files? This might also be a web UI thing. It
|
|
126
|
+
// seems to work though.
|
|
127
|
+
while (path.includes("//")) {
|
|
128
|
+
path = path.replaceAll("//", "/%2F");
|
|
129
|
+
}
|
|
130
|
+
return path;
|
|
131
|
+
}
|
|
132
|
+
|
|
133
|
+
async function downloadFileByName(config: {
|
|
134
|
+
bucketName: string;
|
|
135
|
+
fileName: string;
|
|
136
|
+
range?: { start: number; end: number; };
|
|
137
|
+
}) {
|
|
138
|
+
let fileName = encodePath(config.fileName);
|
|
139
|
+
|
|
140
|
+
let result = await httpsRequest(auth.apiUrl + "/file/" + config.bucketName + "/" + fileName, Buffer.from(JSON.stringify({
|
|
141
|
+
accountId: auth.accountId,
|
|
142
|
+
responseType: "arraybuffer",
|
|
143
|
+
})), "GET", undefined, {
|
|
144
|
+
headers: Object.fromEntries(Object.entries({
|
|
145
|
+
Authorization: auth.authorizationToken,
|
|
146
|
+
"Content-Type": "application/json",
|
|
147
|
+
Range: config.range ? `bytes=${config.range.start}-${config.range.end - 1}` : undefined,
|
|
148
|
+
}).filter(x => x[1] !== undefined)),
|
|
149
|
+
});
|
|
150
|
+
return result;
|
|
151
|
+
}
|
|
152
|
+
|
|
153
|
+
// Oh... apparently, we can't reuse these? Huh...
|
|
154
|
+
const getUploadURL = (async (bucketId: string) => {
|
|
155
|
+
//setTimeout(() => getUploadURL.clear(bucketId), timeInHour * 1);
|
|
156
|
+
let getUploadUrlRaw = await httpsRequest(auth.apiUrl + "/b2api/v2/b2_get_upload_url?bucketId=" + bucketId, undefined, "GET", undefined, {
|
|
157
|
+
headers: {
|
|
158
|
+
Authorization: auth.authorizationToken,
|
|
159
|
+
}
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
return JSON.parse(getUploadUrlRaw.toString()) as {
|
|
163
|
+
bucketId: string;
|
|
164
|
+
uploadUrl: string;
|
|
165
|
+
authorizationToken: string;
|
|
166
|
+
};
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
async function uploadFile(config: {
|
|
170
|
+
bucketId: string;
|
|
171
|
+
fileName: string;
|
|
172
|
+
data: Buffer;
|
|
173
|
+
}) {
|
|
174
|
+
let getUploadUrl = await getUploadURL(config.bucketId);
|
|
175
|
+
|
|
176
|
+
await httpsRequest(getUploadUrl.uploadUrl, config.data, "POST", undefined, {
|
|
177
|
+
headers: {
|
|
178
|
+
Authorization: getUploadUrl.authorizationToken,
|
|
179
|
+
"X-Bz-File-Name": encodePath(config.fileName),
|
|
180
|
+
"Content-Type": "b2/x-auto",
|
|
181
|
+
"X-Bz-Content-Sha1": "do_not_verify",
|
|
182
|
+
"Content-Length": config.data.length + "",
|
|
183
|
+
}
|
|
184
|
+
});
|
|
185
|
+
}
|
|
186
|
+
|
|
187
|
+
const hideFile = createB2Function<{
|
|
188
|
+
bucketId: string;
|
|
189
|
+
fileName: string;
|
|
190
|
+
}, {}>("b2_hide_file", "POST", "noAccountId");
|
|
191
|
+
|
|
192
|
+
const getFileInfo = createB2Function<{
|
|
193
|
+
bucketName: string;
|
|
194
|
+
fileId: string;
|
|
195
|
+
}, {
|
|
196
|
+
fileId: string;
|
|
197
|
+
fileName: string;
|
|
198
|
+
accountId: string;
|
|
199
|
+
bucketId: string;
|
|
200
|
+
contentLength: number;
|
|
201
|
+
contentSha1: string;
|
|
202
|
+
contentType: string;
|
|
203
|
+
fileInfo: {
|
|
204
|
+
src_last_modified_millis: number;
|
|
205
|
+
};
|
|
206
|
+
action: string;
|
|
207
|
+
uploadTimestamp: number;
|
|
208
|
+
}>("b2_get_file_info", "POST");
|
|
209
|
+
|
|
210
|
+
const listFileNames = createB2Function<{
|
|
211
|
+
bucketId: string;
|
|
212
|
+
prefix: string;
|
|
213
|
+
startFileName?: string;
|
|
214
|
+
maxFileCount?: number;
|
|
215
|
+
delimiter?: string;
|
|
216
|
+
}, {
|
|
217
|
+
files: {
|
|
218
|
+
fileId: string;
|
|
219
|
+
fileName: string;
|
|
220
|
+
accountId: string;
|
|
221
|
+
bucketId: string;
|
|
222
|
+
contentLength: number;
|
|
223
|
+
contentSha1: string;
|
|
224
|
+
contentType: string;
|
|
225
|
+
fileInfo: {
|
|
226
|
+
src_last_modified_millis: number;
|
|
227
|
+
};
|
|
228
|
+
action: string;
|
|
229
|
+
uploadTimestamp: number;
|
|
230
|
+
}[];
|
|
231
|
+
nextFileName: string;
|
|
232
|
+
}>("b2_list_file_names", "POST", "noAccountId");
|
|
233
|
+
|
|
234
|
+
const copyFile = createB2Function<{
|
|
235
|
+
sourceFileId: string;
|
|
236
|
+
fileName: string;
|
|
237
|
+
destinationBucketId: string;
|
|
238
|
+
}, {}>("b2_copy_file", "POST", "noAccountId");
|
|
239
|
+
|
|
240
|
+
const startLargeFile = createB2Function<{
|
|
241
|
+
bucketId: string;
|
|
242
|
+
fileName: string;
|
|
243
|
+
contentType: string;
|
|
244
|
+
fileInfo: { [key: string]: string };
|
|
245
|
+
}, {
|
|
246
|
+
fileId: string;
|
|
247
|
+
fileName: string;
|
|
248
|
+
accountId: string;
|
|
249
|
+
bucketId: string;
|
|
250
|
+
contentType: string;
|
|
251
|
+
fileInfo: any;
|
|
252
|
+
uploadTimestamp: number;
|
|
253
|
+
}>("b2_start_large_file", "POST", "noAccountId");
|
|
254
|
+
|
|
255
|
+
// Apparently we can't reuse these?
|
|
256
|
+
const getUploadPartURL = (async (fileId: string) => {
|
|
257
|
+
let uploadPartRaw = await httpsRequest(auth.apiUrl + "/b2api/v2/b2_get_upload_part_url?fileId=" + fileId, undefined, "GET", undefined, {
|
|
258
|
+
headers: {
|
|
259
|
+
Authorization: auth.authorizationToken,
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
return JSON.parse(uploadPartRaw.toString()) as {
|
|
263
|
+
fileId: string;
|
|
264
|
+
partNumber: number;
|
|
265
|
+
uploadUrl: string;
|
|
266
|
+
authorizationToken: string;
|
|
267
|
+
};
|
|
268
|
+
});
|
|
269
|
+
async function uploadPart(config: {
|
|
270
|
+
fileId: string;
|
|
271
|
+
partNumber: number;
|
|
272
|
+
data: Buffer;
|
|
273
|
+
sha1: string;
|
|
274
|
+
}): Promise<{
|
|
275
|
+
fileId: string;
|
|
276
|
+
partNumber: number;
|
|
277
|
+
contentLength: number;
|
|
278
|
+
contentSha1: string;
|
|
279
|
+
}> {
|
|
280
|
+
let uploadPart = await getUploadPartURL(config.fileId);
|
|
281
|
+
|
|
282
|
+
let result = await httpsRequest(uploadPart.uploadUrl, config.data, "POST", undefined, {
|
|
283
|
+
headers: {
|
|
284
|
+
Authorization: uploadPart.authorizationToken,
|
|
285
|
+
"X-Bz-Part-Number": config.partNumber + "",
|
|
286
|
+
"X-Bz-Content-Sha1": config.sha1,
|
|
287
|
+
"Content-Length": config.data.length + "",
|
|
288
|
+
|
|
289
|
+
}
|
|
290
|
+
});
|
|
291
|
+
return JSON.parse(result.toString());
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
const finishLargeFile = createB2Function<{
|
|
295
|
+
fileId: string;
|
|
296
|
+
partSha1Array: string[];
|
|
297
|
+
}, {
|
|
298
|
+
fileId: string;
|
|
299
|
+
fileName: string;
|
|
300
|
+
accountId: string;
|
|
301
|
+
bucketId: string;
|
|
302
|
+
contentLength: number;
|
|
303
|
+
contentSha1: string;
|
|
304
|
+
contentType: string;
|
|
305
|
+
fileInfo: any;
|
|
306
|
+
uploadTimestamp: number;
|
|
307
|
+
}>("b2_finish_large_file", "POST", "noAccountId");
|
|
308
|
+
|
|
309
|
+
const cancelLargeFile = createB2Function<{
|
|
310
|
+
fileId: string;
|
|
311
|
+
}, {}>("b2_cancel_large_file", "POST", "noAccountId");
|
|
312
|
+
|
|
313
|
+
return {
|
|
314
|
+
createBucket,
|
|
315
|
+
listBuckets,
|
|
316
|
+
downloadFileByName,
|
|
317
|
+
uploadFile,
|
|
318
|
+
hideFile,
|
|
319
|
+
getFileInfo,
|
|
320
|
+
listFileNames,
|
|
321
|
+
copyFile,
|
|
322
|
+
startLargeFile,
|
|
323
|
+
uploadPart,
|
|
324
|
+
finishLargeFile,
|
|
325
|
+
cancelLargeFile,
|
|
326
|
+
};
|
|
327
|
+
});
|
|
328
|
+
|
|
329
|
+
type B2Api = (typeof getAPI) extends () => Promise<infer T> ? T : never;
|
|
330
|
+
|
|
331
|
+
|
|
332
|
+
class Backblaze {
|
|
333
|
+
public constructor(private config: {
|
|
334
|
+
bucketName: string;
|
|
335
|
+
public?: boolean;
|
|
336
|
+
immutable?: boolean;
|
|
337
|
+
}) { }
|
|
338
|
+
|
|
339
|
+
private bucketName = this.config.bucketName.replaceAll(/[^\w\d]/g, "-");
|
|
340
|
+
private bucketId = "";
|
|
341
|
+
|
|
342
|
+
private logging = false;
|
|
343
|
+
public enableLogging() {
|
|
344
|
+
this.logging = true;
|
|
345
|
+
}
|
|
346
|
+
private log(text: string) {
|
|
347
|
+
if (!this.logging) return;
|
|
348
|
+
console.log(text);
|
|
349
|
+
}
|
|
350
|
+
|
|
351
|
+
public getDebugName() {
|
|
352
|
+
return "backblaze/" + this.config.bucketName;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
private getBucketAPI = lazy(async () => {
|
|
356
|
+
let api = await getAPI();
|
|
357
|
+
try {
|
|
358
|
+
await api.createBucket({
|
|
359
|
+
bucketName: this.bucketName,
|
|
360
|
+
bucketType: this.config.public ? "allPublic" : "allPrivate",
|
|
361
|
+
lifecycleRules: [{
|
|
362
|
+
"daysFromUploadingToHiding": null,
|
|
363
|
+
// Keep files for 7 days, which should be enough time to recover accidental hiding.
|
|
364
|
+
"daysFromHidingToDeleting": 7,
|
|
365
|
+
"fileNamePrefix": ""
|
|
366
|
+
}],
|
|
367
|
+
corsRules: this.config.public ? [{
|
|
368
|
+
corsRuleName: "onlyCurrentOrigin",
|
|
369
|
+
allowedOrigins: ["*"],
|
|
370
|
+
allowedOperations: ["b2_download_file_by_id", "b2_download_file_by_name"],
|
|
371
|
+
allowedHeaders: [],
|
|
372
|
+
exposeHeaders: ["x-bz-content-sha1"],
|
|
373
|
+
maxAgeSeconds: this.config.immutable ? 86400 : 0,
|
|
374
|
+
}] : [],
|
|
375
|
+
});
|
|
376
|
+
} catch (e: any) {
|
|
377
|
+
if (!e.stack.includes(`"duplicate_bucket_name"`)) {
|
|
378
|
+
throw e;
|
|
379
|
+
}
|
|
380
|
+
}
|
|
381
|
+
let bucketInfo = await api.listBuckets({
|
|
382
|
+
bucketName: this.bucketName,
|
|
383
|
+
});
|
|
384
|
+
if (bucketInfo.buckets.length === 0) {
|
|
385
|
+
throw new Error(`Bucket name "${this.bucketName}" is being used by someone else. Bucket names have to be globally unique. Try a different name until you find a free one.`);
|
|
386
|
+
}
|
|
387
|
+
this.bucketId = bucketInfo.buckets[0].bucketId;
|
|
388
|
+
return api;
|
|
389
|
+
});
|
|
390
|
+
|
|
391
|
+
private async apiRetryLogic<T>(
|
|
392
|
+
fnc: (api: B2Api) => Promise<T>,
|
|
393
|
+
retries = 3
|
|
394
|
+
): Promise<T> {
|
|
395
|
+
let api = await this.getBucketAPI();
|
|
396
|
+
try {
|
|
397
|
+
return await fnc(api);
|
|
398
|
+
} catch (err: any) {
|
|
399
|
+
if (retries <= 0) throw err;
|
|
400
|
+
|
|
401
|
+
if (
|
|
402
|
+
err.stack.includes(`no tomes available`)
|
|
403
|
+
|| err.stack.includes(`"service_unavailable"`)
|
|
404
|
+
|| err.stack.includes(`ETIMEDOUT`)
|
|
405
|
+
|| err.stack.includes(`"internal_error"`)
|
|
406
|
+
|| err.stack.includes(`socket hang up`)
|
|
407
|
+
// Eh... this might be bad, but... I think we just get random 400 errors. If this spams errors,
|
|
408
|
+
// we can remove this line.
|
|
409
|
+
|| err.stack.includes(`400 Bad Request`)
|
|
410
|
+
) {
|
|
411
|
+
this.log(err.message + " retrying in 5s");
|
|
412
|
+
await delay(5000);
|
|
413
|
+
return this.apiRetryLogic(fnc, retries - 1);
|
|
414
|
+
}
|
|
415
|
+
|
|
416
|
+
// If the error is that the authorization token is invalid, reset getBucketAPI and getAPI
|
|
417
|
+
// If the error is that the bucket isn't found, reset getBucketAPI
|
|
418
|
+
if (err.stack.includes(`"expired_auth_token"`)) {
|
|
419
|
+
this.log("Authorization token expired");
|
|
420
|
+
getAPI.reset();
|
|
421
|
+
this.getBucketAPI.reset();
|
|
422
|
+
return this.apiRetryLogic(fnc, retries - 1);
|
|
423
|
+
}
|
|
424
|
+
// TODO: Handle if the bucket is deleted?
|
|
425
|
+
throw err;
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
|
|
429
|
+
public async get(fileName: string, config?: { range?: { start: number; end: number; }; retryCount?: number }): Promise<Buffer | undefined> {
|
|
430
|
+
let downloading = true;
|
|
431
|
+
try {
|
|
432
|
+
let time = Date.now();
|
|
433
|
+
const downloadPoll = () => {
|
|
434
|
+
if (!downloading) return;
|
|
435
|
+
this.log(`Backblaze download in progress ${fileName}`);
|
|
436
|
+
setTimeout(downloadPoll, 5000);
|
|
437
|
+
};
|
|
438
|
+
setTimeout(downloadPoll, 5000);
|
|
439
|
+
let result = await this.apiRetryLogic(async (api) => {
|
|
440
|
+
return await api.downloadFileByName({
|
|
441
|
+
bucketName: this.bucketName,
|
|
442
|
+
fileName,
|
|
443
|
+
range: config?.range
|
|
444
|
+
});
|
|
445
|
+
});
|
|
446
|
+
let timeStr = formatTime(Date.now() - time);
|
|
447
|
+
let rateStr = formatNumber(result.length / (Date.now() - time) * 1000) + "B/s";
|
|
448
|
+
this.log(`backblaze download (${formatNumber(result.length)}B${config?.range && `, ${formatNumber(config.range.start)} - ${formatNumber(config.range.end)}` || ""}) in ${timeStr} (${rateStr}, ${fileName})`);
|
|
449
|
+
return result;
|
|
450
|
+
} catch (e) {
|
|
451
|
+
this.log(`backblaze file does not exist ${fileName}`);
|
|
452
|
+
return undefined;
|
|
453
|
+
} finally {
|
|
454
|
+
downloading = false;
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
public async set(fileName: string, data: Buffer): Promise<void> {
|
|
458
|
+
this.log(`backblaze upload (${formatNumber(data.length)}B) ${fileName}`);
|
|
459
|
+
let f = fileName;
|
|
460
|
+
await this.apiRetryLogic(async (api) => {
|
|
461
|
+
await api.uploadFile({ bucketId: this.bucketId, fileName, data: data, });
|
|
462
|
+
});
|
|
463
|
+
let existsChecks = 30;
|
|
464
|
+
while (existsChecks > 0) {
|
|
465
|
+
let exists = await this.getInfo(fileName);
|
|
466
|
+
if (exists) break;
|
|
467
|
+
await delay(1000);
|
|
468
|
+
existsChecks--;
|
|
469
|
+
}
|
|
470
|
+
if (existsChecks === 0) {
|
|
471
|
+
let exists = await this.getInfo(fileName);
|
|
472
|
+
devDebugbreak();
|
|
473
|
+
console.warn(`File ${fileName}/${f} was uploaded, but could not be found afterwards. Hopefully it was just deleted, very quickly? If backblaze is taking too long for files to propagate, then we might run into issues with the database atomicity.`);
|
|
474
|
+
}
|
|
475
|
+
|
|
476
|
+
}
|
|
477
|
+
public async del(fileName: string): Promise<void> {
|
|
478
|
+
this.log(`backblaze delete ${fileName}`);
|
|
479
|
+
await this.apiRetryLogic(async (api) => {
|
|
480
|
+
try {
|
|
481
|
+
await api.hideFile({ bucketId: this.bucketId, fileName: fileName });
|
|
482
|
+
} catch {
|
|
483
|
+
// Probably already deleted
|
|
484
|
+
}
|
|
485
|
+
});
|
|
486
|
+
|
|
487
|
+
// NOTE: Deletion SEEMS to work. This DOES break if we delete a file which keeps being recreated,
|
|
488
|
+
// ex, the heartbeat.
|
|
489
|
+
// let existsChecks = 10;
|
|
490
|
+
// while (existsChecks > 0) {
|
|
491
|
+
// let exists = await this.getInfo(fileName);
|
|
492
|
+
// if (!exists) break;
|
|
493
|
+
// await delay(1000);
|
|
494
|
+
// existsChecks--;
|
|
495
|
+
// }
|
|
496
|
+
// if (existsChecks === 0) {
|
|
497
|
+
// let exists = await this.getInfo(fileName);
|
|
498
|
+
// devDebugbreak();
|
|
499
|
+
// console.warn(`File ${fileName} was deleted, but was still found afterwards`);
|
|
500
|
+
// exists = await this.getInfo(fileName);
|
|
501
|
+
// }
|
|
502
|
+
}
|
|
503
|
+
|
|
504
|
+
public async setLargeFile(config: { path: string; getNextData(): Promise<Buffer | undefined>; }): Promise<void> {
|
|
505
|
+
|
|
506
|
+
let onError: (() => Promise<void>)[] = [];
|
|
507
|
+
let time = Date.now();
|
|
508
|
+
try {
|
|
509
|
+
let { path } = config;
|
|
510
|
+
// Backblaze requires 5MB chunks. But, larger is more efficient for us.
|
|
511
|
+
const MIN_CHUNK_SIZE = 32 * 1024 * 1024;
|
|
512
|
+
let dataQueue: Buffer[] = [];
|
|
513
|
+
async function getNextData(): Promise<Buffer | undefined> {
|
|
514
|
+
if (dataQueue.length) return dataQueue.shift();
|
|
515
|
+
// Get buffers until we get 5MB, OR, end. Backblaze requires this for large files.
|
|
516
|
+
let totalBytes = 0;
|
|
517
|
+
let buffers: Buffer[] = [];
|
|
518
|
+
while (totalBytes < MIN_CHUNK_SIZE) {
|
|
519
|
+
let data = await config.getNextData();
|
|
520
|
+
if (!data) break;
|
|
521
|
+
totalBytes += data.length;
|
|
522
|
+
buffers.push(data);
|
|
523
|
+
}
|
|
524
|
+
if (!buffers.length) return undefined;
|
|
525
|
+
return Buffer.concat(buffers);
|
|
526
|
+
}
|
|
527
|
+
|
|
528
|
+
let fileName = path;
|
|
529
|
+
let data = await getNextData();
|
|
530
|
+
if (!data?.length) return;
|
|
531
|
+
// Backblaze disallows overly small files
|
|
532
|
+
if (data.length < MIN_CHUNK_SIZE) {
|
|
533
|
+
return await this.set(fileName, data);
|
|
534
|
+
}
|
|
535
|
+
// Backblaze disallows less than 2 chunks
|
|
536
|
+
let secondData = await getNextData();
|
|
537
|
+
if (!secondData?.length) {
|
|
538
|
+
return await this.set(fileName, data);
|
|
539
|
+
}
|
|
540
|
+
// ALSO, if there are two chunks, but one is too small, combine it. This helps allow us never
|
|
541
|
+
// send small chunks.
|
|
542
|
+
if (secondData.length < MIN_CHUNK_SIZE) {
|
|
543
|
+
return await this.set(fileName, Buffer.concat([data, secondData]));
|
|
544
|
+
}
|
|
545
|
+
this.log(`Uploading large file ${config.path}`);
|
|
546
|
+
dataQueue.unshift(data, secondData);
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
let uploadInfo = await this.apiRetryLogic(async (api) => {
|
|
550
|
+
return await api.startLargeFile({
|
|
551
|
+
bucketId: this.bucketId,
|
|
552
|
+
fileName: fileName,
|
|
553
|
+
contentType: "b2/x-auto",
|
|
554
|
+
fileInfo: {},
|
|
555
|
+
});
|
|
556
|
+
});
|
|
557
|
+
onError.push(async () => {
|
|
558
|
+
await this.apiRetryLogic(async (api) => {
|
|
559
|
+
await api.cancelLargeFile({ fileId: uploadInfo.fileId });
|
|
560
|
+
});
|
|
561
|
+
});
|
|
562
|
+
|
|
563
|
+
const LOG_INTERVAL = timeInMinute;
|
|
564
|
+
let nextLogTime = Date.now() + LOG_INTERVAL;
|
|
565
|
+
|
|
566
|
+
let partNumber = 1;
|
|
567
|
+
let partSha1Array: string[] = [];
|
|
568
|
+
let totalBytes = 0;
|
|
569
|
+
while (true) {
|
|
570
|
+
data = await getNextData();
|
|
571
|
+
if (!data) break;
|
|
572
|
+
// So... if the next chunk is the last one, combine it with the current one. This
|
|
573
|
+
// prevents ANY uploads from being < the threshold, as apparently the "last part"
|
|
574
|
+
// check in backblaze fails when we have to retry an upload (due to "no tomes available").
|
|
575
|
+
// Well it can't fail if even the last part is > 5MB, now can it!
|
|
576
|
+
// BUT, only if this isn't the first chunk, otherwise we might try to send
|
|
577
|
+
// a single chunk, which we can't do.
|
|
578
|
+
if (partSha1Array.length > 0) {
|
|
579
|
+
let maybeLastData = await getNextData();
|
|
580
|
+
if (maybeLastData) {
|
|
581
|
+
if (maybeLastData.length < MIN_CHUNK_SIZE) {
|
|
582
|
+
// It's the last one, so consume it now
|
|
583
|
+
data = Buffer.concat([data, maybeLastData]);
|
|
584
|
+
} else {
|
|
585
|
+
// It's not the last one. Put it back, in case the one AFTER is the last
|
|
586
|
+
// one, in which case we need to merge maybeLastData with the next next data.
|
|
587
|
+
dataQueue.unshift(maybeLastData);
|
|
588
|
+
}
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
let sha1 = require("crypto").createHash("sha1");
|
|
592
|
+
sha1.update(data);
|
|
593
|
+
let sha1Hex = sha1.digest("hex");
|
|
594
|
+
partSha1Array.push(sha1Hex);
|
|
595
|
+
await this.apiRetryLogic(async (api) => {
|
|
596
|
+
if (!data) throw new Error("Impossible, data is undefined");
|
|
597
|
+
|
|
598
|
+
let timeStr = formatTime(Date.now() - time);
|
|
599
|
+
let rateStr = formatNumber(totalBytes / (Date.now() - time) * 1000) + "B/s";
|
|
600
|
+
this.log(`Uploading large file part ${partNumber}, uploaded ${blue(formatNumber(totalBytes) + "B")} in ${blue(timeStr)} (${blue(rateStr)}). ${config.path}`);
|
|
601
|
+
totalBytes += data.length;
|
|
602
|
+
|
|
603
|
+
await api.uploadPart({
|
|
604
|
+
fileId: uploadInfo.fileId,
|
|
605
|
+
partNumber: partNumber,
|
|
606
|
+
data: data,
|
|
607
|
+
sha1: sha1Hex,
|
|
608
|
+
});
|
|
609
|
+
});
|
|
610
|
+
partNumber++;
|
|
611
|
+
|
|
612
|
+
if (Date.now() > nextLogTime) {
|
|
613
|
+
nextLogTime = Date.now() + LOG_INTERVAL;
|
|
614
|
+
let timeStr = formatTime(Date.now() - time);
|
|
615
|
+
let rateStr = formatNumber(totalBytes / (Date.now() - time) * 1000) + "B/s";
|
|
616
|
+
console.log(`Still uploading large file at ${Date.now()}. Uploaded ${formatNumber(totalBytes)}B in ${timeStr} (${rateStr}). ${config.path}`);
|
|
617
|
+
}
|
|
618
|
+
}
|
|
619
|
+
this.log(`Finished uploading large file uploaded ${green(formatNumber(totalBytes))}B`);
|
|
620
|
+
|
|
621
|
+
await this.apiRetryLogic(async (api) => {
|
|
622
|
+
await api.finishLargeFile({
|
|
623
|
+
fileId: uploadInfo.fileId,
|
|
624
|
+
partSha1Array: partSha1Array,
|
|
625
|
+
});
|
|
626
|
+
});
|
|
627
|
+
} catch (e: any) {
|
|
628
|
+
for (let c of onError) {
|
|
629
|
+
try {
|
|
630
|
+
await c();
|
|
631
|
+
} catch (e) {
|
|
632
|
+
console.error(`Error during error clean. Ignoring, we will rethrow the original error, path ${config.path}`, e);
|
|
633
|
+
}
|
|
634
|
+
}
|
|
635
|
+
|
|
636
|
+
throw new Error(`Error in setLargeFile for ${config.path}: ${e.stack}`);
|
|
637
|
+
}
|
|
638
|
+
}
|
|
639
|
+
|
|
640
|
+
public async getInfo(fileName: string): Promise<{ writeTime: number; size: number; } | undefined> {
|
|
641
|
+
return await this.apiRetryLogic(async (api) => {
|
|
642
|
+
let info = await api.listFileNames({ bucketId: this.bucketId, prefix: fileName, });
|
|
643
|
+
let file = info.files.find(x => x.fileName === fileName);
|
|
644
|
+
if (!file) {
|
|
645
|
+
this.log(`Backblaze file not exists ${fileName}`);
|
|
646
|
+
return undefined;
|
|
647
|
+
}
|
|
648
|
+
this.log(`Backblaze file exists ${fileName}`);
|
|
649
|
+
return {
|
|
650
|
+
writeTime: file.uploadTimestamp,
|
|
651
|
+
size: file.contentLength,
|
|
652
|
+
};
|
|
653
|
+
});
|
|
654
|
+
}
|
|
655
|
+
|
|
656
|
+
// For example findFileNames("ips/")
|
|
657
|
+
public async find(prefix: string, config?: { shallow?: boolean; type: "files" | "folders" }): Promise<string[]> {
|
|
658
|
+
let result = await this.findInfo(prefix, config);
|
|
659
|
+
return result.map(x => x.path);
|
|
660
|
+
}
|
|
661
|
+
public async findInfo(prefix: string, config?: { shallow?: boolean; type: "files" | "folders" }): Promise<{ path: string; createTime: number; size: number; }[]> {
|
|
662
|
+
return await this.apiRetryLogic(async (api) => {
|
|
663
|
+
if (!config?.shallow && config?.type === "folders") {
|
|
664
|
+
let allFiles = await this.findInfo(prefix);
|
|
665
|
+
let allFolders = new Map<string, { path: string; createTime: number; size: number }>();
|
|
666
|
+
for (let { path, createTime, size } of allFiles) {
|
|
667
|
+
let folder = path.split("/").slice(0, -1).join("/");
|
|
668
|
+
if (!folder) continue;
|
|
669
|
+
allFolders.set(folder, { path: folder, createTime, size });
|
|
670
|
+
}
|
|
671
|
+
return Array.from(allFolders.values());
|
|
672
|
+
}
|
|
673
|
+
let files = new Map<string, { path: string; createTime: number; size: number; }>();
|
|
674
|
+
let startFileName = "";
|
|
675
|
+
while (true) {
|
|
676
|
+
let result = await api.listFileNames({
|
|
677
|
+
bucketId: this.bucketId,
|
|
678
|
+
prefix: prefix,
|
|
679
|
+
startFileName,
|
|
680
|
+
maxFileCount: 1000,
|
|
681
|
+
delimiter: config?.shallow ? "/" : undefined,
|
|
682
|
+
});
|
|
683
|
+
for (let file of result.files) {
|
|
684
|
+
if (file.action === "upload" && config?.type !== "folders") {
|
|
685
|
+
files.set(file.fileName, { path: file.fileName, createTime: file.uploadTimestamp, size: file.contentLength });
|
|
686
|
+
} else if (file.action === "folder" && config?.type === "folders") {
|
|
687
|
+
let folder = file.fileName;
|
|
688
|
+
if (folder.endsWith("/")) {
|
|
689
|
+
folder = folder.slice(0, -1);
|
|
690
|
+
}
|
|
691
|
+
files.set(folder, { path: folder, createTime: file.uploadTimestamp, size: file.contentLength });
|
|
692
|
+
}
|
|
693
|
+
|
|
694
|
+
}
|
|
695
|
+
startFileName = result.nextFileName;
|
|
696
|
+
if (!startFileName) break;
|
|
697
|
+
}
|
|
698
|
+
return Array.from(files.values());
|
|
699
|
+
});
|
|
700
|
+
}
|
|
701
|
+
|
|
702
|
+
public async assertPathValid(path: string) {
|
|
703
|
+
let bytes = Buffer.from(path, "utf8");
|
|
704
|
+
if (bytes.length > 1000) {
|
|
705
|
+
throw new Error(`Path too long: ${path.length} characters > 1000 characters. Path: ${path}`);
|
|
706
|
+
}
|
|
707
|
+
}
|
|
708
|
+
|
|
709
|
+
public async move(config: {
|
|
710
|
+
path: string;
|
|
711
|
+
target: Archives;
|
|
712
|
+
targetPath: string;
|
|
713
|
+
copyInstead?: boolean;
|
|
714
|
+
}) {
|
|
715
|
+
let { path, target, targetPath } = config;
|
|
716
|
+
// Moving is NOT working. The API call works, but... the file
|
|
717
|
+
while (true) {
|
|
718
|
+
let targetUnwrapped = target.getBaseArchives?.();
|
|
719
|
+
if (!targetUnwrapped) break;
|
|
720
|
+
target = targetUnwrapped.archives;
|
|
721
|
+
targetPath = targetUnwrapped.parentPath + targetPath;
|
|
722
|
+
}
|
|
723
|
+
// A self move should NOOP (and definitely not copy, and then delete itself!)
|
|
724
|
+
if (target === this && path === targetPath) {
|
|
725
|
+
this.log(`Backblaze move path to itself. Skipping move, as there is no work to do. ${path}`);
|
|
726
|
+
return;
|
|
727
|
+
}
|
|
728
|
+
if (target instanceof Backblaze) {
|
|
729
|
+
let targetBucketId = target.bucketId;
|
|
730
|
+
if (targetBucketId === this.bucketId && path === targetPath) return;
|
|
731
|
+
await this.apiRetryLogic(async (api) => {
|
|
732
|
+
// Ugh... listing the file name sucks, but... I guess it's still better than
|
|
733
|
+
// downloading and re-uploading the entire file.
|
|
734
|
+
let info = await api.listFileNames({ bucketId: this.bucketId, prefix: path, });
|
|
735
|
+
let file = info.files.find(x => x.fileName === path);
|
|
736
|
+
if (!file) throw new Error(`File not found to move: ${path}`);
|
|
737
|
+
await api.copyFile({
|
|
738
|
+
sourceFileId: file.fileId,
|
|
739
|
+
fileName: targetPath,
|
|
740
|
+
destinationBucketId: targetBucketId,
|
|
741
|
+
});
|
|
742
|
+
});
|
|
743
|
+
} else {
|
|
744
|
+
let data = await this.get(path);
|
|
745
|
+
if (!data) throw new Error(`File not found to move: ${path}`);
|
|
746
|
+
await target.set(targetPath, data);
|
|
747
|
+
}
|
|
748
|
+
|
|
749
|
+
if (!config.copyInstead) {
|
|
750
|
+
let exists = await this.getInfo(targetPath);
|
|
751
|
+
if (!exists) {
|
|
752
|
+
debugbreak(2);
|
|
753
|
+
debugger;
|
|
754
|
+
console.error(`File not found after move. Leaving BOTH files. ${targetPath} was not found. Being moved from ${path}`);
|
|
755
|
+
} else {
|
|
756
|
+
await this.del(path);
|
|
757
|
+
}
|
|
758
|
+
}
|
|
759
|
+
}
|
|
760
|
+
|
|
761
|
+
public async copy(config: {
|
|
762
|
+
path: string;
|
|
763
|
+
target: Archives;
|
|
764
|
+
targetPath: string;
|
|
765
|
+
}): Promise<void> {
|
|
766
|
+
return this.move({ ...config, copyInstead: true });
|
|
767
|
+
}
|
|
768
|
+
}
|
|
769
|
+
|
|
770
|
+
/*
|
|
771
|
+
Names should be a UTF-8 string up to 1024 bytes with the following exceptions:
|
|
772
|
+
Character codes below 32 are not allowed.
|
|
773
|
+
DEL characters (127) are not allowed.
|
|
774
|
+
Backslashes are not allowed.
|
|
775
|
+
File names cannot start with /, end with /, or contain //.
|
|
776
|
+
*/
|
|
777
|
+
|
|
778
|
+
|
|
779
|
+
export const getArchivesBackblaze = cache((domain: string) => {
|
|
780
|
+
const archivesBackblaze: Archives = new Backblaze({ bucketName: domain });
|
|
781
|
+
|
|
782
|
+
return archivesBackblaze;
|
|
783
|
+
});
|
|
784
|
+
export const getArchivesBackblazePublicImmutable = cache((domain: string) => {
|
|
785
|
+
const archivesBackblaze: Archives = new Backblaze({
|
|
786
|
+
bucketName: domain + "-public-immutable",
|
|
787
|
+
public: true,
|
|
788
|
+
immutable: true
|
|
789
|
+
});
|
|
790
|
+
|
|
791
|
+
return archivesBackblaze;
|
|
792
|
+
});
|