@arke-institute/sdk 2.1.0 → 2.3.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{crypto-CQnwqWQn.d.ts → crypto-7c990p-j.d.ts} +40 -16
- package/dist/{crypto-iYgzUi77.d.cts → crypto-El5Z3bNI.d.cts} +40 -16
- package/dist/generated/index.d.cts +389 -140
- package/dist/generated/index.d.ts +389 -140
- package/dist/index.cjs +294 -210
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +292 -210
- package/dist/index.js.map +1 -1
- package/dist/operations/index.cjs +280 -209
- package/dist/operations/index.cjs.map +1 -1
- package/dist/operations/index.d.cts +25 -9
- package/dist/operations/index.d.ts +25 -9
- package/dist/operations/index.js +280 -209
- package/dist/operations/index.js.map +1 -1
- package/openapi/spec.json +598 -227
- package/openapi/version.json +1 -1
- package/package.json +1 -1
|
@@ -68,6 +68,15 @@ async function verifyCid(data, expectedCid) {
|
|
|
68
68
|
}
|
|
69
69
|
|
|
70
70
|
// src/operations/upload/engine.ts
|
|
71
|
+
var PHASE_COUNT = 4;
|
|
72
|
+
var PHASE_INDEX = {
|
|
73
|
+
"computing-cids": 0,
|
|
74
|
+
"creating": 1,
|
|
75
|
+
"backlinking": 2,
|
|
76
|
+
"uploading": 3,
|
|
77
|
+
"complete": 4,
|
|
78
|
+
"error": -1
|
|
79
|
+
};
|
|
71
80
|
async function parallelLimit(items, concurrency, fn) {
|
|
72
81
|
const results = [];
|
|
73
82
|
let index = 0;
|
|
@@ -82,24 +91,83 @@ async function parallelLimit(items, concurrency, fn) {
|
|
|
82
91
|
await Promise.all(workers);
|
|
83
92
|
return results;
|
|
84
93
|
}
|
|
94
|
+
var TARGET_BYTES_IN_FLIGHT = 200 * 1024 * 1024;
|
|
95
|
+
var BytePool = class {
|
|
96
|
+
constructor(targetBytes = TARGET_BYTES_IN_FLIGHT) {
|
|
97
|
+
this.targetBytes = targetBytes;
|
|
98
|
+
this.bytesInFlight = 0;
|
|
99
|
+
this.waitQueue = [];
|
|
100
|
+
}
|
|
101
|
+
async run(size, fn) {
|
|
102
|
+
while (this.bytesInFlight > 0 && this.bytesInFlight + size > this.targetBytes) {
|
|
103
|
+
await new Promise((resolve) => this.waitQueue.push(resolve));
|
|
104
|
+
}
|
|
105
|
+
this.bytesInFlight += size;
|
|
106
|
+
try {
|
|
107
|
+
return await fn();
|
|
108
|
+
} finally {
|
|
109
|
+
this.bytesInFlight -= size;
|
|
110
|
+
const next = this.waitQueue.shift();
|
|
111
|
+
if (next) next();
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
};
|
|
85
115
|
function getParentPath(relativePath) {
|
|
86
116
|
const lastSlash = relativePath.lastIndexOf("/");
|
|
87
117
|
if (lastSlash === -1) return null;
|
|
88
118
|
return relativePath.slice(0, lastSlash);
|
|
89
119
|
}
|
|
120
|
+
function groupFoldersByDepth(folders) {
|
|
121
|
+
const byDepth = /* @__PURE__ */ new Map();
|
|
122
|
+
for (const folder of folders) {
|
|
123
|
+
const depth = folder.relativePath.split("/").length - 1;
|
|
124
|
+
if (!byDepth.has(depth)) byDepth.set(depth, []);
|
|
125
|
+
byDepth.get(depth).push(folder);
|
|
126
|
+
}
|
|
127
|
+
return byDepth;
|
|
128
|
+
}
|
|
90
129
|
async function uploadTree(client, tree, options) {
|
|
91
|
-
const { target, onProgress, concurrency =
|
|
130
|
+
const { target, onProgress, concurrency = 10, continueOnError = false, note } = options;
|
|
92
131
|
const errors = [];
|
|
93
132
|
const createdFolders = [];
|
|
94
133
|
const createdFiles = [];
|
|
134
|
+
const foldersByPath = /* @__PURE__ */ new Map();
|
|
135
|
+
const totalEntities = tree.files.length + tree.folders.length;
|
|
136
|
+
const totalBytes = tree.files.reduce((sum, f) => sum + f.size, 0);
|
|
137
|
+
let completedEntities = 0;
|
|
138
|
+
let bytesUploaded = 0;
|
|
95
139
|
const reportProgress = (progress) => {
|
|
96
140
|
if (onProgress) {
|
|
141
|
+
const phase = progress.phase || "computing-cids";
|
|
142
|
+
const phaseIndex = PHASE_INDEX[phase] ?? -1;
|
|
143
|
+
let phasePercent = 0;
|
|
144
|
+
if (phase === "computing-cids") {
|
|
145
|
+
const done = progress.completedEntities ?? completedEntities;
|
|
146
|
+
phasePercent = tree.files.length > 0 ? Math.round(done / tree.files.length * 100) : 100;
|
|
147
|
+
} else if (phase === "creating") {
|
|
148
|
+
const done = progress.completedEntities ?? completedEntities;
|
|
149
|
+
phasePercent = totalEntities > 0 ? Math.round(done / totalEntities * 100) : 100;
|
|
150
|
+
} else if (phase === "backlinking") {
|
|
151
|
+
const done = progress.completedParents ?? 0;
|
|
152
|
+
const total = progress.totalParents ?? 0;
|
|
153
|
+
phasePercent = total > 0 ? Math.round(done / total * 100) : 100;
|
|
154
|
+
} else if (phase === "uploading") {
|
|
155
|
+
const done = progress.bytesUploaded ?? bytesUploaded;
|
|
156
|
+
phasePercent = totalBytes > 0 ? Math.round(done / totalBytes * 100) : 100;
|
|
157
|
+
} else if (phase === "complete") {
|
|
158
|
+
phasePercent = 100;
|
|
159
|
+
}
|
|
97
160
|
onProgress({
|
|
98
|
-
phase
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
161
|
+
phase,
|
|
162
|
+
phaseIndex,
|
|
163
|
+
phaseCount: PHASE_COUNT,
|
|
164
|
+
phasePercent,
|
|
165
|
+
totalEntities,
|
|
166
|
+
completedEntities,
|
|
167
|
+
totalParents: 0,
|
|
168
|
+
completedParents: 0,
|
|
169
|
+
totalBytes,
|
|
170
|
+
bytesUploaded,
|
|
103
171
|
...progress
|
|
104
172
|
});
|
|
105
173
|
}
|
|
@@ -109,7 +177,6 @@ async function uploadTree(client, tree, options) {
|
|
|
109
177
|
let collectionCid;
|
|
110
178
|
let collectionCreated = false;
|
|
111
179
|
if (target.createCollection) {
|
|
112
|
-
reportProgress({ phase: "scanning", currentFolder: "Creating collection..." });
|
|
113
180
|
const collectionBody = {
|
|
114
181
|
label: target.createCollection.label,
|
|
115
182
|
description: target.createCollection.description,
|
|
@@ -138,26 +205,19 @@ async function uploadTree(client, tree, options) {
|
|
|
138
205
|
throw new Error("Must provide either collectionId or createCollection in target");
|
|
139
206
|
}
|
|
140
207
|
const rootParentId = target.parentId ?? collectionId;
|
|
141
|
-
reportProgress({
|
|
142
|
-
phase: "computing-cids",
|
|
143
|
-
totalFiles: tree.files.length,
|
|
144
|
-
completedFiles: 0
|
|
145
|
-
});
|
|
208
|
+
reportProgress({ phase: "computing-cids", completedEntities: 0 });
|
|
146
209
|
const preparedFiles = [];
|
|
147
210
|
let cidProgress = 0;
|
|
148
|
-
await parallelLimit(tree.files, concurrency, async (file) => {
|
|
211
|
+
await parallelLimit(tree.files, Math.max(concurrency, 20), async (file) => {
|
|
149
212
|
try {
|
|
150
213
|
const data = await file.getData();
|
|
151
214
|
const cid = await computeCid(data);
|
|
152
|
-
preparedFiles.push({
|
|
153
|
-
...file,
|
|
154
|
-
cid
|
|
155
|
-
});
|
|
215
|
+
preparedFiles.push({ ...file, cid });
|
|
156
216
|
cidProgress++;
|
|
157
217
|
reportProgress({
|
|
158
218
|
phase: "computing-cids",
|
|
159
|
-
|
|
160
|
-
|
|
219
|
+
completedEntities: cidProgress,
|
|
220
|
+
currentItem: file.relativePath
|
|
161
221
|
});
|
|
162
222
|
} catch (err) {
|
|
163
223
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
@@ -168,74 +228,74 @@ async function uploadTree(client, tree, options) {
|
|
|
168
228
|
}
|
|
169
229
|
}
|
|
170
230
|
});
|
|
171
|
-
reportProgress({
|
|
172
|
-
|
|
173
|
-
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
231
|
+
reportProgress({ phase: "creating", completedEntities: 0 });
|
|
232
|
+
const foldersByDepth = groupFoldersByDepth(tree.folders);
|
|
233
|
+
const sortedDepths = [...foldersByDepth.keys()].sort((a, b) => a - b);
|
|
234
|
+
for (const depth of sortedDepths) {
|
|
235
|
+
const foldersAtDepth = foldersByDepth.get(depth);
|
|
236
|
+
await Promise.all(
|
|
237
|
+
foldersAtDepth.map(async (folder) => {
|
|
238
|
+
try {
|
|
239
|
+
const parentPath = getParentPath(folder.relativePath);
|
|
240
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
241
|
+
const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
|
|
242
|
+
const folderBody = {
|
|
243
|
+
label: folder.name,
|
|
244
|
+
collection: collectionId,
|
|
245
|
+
note,
|
|
246
|
+
relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
|
|
247
|
+
};
|
|
248
|
+
const { data, error } = await client.api.POST("/folders", {
|
|
249
|
+
body: folderBody
|
|
250
|
+
});
|
|
251
|
+
if (error || !data) {
|
|
252
|
+
throw new Error(JSON.stringify(error));
|
|
253
|
+
}
|
|
254
|
+
foldersByPath.set(folder.relativePath, { id: data.id, cid: data.cid });
|
|
255
|
+
createdFolders.push({
|
|
256
|
+
name: folder.name,
|
|
257
|
+
relativePath: folder.relativePath,
|
|
258
|
+
id: data.id,
|
|
259
|
+
entityCid: data.cid
|
|
260
|
+
});
|
|
261
|
+
completedEntities++;
|
|
262
|
+
reportProgress({
|
|
263
|
+
phase: "creating",
|
|
264
|
+
completedEntities,
|
|
265
|
+
currentItem: folder.relativePath
|
|
266
|
+
});
|
|
267
|
+
} catch (err) {
|
|
268
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
269
|
+
if (continueOnError) {
|
|
270
|
+
errors.push({ path: folder.relativePath, error: `Folder creation failed: ${errorMsg}` });
|
|
271
|
+
completedEntities++;
|
|
272
|
+
} else {
|
|
273
|
+
throw new Error(`Failed to create folder ${folder.relativePath}: ${errorMsg}`);
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
})
|
|
277
|
+
);
|
|
216
278
|
}
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
totalFiles: preparedFiles.length,
|
|
220
|
-
completedFiles: 0
|
|
221
|
-
});
|
|
222
|
-
let fileCreateProgress = 0;
|
|
223
|
-
await parallelLimit(preparedFiles, concurrency, async (file) => {
|
|
279
|
+
const FILE_CREATION_CONCURRENCY = 50;
|
|
280
|
+
await parallelLimit(preparedFiles, FILE_CREATION_CONCURRENCY, async (file) => {
|
|
224
281
|
try {
|
|
282
|
+
const parentPath = getParentPath(file.relativePath);
|
|
283
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
284
|
+
const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
|
|
225
285
|
const fileBody = {
|
|
226
286
|
key: file.cid,
|
|
227
|
-
// Use CID as storage key (best practice)
|
|
228
287
|
filename: file.name,
|
|
229
288
|
content_type: file.mimeType,
|
|
230
289
|
size: file.size,
|
|
231
290
|
cid: file.cid,
|
|
232
|
-
collection: collectionId
|
|
291
|
+
collection: collectionId,
|
|
292
|
+
relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
|
|
233
293
|
};
|
|
234
294
|
const { data, error } = await client.api.POST("/files", {
|
|
235
295
|
body: fileBody
|
|
236
296
|
});
|
|
237
297
|
if (error || !data) {
|
|
238
|
-
throw new Error(JSON.stringify(error));
|
|
298
|
+
throw new Error(`Entity creation failed: ${JSON.stringify(error)}`);
|
|
239
299
|
}
|
|
240
300
|
createdFiles.push({
|
|
241
301
|
...file,
|
|
@@ -244,166 +304,177 @@ async function uploadTree(client, tree, options) {
|
|
|
244
304
|
uploadUrl: data.upload_url,
|
|
245
305
|
uploadExpiresAt: data.upload_expires_at
|
|
246
306
|
});
|
|
247
|
-
|
|
307
|
+
completedEntities++;
|
|
248
308
|
reportProgress({
|
|
249
|
-
phase: "creating
|
|
250
|
-
|
|
251
|
-
|
|
309
|
+
phase: "creating",
|
|
310
|
+
completedEntities,
|
|
311
|
+
currentItem: file.relativePath
|
|
252
312
|
});
|
|
253
313
|
} catch (err) {
|
|
254
314
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
255
315
|
if (continueOnError) {
|
|
256
|
-
errors.push({ path: file.relativePath, error:
|
|
316
|
+
errors.push({ path: file.relativePath, error: errorMsg });
|
|
317
|
+
completedEntities++;
|
|
257
318
|
} else {
|
|
258
319
|
throw new Error(`Failed to create file ${file.relativePath}: ${errorMsg}`);
|
|
259
320
|
}
|
|
260
321
|
}
|
|
261
322
|
});
|
|
262
|
-
const
|
|
263
|
-
let bytesUploaded = 0;
|
|
264
|
-
reportProgress({
|
|
265
|
-
phase: "uploading-content",
|
|
266
|
-
totalFiles: createdFiles.length,
|
|
267
|
-
completedFiles: 0,
|
|
268
|
-
totalBytes,
|
|
269
|
-
bytesUploaded: 0
|
|
270
|
-
});
|
|
271
|
-
let uploadProgress = 0;
|
|
272
|
-
await parallelLimit(createdFiles, concurrency, async (file) => {
|
|
273
|
-
try {
|
|
274
|
-
const data = await file.getData();
|
|
275
|
-
let body;
|
|
276
|
-
if (data instanceof Blob) {
|
|
277
|
-
body = data;
|
|
278
|
-
} else if (data instanceof Uint8Array) {
|
|
279
|
-
const arrayBuffer = new ArrayBuffer(data.byteLength);
|
|
280
|
-
new Uint8Array(arrayBuffer).set(data);
|
|
281
|
-
body = new Blob([arrayBuffer], { type: file.mimeType });
|
|
282
|
-
} else {
|
|
283
|
-
body = new Blob([data], { type: file.mimeType });
|
|
284
|
-
}
|
|
285
|
-
const response = await fetch(file.uploadUrl, {
|
|
286
|
-
method: "PUT",
|
|
287
|
-
body,
|
|
288
|
-
headers: {
|
|
289
|
-
"Content-Type": file.mimeType
|
|
290
|
-
}
|
|
291
|
-
});
|
|
292
|
-
if (!response.ok) {
|
|
293
|
-
throw new Error(`Upload failed with status ${response.status}`);
|
|
294
|
-
}
|
|
295
|
-
bytesUploaded += file.size;
|
|
296
|
-
uploadProgress++;
|
|
297
|
-
reportProgress({
|
|
298
|
-
phase: "uploading-content",
|
|
299
|
-
completedFiles: uploadProgress,
|
|
300
|
-
currentFile: file.relativePath,
|
|
301
|
-
bytesUploaded,
|
|
302
|
-
totalBytes
|
|
303
|
-
});
|
|
304
|
-
} catch (err) {
|
|
305
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
306
|
-
if (continueOnError) {
|
|
307
|
-
errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
|
|
308
|
-
} else {
|
|
309
|
-
throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
|
|
310
|
-
}
|
|
311
|
-
}
|
|
312
|
-
});
|
|
313
|
-
reportProgress({ phase: "linking" });
|
|
314
|
-
const filePathToEntity = /* @__PURE__ */ new Map();
|
|
315
|
-
for (const file of createdFiles) {
|
|
316
|
-
filePathToEntity.set(file.relativePath, file);
|
|
317
|
-
}
|
|
318
|
-
const parentGroups = /* @__PURE__ */ new Map();
|
|
323
|
+
const childrenByParent = /* @__PURE__ */ new Map();
|
|
319
324
|
for (const folder of createdFolders) {
|
|
320
325
|
const parentPath = getParentPath(folder.relativePath);
|
|
321
|
-
|
|
322
|
-
if (
|
|
323
|
-
|
|
324
|
-
} else {
|
|
325
|
-
const parentFolder = folderPathToEntity.get(parentPath);
|
|
326
|
-
if (!parentFolder) {
|
|
327
|
-
errors.push({
|
|
328
|
-
path: folder.relativePath,
|
|
329
|
-
error: `Parent folder not found: ${parentPath}`
|
|
330
|
-
});
|
|
331
|
-
continue;
|
|
332
|
-
}
|
|
333
|
-
parentId = parentFolder.id;
|
|
334
|
-
}
|
|
335
|
-
if (!parentGroups.has(parentId)) {
|
|
336
|
-
parentGroups.set(parentId, { folderId: parentId, children: [] });
|
|
337
|
-
}
|
|
338
|
-
parentGroups.get(parentId).children.push({ id: folder.id });
|
|
326
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
327
|
+
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
|
|
328
|
+
childrenByParent.get(parentId).push({ id: folder.id, type: "folder" });
|
|
339
329
|
}
|
|
340
330
|
for (const file of createdFiles) {
|
|
341
331
|
const parentPath = getParentPath(file.relativePath);
|
|
342
|
-
|
|
343
|
-
if (
|
|
344
|
-
|
|
345
|
-
} else {
|
|
346
|
-
const parentFolder = folderPathToEntity.get(parentPath);
|
|
347
|
-
if (!parentFolder) {
|
|
348
|
-
errors.push({
|
|
349
|
-
path: file.relativePath,
|
|
350
|
-
error: `Parent folder not found: ${parentPath}`
|
|
351
|
-
});
|
|
352
|
-
continue;
|
|
353
|
-
}
|
|
354
|
-
parentId = parentFolder.id;
|
|
355
|
-
}
|
|
356
|
-
if (!parentGroups.has(parentId)) {
|
|
357
|
-
parentGroups.set(parentId, { folderId: parentId, children: [] });
|
|
358
|
-
}
|
|
359
|
-
parentGroups.get(parentId).children.push({ id: file.id });
|
|
332
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
333
|
+
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
|
|
334
|
+
childrenByParent.get(parentId).push({ id: file.id, type: "file" });
|
|
360
335
|
}
|
|
361
|
-
|
|
362
|
-
|
|
336
|
+
const totalParents = childrenByParent.size;
|
|
337
|
+
let completedParents = 0;
|
|
338
|
+
reportProgress({ phase: "backlinking", totalParents, completedParents: 0 });
|
|
339
|
+
const parentEntries = [...childrenByParent.entries()];
|
|
340
|
+
await parallelLimit(parentEntries, concurrency, async ([parentId, children]) => {
|
|
363
341
|
try {
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
342
|
+
const isCollection = parentId === collectionId;
|
|
343
|
+
const relationshipsAdd = children.map((child) => ({
|
|
344
|
+
predicate: "contains",
|
|
345
|
+
peer: child.id,
|
|
346
|
+
peer_type: child.type
|
|
347
|
+
}));
|
|
348
|
+
if (isCollection) {
|
|
349
|
+
const { data: collData, error: getError } = await client.api.GET("/collections/{id}", {
|
|
350
|
+
params: { path: { id: parentId } }
|
|
351
|
+
});
|
|
352
|
+
if (getError || !collData) {
|
|
353
|
+
throw new Error(`Failed to fetch collection: ${JSON.stringify(getError)}`);
|
|
354
|
+
}
|
|
355
|
+
const updateBody = {
|
|
356
|
+
expect_tip: collData.cid,
|
|
357
|
+
relationships_add: relationshipsAdd,
|
|
358
|
+
note: note ? `${note} (backlink)` : "Upload backlink"
|
|
359
|
+
};
|
|
360
|
+
const { error } = await client.api.PUT("/collections/{id}", {
|
|
361
|
+
params: { path: { id: parentId } },
|
|
362
|
+
body: updateBody
|
|
368
363
|
});
|
|
369
|
-
if (
|
|
370
|
-
throw new Error(
|
|
364
|
+
if (error) {
|
|
365
|
+
throw new Error(JSON.stringify(error));
|
|
371
366
|
}
|
|
372
|
-
expectTip = data.cid;
|
|
373
367
|
} else {
|
|
374
|
-
const { data, error:
|
|
368
|
+
const { data: folderData, error: getError } = await client.api.GET("/folders/{id}", {
|
|
375
369
|
params: { path: { id: parentId } }
|
|
376
370
|
});
|
|
377
|
-
if (
|
|
378
|
-
throw new Error(`Failed to fetch folder
|
|
371
|
+
if (getError || !folderData) {
|
|
372
|
+
throw new Error(`Failed to fetch folder: ${JSON.stringify(getError)}`);
|
|
373
|
+
}
|
|
374
|
+
const updateBody = {
|
|
375
|
+
expect_tip: folderData.cid,
|
|
376
|
+
relationships_add: relationshipsAdd,
|
|
377
|
+
note: note ? `${note} (backlink)` : "Upload backlink"
|
|
378
|
+
};
|
|
379
|
+
const { error } = await client.api.PUT("/folders/{id}", {
|
|
380
|
+
params: { path: { id: parentId } },
|
|
381
|
+
body: updateBody
|
|
382
|
+
});
|
|
383
|
+
if (error) {
|
|
384
|
+
throw new Error(JSON.stringify(error));
|
|
379
385
|
}
|
|
380
|
-
expectTip = data.cid;
|
|
381
386
|
}
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
params: { path: { id: parentId } },
|
|
389
|
-
body: bulkBody
|
|
387
|
+
completedParents++;
|
|
388
|
+
reportProgress({
|
|
389
|
+
phase: "backlinking",
|
|
390
|
+
totalParents,
|
|
391
|
+
completedParents,
|
|
392
|
+
currentItem: `parent:${parentId}`
|
|
390
393
|
});
|
|
391
|
-
if (error) {
|
|
392
|
-
throw new Error(JSON.stringify(error));
|
|
393
|
-
}
|
|
394
394
|
} catch (err) {
|
|
395
395
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
396
396
|
if (continueOnError) {
|
|
397
|
-
errors.push({
|
|
398
|
-
|
|
399
|
-
error: `Bulk linking failed: ${errorMsg}`
|
|
400
|
-
});
|
|
397
|
+
errors.push({ path: `parent:${parentId}`, error: `Backlink failed: ${errorMsg}` });
|
|
398
|
+
completedParents++;
|
|
401
399
|
} else {
|
|
402
|
-
throw new Error(`Failed to
|
|
400
|
+
throw new Error(`Failed to backlink parent ${parentId}: ${errorMsg}`);
|
|
403
401
|
}
|
|
404
402
|
}
|
|
405
|
-
}
|
|
406
|
-
reportProgress({ phase: "
|
|
403
|
+
});
|
|
404
|
+
reportProgress({ phase: "uploading", bytesUploaded: 0 });
|
|
405
|
+
const pool = new BytePool();
|
|
406
|
+
await Promise.all(
|
|
407
|
+
createdFiles.map(async (file) => {
|
|
408
|
+
await pool.run(file.size, async () => {
|
|
409
|
+
try {
|
|
410
|
+
const fileData = await file.getData();
|
|
411
|
+
let body;
|
|
412
|
+
if (fileData instanceof Blob) {
|
|
413
|
+
body = fileData;
|
|
414
|
+
} else if (fileData instanceof Uint8Array) {
|
|
415
|
+
const arrayBuffer = new ArrayBuffer(fileData.byteLength);
|
|
416
|
+
new Uint8Array(arrayBuffer).set(fileData);
|
|
417
|
+
body = new Blob([arrayBuffer], { type: file.mimeType });
|
|
418
|
+
} else {
|
|
419
|
+
body = new Blob([fileData], { type: file.mimeType });
|
|
420
|
+
}
|
|
421
|
+
const uploadResponse = await fetch(file.uploadUrl, {
|
|
422
|
+
method: "PUT",
|
|
423
|
+
body,
|
|
424
|
+
headers: { "Content-Type": file.mimeType }
|
|
425
|
+
});
|
|
426
|
+
if (!uploadResponse.ok) {
|
|
427
|
+
throw new Error(`S3 upload failed with status ${uploadResponse.status}`);
|
|
428
|
+
}
|
|
429
|
+
let confirmTip = file.entityCid;
|
|
430
|
+
let confirmAttempts = 0;
|
|
431
|
+
const MAX_CONFIRM_ATTEMPTS = 3;
|
|
432
|
+
while (confirmAttempts < MAX_CONFIRM_ATTEMPTS) {
|
|
433
|
+
confirmAttempts++;
|
|
434
|
+
const { error: confirmError } = await client.api.POST("/files/{id}/confirm-upload", {
|
|
435
|
+
params: { path: { id: file.id } },
|
|
436
|
+
body: {
|
|
437
|
+
expect_tip: confirmTip,
|
|
438
|
+
note: note ? `${note} (confirmed)` : "Upload confirmed"
|
|
439
|
+
}
|
|
440
|
+
});
|
|
441
|
+
if (!confirmError) {
|
|
442
|
+
break;
|
|
443
|
+
}
|
|
444
|
+
const errorStr = JSON.stringify(confirmError);
|
|
445
|
+
if (errorStr.includes("409") || errorStr.includes("CAS") || errorStr.includes("conflict")) {
|
|
446
|
+
const { data: currentFile, error: fetchError } = await client.api.GET("/files/{id}", {
|
|
447
|
+
params: { path: { id: file.id } }
|
|
448
|
+
});
|
|
449
|
+
if (fetchError || !currentFile) {
|
|
450
|
+
throw new Error(`Failed to fetch file for confirm retry: ${JSON.stringify(fetchError)}`);
|
|
451
|
+
}
|
|
452
|
+
confirmTip = currentFile.cid;
|
|
453
|
+
} else {
|
|
454
|
+
throw new Error(`Confirm upload failed: ${errorStr}`);
|
|
455
|
+
}
|
|
456
|
+
}
|
|
457
|
+
if (confirmAttempts >= MAX_CONFIRM_ATTEMPTS) {
|
|
458
|
+
throw new Error(`Confirm upload failed after ${MAX_CONFIRM_ATTEMPTS} CAS retries`);
|
|
459
|
+
}
|
|
460
|
+
bytesUploaded += file.size;
|
|
461
|
+
reportProgress({
|
|
462
|
+
phase: "uploading",
|
|
463
|
+
bytesUploaded,
|
|
464
|
+
currentItem: file.relativePath
|
|
465
|
+
});
|
|
466
|
+
} catch (err) {
|
|
467
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
468
|
+
if (continueOnError) {
|
|
469
|
+
errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
|
|
470
|
+
} else {
|
|
471
|
+
throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
|
|
472
|
+
}
|
|
473
|
+
}
|
|
474
|
+
});
|
|
475
|
+
})
|
|
476
|
+
);
|
|
477
|
+
reportProgress({ phase: "complete", totalParents, completedParents, bytesUploaded });
|
|
407
478
|
const resultFolders = createdFolders.map((f) => ({
|
|
408
479
|
id: f.id,
|
|
409
480
|
cid: f.entityCid,
|
|
@@ -714,12 +785,12 @@ var FolderOperations = class {
|
|
|
714
785
|
concurrency: options.concurrency,
|
|
715
786
|
onProgress: options.onProgress ? (p) => {
|
|
716
787
|
options.onProgress({
|
|
717
|
-
phase: p.phase === "computing-cids"
|
|
718
|
-
totalFiles: p.
|
|
719
|
-
completedFiles: p.
|
|
720
|
-
totalFolders: p.
|
|
721
|
-
completedFolders: p.
|
|
722
|
-
currentFile: p.
|
|
788
|
+
phase: p.phase === "computing-cids" ? "creating-folders" : p.phase === "creating" ? "uploading-files" : p.phase === "backlinking" ? "linking" : p.phase === "complete" ? "complete" : "scanning",
|
|
789
|
+
totalFiles: p.totalEntities,
|
|
790
|
+
completedFiles: p.completedEntities,
|
|
791
|
+
totalFolders: p.totalParents,
|
|
792
|
+
completedFolders: p.completedParents,
|
|
793
|
+
currentFile: p.currentItem
|
|
723
794
|
});
|
|
724
795
|
} : void 0
|
|
725
796
|
});
|