@arke-institute/sdk 2.1.0 → 2.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{crypto-CQnwqWQn.d.ts → crypto-7c990p-j.d.ts} +40 -16
- package/dist/{crypto-iYgzUi77.d.cts → crypto-El5Z3bNI.d.cts} +40 -16
- package/dist/generated/index.d.cts +385 -134
- package/dist/generated/index.d.ts +385 -134
- package/dist/index.cjs +294 -210
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +292 -210
- package/dist/index.js.map +1 -1
- package/dist/operations/index.cjs +280 -209
- package/dist/operations/index.cjs.map +1 -1
- package/dist/operations/index.d.cts +25 -9
- package/dist/operations/index.d.ts +25 -9
- package/dist/operations/index.js +280 -209
- package/dist/operations/index.js.map +1 -1
- package/openapi/spec.json +588 -205
- package/openapi/version.json +1 -1
- package/package.json +1 -1
package/dist/operations/index.js
CHANGED
|
@@ -22,6 +22,15 @@ async function verifyCid(data, expectedCid) {
|
|
|
22
22
|
}
|
|
23
23
|
|
|
24
24
|
// src/operations/upload/engine.ts
|
|
25
|
+
var PHASE_COUNT = 4;
|
|
26
|
+
var PHASE_INDEX = {
|
|
27
|
+
"computing-cids": 0,
|
|
28
|
+
"creating": 1,
|
|
29
|
+
"backlinking": 2,
|
|
30
|
+
"uploading": 3,
|
|
31
|
+
"complete": 4,
|
|
32
|
+
"error": -1
|
|
33
|
+
};
|
|
25
34
|
async function parallelLimit(items, concurrency, fn) {
|
|
26
35
|
const results = [];
|
|
27
36
|
let index = 0;
|
|
@@ -36,24 +45,83 @@ async function parallelLimit(items, concurrency, fn) {
|
|
|
36
45
|
await Promise.all(workers);
|
|
37
46
|
return results;
|
|
38
47
|
}
|
|
48
|
+
var TARGET_BYTES_IN_FLIGHT = 200 * 1024 * 1024;
|
|
49
|
+
var BytePool = class {
|
|
50
|
+
constructor(targetBytes = TARGET_BYTES_IN_FLIGHT) {
|
|
51
|
+
this.targetBytes = targetBytes;
|
|
52
|
+
this.bytesInFlight = 0;
|
|
53
|
+
this.waitQueue = [];
|
|
54
|
+
}
|
|
55
|
+
async run(size, fn) {
|
|
56
|
+
while (this.bytesInFlight > 0 && this.bytesInFlight + size > this.targetBytes) {
|
|
57
|
+
await new Promise((resolve) => this.waitQueue.push(resolve));
|
|
58
|
+
}
|
|
59
|
+
this.bytesInFlight += size;
|
|
60
|
+
try {
|
|
61
|
+
return await fn();
|
|
62
|
+
} finally {
|
|
63
|
+
this.bytesInFlight -= size;
|
|
64
|
+
const next = this.waitQueue.shift();
|
|
65
|
+
if (next) next();
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
};
|
|
39
69
|
function getParentPath(relativePath) {
|
|
40
70
|
const lastSlash = relativePath.lastIndexOf("/");
|
|
41
71
|
if (lastSlash === -1) return null;
|
|
42
72
|
return relativePath.slice(0, lastSlash);
|
|
43
73
|
}
|
|
74
|
+
function groupFoldersByDepth(folders) {
|
|
75
|
+
const byDepth = /* @__PURE__ */ new Map();
|
|
76
|
+
for (const folder of folders) {
|
|
77
|
+
const depth = folder.relativePath.split("/").length - 1;
|
|
78
|
+
if (!byDepth.has(depth)) byDepth.set(depth, []);
|
|
79
|
+
byDepth.get(depth).push(folder);
|
|
80
|
+
}
|
|
81
|
+
return byDepth;
|
|
82
|
+
}
|
|
44
83
|
async function uploadTree(client, tree, options) {
|
|
45
|
-
const { target, onProgress, concurrency =
|
|
84
|
+
const { target, onProgress, concurrency = 10, continueOnError = false, note } = options;
|
|
46
85
|
const errors = [];
|
|
47
86
|
const createdFolders = [];
|
|
48
87
|
const createdFiles = [];
|
|
88
|
+
const foldersByPath = /* @__PURE__ */ new Map();
|
|
89
|
+
const totalEntities = tree.files.length + tree.folders.length;
|
|
90
|
+
const totalBytes = tree.files.reduce((sum, f) => sum + f.size, 0);
|
|
91
|
+
let completedEntities = 0;
|
|
92
|
+
let bytesUploaded = 0;
|
|
49
93
|
const reportProgress = (progress) => {
|
|
50
94
|
if (onProgress) {
|
|
95
|
+
const phase = progress.phase || "computing-cids";
|
|
96
|
+
const phaseIndex = PHASE_INDEX[phase] ?? -1;
|
|
97
|
+
let phasePercent = 0;
|
|
98
|
+
if (phase === "computing-cids") {
|
|
99
|
+
const done = progress.completedEntities ?? completedEntities;
|
|
100
|
+
phasePercent = tree.files.length > 0 ? Math.round(done / tree.files.length * 100) : 100;
|
|
101
|
+
} else if (phase === "creating") {
|
|
102
|
+
const done = progress.completedEntities ?? completedEntities;
|
|
103
|
+
phasePercent = totalEntities > 0 ? Math.round(done / totalEntities * 100) : 100;
|
|
104
|
+
} else if (phase === "backlinking") {
|
|
105
|
+
const done = progress.completedParents ?? 0;
|
|
106
|
+
const total = progress.totalParents ?? 0;
|
|
107
|
+
phasePercent = total > 0 ? Math.round(done / total * 100) : 100;
|
|
108
|
+
} else if (phase === "uploading") {
|
|
109
|
+
const done = progress.bytesUploaded ?? bytesUploaded;
|
|
110
|
+
phasePercent = totalBytes > 0 ? Math.round(done / totalBytes * 100) : 100;
|
|
111
|
+
} else if (phase === "complete") {
|
|
112
|
+
phasePercent = 100;
|
|
113
|
+
}
|
|
51
114
|
onProgress({
|
|
52
|
-
phase
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
115
|
+
phase,
|
|
116
|
+
phaseIndex,
|
|
117
|
+
phaseCount: PHASE_COUNT,
|
|
118
|
+
phasePercent,
|
|
119
|
+
totalEntities,
|
|
120
|
+
completedEntities,
|
|
121
|
+
totalParents: 0,
|
|
122
|
+
completedParents: 0,
|
|
123
|
+
totalBytes,
|
|
124
|
+
bytesUploaded,
|
|
57
125
|
...progress
|
|
58
126
|
});
|
|
59
127
|
}
|
|
@@ -63,7 +131,6 @@ async function uploadTree(client, tree, options) {
|
|
|
63
131
|
let collectionCid;
|
|
64
132
|
let collectionCreated = false;
|
|
65
133
|
if (target.createCollection) {
|
|
66
|
-
reportProgress({ phase: "scanning", currentFolder: "Creating collection..." });
|
|
67
134
|
const collectionBody = {
|
|
68
135
|
label: target.createCollection.label,
|
|
69
136
|
description: target.createCollection.description,
|
|
@@ -92,26 +159,19 @@ async function uploadTree(client, tree, options) {
|
|
|
92
159
|
throw new Error("Must provide either collectionId or createCollection in target");
|
|
93
160
|
}
|
|
94
161
|
const rootParentId = target.parentId ?? collectionId;
|
|
95
|
-
reportProgress({
|
|
96
|
-
phase: "computing-cids",
|
|
97
|
-
totalFiles: tree.files.length,
|
|
98
|
-
completedFiles: 0
|
|
99
|
-
});
|
|
162
|
+
reportProgress({ phase: "computing-cids", completedEntities: 0 });
|
|
100
163
|
const preparedFiles = [];
|
|
101
164
|
let cidProgress = 0;
|
|
102
|
-
await parallelLimit(tree.files, concurrency, async (file) => {
|
|
165
|
+
await parallelLimit(tree.files, Math.max(concurrency, 20), async (file) => {
|
|
103
166
|
try {
|
|
104
167
|
const data = await file.getData();
|
|
105
168
|
const cid = await computeCid(data);
|
|
106
|
-
preparedFiles.push({
|
|
107
|
-
...file,
|
|
108
|
-
cid
|
|
109
|
-
});
|
|
169
|
+
preparedFiles.push({ ...file, cid });
|
|
110
170
|
cidProgress++;
|
|
111
171
|
reportProgress({
|
|
112
172
|
phase: "computing-cids",
|
|
113
|
-
|
|
114
|
-
|
|
173
|
+
completedEntities: cidProgress,
|
|
174
|
+
currentItem: file.relativePath
|
|
115
175
|
});
|
|
116
176
|
} catch (err) {
|
|
117
177
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
@@ -122,74 +182,74 @@ async function uploadTree(client, tree, options) {
|
|
|
122
182
|
}
|
|
123
183
|
}
|
|
124
184
|
});
|
|
125
|
-
reportProgress({
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
185
|
+
reportProgress({ phase: "creating", completedEntities: 0 });
|
|
186
|
+
const foldersByDepth = groupFoldersByDepth(tree.folders);
|
|
187
|
+
const sortedDepths = [...foldersByDepth.keys()].sort((a, b) => a - b);
|
|
188
|
+
for (const depth of sortedDepths) {
|
|
189
|
+
const foldersAtDepth = foldersByDepth.get(depth);
|
|
190
|
+
await Promise.all(
|
|
191
|
+
foldersAtDepth.map(async (folder) => {
|
|
192
|
+
try {
|
|
193
|
+
const parentPath = getParentPath(folder.relativePath);
|
|
194
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
195
|
+
const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
|
|
196
|
+
const folderBody = {
|
|
197
|
+
label: folder.name,
|
|
198
|
+
collection: collectionId,
|
|
199
|
+
note,
|
|
200
|
+
relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
|
|
201
|
+
};
|
|
202
|
+
const { data, error } = await client.api.POST("/folders", {
|
|
203
|
+
body: folderBody
|
|
204
|
+
});
|
|
205
|
+
if (error || !data) {
|
|
206
|
+
throw new Error(JSON.stringify(error));
|
|
207
|
+
}
|
|
208
|
+
foldersByPath.set(folder.relativePath, { id: data.id, cid: data.cid });
|
|
209
|
+
createdFolders.push({
|
|
210
|
+
name: folder.name,
|
|
211
|
+
relativePath: folder.relativePath,
|
|
212
|
+
id: data.id,
|
|
213
|
+
entityCid: data.cid
|
|
214
|
+
});
|
|
215
|
+
completedEntities++;
|
|
216
|
+
reportProgress({
|
|
217
|
+
phase: "creating",
|
|
218
|
+
completedEntities,
|
|
219
|
+
currentItem: folder.relativePath
|
|
220
|
+
});
|
|
221
|
+
} catch (err) {
|
|
222
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
223
|
+
if (continueOnError) {
|
|
224
|
+
errors.push({ path: folder.relativePath, error: `Folder creation failed: ${errorMsg}` });
|
|
225
|
+
completedEntities++;
|
|
226
|
+
} else {
|
|
227
|
+
throw new Error(`Failed to create folder ${folder.relativePath}: ${errorMsg}`);
|
|
228
|
+
}
|
|
229
|
+
}
|
|
230
|
+
})
|
|
231
|
+
);
|
|
170
232
|
}
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
totalFiles: preparedFiles.length,
|
|
174
|
-
completedFiles: 0
|
|
175
|
-
});
|
|
176
|
-
let fileCreateProgress = 0;
|
|
177
|
-
await parallelLimit(preparedFiles, concurrency, async (file) => {
|
|
233
|
+
const FILE_CREATION_CONCURRENCY = 50;
|
|
234
|
+
await parallelLimit(preparedFiles, FILE_CREATION_CONCURRENCY, async (file) => {
|
|
178
235
|
try {
|
|
236
|
+
const parentPath = getParentPath(file.relativePath);
|
|
237
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
238
|
+
const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
|
|
179
239
|
const fileBody = {
|
|
180
240
|
key: file.cid,
|
|
181
|
-
// Use CID as storage key (best practice)
|
|
182
241
|
filename: file.name,
|
|
183
242
|
content_type: file.mimeType,
|
|
184
243
|
size: file.size,
|
|
185
244
|
cid: file.cid,
|
|
186
|
-
collection: collectionId
|
|
245
|
+
collection: collectionId,
|
|
246
|
+
relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
|
|
187
247
|
};
|
|
188
248
|
const { data, error } = await client.api.POST("/files", {
|
|
189
249
|
body: fileBody
|
|
190
250
|
});
|
|
191
251
|
if (error || !data) {
|
|
192
|
-
throw new Error(JSON.stringify(error));
|
|
252
|
+
throw new Error(`Entity creation failed: ${JSON.stringify(error)}`);
|
|
193
253
|
}
|
|
194
254
|
createdFiles.push({
|
|
195
255
|
...file,
|
|
@@ -198,166 +258,177 @@ async function uploadTree(client, tree, options) {
|
|
|
198
258
|
uploadUrl: data.upload_url,
|
|
199
259
|
uploadExpiresAt: data.upload_expires_at
|
|
200
260
|
});
|
|
201
|
-
|
|
261
|
+
completedEntities++;
|
|
202
262
|
reportProgress({
|
|
203
|
-
phase: "creating
|
|
204
|
-
|
|
205
|
-
|
|
263
|
+
phase: "creating",
|
|
264
|
+
completedEntities,
|
|
265
|
+
currentItem: file.relativePath
|
|
206
266
|
});
|
|
207
267
|
} catch (err) {
|
|
208
268
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
209
269
|
if (continueOnError) {
|
|
210
|
-
errors.push({ path: file.relativePath, error:
|
|
270
|
+
errors.push({ path: file.relativePath, error: errorMsg });
|
|
271
|
+
completedEntities++;
|
|
211
272
|
} else {
|
|
212
273
|
throw new Error(`Failed to create file ${file.relativePath}: ${errorMsg}`);
|
|
213
274
|
}
|
|
214
275
|
}
|
|
215
276
|
});
|
|
216
|
-
const
|
|
217
|
-
let bytesUploaded = 0;
|
|
218
|
-
reportProgress({
|
|
219
|
-
phase: "uploading-content",
|
|
220
|
-
totalFiles: createdFiles.length,
|
|
221
|
-
completedFiles: 0,
|
|
222
|
-
totalBytes,
|
|
223
|
-
bytesUploaded: 0
|
|
224
|
-
});
|
|
225
|
-
let uploadProgress = 0;
|
|
226
|
-
await parallelLimit(createdFiles, concurrency, async (file) => {
|
|
227
|
-
try {
|
|
228
|
-
const data = await file.getData();
|
|
229
|
-
let body;
|
|
230
|
-
if (data instanceof Blob) {
|
|
231
|
-
body = data;
|
|
232
|
-
} else if (data instanceof Uint8Array) {
|
|
233
|
-
const arrayBuffer = new ArrayBuffer(data.byteLength);
|
|
234
|
-
new Uint8Array(arrayBuffer).set(data);
|
|
235
|
-
body = new Blob([arrayBuffer], { type: file.mimeType });
|
|
236
|
-
} else {
|
|
237
|
-
body = new Blob([data], { type: file.mimeType });
|
|
238
|
-
}
|
|
239
|
-
const response = await fetch(file.uploadUrl, {
|
|
240
|
-
method: "PUT",
|
|
241
|
-
body,
|
|
242
|
-
headers: {
|
|
243
|
-
"Content-Type": file.mimeType
|
|
244
|
-
}
|
|
245
|
-
});
|
|
246
|
-
if (!response.ok) {
|
|
247
|
-
throw new Error(`Upload failed with status ${response.status}`);
|
|
248
|
-
}
|
|
249
|
-
bytesUploaded += file.size;
|
|
250
|
-
uploadProgress++;
|
|
251
|
-
reportProgress({
|
|
252
|
-
phase: "uploading-content",
|
|
253
|
-
completedFiles: uploadProgress,
|
|
254
|
-
currentFile: file.relativePath,
|
|
255
|
-
bytesUploaded,
|
|
256
|
-
totalBytes
|
|
257
|
-
});
|
|
258
|
-
} catch (err) {
|
|
259
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
260
|
-
if (continueOnError) {
|
|
261
|
-
errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
|
|
262
|
-
} else {
|
|
263
|
-
throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
|
|
264
|
-
}
|
|
265
|
-
}
|
|
266
|
-
});
|
|
267
|
-
reportProgress({ phase: "linking" });
|
|
268
|
-
const filePathToEntity = /* @__PURE__ */ new Map();
|
|
269
|
-
for (const file of createdFiles) {
|
|
270
|
-
filePathToEntity.set(file.relativePath, file);
|
|
271
|
-
}
|
|
272
|
-
const parentGroups = /* @__PURE__ */ new Map();
|
|
277
|
+
const childrenByParent = /* @__PURE__ */ new Map();
|
|
273
278
|
for (const folder of createdFolders) {
|
|
274
279
|
const parentPath = getParentPath(folder.relativePath);
|
|
275
|
-
|
|
276
|
-
if (
|
|
277
|
-
|
|
278
|
-
} else {
|
|
279
|
-
const parentFolder = folderPathToEntity.get(parentPath);
|
|
280
|
-
if (!parentFolder) {
|
|
281
|
-
errors.push({
|
|
282
|
-
path: folder.relativePath,
|
|
283
|
-
error: `Parent folder not found: ${parentPath}`
|
|
284
|
-
});
|
|
285
|
-
continue;
|
|
286
|
-
}
|
|
287
|
-
parentId = parentFolder.id;
|
|
288
|
-
}
|
|
289
|
-
if (!parentGroups.has(parentId)) {
|
|
290
|
-
parentGroups.set(parentId, { folderId: parentId, children: [] });
|
|
291
|
-
}
|
|
292
|
-
parentGroups.get(parentId).children.push({ id: folder.id });
|
|
280
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
281
|
+
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
|
|
282
|
+
childrenByParent.get(parentId).push({ id: folder.id, type: "folder" });
|
|
293
283
|
}
|
|
294
284
|
for (const file of createdFiles) {
|
|
295
285
|
const parentPath = getParentPath(file.relativePath);
|
|
296
|
-
|
|
297
|
-
if (
|
|
298
|
-
|
|
299
|
-
} else {
|
|
300
|
-
const parentFolder = folderPathToEntity.get(parentPath);
|
|
301
|
-
if (!parentFolder) {
|
|
302
|
-
errors.push({
|
|
303
|
-
path: file.relativePath,
|
|
304
|
-
error: `Parent folder not found: ${parentPath}`
|
|
305
|
-
});
|
|
306
|
-
continue;
|
|
307
|
-
}
|
|
308
|
-
parentId = parentFolder.id;
|
|
309
|
-
}
|
|
310
|
-
if (!parentGroups.has(parentId)) {
|
|
311
|
-
parentGroups.set(parentId, { folderId: parentId, children: [] });
|
|
312
|
-
}
|
|
313
|
-
parentGroups.get(parentId).children.push({ id: file.id });
|
|
286
|
+
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
287
|
+
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
|
|
288
|
+
childrenByParent.get(parentId).push({ id: file.id, type: "file" });
|
|
314
289
|
}
|
|
315
|
-
|
|
316
|
-
|
|
290
|
+
const totalParents = childrenByParent.size;
|
|
291
|
+
let completedParents = 0;
|
|
292
|
+
reportProgress({ phase: "backlinking", totalParents, completedParents: 0 });
|
|
293
|
+
const parentEntries = [...childrenByParent.entries()];
|
|
294
|
+
await parallelLimit(parentEntries, concurrency, async ([parentId, children]) => {
|
|
317
295
|
try {
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
296
|
+
const isCollection = parentId === collectionId;
|
|
297
|
+
const relationshipsAdd = children.map((child) => ({
|
|
298
|
+
predicate: "contains",
|
|
299
|
+
peer: child.id,
|
|
300
|
+
peer_type: child.type
|
|
301
|
+
}));
|
|
302
|
+
if (isCollection) {
|
|
303
|
+
const { data: collData, error: getError } = await client.api.GET("/collections/{id}", {
|
|
304
|
+
params: { path: { id: parentId } }
|
|
305
|
+
});
|
|
306
|
+
if (getError || !collData) {
|
|
307
|
+
throw new Error(`Failed to fetch collection: ${JSON.stringify(getError)}`);
|
|
308
|
+
}
|
|
309
|
+
const updateBody = {
|
|
310
|
+
expect_tip: collData.cid,
|
|
311
|
+
relationships_add: relationshipsAdd,
|
|
312
|
+
note: note ? `${note} (backlink)` : "Upload backlink"
|
|
313
|
+
};
|
|
314
|
+
const { error } = await client.api.PUT("/collections/{id}", {
|
|
315
|
+
params: { path: { id: parentId } },
|
|
316
|
+
body: updateBody
|
|
322
317
|
});
|
|
323
|
-
if (
|
|
324
|
-
throw new Error(
|
|
318
|
+
if (error) {
|
|
319
|
+
throw new Error(JSON.stringify(error));
|
|
325
320
|
}
|
|
326
|
-
expectTip = data.cid;
|
|
327
321
|
} else {
|
|
328
|
-
const { data, error:
|
|
322
|
+
const { data: folderData, error: getError } = await client.api.GET("/folders/{id}", {
|
|
329
323
|
params: { path: { id: parentId } }
|
|
330
324
|
});
|
|
331
|
-
if (
|
|
332
|
-
throw new Error(`Failed to fetch folder
|
|
325
|
+
if (getError || !folderData) {
|
|
326
|
+
throw new Error(`Failed to fetch folder: ${JSON.stringify(getError)}`);
|
|
327
|
+
}
|
|
328
|
+
const updateBody = {
|
|
329
|
+
expect_tip: folderData.cid,
|
|
330
|
+
relationships_add: relationshipsAdd,
|
|
331
|
+
note: note ? `${note} (backlink)` : "Upload backlink"
|
|
332
|
+
};
|
|
333
|
+
const { error } = await client.api.PUT("/folders/{id}", {
|
|
334
|
+
params: { path: { id: parentId } },
|
|
335
|
+
body: updateBody
|
|
336
|
+
});
|
|
337
|
+
if (error) {
|
|
338
|
+
throw new Error(JSON.stringify(error));
|
|
333
339
|
}
|
|
334
|
-
expectTip = data.cid;
|
|
335
340
|
}
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
342
|
-
params: { path: { id: parentId } },
|
|
343
|
-
body: bulkBody
|
|
341
|
+
completedParents++;
|
|
342
|
+
reportProgress({
|
|
343
|
+
phase: "backlinking",
|
|
344
|
+
totalParents,
|
|
345
|
+
completedParents,
|
|
346
|
+
currentItem: `parent:${parentId}`
|
|
344
347
|
});
|
|
345
|
-
if (error) {
|
|
346
|
-
throw new Error(JSON.stringify(error));
|
|
347
|
-
}
|
|
348
348
|
} catch (err) {
|
|
349
349
|
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
350
350
|
if (continueOnError) {
|
|
351
|
-
errors.push({
|
|
352
|
-
|
|
353
|
-
error: `Bulk linking failed: ${errorMsg}`
|
|
354
|
-
});
|
|
351
|
+
errors.push({ path: `parent:${parentId}`, error: `Backlink failed: ${errorMsg}` });
|
|
352
|
+
completedParents++;
|
|
355
353
|
} else {
|
|
356
|
-
throw new Error(`Failed to
|
|
354
|
+
throw new Error(`Failed to backlink parent ${parentId}: ${errorMsg}`);
|
|
357
355
|
}
|
|
358
356
|
}
|
|
359
|
-
}
|
|
360
|
-
reportProgress({ phase: "
|
|
357
|
+
});
|
|
358
|
+
reportProgress({ phase: "uploading", bytesUploaded: 0 });
|
|
359
|
+
const pool = new BytePool();
|
|
360
|
+
await Promise.all(
|
|
361
|
+
createdFiles.map(async (file) => {
|
|
362
|
+
await pool.run(file.size, async () => {
|
|
363
|
+
try {
|
|
364
|
+
const fileData = await file.getData();
|
|
365
|
+
let body;
|
|
366
|
+
if (fileData instanceof Blob) {
|
|
367
|
+
body = fileData;
|
|
368
|
+
} else if (fileData instanceof Uint8Array) {
|
|
369
|
+
const arrayBuffer = new ArrayBuffer(fileData.byteLength);
|
|
370
|
+
new Uint8Array(arrayBuffer).set(fileData);
|
|
371
|
+
body = new Blob([arrayBuffer], { type: file.mimeType });
|
|
372
|
+
} else {
|
|
373
|
+
body = new Blob([fileData], { type: file.mimeType });
|
|
374
|
+
}
|
|
375
|
+
const uploadResponse = await fetch(file.uploadUrl, {
|
|
376
|
+
method: "PUT",
|
|
377
|
+
body,
|
|
378
|
+
headers: { "Content-Type": file.mimeType }
|
|
379
|
+
});
|
|
380
|
+
if (!uploadResponse.ok) {
|
|
381
|
+
throw new Error(`S3 upload failed with status ${uploadResponse.status}`);
|
|
382
|
+
}
|
|
383
|
+
let confirmTip = file.entityCid;
|
|
384
|
+
let confirmAttempts = 0;
|
|
385
|
+
const MAX_CONFIRM_ATTEMPTS = 3;
|
|
386
|
+
while (confirmAttempts < MAX_CONFIRM_ATTEMPTS) {
|
|
387
|
+
confirmAttempts++;
|
|
388
|
+
const { error: confirmError } = await client.api.POST("/files/{id}/confirm-upload", {
|
|
389
|
+
params: { path: { id: file.id } },
|
|
390
|
+
body: {
|
|
391
|
+
expect_tip: confirmTip,
|
|
392
|
+
note: note ? `${note} (confirmed)` : "Upload confirmed"
|
|
393
|
+
}
|
|
394
|
+
});
|
|
395
|
+
if (!confirmError) {
|
|
396
|
+
break;
|
|
397
|
+
}
|
|
398
|
+
const errorStr = JSON.stringify(confirmError);
|
|
399
|
+
if (errorStr.includes("409") || errorStr.includes("CAS") || errorStr.includes("conflict")) {
|
|
400
|
+
const { data: currentFile, error: fetchError } = await client.api.GET("/files/{id}", {
|
|
401
|
+
params: { path: { id: file.id } }
|
|
402
|
+
});
|
|
403
|
+
if (fetchError || !currentFile) {
|
|
404
|
+
throw new Error(`Failed to fetch file for confirm retry: ${JSON.stringify(fetchError)}`);
|
|
405
|
+
}
|
|
406
|
+
confirmTip = currentFile.cid;
|
|
407
|
+
} else {
|
|
408
|
+
throw new Error(`Confirm upload failed: ${errorStr}`);
|
|
409
|
+
}
|
|
410
|
+
}
|
|
411
|
+
if (confirmAttempts >= MAX_CONFIRM_ATTEMPTS) {
|
|
412
|
+
throw new Error(`Confirm upload failed after ${MAX_CONFIRM_ATTEMPTS} CAS retries`);
|
|
413
|
+
}
|
|
414
|
+
bytesUploaded += file.size;
|
|
415
|
+
reportProgress({
|
|
416
|
+
phase: "uploading",
|
|
417
|
+
bytesUploaded,
|
|
418
|
+
currentItem: file.relativePath
|
|
419
|
+
});
|
|
420
|
+
} catch (err) {
|
|
421
|
+
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
422
|
+
if (continueOnError) {
|
|
423
|
+
errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
|
|
424
|
+
} else {
|
|
425
|
+
throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
|
|
426
|
+
}
|
|
427
|
+
}
|
|
428
|
+
});
|
|
429
|
+
})
|
|
430
|
+
);
|
|
431
|
+
reportProgress({ phase: "complete", totalParents, completedParents, bytesUploaded });
|
|
361
432
|
const resultFolders = createdFolders.map((f) => ({
|
|
362
433
|
id: f.id,
|
|
363
434
|
cid: f.entityCid,
|
|
@@ -668,12 +739,12 @@ var FolderOperations = class {
|
|
|
668
739
|
concurrency: options.concurrency,
|
|
669
740
|
onProgress: options.onProgress ? (p) => {
|
|
670
741
|
options.onProgress({
|
|
671
|
-
phase: p.phase === "computing-cids"
|
|
672
|
-
totalFiles: p.
|
|
673
|
-
completedFiles: p.
|
|
674
|
-
totalFolders: p.
|
|
675
|
-
completedFolders: p.
|
|
676
|
-
currentFile: p.
|
|
742
|
+
phase: p.phase === "computing-cids" ? "creating-folders" : p.phase === "creating" ? "uploading-files" : p.phase === "backlinking" ? "linking" : p.phase === "complete" ? "complete" : "scanning",
|
|
743
|
+
totalFiles: p.totalEntities,
|
|
744
|
+
completedFiles: p.completedEntities,
|
|
745
|
+
totalFolders: p.totalParents,
|
|
746
|
+
completedFolders: p.completedParents,
|
|
747
|
+
currentFile: p.currentItem
|
|
677
748
|
});
|
|
678
749
|
} : void 0
|
|
679
750
|
});
|