@arke-institute/sdk 2.3.7 → 2.3.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/{crypto-BddSiyPC.d.cts → crypto-D7rJLGQQ.d.cts} +12 -8
- package/dist/{crypto-Dz2ktRy4.d.ts → crypto-s98kufbt.d.ts} +12 -8
- package/dist/index.cjs +6 -541
- package/dist/index.cjs.map +1 -1
- package/dist/index.d.cts +1 -1
- package/dist/index.d.ts +1 -1
- package/dist/index.js +6 -541
- package/dist/index.js.map +1 -1
- package/dist/operations/index.cjs +6 -71
- package/dist/operations/index.cjs.map +1 -1
- package/dist/operations/index.d.cts +3 -26
- package/dist/operations/index.d.ts +3 -26
- package/dist/operations/index.js +6 -70
- package/dist/operations/index.js.map +1 -1
- package/package.json +1 -1
|
@@ -275,9 +275,12 @@ interface UploadResult {
|
|
|
275
275
|
*
|
|
276
276
|
* @deprecated Use the new upload module instead:
|
|
277
277
|
* ```typescript
|
|
278
|
-
* import { uploadTree,
|
|
278
|
+
* import { uploadTree, buildUploadTree } from '@arke-institute/sdk/operations';
|
|
279
279
|
*
|
|
280
|
-
* const tree =
|
|
280
|
+
* const tree = buildUploadTree([
|
|
281
|
+
* { path: 'docs/readme.md', data: readmeBuffer },
|
|
282
|
+
* { path: 'images/logo.png', data: logoBlob },
|
|
283
|
+
* ]);
|
|
281
284
|
* const result = await uploadTree(client, tree, {
|
|
282
285
|
* target: { collectionId: '...' },
|
|
283
286
|
* });
|
|
@@ -322,14 +325,15 @@ interface UploadDirectoryResult {
|
|
|
322
325
|
/**
|
|
323
326
|
* Folder operations helper
|
|
324
327
|
*
|
|
325
|
-
* @deprecated Use uploadTree and
|
|
328
|
+
* @deprecated Use uploadTree and buildUploadTree functions instead:
|
|
326
329
|
* ```typescript
|
|
327
|
-
* import { uploadTree,
|
|
330
|
+
* import { uploadTree, buildUploadTree } from '@arke-institute/sdk/operations';
|
|
328
331
|
*
|
|
329
|
-
* const tree =
|
|
332
|
+
* const tree = buildUploadTree([
|
|
333
|
+
* { path: 'docs/readme.md', data: readmeBuffer },
|
|
334
|
+
* ]);
|
|
330
335
|
* const result = await uploadTree(client, tree, {
|
|
331
336
|
* target: { collectionId: '...' },
|
|
332
|
-
* onProgress: (p) => console.log(`${p.completedFiles}/${p.totalFiles} files`),
|
|
333
337
|
* });
|
|
334
338
|
* ```
|
|
335
339
|
*/
|
|
@@ -339,9 +343,9 @@ declare class FolderOperations {
|
|
|
339
343
|
/**
|
|
340
344
|
* Upload a local directory to Arke
|
|
341
345
|
*
|
|
342
|
-
* @deprecated Use uploadTree and
|
|
346
|
+
* @deprecated This method has been removed. Use uploadTree and buildUploadTree instead.
|
|
343
347
|
*/
|
|
344
|
-
uploadDirectory(
|
|
348
|
+
uploadDirectory(_localPath: string, _options: UploadDirectoryOptions): Promise<UploadDirectoryResult>;
|
|
345
349
|
}
|
|
346
350
|
|
|
347
351
|
/**
|
|
@@ -275,9 +275,12 @@ interface UploadResult {
|
|
|
275
275
|
*
|
|
276
276
|
* @deprecated Use the new upload module instead:
|
|
277
277
|
* ```typescript
|
|
278
|
-
* import { uploadTree,
|
|
278
|
+
* import { uploadTree, buildUploadTree } from '@arke-institute/sdk/operations';
|
|
279
279
|
*
|
|
280
|
-
* const tree =
|
|
280
|
+
* const tree = buildUploadTree([
|
|
281
|
+
* { path: 'docs/readme.md', data: readmeBuffer },
|
|
282
|
+
* { path: 'images/logo.png', data: logoBlob },
|
|
283
|
+
* ]);
|
|
281
284
|
* const result = await uploadTree(client, tree, {
|
|
282
285
|
* target: { collectionId: '...' },
|
|
283
286
|
* });
|
|
@@ -322,14 +325,15 @@ interface UploadDirectoryResult {
|
|
|
322
325
|
/**
|
|
323
326
|
* Folder operations helper
|
|
324
327
|
*
|
|
325
|
-
* @deprecated Use uploadTree and
|
|
328
|
+
* @deprecated Use uploadTree and buildUploadTree functions instead:
|
|
326
329
|
* ```typescript
|
|
327
|
-
* import { uploadTree,
|
|
330
|
+
* import { uploadTree, buildUploadTree } from '@arke-institute/sdk/operations';
|
|
328
331
|
*
|
|
329
|
-
* const tree =
|
|
332
|
+
* const tree = buildUploadTree([
|
|
333
|
+
* { path: 'docs/readme.md', data: readmeBuffer },
|
|
334
|
+
* ]);
|
|
330
335
|
* const result = await uploadTree(client, tree, {
|
|
331
336
|
* target: { collectionId: '...' },
|
|
332
|
-
* onProgress: (p) => console.log(`${p.completedFiles}/${p.totalFiles} files`),
|
|
333
337
|
* });
|
|
334
338
|
* ```
|
|
335
339
|
*/
|
|
@@ -339,9 +343,9 @@ declare class FolderOperations {
|
|
|
339
343
|
/**
|
|
340
344
|
* Upload a local directory to Arke
|
|
341
345
|
*
|
|
342
|
-
* @deprecated Use uploadTree and
|
|
346
|
+
* @deprecated This method has been removed. Use uploadTree and buildUploadTree instead.
|
|
343
347
|
*/
|
|
344
|
-
uploadDirectory(
|
|
348
|
+
uploadDirectory(_localPath: string, _options: UploadDirectoryOptions): Promise<UploadDirectoryResult>;
|
|
345
349
|
}
|
|
346
350
|
|
|
347
351
|
/**
|
package/dist/index.cjs
CHANGED
|
@@ -210,563 +210,28 @@ function createArkeClient(config) {
|
|
|
210
210
|
}
|
|
211
211
|
|
|
212
212
|
// src/operations/upload/engine.ts
|
|
213
|
-
var PHASE_COUNT = 3;
|
|
214
|
-
var PHASE_INDEX = {
|
|
215
|
-
creating: 0,
|
|
216
|
-
backlinking: 1,
|
|
217
|
-
uploading: 2,
|
|
218
|
-
complete: 3,
|
|
219
|
-
error: -1
|
|
220
|
-
};
|
|
221
|
-
async function parallelLimit(items, concurrency, fn) {
|
|
222
|
-
const results = [];
|
|
223
|
-
let index = 0;
|
|
224
|
-
async function worker() {
|
|
225
|
-
while (index < items.length) {
|
|
226
|
-
const currentIndex = index++;
|
|
227
|
-
const item = items[currentIndex];
|
|
228
|
-
results[currentIndex] = await fn(item, currentIndex);
|
|
229
|
-
}
|
|
230
|
-
}
|
|
231
|
-
const workers = Array.from({ length: Math.min(concurrency, items.length) }, () => worker());
|
|
232
|
-
await Promise.all(workers);
|
|
233
|
-
return results;
|
|
234
|
-
}
|
|
235
213
|
var TARGET_BYTES_IN_FLIGHT = 200 * 1024 * 1024;
|
|
236
|
-
var BytePool = class {
|
|
237
|
-
constructor(targetBytes = TARGET_BYTES_IN_FLIGHT) {
|
|
238
|
-
this.targetBytes = targetBytes;
|
|
239
|
-
this.bytesInFlight = 0;
|
|
240
|
-
this.waitQueue = [];
|
|
241
|
-
}
|
|
242
|
-
async run(size, fn) {
|
|
243
|
-
while (this.bytesInFlight > 0 && this.bytesInFlight + size > this.targetBytes) {
|
|
244
|
-
await new Promise((resolve) => this.waitQueue.push(resolve));
|
|
245
|
-
}
|
|
246
|
-
this.bytesInFlight += size;
|
|
247
|
-
try {
|
|
248
|
-
return await fn();
|
|
249
|
-
} finally {
|
|
250
|
-
this.bytesInFlight -= size;
|
|
251
|
-
const next = this.waitQueue.shift();
|
|
252
|
-
if (next) next();
|
|
253
|
-
}
|
|
254
|
-
}
|
|
255
|
-
};
|
|
256
|
-
function getParentPath(relativePath) {
|
|
257
|
-
const lastSlash = relativePath.lastIndexOf("/");
|
|
258
|
-
if (lastSlash === -1) return null;
|
|
259
|
-
return relativePath.slice(0, lastSlash);
|
|
260
|
-
}
|
|
261
|
-
function groupFoldersByDepth(folders) {
|
|
262
|
-
const byDepth = /* @__PURE__ */ new Map();
|
|
263
|
-
for (const folder of folders) {
|
|
264
|
-
const depth = folder.relativePath.split("/").length - 1;
|
|
265
|
-
if (!byDepth.has(depth)) byDepth.set(depth, []);
|
|
266
|
-
byDepth.get(depth).push(folder);
|
|
267
|
-
}
|
|
268
|
-
return byDepth;
|
|
269
|
-
}
|
|
270
|
-
async function uploadTree(client, tree, options) {
|
|
271
|
-
const { target, onProgress, concurrency = 10, continueOnError = false, note } = options;
|
|
272
|
-
const errors = [];
|
|
273
|
-
const createdFolders = [];
|
|
274
|
-
const createdFiles = [];
|
|
275
|
-
const foldersByPath = /* @__PURE__ */ new Map();
|
|
276
|
-
const totalEntities = tree.files.length + tree.folders.length;
|
|
277
|
-
const totalBytes = tree.files.reduce((sum, f) => sum + f.size, 0);
|
|
278
|
-
let completedEntities = 0;
|
|
279
|
-
let bytesUploaded = 0;
|
|
280
|
-
const reportProgress = (progress) => {
|
|
281
|
-
if (onProgress) {
|
|
282
|
-
const phase = progress.phase || "creating";
|
|
283
|
-
const phaseIndex = PHASE_INDEX[phase] ?? -1;
|
|
284
|
-
let phasePercent = 0;
|
|
285
|
-
if (phase === "creating") {
|
|
286
|
-
const done = progress.completedEntities ?? completedEntities;
|
|
287
|
-
phasePercent = totalEntities > 0 ? Math.round(done / totalEntities * 100) : 100;
|
|
288
|
-
} else if (phase === "backlinking") {
|
|
289
|
-
const done = progress.completedParents ?? 0;
|
|
290
|
-
const total = progress.totalParents ?? 0;
|
|
291
|
-
phasePercent = total > 0 ? Math.round(done / total * 100) : 100;
|
|
292
|
-
} else if (phase === "uploading") {
|
|
293
|
-
const done = progress.bytesUploaded ?? bytesUploaded;
|
|
294
|
-
phasePercent = totalBytes > 0 ? Math.round(done / totalBytes * 100) : 100;
|
|
295
|
-
} else if (phase === "complete") {
|
|
296
|
-
phasePercent = 100;
|
|
297
|
-
}
|
|
298
|
-
onProgress({
|
|
299
|
-
phase,
|
|
300
|
-
phaseIndex,
|
|
301
|
-
phaseCount: PHASE_COUNT,
|
|
302
|
-
phasePercent,
|
|
303
|
-
totalEntities,
|
|
304
|
-
completedEntities,
|
|
305
|
-
totalParents: 0,
|
|
306
|
-
completedParents: 0,
|
|
307
|
-
totalBytes,
|
|
308
|
-
bytesUploaded,
|
|
309
|
-
...progress
|
|
310
|
-
});
|
|
311
|
-
}
|
|
312
|
-
};
|
|
313
|
-
try {
|
|
314
|
-
let collectionId;
|
|
315
|
-
let collectionCid;
|
|
316
|
-
let collectionCreated = false;
|
|
317
|
-
if (target.createCollection) {
|
|
318
|
-
const collectionBody = {
|
|
319
|
-
label: target.createCollection.label,
|
|
320
|
-
description: target.createCollection.description,
|
|
321
|
-
roles: target.createCollection.roles,
|
|
322
|
-
note
|
|
323
|
-
};
|
|
324
|
-
const { data, error } = await client.api.POST("/collections", {
|
|
325
|
-
body: collectionBody
|
|
326
|
-
});
|
|
327
|
-
if (error || !data) {
|
|
328
|
-
throw new Error(`Failed to create collection: ${JSON.stringify(error)}`);
|
|
329
|
-
}
|
|
330
|
-
collectionId = data.id;
|
|
331
|
-
collectionCid = data.cid;
|
|
332
|
-
collectionCreated = true;
|
|
333
|
-
} else if (target.collectionId) {
|
|
334
|
-
collectionId = target.collectionId;
|
|
335
|
-
const { data, error } = await client.api.GET("/collections/{id}", {
|
|
336
|
-
params: { path: { id: collectionId } }
|
|
337
|
-
});
|
|
338
|
-
if (error || !data) {
|
|
339
|
-
throw new Error(`Failed to fetch collection: ${JSON.stringify(error)}`);
|
|
340
|
-
}
|
|
341
|
-
collectionCid = data.cid;
|
|
342
|
-
} else {
|
|
343
|
-
throw new Error("Must provide either collectionId or createCollection in target");
|
|
344
|
-
}
|
|
345
|
-
const rootParentId = target.parentId ?? collectionId;
|
|
346
|
-
reportProgress({ phase: "creating", completedEntities: 0 });
|
|
347
|
-
const foldersByDepth = groupFoldersByDepth(tree.folders);
|
|
348
|
-
const sortedDepths = [...foldersByDepth.keys()].sort((a, b) => a - b);
|
|
349
|
-
for (const depth of sortedDepths) {
|
|
350
|
-
const foldersAtDepth = foldersByDepth.get(depth);
|
|
351
|
-
await Promise.all(
|
|
352
|
-
foldersAtDepth.map(async (folder) => {
|
|
353
|
-
try {
|
|
354
|
-
const parentPath = getParentPath(folder.relativePath);
|
|
355
|
-
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
356
|
-
const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
|
|
357
|
-
const folderBody = {
|
|
358
|
-
label: folder.name,
|
|
359
|
-
collection: collectionId,
|
|
360
|
-
note,
|
|
361
|
-
relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
|
|
362
|
-
};
|
|
363
|
-
const { data, error } = await client.api.POST("/folders", {
|
|
364
|
-
body: folderBody
|
|
365
|
-
});
|
|
366
|
-
if (error || !data) {
|
|
367
|
-
throw new Error(JSON.stringify(error));
|
|
368
|
-
}
|
|
369
|
-
foldersByPath.set(folder.relativePath, { id: data.id, cid: data.cid });
|
|
370
|
-
createdFolders.push({
|
|
371
|
-
name: folder.name,
|
|
372
|
-
relativePath: folder.relativePath,
|
|
373
|
-
id: data.id,
|
|
374
|
-
entityCid: data.cid
|
|
375
|
-
});
|
|
376
|
-
completedEntities++;
|
|
377
|
-
reportProgress({
|
|
378
|
-
phase: "creating",
|
|
379
|
-
completedEntities,
|
|
380
|
-
currentItem: folder.relativePath
|
|
381
|
-
});
|
|
382
|
-
} catch (err) {
|
|
383
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
384
|
-
if (continueOnError) {
|
|
385
|
-
errors.push({ path: folder.relativePath, error: `Folder creation failed: ${errorMsg}` });
|
|
386
|
-
completedEntities++;
|
|
387
|
-
} else {
|
|
388
|
-
throw new Error(`Failed to create folder ${folder.relativePath}: ${errorMsg}`);
|
|
389
|
-
}
|
|
390
|
-
}
|
|
391
|
-
})
|
|
392
|
-
);
|
|
393
|
-
}
|
|
394
|
-
const FILE_CREATION_CONCURRENCY = 50;
|
|
395
|
-
await parallelLimit(tree.files, FILE_CREATION_CONCURRENCY, async (file) => {
|
|
396
|
-
try {
|
|
397
|
-
const parentPath = getParentPath(file.relativePath);
|
|
398
|
-
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
399
|
-
const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
|
|
400
|
-
const fileBody = {
|
|
401
|
-
key: crypto.randomUUID(),
|
|
402
|
-
// Generate unique storage key
|
|
403
|
-
filename: file.name,
|
|
404
|
-
content_type: file.mimeType,
|
|
405
|
-
size: file.size,
|
|
406
|
-
collection: collectionId,
|
|
407
|
-
relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
|
|
408
|
-
};
|
|
409
|
-
const { data, error } = await client.api.POST("/files", {
|
|
410
|
-
body: fileBody
|
|
411
|
-
});
|
|
412
|
-
if (error || !data) {
|
|
413
|
-
throw new Error(`Entity creation failed: ${JSON.stringify(error)}`);
|
|
414
|
-
}
|
|
415
|
-
createdFiles.push({
|
|
416
|
-
...file,
|
|
417
|
-
id: data.id,
|
|
418
|
-
entityCid: data.cid
|
|
419
|
-
});
|
|
420
|
-
completedEntities++;
|
|
421
|
-
reportProgress({
|
|
422
|
-
phase: "creating",
|
|
423
|
-
completedEntities,
|
|
424
|
-
currentItem: file.relativePath
|
|
425
|
-
});
|
|
426
|
-
} catch (err) {
|
|
427
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
428
|
-
if (continueOnError) {
|
|
429
|
-
errors.push({ path: file.relativePath, error: errorMsg });
|
|
430
|
-
completedEntities++;
|
|
431
|
-
} else {
|
|
432
|
-
throw new Error(`Failed to create file ${file.relativePath}: ${errorMsg}`);
|
|
433
|
-
}
|
|
434
|
-
}
|
|
435
|
-
});
|
|
436
|
-
const childrenByParent = /* @__PURE__ */ new Map();
|
|
437
|
-
for (const folder of createdFolders) {
|
|
438
|
-
const parentPath = getParentPath(folder.relativePath);
|
|
439
|
-
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
440
|
-
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
|
|
441
|
-
childrenByParent.get(parentId).push({ id: folder.id, type: "folder" });
|
|
442
|
-
}
|
|
443
|
-
for (const file of createdFiles) {
|
|
444
|
-
const parentPath = getParentPath(file.relativePath);
|
|
445
|
-
const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
|
|
446
|
-
if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
|
|
447
|
-
childrenByParent.get(parentId).push({ id: file.id, type: "file" });
|
|
448
|
-
}
|
|
449
|
-
const totalParents = childrenByParent.size;
|
|
450
|
-
let completedParents = 0;
|
|
451
|
-
reportProgress({ phase: "backlinking", totalParents, completedParents: 0 });
|
|
452
|
-
const parentEntries = [...childrenByParent.entries()];
|
|
453
|
-
await parallelLimit(parentEntries, concurrency, async ([parentId, children]) => {
|
|
454
|
-
try {
|
|
455
|
-
const isCollection = parentId === collectionId;
|
|
456
|
-
const relationshipsAdd = children.map((child) => ({
|
|
457
|
-
predicate: "contains",
|
|
458
|
-
peer: child.id,
|
|
459
|
-
peer_type: child.type
|
|
460
|
-
}));
|
|
461
|
-
if (isCollection) {
|
|
462
|
-
const { data: collData, error: getError } = await client.api.GET("/collections/{id}", {
|
|
463
|
-
params: { path: { id: parentId } }
|
|
464
|
-
});
|
|
465
|
-
if (getError || !collData) {
|
|
466
|
-
throw new Error(`Failed to fetch collection: ${JSON.stringify(getError)}`);
|
|
467
|
-
}
|
|
468
|
-
const updateBody = {
|
|
469
|
-
expect_tip: collData.cid,
|
|
470
|
-
relationships_add: relationshipsAdd,
|
|
471
|
-
note: note ? `${note} (backlink)` : "Upload backlink"
|
|
472
|
-
};
|
|
473
|
-
const { error } = await client.api.PUT("/collections/{id}", {
|
|
474
|
-
params: { path: { id: parentId } },
|
|
475
|
-
body: updateBody
|
|
476
|
-
});
|
|
477
|
-
if (error) {
|
|
478
|
-
throw new Error(JSON.stringify(error));
|
|
479
|
-
}
|
|
480
|
-
} else {
|
|
481
|
-
const { data: folderData, error: getError } = await client.api.GET("/folders/{id}", {
|
|
482
|
-
params: { path: { id: parentId } }
|
|
483
|
-
});
|
|
484
|
-
if (getError || !folderData) {
|
|
485
|
-
throw new Error(`Failed to fetch folder: ${JSON.stringify(getError)}`);
|
|
486
|
-
}
|
|
487
|
-
const updateBody = {
|
|
488
|
-
expect_tip: folderData.cid,
|
|
489
|
-
relationships_add: relationshipsAdd,
|
|
490
|
-
note: note ? `${note} (backlink)` : "Upload backlink"
|
|
491
|
-
};
|
|
492
|
-
const { error } = await client.api.PUT("/folders/{id}", {
|
|
493
|
-
params: { path: { id: parentId } },
|
|
494
|
-
body: updateBody
|
|
495
|
-
});
|
|
496
|
-
if (error) {
|
|
497
|
-
throw new Error(JSON.stringify(error));
|
|
498
|
-
}
|
|
499
|
-
}
|
|
500
|
-
completedParents++;
|
|
501
|
-
reportProgress({
|
|
502
|
-
phase: "backlinking",
|
|
503
|
-
totalParents,
|
|
504
|
-
completedParents,
|
|
505
|
-
currentItem: `parent:${parentId}`
|
|
506
|
-
});
|
|
507
|
-
} catch (err) {
|
|
508
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
509
|
-
if (continueOnError) {
|
|
510
|
-
errors.push({ path: `parent:${parentId}`, error: `Backlink failed: ${errorMsg}` });
|
|
511
|
-
completedParents++;
|
|
512
|
-
} else {
|
|
513
|
-
throw new Error(`Failed to backlink parent ${parentId}: ${errorMsg}`);
|
|
514
|
-
}
|
|
515
|
-
}
|
|
516
|
-
});
|
|
517
|
-
reportProgress({ phase: "uploading", bytesUploaded: 0 });
|
|
518
|
-
const pool = new BytePool();
|
|
519
|
-
await Promise.all(
|
|
520
|
-
createdFiles.map(async (file) => {
|
|
521
|
-
await pool.run(file.size, async () => {
|
|
522
|
-
try {
|
|
523
|
-
const fileData = await file.getData();
|
|
524
|
-
let body;
|
|
525
|
-
if (fileData instanceof Blob) {
|
|
526
|
-
body = fileData;
|
|
527
|
-
} else if (fileData instanceof Uint8Array) {
|
|
528
|
-
const arrayBuffer = new ArrayBuffer(fileData.byteLength);
|
|
529
|
-
new Uint8Array(arrayBuffer).set(fileData);
|
|
530
|
-
body = new Blob([arrayBuffer], { type: file.mimeType });
|
|
531
|
-
} else {
|
|
532
|
-
body = new Blob([fileData], { type: file.mimeType });
|
|
533
|
-
}
|
|
534
|
-
const { error: uploadError } = await client.api.POST("/files/{id}/content", {
|
|
535
|
-
params: { path: { id: file.id } },
|
|
536
|
-
body,
|
|
537
|
-
bodySerializer: (b) => b,
|
|
538
|
-
headers: { "Content-Type": file.mimeType }
|
|
539
|
-
});
|
|
540
|
-
if (uploadError) {
|
|
541
|
-
throw new Error(`Upload failed: ${JSON.stringify(uploadError)}`);
|
|
542
|
-
}
|
|
543
|
-
bytesUploaded += file.size;
|
|
544
|
-
reportProgress({
|
|
545
|
-
phase: "uploading",
|
|
546
|
-
bytesUploaded,
|
|
547
|
-
currentItem: file.relativePath
|
|
548
|
-
});
|
|
549
|
-
} catch (err) {
|
|
550
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
551
|
-
if (continueOnError) {
|
|
552
|
-
errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
|
|
553
|
-
} else {
|
|
554
|
-
throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
|
|
555
|
-
}
|
|
556
|
-
}
|
|
557
|
-
});
|
|
558
|
-
})
|
|
559
|
-
);
|
|
560
|
-
reportProgress({ phase: "complete", totalParents, completedParents, bytesUploaded });
|
|
561
|
-
const resultFolders = createdFolders.map((f) => ({
|
|
562
|
-
id: f.id,
|
|
563
|
-
cid: f.entityCid,
|
|
564
|
-
type: "folder",
|
|
565
|
-
relativePath: f.relativePath
|
|
566
|
-
}));
|
|
567
|
-
const resultFiles = createdFiles.map((f) => ({
|
|
568
|
-
id: f.id,
|
|
569
|
-
cid: f.entityCid,
|
|
570
|
-
type: "file",
|
|
571
|
-
relativePath: f.relativePath
|
|
572
|
-
}));
|
|
573
|
-
return {
|
|
574
|
-
success: errors.length === 0,
|
|
575
|
-
collection: {
|
|
576
|
-
id: collectionId,
|
|
577
|
-
cid: collectionCid,
|
|
578
|
-
created: collectionCreated
|
|
579
|
-
},
|
|
580
|
-
folders: resultFolders,
|
|
581
|
-
files: resultFiles,
|
|
582
|
-
errors
|
|
583
|
-
};
|
|
584
|
-
} catch (err) {
|
|
585
|
-
const errorMsg = err instanceof Error ? err.message : String(err);
|
|
586
|
-
reportProgress({
|
|
587
|
-
phase: "error",
|
|
588
|
-
error: errorMsg
|
|
589
|
-
});
|
|
590
|
-
return {
|
|
591
|
-
success: false,
|
|
592
|
-
collection: {
|
|
593
|
-
id: target.collectionId ?? "",
|
|
594
|
-
cid: "",
|
|
595
|
-
created: false
|
|
596
|
-
},
|
|
597
|
-
folders: createdFolders.map((f) => ({
|
|
598
|
-
id: f.id,
|
|
599
|
-
cid: f.entityCid,
|
|
600
|
-
type: "folder",
|
|
601
|
-
relativePath: f.relativePath
|
|
602
|
-
})),
|
|
603
|
-
files: createdFiles.map((f) => ({
|
|
604
|
-
id: f.id,
|
|
605
|
-
cid: f.entityCid,
|
|
606
|
-
type: "file",
|
|
607
|
-
relativePath: f.relativePath
|
|
608
|
-
})),
|
|
609
|
-
errors: [...errors, { path: "", error: errorMsg }]
|
|
610
|
-
};
|
|
611
|
-
}
|
|
612
|
-
}
|
|
613
214
|
|
|
614
215
|
// src/operations/upload/cid.ts
|
|
615
216
|
var import_cid = require("multiformats/cid");
|
|
616
217
|
var import_sha2 = require("multiformats/hashes/sha2");
|
|
617
218
|
var raw = __toESM(require("multiformats/codecs/raw"), 1);
|
|
618
219
|
|
|
619
|
-
// src/operations/upload/scanners.ts
|
|
620
|
-
function getMimeType(filename) {
|
|
621
|
-
const ext = filename.toLowerCase().split(".").pop() || "";
|
|
622
|
-
const mimeTypes = {
|
|
623
|
-
// Images
|
|
624
|
-
jpg: "image/jpeg",
|
|
625
|
-
jpeg: "image/jpeg",
|
|
626
|
-
png: "image/png",
|
|
627
|
-
gif: "image/gif",
|
|
628
|
-
webp: "image/webp",
|
|
629
|
-
svg: "image/svg+xml",
|
|
630
|
-
ico: "image/x-icon",
|
|
631
|
-
bmp: "image/bmp",
|
|
632
|
-
tiff: "image/tiff",
|
|
633
|
-
tif: "image/tiff",
|
|
634
|
-
// Documents
|
|
635
|
-
pdf: "application/pdf",
|
|
636
|
-
doc: "application/msword",
|
|
637
|
-
docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
638
|
-
xls: "application/vnd.ms-excel",
|
|
639
|
-
xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
640
|
-
ppt: "application/vnd.ms-powerpoint",
|
|
641
|
-
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
642
|
-
odt: "application/vnd.oasis.opendocument.text",
|
|
643
|
-
ods: "application/vnd.oasis.opendocument.spreadsheet",
|
|
644
|
-
odp: "application/vnd.oasis.opendocument.presentation",
|
|
645
|
-
// Text
|
|
646
|
-
txt: "text/plain",
|
|
647
|
-
md: "text/markdown",
|
|
648
|
-
csv: "text/csv",
|
|
649
|
-
html: "text/html",
|
|
650
|
-
htm: "text/html",
|
|
651
|
-
css: "text/css",
|
|
652
|
-
xml: "text/xml",
|
|
653
|
-
rtf: "application/rtf",
|
|
654
|
-
// Code
|
|
655
|
-
js: "text/javascript",
|
|
656
|
-
mjs: "text/javascript",
|
|
657
|
-
ts: "text/typescript",
|
|
658
|
-
jsx: "text/javascript",
|
|
659
|
-
tsx: "text/typescript",
|
|
660
|
-
json: "application/json",
|
|
661
|
-
yaml: "text/yaml",
|
|
662
|
-
yml: "text/yaml",
|
|
663
|
-
// Archives
|
|
664
|
-
zip: "application/zip",
|
|
665
|
-
tar: "application/x-tar",
|
|
666
|
-
gz: "application/gzip",
|
|
667
|
-
rar: "application/vnd.rar",
|
|
668
|
-
"7z": "application/x-7z-compressed",
|
|
669
|
-
// Audio
|
|
670
|
-
mp3: "audio/mpeg",
|
|
671
|
-
wav: "audio/wav",
|
|
672
|
-
ogg: "audio/ogg",
|
|
673
|
-
m4a: "audio/mp4",
|
|
674
|
-
flac: "audio/flac",
|
|
675
|
-
// Video
|
|
676
|
-
mp4: "video/mp4",
|
|
677
|
-
webm: "video/webm",
|
|
678
|
-
avi: "video/x-msvideo",
|
|
679
|
-
mov: "video/quicktime",
|
|
680
|
-
mkv: "video/x-matroska",
|
|
681
|
-
// Fonts
|
|
682
|
-
woff: "font/woff",
|
|
683
|
-
woff2: "font/woff2",
|
|
684
|
-
ttf: "font/ttf",
|
|
685
|
-
otf: "font/otf",
|
|
686
|
-
// Other
|
|
687
|
-
wasm: "application/wasm"
|
|
688
|
-
};
|
|
689
|
-
return mimeTypes[ext] || "application/octet-stream";
|
|
690
|
-
}
|
|
691
|
-
async function scanDirectory(directoryPath, options = {}) {
|
|
692
|
-
const fs = await import("fs/promises");
|
|
693
|
-
const path = await import("path");
|
|
694
|
-
const { ignore = ["node_modules", ".git", ".DS_Store"], includeHidden = false } = options;
|
|
695
|
-
const files = [];
|
|
696
|
-
const folders = [];
|
|
697
|
-
const rootName = path.basename(directoryPath);
|
|
698
|
-
async function scanDir(dirPath, relativePath) {
|
|
699
|
-
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
|
700
|
-
for (const entry of entries) {
|
|
701
|
-
const name = entry.name;
|
|
702
|
-
if (!includeHidden && name.startsWith(".")) {
|
|
703
|
-
continue;
|
|
704
|
-
}
|
|
705
|
-
if (ignore.some((pattern) => name === pattern || name.match(pattern))) {
|
|
706
|
-
continue;
|
|
707
|
-
}
|
|
708
|
-
const fullPath = path.join(dirPath, name);
|
|
709
|
-
const entryRelativePath = relativePath ? `${relativePath}/${name}` : name;
|
|
710
|
-
if (entry.isDirectory()) {
|
|
711
|
-
folders.push({
|
|
712
|
-
name,
|
|
713
|
-
relativePath: entryRelativePath
|
|
714
|
-
});
|
|
715
|
-
await scanDir(fullPath, entryRelativePath);
|
|
716
|
-
} else if (entry.isFile()) {
|
|
717
|
-
const stat = await fs.stat(fullPath);
|
|
718
|
-
files.push({
|
|
719
|
-
name,
|
|
720
|
-
relativePath: entryRelativePath,
|
|
721
|
-
size: stat.size,
|
|
722
|
-
mimeType: getMimeType(name),
|
|
723
|
-
getData: async () => {
|
|
724
|
-
const buffer = await fs.readFile(fullPath);
|
|
725
|
-
return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
|
|
726
|
-
}
|
|
727
|
-
});
|
|
728
|
-
}
|
|
729
|
-
}
|
|
730
|
-
}
|
|
731
|
-
await scanDir(directoryPath, "");
|
|
732
|
-
folders.sort((a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length);
|
|
733
|
-
return { files, folders };
|
|
734
|
-
}
|
|
735
|
-
|
|
736
220
|
// src/operations/folders.ts
|
|
737
221
|
var FolderOperations = class {
|
|
738
222
|
constructor(client) {
|
|
739
223
|
this.client = client;
|
|
224
|
+
void client;
|
|
740
225
|
}
|
|
741
226
|
/**
|
|
742
227
|
* Upload a local directory to Arke
|
|
743
228
|
*
|
|
744
|
-
* @deprecated Use uploadTree and
|
|
229
|
+
* @deprecated This method has been removed. Use uploadTree and buildUploadTree instead.
|
|
745
230
|
*/
|
|
746
|
-
async uploadDirectory(
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
collectionId: options.collectionId,
|
|
751
|
-
parentId: options.parentFolderId
|
|
752
|
-
},
|
|
753
|
-
concurrency: options.concurrency,
|
|
754
|
-
onProgress: options.onProgress ? (p) => {
|
|
755
|
-
options.onProgress({
|
|
756
|
-
phase: p.phase === "creating" ? "creating-folders" : p.phase === "uploading" ? "uploading-files" : p.phase === "backlinking" ? "linking" : p.phase === "complete" ? "complete" : "scanning",
|
|
757
|
-
totalFiles: p.totalEntities,
|
|
758
|
-
completedFiles: p.completedEntities,
|
|
759
|
-
totalFolders: p.totalParents,
|
|
760
|
-
completedFolders: p.completedParents,
|
|
761
|
-
currentFile: p.currentItem
|
|
762
|
-
});
|
|
763
|
-
} : void 0
|
|
764
|
-
});
|
|
765
|
-
return {
|
|
766
|
-
rootFolder: result.folders[0] || null,
|
|
767
|
-
folders: result.folders,
|
|
768
|
-
files: result.files
|
|
769
|
-
};
|
|
231
|
+
async uploadDirectory(_localPath, _options) {
|
|
232
|
+
throw new Error(
|
|
233
|
+
"FolderOperations.uploadDirectory has been removed. Use uploadTree() with buildUploadTree() instead. See: https://github.com/arke-institute/arke-sdk#upload-module"
|
|
234
|
+
);
|
|
770
235
|
}
|
|
771
236
|
};
|
|
772
237
|
|