@arke-institute/sdk 2.3.7 → 2.3.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -160,563 +160,28 @@ function createArkeClient(config) {
160
160
  }
161
161
 
162
162
  // src/operations/upload/engine.ts
163
- var PHASE_COUNT = 3;
164
- var PHASE_INDEX = {
165
- creating: 0,
166
- backlinking: 1,
167
- uploading: 2,
168
- complete: 3,
169
- error: -1
170
- };
171
- async function parallelLimit(items, concurrency, fn) {
172
- const results = [];
173
- let index = 0;
174
- async function worker() {
175
- while (index < items.length) {
176
- const currentIndex = index++;
177
- const item = items[currentIndex];
178
- results[currentIndex] = await fn(item, currentIndex);
179
- }
180
- }
181
- const workers = Array.from({ length: Math.min(concurrency, items.length) }, () => worker());
182
- await Promise.all(workers);
183
- return results;
184
- }
185
163
  var TARGET_BYTES_IN_FLIGHT = 200 * 1024 * 1024;
186
- var BytePool = class {
187
- constructor(targetBytes = TARGET_BYTES_IN_FLIGHT) {
188
- this.targetBytes = targetBytes;
189
- this.bytesInFlight = 0;
190
- this.waitQueue = [];
191
- }
192
- async run(size, fn) {
193
- while (this.bytesInFlight > 0 && this.bytesInFlight + size > this.targetBytes) {
194
- await new Promise((resolve) => this.waitQueue.push(resolve));
195
- }
196
- this.bytesInFlight += size;
197
- try {
198
- return await fn();
199
- } finally {
200
- this.bytesInFlight -= size;
201
- const next = this.waitQueue.shift();
202
- if (next) next();
203
- }
204
- }
205
- };
206
- function getParentPath(relativePath) {
207
- const lastSlash = relativePath.lastIndexOf("/");
208
- if (lastSlash === -1) return null;
209
- return relativePath.slice(0, lastSlash);
210
- }
211
- function groupFoldersByDepth(folders) {
212
- const byDepth = /* @__PURE__ */ new Map();
213
- for (const folder of folders) {
214
- const depth = folder.relativePath.split("/").length - 1;
215
- if (!byDepth.has(depth)) byDepth.set(depth, []);
216
- byDepth.get(depth).push(folder);
217
- }
218
- return byDepth;
219
- }
220
- async function uploadTree(client, tree, options) {
221
- const { target, onProgress, concurrency = 10, continueOnError = false, note } = options;
222
- const errors = [];
223
- const createdFolders = [];
224
- const createdFiles = [];
225
- const foldersByPath = /* @__PURE__ */ new Map();
226
- const totalEntities = tree.files.length + tree.folders.length;
227
- const totalBytes = tree.files.reduce((sum, f) => sum + f.size, 0);
228
- let completedEntities = 0;
229
- let bytesUploaded = 0;
230
- const reportProgress = (progress) => {
231
- if (onProgress) {
232
- const phase = progress.phase || "creating";
233
- const phaseIndex = PHASE_INDEX[phase] ?? -1;
234
- let phasePercent = 0;
235
- if (phase === "creating") {
236
- const done = progress.completedEntities ?? completedEntities;
237
- phasePercent = totalEntities > 0 ? Math.round(done / totalEntities * 100) : 100;
238
- } else if (phase === "backlinking") {
239
- const done = progress.completedParents ?? 0;
240
- const total = progress.totalParents ?? 0;
241
- phasePercent = total > 0 ? Math.round(done / total * 100) : 100;
242
- } else if (phase === "uploading") {
243
- const done = progress.bytesUploaded ?? bytesUploaded;
244
- phasePercent = totalBytes > 0 ? Math.round(done / totalBytes * 100) : 100;
245
- } else if (phase === "complete") {
246
- phasePercent = 100;
247
- }
248
- onProgress({
249
- phase,
250
- phaseIndex,
251
- phaseCount: PHASE_COUNT,
252
- phasePercent,
253
- totalEntities,
254
- completedEntities,
255
- totalParents: 0,
256
- completedParents: 0,
257
- totalBytes,
258
- bytesUploaded,
259
- ...progress
260
- });
261
- }
262
- };
263
- try {
264
- let collectionId;
265
- let collectionCid;
266
- let collectionCreated = false;
267
- if (target.createCollection) {
268
- const collectionBody = {
269
- label: target.createCollection.label,
270
- description: target.createCollection.description,
271
- roles: target.createCollection.roles,
272
- note
273
- };
274
- const { data, error } = await client.api.POST("/collections", {
275
- body: collectionBody
276
- });
277
- if (error || !data) {
278
- throw new Error(`Failed to create collection: ${JSON.stringify(error)}`);
279
- }
280
- collectionId = data.id;
281
- collectionCid = data.cid;
282
- collectionCreated = true;
283
- } else if (target.collectionId) {
284
- collectionId = target.collectionId;
285
- const { data, error } = await client.api.GET("/collections/{id}", {
286
- params: { path: { id: collectionId } }
287
- });
288
- if (error || !data) {
289
- throw new Error(`Failed to fetch collection: ${JSON.stringify(error)}`);
290
- }
291
- collectionCid = data.cid;
292
- } else {
293
- throw new Error("Must provide either collectionId or createCollection in target");
294
- }
295
- const rootParentId = target.parentId ?? collectionId;
296
- reportProgress({ phase: "creating", completedEntities: 0 });
297
- const foldersByDepth = groupFoldersByDepth(tree.folders);
298
- const sortedDepths = [...foldersByDepth.keys()].sort((a, b) => a - b);
299
- for (const depth of sortedDepths) {
300
- const foldersAtDepth = foldersByDepth.get(depth);
301
- await Promise.all(
302
- foldersAtDepth.map(async (folder) => {
303
- try {
304
- const parentPath = getParentPath(folder.relativePath);
305
- const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
306
- const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
307
- const folderBody = {
308
- label: folder.name,
309
- collection: collectionId,
310
- note,
311
- relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
312
- };
313
- const { data, error } = await client.api.POST("/folders", {
314
- body: folderBody
315
- });
316
- if (error || !data) {
317
- throw new Error(JSON.stringify(error));
318
- }
319
- foldersByPath.set(folder.relativePath, { id: data.id, cid: data.cid });
320
- createdFolders.push({
321
- name: folder.name,
322
- relativePath: folder.relativePath,
323
- id: data.id,
324
- entityCid: data.cid
325
- });
326
- completedEntities++;
327
- reportProgress({
328
- phase: "creating",
329
- completedEntities,
330
- currentItem: folder.relativePath
331
- });
332
- } catch (err) {
333
- const errorMsg = err instanceof Error ? err.message : String(err);
334
- if (continueOnError) {
335
- errors.push({ path: folder.relativePath, error: `Folder creation failed: ${errorMsg}` });
336
- completedEntities++;
337
- } else {
338
- throw new Error(`Failed to create folder ${folder.relativePath}: ${errorMsg}`);
339
- }
340
- }
341
- })
342
- );
343
- }
344
- const FILE_CREATION_CONCURRENCY = 50;
345
- await parallelLimit(tree.files, FILE_CREATION_CONCURRENCY, async (file) => {
346
- try {
347
- const parentPath = getParentPath(file.relativePath);
348
- const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
349
- const parentType = parentPath ? "folder" : parentId === collectionId ? "collection" : "folder";
350
- const fileBody = {
351
- key: crypto.randomUUID(),
352
- // Generate unique storage key
353
- filename: file.name,
354
- content_type: file.mimeType,
355
- size: file.size,
356
- collection: collectionId,
357
- relationships: [{ predicate: "in", peer: parentId, peer_type: parentType }]
358
- };
359
- const { data, error } = await client.api.POST("/files", {
360
- body: fileBody
361
- });
362
- if (error || !data) {
363
- throw new Error(`Entity creation failed: ${JSON.stringify(error)}`);
364
- }
365
- createdFiles.push({
366
- ...file,
367
- id: data.id,
368
- entityCid: data.cid
369
- });
370
- completedEntities++;
371
- reportProgress({
372
- phase: "creating",
373
- completedEntities,
374
- currentItem: file.relativePath
375
- });
376
- } catch (err) {
377
- const errorMsg = err instanceof Error ? err.message : String(err);
378
- if (continueOnError) {
379
- errors.push({ path: file.relativePath, error: errorMsg });
380
- completedEntities++;
381
- } else {
382
- throw new Error(`Failed to create file ${file.relativePath}: ${errorMsg}`);
383
- }
384
- }
385
- });
386
- const childrenByParent = /* @__PURE__ */ new Map();
387
- for (const folder of createdFolders) {
388
- const parentPath = getParentPath(folder.relativePath);
389
- const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
390
- if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
391
- childrenByParent.get(parentId).push({ id: folder.id, type: "folder" });
392
- }
393
- for (const file of createdFiles) {
394
- const parentPath = getParentPath(file.relativePath);
395
- const parentId = parentPath ? foldersByPath.get(parentPath).id : rootParentId;
396
- if (!childrenByParent.has(parentId)) childrenByParent.set(parentId, []);
397
- childrenByParent.get(parentId).push({ id: file.id, type: "file" });
398
- }
399
- const totalParents = childrenByParent.size;
400
- let completedParents = 0;
401
- reportProgress({ phase: "backlinking", totalParents, completedParents: 0 });
402
- const parentEntries = [...childrenByParent.entries()];
403
- await parallelLimit(parentEntries, concurrency, async ([parentId, children]) => {
404
- try {
405
- const isCollection = parentId === collectionId;
406
- const relationshipsAdd = children.map((child) => ({
407
- predicate: "contains",
408
- peer: child.id,
409
- peer_type: child.type
410
- }));
411
- if (isCollection) {
412
- const { data: collData, error: getError } = await client.api.GET("/collections/{id}", {
413
- params: { path: { id: parentId } }
414
- });
415
- if (getError || !collData) {
416
- throw new Error(`Failed to fetch collection: ${JSON.stringify(getError)}`);
417
- }
418
- const updateBody = {
419
- expect_tip: collData.cid,
420
- relationships_add: relationshipsAdd,
421
- note: note ? `${note} (backlink)` : "Upload backlink"
422
- };
423
- const { error } = await client.api.PUT("/collections/{id}", {
424
- params: { path: { id: parentId } },
425
- body: updateBody
426
- });
427
- if (error) {
428
- throw new Error(JSON.stringify(error));
429
- }
430
- } else {
431
- const { data: folderData, error: getError } = await client.api.GET("/folders/{id}", {
432
- params: { path: { id: parentId } }
433
- });
434
- if (getError || !folderData) {
435
- throw new Error(`Failed to fetch folder: ${JSON.stringify(getError)}`);
436
- }
437
- const updateBody = {
438
- expect_tip: folderData.cid,
439
- relationships_add: relationshipsAdd,
440
- note: note ? `${note} (backlink)` : "Upload backlink"
441
- };
442
- const { error } = await client.api.PUT("/folders/{id}", {
443
- params: { path: { id: parentId } },
444
- body: updateBody
445
- });
446
- if (error) {
447
- throw new Error(JSON.stringify(error));
448
- }
449
- }
450
- completedParents++;
451
- reportProgress({
452
- phase: "backlinking",
453
- totalParents,
454
- completedParents,
455
- currentItem: `parent:${parentId}`
456
- });
457
- } catch (err) {
458
- const errorMsg = err instanceof Error ? err.message : String(err);
459
- if (continueOnError) {
460
- errors.push({ path: `parent:${parentId}`, error: `Backlink failed: ${errorMsg}` });
461
- completedParents++;
462
- } else {
463
- throw new Error(`Failed to backlink parent ${parentId}: ${errorMsg}`);
464
- }
465
- }
466
- });
467
- reportProgress({ phase: "uploading", bytesUploaded: 0 });
468
- const pool = new BytePool();
469
- await Promise.all(
470
- createdFiles.map(async (file) => {
471
- await pool.run(file.size, async () => {
472
- try {
473
- const fileData = await file.getData();
474
- let body;
475
- if (fileData instanceof Blob) {
476
- body = fileData;
477
- } else if (fileData instanceof Uint8Array) {
478
- const arrayBuffer = new ArrayBuffer(fileData.byteLength);
479
- new Uint8Array(arrayBuffer).set(fileData);
480
- body = new Blob([arrayBuffer], { type: file.mimeType });
481
- } else {
482
- body = new Blob([fileData], { type: file.mimeType });
483
- }
484
- const { error: uploadError } = await client.api.POST("/files/{id}/content", {
485
- params: { path: { id: file.id } },
486
- body,
487
- bodySerializer: (b) => b,
488
- headers: { "Content-Type": file.mimeType }
489
- });
490
- if (uploadError) {
491
- throw new Error(`Upload failed: ${JSON.stringify(uploadError)}`);
492
- }
493
- bytesUploaded += file.size;
494
- reportProgress({
495
- phase: "uploading",
496
- bytesUploaded,
497
- currentItem: file.relativePath
498
- });
499
- } catch (err) {
500
- const errorMsg = err instanceof Error ? err.message : String(err);
501
- if (continueOnError) {
502
- errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
503
- } else {
504
- throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
505
- }
506
- }
507
- });
508
- })
509
- );
510
- reportProgress({ phase: "complete", totalParents, completedParents, bytesUploaded });
511
- const resultFolders = createdFolders.map((f) => ({
512
- id: f.id,
513
- cid: f.entityCid,
514
- type: "folder",
515
- relativePath: f.relativePath
516
- }));
517
- const resultFiles = createdFiles.map((f) => ({
518
- id: f.id,
519
- cid: f.entityCid,
520
- type: "file",
521
- relativePath: f.relativePath
522
- }));
523
- return {
524
- success: errors.length === 0,
525
- collection: {
526
- id: collectionId,
527
- cid: collectionCid,
528
- created: collectionCreated
529
- },
530
- folders: resultFolders,
531
- files: resultFiles,
532
- errors
533
- };
534
- } catch (err) {
535
- const errorMsg = err instanceof Error ? err.message : String(err);
536
- reportProgress({
537
- phase: "error",
538
- error: errorMsg
539
- });
540
- return {
541
- success: false,
542
- collection: {
543
- id: target.collectionId ?? "",
544
- cid: "",
545
- created: false
546
- },
547
- folders: createdFolders.map((f) => ({
548
- id: f.id,
549
- cid: f.entityCid,
550
- type: "folder",
551
- relativePath: f.relativePath
552
- })),
553
- files: createdFiles.map((f) => ({
554
- id: f.id,
555
- cid: f.entityCid,
556
- type: "file",
557
- relativePath: f.relativePath
558
- })),
559
- errors: [...errors, { path: "", error: errorMsg }]
560
- };
561
- }
562
- }
563
164
 
564
165
  // src/operations/upload/cid.ts
565
166
  import { CID } from "multiformats/cid";
566
167
  import { sha256 } from "multiformats/hashes/sha2";
567
168
  import * as raw from "multiformats/codecs/raw";
568
169
 
569
- // src/operations/upload/scanners.ts
570
- function getMimeType(filename) {
571
- const ext = filename.toLowerCase().split(".").pop() || "";
572
- const mimeTypes = {
573
- // Images
574
- jpg: "image/jpeg",
575
- jpeg: "image/jpeg",
576
- png: "image/png",
577
- gif: "image/gif",
578
- webp: "image/webp",
579
- svg: "image/svg+xml",
580
- ico: "image/x-icon",
581
- bmp: "image/bmp",
582
- tiff: "image/tiff",
583
- tif: "image/tiff",
584
- // Documents
585
- pdf: "application/pdf",
586
- doc: "application/msword",
587
- docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
588
- xls: "application/vnd.ms-excel",
589
- xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
590
- ppt: "application/vnd.ms-powerpoint",
591
- pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
592
- odt: "application/vnd.oasis.opendocument.text",
593
- ods: "application/vnd.oasis.opendocument.spreadsheet",
594
- odp: "application/vnd.oasis.opendocument.presentation",
595
- // Text
596
- txt: "text/plain",
597
- md: "text/markdown",
598
- csv: "text/csv",
599
- html: "text/html",
600
- htm: "text/html",
601
- css: "text/css",
602
- xml: "text/xml",
603
- rtf: "application/rtf",
604
- // Code
605
- js: "text/javascript",
606
- mjs: "text/javascript",
607
- ts: "text/typescript",
608
- jsx: "text/javascript",
609
- tsx: "text/typescript",
610
- json: "application/json",
611
- yaml: "text/yaml",
612
- yml: "text/yaml",
613
- // Archives
614
- zip: "application/zip",
615
- tar: "application/x-tar",
616
- gz: "application/gzip",
617
- rar: "application/vnd.rar",
618
- "7z": "application/x-7z-compressed",
619
- // Audio
620
- mp3: "audio/mpeg",
621
- wav: "audio/wav",
622
- ogg: "audio/ogg",
623
- m4a: "audio/mp4",
624
- flac: "audio/flac",
625
- // Video
626
- mp4: "video/mp4",
627
- webm: "video/webm",
628
- avi: "video/x-msvideo",
629
- mov: "video/quicktime",
630
- mkv: "video/x-matroska",
631
- // Fonts
632
- woff: "font/woff",
633
- woff2: "font/woff2",
634
- ttf: "font/ttf",
635
- otf: "font/otf",
636
- // Other
637
- wasm: "application/wasm"
638
- };
639
- return mimeTypes[ext] || "application/octet-stream";
640
- }
641
- async function scanDirectory(directoryPath, options = {}) {
642
- const fs = await import("fs/promises");
643
- const path = await import("path");
644
- const { ignore = ["node_modules", ".git", ".DS_Store"], includeHidden = false } = options;
645
- const files = [];
646
- const folders = [];
647
- const rootName = path.basename(directoryPath);
648
- async function scanDir(dirPath, relativePath) {
649
- const entries = await fs.readdir(dirPath, { withFileTypes: true });
650
- for (const entry of entries) {
651
- const name = entry.name;
652
- if (!includeHidden && name.startsWith(".")) {
653
- continue;
654
- }
655
- if (ignore.some((pattern) => name === pattern || name.match(pattern))) {
656
- continue;
657
- }
658
- const fullPath = path.join(dirPath, name);
659
- const entryRelativePath = relativePath ? `${relativePath}/${name}` : name;
660
- if (entry.isDirectory()) {
661
- folders.push({
662
- name,
663
- relativePath: entryRelativePath
664
- });
665
- await scanDir(fullPath, entryRelativePath);
666
- } else if (entry.isFile()) {
667
- const stat = await fs.stat(fullPath);
668
- files.push({
669
- name,
670
- relativePath: entryRelativePath,
671
- size: stat.size,
672
- mimeType: getMimeType(name),
673
- getData: async () => {
674
- const buffer = await fs.readFile(fullPath);
675
- return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
676
- }
677
- });
678
- }
679
- }
680
- }
681
- await scanDir(directoryPath, "");
682
- folders.sort((a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length);
683
- return { files, folders };
684
- }
685
-
686
170
  // src/operations/folders.ts
687
171
  var FolderOperations = class {
688
172
  constructor(client) {
689
173
  this.client = client;
174
+ void client;
690
175
  }
691
176
  /**
692
177
  * Upload a local directory to Arke
693
178
  *
694
- * @deprecated Use uploadTree and scanDirectory instead
179
+ * @deprecated This method has been removed. Use uploadTree and buildUploadTree instead.
695
180
  */
696
- async uploadDirectory(localPath, options) {
697
- const tree = await scanDirectory(localPath);
698
- const result = await uploadTree(this.client, tree, {
699
- target: {
700
- collectionId: options.collectionId,
701
- parentId: options.parentFolderId
702
- },
703
- concurrency: options.concurrency,
704
- onProgress: options.onProgress ? (p) => {
705
- options.onProgress({
706
- phase: p.phase === "creating" ? "creating-folders" : p.phase === "uploading" ? "uploading-files" : p.phase === "backlinking" ? "linking" : p.phase === "complete" ? "complete" : "scanning",
707
- totalFiles: p.totalEntities,
708
- completedFiles: p.completedEntities,
709
- totalFolders: p.totalParents,
710
- completedFolders: p.completedParents,
711
- currentFile: p.currentItem
712
- });
713
- } : void 0
714
- });
715
- return {
716
- rootFolder: result.folders[0] || null,
717
- folders: result.folders,
718
- files: result.files
719
- };
181
+ async uploadDirectory(_localPath, _options) {
182
+ throw new Error(
183
+ "FolderOperations.uploadDirectory has been removed. Use uploadTree() with buildUploadTree() instead. See: https://github.com/arke-institute/arke-sdk#upload-module"
184
+ );
720
185
  }
721
186
  };
722
187