@arke-institute/sdk 0.1.3 → 2.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (66) hide show
  1. package/README.md +222 -176
  2. package/dist/crypto-CQnwqWQn.d.ts +459 -0
  3. package/dist/crypto-iYgzUi77.d.cts +459 -0
  4. package/dist/generated/index.cjs +19 -0
  5. package/dist/generated/index.cjs.map +1 -0
  6. package/dist/generated/index.d.cts +6545 -0
  7. package/dist/generated/index.d.ts +6545 -0
  8. package/dist/generated/index.js +1 -0
  9. package/dist/generated/index.js.map +1 -0
  10. package/dist/index.cjs +725 -4248
  11. package/dist/index.cjs.map +1 -1
  12. package/dist/index.d.cts +62 -7
  13. package/dist/index.d.ts +62 -7
  14. package/dist/index.js +706 -4221
  15. package/dist/index.js.map +1 -1
  16. package/dist/operations/index.cjs +806 -0
  17. package/dist/operations/index.cjs.map +1 -0
  18. package/dist/operations/index.d.cts +157 -0
  19. package/dist/operations/index.d.ts +157 -0
  20. package/dist/operations/index.js +759 -0
  21. package/dist/operations/index.js.map +1 -0
  22. package/openapi/spec.json +8648 -0
  23. package/openapi/version.json +7 -0
  24. package/package.json +51 -52
  25. package/dist/client-dAk3E64p.d.cts +0 -183
  26. package/dist/client-dAk3E64p.d.ts +0 -183
  27. package/dist/collections/index.cjs +0 -233
  28. package/dist/collections/index.cjs.map +0 -1
  29. package/dist/collections/index.d.cts +0 -9
  30. package/dist/collections/index.d.ts +0 -9
  31. package/dist/collections/index.js +0 -205
  32. package/dist/collections/index.js.map +0 -1
  33. package/dist/content/index.cjs +0 -591
  34. package/dist/content/index.cjs.map +0 -1
  35. package/dist/content/index.d.cts +0 -516
  36. package/dist/content/index.d.ts +0 -516
  37. package/dist/content/index.js +0 -558
  38. package/dist/content/index.js.map +0 -1
  39. package/dist/edit/index.cjs +0 -1503
  40. package/dist/edit/index.cjs.map +0 -1
  41. package/dist/edit/index.d.cts +0 -78
  42. package/dist/edit/index.d.ts +0 -78
  43. package/dist/edit/index.js +0 -1447
  44. package/dist/edit/index.js.map +0 -1
  45. package/dist/errors-3L7IiHcr.d.cts +0 -480
  46. package/dist/errors-BTe8GKRQ.d.ts +0 -480
  47. package/dist/errors-CT7yzKkU.d.cts +0 -874
  48. package/dist/errors-CT7yzKkU.d.ts +0 -874
  49. package/dist/graph/index.cjs +0 -427
  50. package/dist/graph/index.cjs.map +0 -1
  51. package/dist/graph/index.d.cts +0 -485
  52. package/dist/graph/index.d.ts +0 -485
  53. package/dist/graph/index.js +0 -396
  54. package/dist/graph/index.js.map +0 -1
  55. package/dist/query/index.cjs +0 -356
  56. package/dist/query/index.cjs.map +0 -1
  57. package/dist/query/index.d.cts +0 -636
  58. package/dist/query/index.d.ts +0 -636
  59. package/dist/query/index.js +0 -328
  60. package/dist/query/index.js.map +0 -1
  61. package/dist/upload/index.cjs +0 -1634
  62. package/dist/upload/index.cjs.map +0 -1
  63. package/dist/upload/index.d.cts +0 -150
  64. package/dist/upload/index.d.ts +0 -150
  65. package/dist/upload/index.js +0 -1597
  66. package/dist/upload/index.js.map +0 -1
@@ -0,0 +1,759 @@
1
+ // src/operations/upload/cid.ts
2
+ import { CID } from "multiformats/cid";
3
+ import { sha256 } from "multiformats/hashes/sha2";
4
+ import * as raw from "multiformats/codecs/raw";
5
+ async function computeCid(data) {
6
+ let bytes;
7
+ if (data instanceof Blob) {
8
+ const buffer = await data.arrayBuffer();
9
+ bytes = new Uint8Array(buffer);
10
+ } else if (data instanceof ArrayBuffer) {
11
+ bytes = new Uint8Array(data);
12
+ } else {
13
+ bytes = data;
14
+ }
15
+ const hash = await sha256.digest(bytes);
16
+ const cid = CID.create(1, raw.code, hash);
17
+ return cid.toString();
18
+ }
19
+ async function verifyCid(data, expectedCid) {
20
+ const computed = await computeCid(data);
21
+ return computed === expectedCid;
22
+ }
23
+
24
+ // src/operations/upload/engine.ts
25
+ async function parallelLimit(items, concurrency, fn) {
26
+ const results = [];
27
+ let index = 0;
28
+ async function worker() {
29
+ while (index < items.length) {
30
+ const currentIndex = index++;
31
+ const item = items[currentIndex];
32
+ results[currentIndex] = await fn(item, currentIndex);
33
+ }
34
+ }
35
+ const workers = Array.from({ length: Math.min(concurrency, items.length) }, () => worker());
36
+ await Promise.all(workers);
37
+ return results;
38
+ }
39
+ function getParentPath(relativePath) {
40
+ const lastSlash = relativePath.lastIndexOf("/");
41
+ if (lastSlash === -1) return null;
42
+ return relativePath.slice(0, lastSlash);
43
+ }
44
+ async function uploadTree(client, tree, options) {
45
+ const { target, onProgress, concurrency = 5, continueOnError = false, note } = options;
46
+ const errors = [];
47
+ const createdFolders = [];
48
+ const createdFiles = [];
49
+ const reportProgress = (progress) => {
50
+ if (onProgress) {
51
+ onProgress({
52
+ phase: "scanning",
53
+ totalFiles: tree.files.length,
54
+ completedFiles: 0,
55
+ totalFolders: tree.folders.length,
56
+ completedFolders: 0,
57
+ ...progress
58
+ });
59
+ }
60
+ };
61
+ try {
62
+ let collectionId;
63
+ let collectionCid;
64
+ let collectionCreated = false;
65
+ if (target.createCollection) {
66
+ reportProgress({ phase: "scanning", currentFolder: "Creating collection..." });
67
+ const collectionBody = {
68
+ label: target.createCollection.label,
69
+ description: target.createCollection.description,
70
+ roles: target.createCollection.roles,
71
+ note
72
+ };
73
+ const { data, error } = await client.api.POST("/collections", {
74
+ body: collectionBody
75
+ });
76
+ if (error || !data) {
77
+ throw new Error(`Failed to create collection: ${JSON.stringify(error)}`);
78
+ }
79
+ collectionId = data.id;
80
+ collectionCid = data.cid;
81
+ collectionCreated = true;
82
+ } else if (target.collectionId) {
83
+ collectionId = target.collectionId;
84
+ const { data, error } = await client.api.GET("/collections/{id}", {
85
+ params: { path: { id: collectionId } }
86
+ });
87
+ if (error || !data) {
88
+ throw new Error(`Failed to fetch collection: ${JSON.stringify(error)}`);
89
+ }
90
+ collectionCid = data.cid;
91
+ } else {
92
+ throw new Error("Must provide either collectionId or createCollection in target");
93
+ }
94
+ const rootParentId = target.parentId ?? collectionId;
95
+ reportProgress({
96
+ phase: "computing-cids",
97
+ totalFiles: tree.files.length,
98
+ completedFiles: 0
99
+ });
100
+ const preparedFiles = [];
101
+ let cidProgress = 0;
102
+ await parallelLimit(tree.files, concurrency, async (file) => {
103
+ try {
104
+ const data = await file.getData();
105
+ const cid = await computeCid(data);
106
+ preparedFiles.push({
107
+ ...file,
108
+ cid
109
+ });
110
+ cidProgress++;
111
+ reportProgress({
112
+ phase: "computing-cids",
113
+ completedFiles: cidProgress,
114
+ currentFile: file.relativePath
115
+ });
116
+ } catch (err) {
117
+ const errorMsg = err instanceof Error ? err.message : String(err);
118
+ if (continueOnError) {
119
+ errors.push({ path: file.relativePath, error: `CID computation failed: ${errorMsg}` });
120
+ } else {
121
+ throw new Error(`Failed to compute CID for ${file.relativePath}: ${errorMsg}`);
122
+ }
123
+ }
124
+ });
125
+ reportProgress({
126
+ phase: "creating-folders",
127
+ totalFolders: tree.folders.length,
128
+ completedFolders: 0
129
+ });
130
+ const sortedFolders = [...tree.folders].sort(
131
+ (a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length
132
+ );
133
+ for (let i = 0; i < sortedFolders.length; i++) {
134
+ const folder = sortedFolders[i];
135
+ try {
136
+ const folderBody = {
137
+ label: folder.name,
138
+ collection: collectionId,
139
+ note
140
+ };
141
+ const { data, error } = await client.api.POST("/folders", {
142
+ body: folderBody
143
+ });
144
+ if (error || !data) {
145
+ throw new Error(JSON.stringify(error));
146
+ }
147
+ createdFolders.push({
148
+ name: folder.name,
149
+ relativePath: folder.relativePath,
150
+ id: data.id,
151
+ entityCid: data.cid
152
+ });
153
+ reportProgress({
154
+ phase: "creating-folders",
155
+ completedFolders: i + 1,
156
+ currentFolder: folder.relativePath
157
+ });
158
+ } catch (err) {
159
+ const errorMsg = err instanceof Error ? err.message : String(err);
160
+ if (continueOnError) {
161
+ errors.push({ path: folder.relativePath, error: `Folder creation failed: ${errorMsg}` });
162
+ } else {
163
+ throw new Error(`Failed to create folder ${folder.relativePath}: ${errorMsg}`);
164
+ }
165
+ }
166
+ }
167
+ const folderPathToEntity = /* @__PURE__ */ new Map();
168
+ for (const folder of createdFolders) {
169
+ folderPathToEntity.set(folder.relativePath, folder);
170
+ }
171
+ reportProgress({
172
+ phase: "creating-files",
173
+ totalFiles: preparedFiles.length,
174
+ completedFiles: 0
175
+ });
176
+ let fileCreateProgress = 0;
177
+ await parallelLimit(preparedFiles, concurrency, async (file) => {
178
+ try {
179
+ const fileBody = {
180
+ key: file.cid,
181
+ // Use CID as storage key (best practice)
182
+ filename: file.name,
183
+ content_type: file.mimeType,
184
+ size: file.size,
185
+ cid: file.cid,
186
+ collection: collectionId
187
+ };
188
+ const { data, error } = await client.api.POST("/files", {
189
+ body: fileBody
190
+ });
191
+ if (error || !data) {
192
+ throw new Error(JSON.stringify(error));
193
+ }
194
+ createdFiles.push({
195
+ ...file,
196
+ id: data.id,
197
+ entityCid: data.cid,
198
+ uploadUrl: data.upload_url,
199
+ uploadExpiresAt: data.upload_expires_at
200
+ });
201
+ fileCreateProgress++;
202
+ reportProgress({
203
+ phase: "creating-files",
204
+ completedFiles: fileCreateProgress,
205
+ currentFile: file.relativePath
206
+ });
207
+ } catch (err) {
208
+ const errorMsg = err instanceof Error ? err.message : String(err);
209
+ if (continueOnError) {
210
+ errors.push({ path: file.relativePath, error: `File creation failed: ${errorMsg}` });
211
+ } else {
212
+ throw new Error(`Failed to create file ${file.relativePath}: ${errorMsg}`);
213
+ }
214
+ }
215
+ });
216
+ const totalBytes = createdFiles.reduce((sum, f) => sum + f.size, 0);
217
+ let bytesUploaded = 0;
218
+ reportProgress({
219
+ phase: "uploading-content",
220
+ totalFiles: createdFiles.length,
221
+ completedFiles: 0,
222
+ totalBytes,
223
+ bytesUploaded: 0
224
+ });
225
+ let uploadProgress = 0;
226
+ await parallelLimit(createdFiles, concurrency, async (file) => {
227
+ try {
228
+ const data = await file.getData();
229
+ let body;
230
+ if (data instanceof Blob) {
231
+ body = data;
232
+ } else if (data instanceof Uint8Array) {
233
+ const arrayBuffer = new ArrayBuffer(data.byteLength);
234
+ new Uint8Array(arrayBuffer).set(data);
235
+ body = new Blob([arrayBuffer], { type: file.mimeType });
236
+ } else {
237
+ body = new Blob([data], { type: file.mimeType });
238
+ }
239
+ const response = await fetch(file.uploadUrl, {
240
+ method: "PUT",
241
+ body,
242
+ headers: {
243
+ "Content-Type": file.mimeType
244
+ }
245
+ });
246
+ if (!response.ok) {
247
+ throw new Error(`Upload failed with status ${response.status}`);
248
+ }
249
+ bytesUploaded += file.size;
250
+ uploadProgress++;
251
+ reportProgress({
252
+ phase: "uploading-content",
253
+ completedFiles: uploadProgress,
254
+ currentFile: file.relativePath,
255
+ bytesUploaded,
256
+ totalBytes
257
+ });
258
+ } catch (err) {
259
+ const errorMsg = err instanceof Error ? err.message : String(err);
260
+ if (continueOnError) {
261
+ errors.push({ path: file.relativePath, error: `Upload failed: ${errorMsg}` });
262
+ } else {
263
+ throw new Error(`Failed to upload ${file.relativePath}: ${errorMsg}`);
264
+ }
265
+ }
266
+ });
267
+ reportProgress({ phase: "linking" });
268
+ const filePathToEntity = /* @__PURE__ */ new Map();
269
+ for (const file of createdFiles) {
270
+ filePathToEntity.set(file.relativePath, file);
271
+ }
272
+ const parentGroups = /* @__PURE__ */ new Map();
273
+ for (const folder of createdFolders) {
274
+ const parentPath = getParentPath(folder.relativePath);
275
+ let parentId;
276
+ if (parentPath === null) {
277
+ parentId = rootParentId;
278
+ } else {
279
+ const parentFolder = folderPathToEntity.get(parentPath);
280
+ if (!parentFolder) {
281
+ errors.push({
282
+ path: folder.relativePath,
283
+ error: `Parent folder not found: ${parentPath}`
284
+ });
285
+ continue;
286
+ }
287
+ parentId = parentFolder.id;
288
+ }
289
+ if (!parentGroups.has(parentId)) {
290
+ parentGroups.set(parentId, { folderId: parentId, children: [] });
291
+ }
292
+ parentGroups.get(parentId).children.push({ id: folder.id });
293
+ }
294
+ for (const file of createdFiles) {
295
+ const parentPath = getParentPath(file.relativePath);
296
+ let parentId;
297
+ if (parentPath === null) {
298
+ parentId = rootParentId;
299
+ } else {
300
+ const parentFolder = folderPathToEntity.get(parentPath);
301
+ if (!parentFolder) {
302
+ errors.push({
303
+ path: file.relativePath,
304
+ error: `Parent folder not found: ${parentPath}`
305
+ });
306
+ continue;
307
+ }
308
+ parentId = parentFolder.id;
309
+ }
310
+ if (!parentGroups.has(parentId)) {
311
+ parentGroups.set(parentId, { folderId: parentId, children: [] });
312
+ }
313
+ parentGroups.get(parentId).children.push({ id: file.id });
314
+ }
315
+ for (const [parentId, group] of parentGroups) {
316
+ if (group.children.length === 0) continue;
317
+ try {
318
+ let expectTip;
319
+ if (parentId === collectionId) {
320
+ const { data, error: error2 } = await client.api.GET("/collections/{id}", {
321
+ params: { path: { id: collectionId } }
322
+ });
323
+ if (error2 || !data) {
324
+ throw new Error(`Failed to fetch collection CID: ${JSON.stringify(error2)}`);
325
+ }
326
+ expectTip = data.cid;
327
+ } else {
328
+ const { data, error: error2 } = await client.api.GET("/folders/{id}", {
329
+ params: { path: { id: parentId } }
330
+ });
331
+ if (error2 || !data) {
332
+ throw new Error(`Failed to fetch folder CID: ${JSON.stringify(error2)}`);
333
+ }
334
+ expectTip = data.cid;
335
+ }
336
+ const bulkBody = {
337
+ expect_tip: expectTip,
338
+ children: group.children,
339
+ note
340
+ };
341
+ const { error } = await client.api.POST("/folders/{id}/children/bulk", {
342
+ params: { path: { id: parentId } },
343
+ body: bulkBody
344
+ });
345
+ if (error) {
346
+ throw new Error(JSON.stringify(error));
347
+ }
348
+ } catch (err) {
349
+ const errorMsg = err instanceof Error ? err.message : String(err);
350
+ if (continueOnError) {
351
+ errors.push({
352
+ path: `parent:${parentId}`,
353
+ error: `Bulk linking failed: ${errorMsg}`
354
+ });
355
+ } else {
356
+ throw new Error(`Failed to link children to ${parentId}: ${errorMsg}`);
357
+ }
358
+ }
359
+ }
360
+ reportProgress({ phase: "complete" });
361
+ const resultFolders = createdFolders.map((f) => ({
362
+ id: f.id,
363
+ cid: f.entityCid,
364
+ type: "folder",
365
+ relativePath: f.relativePath
366
+ }));
367
+ const resultFiles = createdFiles.map((f) => ({
368
+ id: f.id,
369
+ cid: f.entityCid,
370
+ type: "file",
371
+ relativePath: f.relativePath
372
+ }));
373
+ return {
374
+ success: errors.length === 0,
375
+ collection: {
376
+ id: collectionId,
377
+ cid: collectionCid,
378
+ created: collectionCreated
379
+ },
380
+ folders: resultFolders,
381
+ files: resultFiles,
382
+ errors
383
+ };
384
+ } catch (err) {
385
+ const errorMsg = err instanceof Error ? err.message : String(err);
386
+ reportProgress({
387
+ phase: "error",
388
+ error: errorMsg
389
+ });
390
+ return {
391
+ success: false,
392
+ collection: {
393
+ id: target.collectionId ?? "",
394
+ cid: "",
395
+ created: false
396
+ },
397
+ folders: createdFolders.map((f) => ({
398
+ id: f.id,
399
+ cid: f.entityCid,
400
+ type: "folder",
401
+ relativePath: f.relativePath
402
+ })),
403
+ files: createdFiles.map((f) => ({
404
+ id: f.id,
405
+ cid: f.entityCid,
406
+ type: "file",
407
+ relativePath: f.relativePath
408
+ })),
409
+ errors: [...errors, { path: "", error: errorMsg }]
410
+ };
411
+ }
412
+ }
413
+
414
+ // src/operations/upload/scanners.ts
415
+ function getMimeType(filename) {
416
+ const ext = filename.toLowerCase().split(".").pop() || "";
417
+ const mimeTypes = {
418
+ // Images
419
+ jpg: "image/jpeg",
420
+ jpeg: "image/jpeg",
421
+ png: "image/png",
422
+ gif: "image/gif",
423
+ webp: "image/webp",
424
+ svg: "image/svg+xml",
425
+ ico: "image/x-icon",
426
+ bmp: "image/bmp",
427
+ tiff: "image/tiff",
428
+ tif: "image/tiff",
429
+ // Documents
430
+ pdf: "application/pdf",
431
+ doc: "application/msword",
432
+ docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
433
+ xls: "application/vnd.ms-excel",
434
+ xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
435
+ ppt: "application/vnd.ms-powerpoint",
436
+ pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
437
+ odt: "application/vnd.oasis.opendocument.text",
438
+ ods: "application/vnd.oasis.opendocument.spreadsheet",
439
+ odp: "application/vnd.oasis.opendocument.presentation",
440
+ // Text
441
+ txt: "text/plain",
442
+ md: "text/markdown",
443
+ csv: "text/csv",
444
+ html: "text/html",
445
+ htm: "text/html",
446
+ css: "text/css",
447
+ xml: "text/xml",
448
+ rtf: "application/rtf",
449
+ // Code
450
+ js: "text/javascript",
451
+ mjs: "text/javascript",
452
+ ts: "text/typescript",
453
+ jsx: "text/javascript",
454
+ tsx: "text/typescript",
455
+ json: "application/json",
456
+ yaml: "text/yaml",
457
+ yml: "text/yaml",
458
+ // Archives
459
+ zip: "application/zip",
460
+ tar: "application/x-tar",
461
+ gz: "application/gzip",
462
+ rar: "application/vnd.rar",
463
+ "7z": "application/x-7z-compressed",
464
+ // Audio
465
+ mp3: "audio/mpeg",
466
+ wav: "audio/wav",
467
+ ogg: "audio/ogg",
468
+ m4a: "audio/mp4",
469
+ flac: "audio/flac",
470
+ // Video
471
+ mp4: "video/mp4",
472
+ webm: "video/webm",
473
+ avi: "video/x-msvideo",
474
+ mov: "video/quicktime",
475
+ mkv: "video/x-matroska",
476
+ // Fonts
477
+ woff: "font/woff",
478
+ woff2: "font/woff2",
479
+ ttf: "font/ttf",
480
+ otf: "font/otf",
481
+ // Other
482
+ wasm: "application/wasm"
483
+ };
484
+ return mimeTypes[ext] || "application/octet-stream";
485
+ }
486
+ async function scanDirectory(directoryPath, options = {}) {
487
+ const fs = await import("fs/promises");
488
+ const path = await import("path");
489
+ const { ignore = ["node_modules", ".git", ".DS_Store"], includeHidden = false } = options;
490
+ const files = [];
491
+ const folders = [];
492
+ const rootName = path.basename(directoryPath);
493
+ async function scanDir(dirPath, relativePath) {
494
+ const entries = await fs.readdir(dirPath, { withFileTypes: true });
495
+ for (const entry of entries) {
496
+ const name = entry.name;
497
+ if (!includeHidden && name.startsWith(".")) {
498
+ continue;
499
+ }
500
+ if (ignore.some((pattern) => name === pattern || name.match(pattern))) {
501
+ continue;
502
+ }
503
+ const fullPath = path.join(dirPath, name);
504
+ const entryRelativePath = relativePath ? `${relativePath}/${name}` : name;
505
+ if (entry.isDirectory()) {
506
+ folders.push({
507
+ name,
508
+ relativePath: entryRelativePath
509
+ });
510
+ await scanDir(fullPath, entryRelativePath);
511
+ } else if (entry.isFile()) {
512
+ const stat = await fs.stat(fullPath);
513
+ files.push({
514
+ name,
515
+ relativePath: entryRelativePath,
516
+ size: stat.size,
517
+ mimeType: getMimeType(name),
518
+ getData: async () => {
519
+ const buffer = await fs.readFile(fullPath);
520
+ return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength);
521
+ }
522
+ });
523
+ }
524
+ }
525
+ }
526
+ await scanDir(directoryPath, "");
527
+ folders.sort((a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length);
528
+ return { files, folders };
529
+ }
530
+ async function scanFileSystemEntries(entries, options = {}) {
531
+ const { ignore = ["node_modules", ".git", ".DS_Store"] } = options;
532
+ const files = [];
533
+ const folders = [];
534
+ async function processEntry(entry, parentPath) {
535
+ const name = entry.name;
536
+ if (ignore.some((pattern) => name === pattern)) {
537
+ return;
538
+ }
539
+ const relativePath = parentPath ? `${parentPath}/${name}` : name;
540
+ if (entry.isFile) {
541
+ const fileEntry = entry;
542
+ const file = await new Promise((resolve, reject) => {
543
+ fileEntry.file(resolve, reject);
544
+ });
545
+ files.push({
546
+ name,
547
+ relativePath,
548
+ size: file.size,
549
+ mimeType: file.type || getMimeType(name),
550
+ getData: async () => file.arrayBuffer()
551
+ });
552
+ } else if (entry.isDirectory) {
553
+ const dirEntry = entry;
554
+ folders.push({
555
+ name,
556
+ relativePath
557
+ });
558
+ const reader = dirEntry.createReader();
559
+ const childEntries = await new Promise((resolve, reject) => {
560
+ const allEntries = [];
561
+ function readEntries() {
562
+ reader.readEntries((entries2) => {
563
+ if (entries2.length === 0) {
564
+ resolve(allEntries);
565
+ } else {
566
+ allEntries.push(...entries2);
567
+ readEntries();
568
+ }
569
+ }, reject);
570
+ }
571
+ readEntries();
572
+ });
573
+ for (const childEntry of childEntries) {
574
+ await processEntry(childEntry, relativePath);
575
+ }
576
+ }
577
+ }
578
+ for (const entry of entries) {
579
+ if (entry) {
580
+ await processEntry(entry, "");
581
+ }
582
+ }
583
+ folders.sort((a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length);
584
+ return { files, folders };
585
+ }
586
+ async function scanFileList(fileList, options = {}) {
587
+ const { ignore = ["node_modules", ".git", ".DS_Store"] } = options;
588
+ const files = [];
589
+ const folderPaths = /* @__PURE__ */ new Set();
590
+ for (let i = 0; i < fileList.length; i++) {
591
+ const file = fileList[i];
592
+ if (!file) continue;
593
+ const relativePath = file.webkitRelativePath || file.name;
594
+ const name = file.name;
595
+ const pathSegments = relativePath.split("/");
596
+ if (pathSegments.some((segment) => ignore.includes(segment))) {
597
+ continue;
598
+ }
599
+ const pathParts = relativePath.split("/");
600
+ for (let j = 1; j < pathParts.length; j++) {
601
+ const folderPath = pathParts.slice(0, j).join("/");
602
+ folderPaths.add(folderPath);
603
+ }
604
+ const fileRef = file;
605
+ files.push({
606
+ name,
607
+ relativePath,
608
+ size: fileRef.size,
609
+ mimeType: fileRef.type || getMimeType(name),
610
+ getData: async () => fileRef.arrayBuffer()
611
+ });
612
+ }
613
+ const folders = Array.from(folderPaths).map((path) => ({
614
+ name: path.split("/").pop(),
615
+ relativePath: path
616
+ })).sort((a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length);
617
+ return { files, folders };
618
+ }
619
+ function buildUploadTree(items) {
620
+ const files = [];
621
+ const folderPaths = /* @__PURE__ */ new Set();
622
+ for (const item of items) {
623
+ const pathParts = item.path.split("/");
624
+ const name = pathParts.pop();
625
+ for (let i = 1; i <= pathParts.length; i++) {
626
+ folderPaths.add(pathParts.slice(0, i).join("/"));
627
+ }
628
+ let size;
629
+ if (item.data instanceof Blob) {
630
+ size = item.data.size;
631
+ } else if (item.data instanceof ArrayBuffer) {
632
+ size = item.data.byteLength;
633
+ } else {
634
+ size = item.data.length;
635
+ }
636
+ files.push({
637
+ name,
638
+ relativePath: item.path,
639
+ size,
640
+ mimeType: item.mimeType || getMimeType(name),
641
+ getData: async () => item.data
642
+ });
643
+ }
644
+ const folders = Array.from(folderPaths).map((path) => ({
645
+ name: path.split("/").pop(),
646
+ relativePath: path
647
+ })).sort((a, b) => a.relativePath.split("/").length - b.relativePath.split("/").length);
648
+ return { files, folders };
649
+ }
650
+
651
+ // src/operations/folders.ts
652
+ var FolderOperations = class {
653
+ constructor(client) {
654
+ this.client = client;
655
+ }
656
+ /**
657
+ * Upload a local directory to Arke
658
+ *
659
+ * @deprecated Use uploadTree and scanDirectory instead
660
+ */
661
+ async uploadDirectory(localPath, options) {
662
+ const tree = await scanDirectory(localPath);
663
+ const result = await uploadTree(this.client, tree, {
664
+ target: {
665
+ collectionId: options.collectionId,
666
+ parentId: options.parentFolderId
667
+ },
668
+ concurrency: options.concurrency,
669
+ onProgress: options.onProgress ? (p) => {
670
+ options.onProgress({
671
+ phase: p.phase === "computing-cids" || p.phase === "creating-folders" ? "creating-folders" : p.phase === "creating-files" || p.phase === "uploading-content" ? "uploading-files" : p.phase === "linking" ? "linking" : p.phase === "complete" ? "complete" : "scanning",
672
+ totalFiles: p.totalFiles,
673
+ completedFiles: p.completedFiles,
674
+ totalFolders: p.totalFolders,
675
+ completedFolders: p.completedFolders,
676
+ currentFile: p.currentFile
677
+ });
678
+ } : void 0
679
+ });
680
+ return {
681
+ rootFolder: result.folders[0] || null,
682
+ folders: result.folders,
683
+ files: result.files
684
+ };
685
+ }
686
+ };
687
+
688
+ // src/operations/batch.ts
689
+ var BatchOperations = class {
690
+ constructor(client) {
691
+ this.client = client;
692
+ }
693
+ /**
694
+ * Create multiple entities in parallel
695
+ *
696
+ * TODO: Implement this method
697
+ */
698
+ async createEntities(_entities, _options) {
699
+ throw new Error("BatchOperations.createEntities is not yet implemented");
700
+ }
701
+ /**
702
+ * Create multiple relationships in parallel
703
+ *
704
+ * TODO: Implement this method
705
+ */
706
+ async createRelationships(_relationships, _options) {
707
+ throw new Error("BatchOperations.createRelationships is not yet implemented");
708
+ }
709
+ };
710
+
711
+ // src/operations/crypto.ts
712
+ var CryptoOperations = class {
713
+ /**
714
+ * Generate an Ed25519 key pair for agent authentication
715
+ *
716
+ * TODO: Implement using Node.js crypto or Web Crypto API
717
+ */
718
+ static async generateKeyPair() {
719
+ throw new Error("CryptoOperations.generateKeyPair is not yet implemented");
720
+ }
721
+ /**
722
+ * Sign a payload with an Ed25519 private key
723
+ *
724
+ * TODO: Implement signature generation
725
+ */
726
+ static async signPayload(_privateKey, _payload) {
727
+ throw new Error("CryptoOperations.signPayload is not yet implemented");
728
+ }
729
+ /**
730
+ * Verify an Ed25519 signature
731
+ *
732
+ * TODO: Implement signature verification
733
+ */
734
+ static async verifySignature(_publicKey, _payload, _signature) {
735
+ throw new Error("CryptoOperations.verifySignature is not yet implemented");
736
+ }
737
+ /**
738
+ * Compute IPFS CID for content
739
+ *
740
+ * TODO: Implement using multiformats library
741
+ */
742
+ static async computeCID(_content) {
743
+ throw new Error("CryptoOperations.computeCID is not yet implemented");
744
+ }
745
+ };
746
+ export {
747
+ BatchOperations,
748
+ CryptoOperations,
749
+ FolderOperations,
750
+ buildUploadTree,
751
+ computeCid,
752
+ getMimeType,
753
+ scanDirectory,
754
+ scanFileList,
755
+ scanFileSystemEntries,
756
+ uploadTree,
757
+ verifyCid
758
+ };
759
+ //# sourceMappingURL=index.js.map