@lumeweb/pinner 0.0.1 → 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +9 -0
- package/README.md +690 -28
- package/dist/cjs/_virtual/rolldown_runtime.cjs +29 -0
- package/dist/cjs/adapters/pinata/adapter.cjs +88 -0
- package/dist/cjs/adapters/pinata/adapter.cjs.map +1 -0
- package/dist/cjs/adapters/pinata/adapter.d.cts +35 -0
- package/dist/cjs/adapters/pinata/builder.cjs +194 -0
- package/dist/cjs/adapters/pinata/builder.cjs.map +1 -0
- package/dist/cjs/adapters/pinata/index.cjs +3 -0
- package/dist/cjs/adapters/pinata/list-builder.cjs +52 -0
- package/dist/cjs/adapters/pinata/list-builder.cjs.map +1 -0
- package/dist/cjs/blockstore/index.cjs +2 -0
- package/dist/cjs/blockstore/unstorage-base.cjs +240 -0
- package/dist/cjs/blockstore/unstorage-base.cjs.map +1 -0
- package/dist/cjs/blockstore/unstorage-base.d.cts +23 -0
- package/dist/cjs/blockstore/unstorage.cjs +39 -0
- package/dist/cjs/blockstore/unstorage.cjs.map +1 -0
- package/dist/cjs/blockstore/unstorage.d.cts +36 -0
- package/dist/cjs/config.d.cts +51 -0
- package/dist/cjs/encoder/base64.cjs +38 -0
- package/dist/cjs/encoder/base64.cjs.map +1 -0
- package/dist/cjs/encoder/csv/csv-formatter.cjs +81 -0
- package/dist/cjs/encoder/csv/csv-formatter.cjs.map +1 -0
- package/dist/cjs/encoder/csv/field-formatter.cjs +76 -0
- package/dist/cjs/encoder/csv/field-formatter.cjs.map +1 -0
- package/dist/cjs/encoder/csv/row-formatter.cjs +159 -0
- package/dist/cjs/encoder/csv/row-formatter.cjs.map +1 -0
- package/dist/cjs/encoder/csv.cjs +44 -0
- package/dist/cjs/encoder/csv.cjs.map +1 -0
- package/dist/cjs/encoder/error.cjs +19 -0
- package/dist/cjs/encoder/error.cjs.map +1 -0
- package/dist/cjs/encoder/index.cjs +6 -0
- package/dist/cjs/encoder/json.cjs +36 -0
- package/dist/cjs/encoder/json.cjs.map +1 -0
- package/dist/cjs/encoder/text.cjs +35 -0
- package/dist/cjs/encoder/text.cjs.map +1 -0
- package/dist/cjs/encoder/url.cjs +39 -0
- package/dist/cjs/encoder/url.cjs.map +1 -0
- package/dist/cjs/errors/index.cjs +104 -0
- package/dist/cjs/errors/index.cjs.map +1 -0
- package/dist/cjs/errors/index.d.cts +47 -0
- package/dist/cjs/index.cjs +42 -0
- package/dist/cjs/index.d.cts +14 -0
- package/dist/cjs/pin/client.cjs +96 -0
- package/dist/cjs/pin/client.cjs.map +1 -0
- package/dist/cjs/pin/index.cjs +1 -0
- package/dist/cjs/pinner.cjs +126 -0
- package/dist/cjs/pinner.cjs.map +1 -0
- package/dist/cjs/pinner.d.cts +77 -0
- package/dist/cjs/types/constants.cjs +34 -0
- package/dist/cjs/types/constants.cjs.map +1 -0
- package/dist/cjs/types/mime-types.cjs +11 -0
- package/dist/cjs/types/mime-types.cjs.map +1 -0
- package/dist/cjs/types/mime-types.d.cts +7 -0
- package/dist/cjs/types/pin.d.cts +74 -0
- package/dist/cjs/types/pinata.d.cts +99 -0
- package/dist/cjs/types/type-guards.cjs +20 -0
- package/dist/cjs/types/type-guards.cjs.map +1 -0
- package/dist/cjs/types/type-guards.d.cts +15 -0
- package/dist/cjs/types/upload.cjs +18 -0
- package/dist/cjs/types/upload.cjs.map +1 -0
- package/dist/cjs/types/upload.d.cts +189 -0
- package/dist/cjs/upload/base-upload.cjs +135 -0
- package/dist/cjs/upload/base-upload.cjs.map +1 -0
- package/dist/cjs/upload/builder.cjs +174 -0
- package/dist/cjs/upload/builder.cjs.map +1 -0
- package/dist/cjs/upload/builder.d.cts +60 -0
- package/dist/cjs/upload/car.cjs +129 -0
- package/dist/cjs/upload/car.cjs.map +1 -0
- package/dist/cjs/upload/car.d.cts +19 -0
- package/dist/cjs/upload/constants.cjs +9 -0
- package/dist/cjs/upload/constants.cjs.map +1 -0
- package/dist/cjs/upload/index.cjs +8 -0
- package/dist/cjs/upload/manager.cjs +249 -0
- package/dist/cjs/upload/manager.cjs.map +1 -0
- package/dist/cjs/upload/manager.d.cts +35 -0
- package/dist/cjs/upload/normalize.cjs +28 -0
- package/dist/cjs/upload/normalize.cjs.map +1 -0
- package/dist/cjs/upload/tus-upload.cjs +74 -0
- package/dist/cjs/upload/tus-upload.cjs.map +1 -0
- package/dist/cjs/upload/xhr-upload.cjs +41 -0
- package/dist/cjs/upload/xhr-upload.cjs.map +1 -0
- package/dist/cjs/utils/env.cjs +12 -0
- package/dist/cjs/utils/env.cjs.map +1 -0
- package/dist/cjs/utils/stream.cjs +141 -0
- package/dist/cjs/utils/stream.cjs.map +1 -0
- package/dist/cjs/utils/stream.d.cts +23 -0
- package/dist/cjs/utils/tus-patch.cjs +50 -0
- package/dist/cjs/utils/tus-patch.cjs.map +1 -0
- package/dist/cjs/utils/validation.cjs +62 -0
- package/dist/cjs/utils/validation.cjs.map +1 -0
- package/dist/esm/_virtual/rolldown_runtime.js +8 -0
- package/dist/esm/adapters/pinata/adapter.d.ts +35 -0
- package/dist/esm/adapters/pinata/adapter.js +87 -0
- package/dist/esm/adapters/pinata/adapter.js.map +1 -0
- package/dist/esm/adapters/pinata/builder.d.ts +1 -0
- package/dist/esm/adapters/pinata/builder.js +187 -0
- package/dist/esm/adapters/pinata/builder.js.map +1 -0
- package/dist/esm/adapters/pinata/index.d.ts +4 -0
- package/dist/esm/adapters/pinata/index.js +3 -0
- package/dist/esm/adapters/pinata/list-builder.d.ts +1 -0
- package/dist/esm/adapters/pinata/list-builder.js +51 -0
- package/dist/esm/adapters/pinata/list-builder.js.map +1 -0
- package/dist/esm/blockstore/index.d.ts +2 -0
- package/dist/esm/blockstore/index.js +2 -0
- package/dist/esm/blockstore/unstorage-base.d.ts +23 -0
- package/dist/esm/blockstore/unstorage-base.js +231 -0
- package/dist/esm/blockstore/unstorage-base.js.map +1 -0
- package/dist/esm/blockstore/unstorage.d.ts +36 -0
- package/dist/esm/blockstore/unstorage.js +38 -0
- package/dist/esm/blockstore/unstorage.js.map +1 -0
- package/dist/esm/config.d.ts +51 -0
- package/dist/esm/encoder/base64.js +37 -0
- package/dist/esm/encoder/base64.js.map +1 -0
- package/dist/esm/encoder/csv/csv-formatter.js +81 -0
- package/dist/esm/encoder/csv/csv-formatter.js.map +1 -0
- package/dist/esm/encoder/csv/field-formatter.js +75 -0
- package/dist/esm/encoder/csv/field-formatter.js.map +1 -0
- package/dist/esm/encoder/csv/row-formatter.js +159 -0
- package/dist/esm/encoder/csv/row-formatter.js.map +1 -0
- package/dist/esm/encoder/csv.js +43 -0
- package/dist/esm/encoder/csv.js.map +1 -0
- package/dist/esm/encoder/error.js +18 -0
- package/dist/esm/encoder/error.js.map +1 -0
- package/dist/esm/encoder/index.js +6 -0
- package/dist/esm/encoder/json.js +35 -0
- package/dist/esm/encoder/json.js.map +1 -0
- package/dist/esm/encoder/text.js +34 -0
- package/dist/esm/encoder/text.js.map +1 -0
- package/dist/esm/encoder/url.js +36 -0
- package/dist/esm/encoder/url.js.map +1 -0
- package/dist/esm/errors/index.d.ts +47 -0
- package/dist/esm/errors/index.js +93 -0
- package/dist/esm/errors/index.js.map +1 -0
- package/dist/esm/index.d.ts +16 -0
- package/dist/esm/index.js +14 -0
- package/dist/esm/pin/client.js +95 -0
- package/dist/esm/pin/client.js.map +1 -0
- package/dist/esm/pin/index.js +1 -0
- package/dist/esm/pinner.d.ts +77 -0
- package/dist/esm/pinner.js +125 -0
- package/dist/esm/pinner.js.map +1 -0
- package/dist/esm/types/constants.js +29 -0
- package/dist/esm/types/constants.js.map +1 -0
- package/dist/esm/types/mime-types.d.ts +7 -0
- package/dist/esm/types/mime-types.js +8 -0
- package/dist/esm/types/mime-types.js.map +1 -0
- package/dist/esm/types/pin.d.ts +74 -0
- package/dist/esm/types/pinata.d.ts +99 -0
- package/dist/esm/types/type-guards.d.ts +15 -0
- package/dist/esm/types/type-guards.js +19 -0
- package/dist/esm/types/type-guards.js.map +1 -0
- package/dist/esm/types/upload.d.ts +189 -0
- package/dist/esm/types/upload.js +16 -0
- package/dist/esm/types/upload.js.map +1 -0
- package/dist/esm/upload/base-upload.js +132 -0
- package/dist/esm/upload/base-upload.js.map +1 -0
- package/dist/esm/upload/builder.d.ts +60 -0
- package/dist/esm/upload/builder.js +173 -0
- package/dist/esm/upload/builder.js.map +1 -0
- package/dist/esm/upload/car.d.ts +19 -0
- package/dist/esm/upload/car.js +125 -0
- package/dist/esm/upload/car.js.map +1 -0
- package/dist/esm/upload/constants.js +7 -0
- package/dist/esm/upload/constants.js.map +1 -0
- package/dist/esm/upload/index.js +8 -0
- package/dist/esm/upload/manager.d.ts +35 -0
- package/dist/esm/upload/manager.js +248 -0
- package/dist/esm/upload/manager.js.map +1 -0
- package/dist/esm/upload/normalize.js +28 -0
- package/dist/esm/upload/normalize.js.map +1 -0
- package/dist/esm/upload/tus-upload.js +72 -0
- package/dist/esm/upload/tus-upload.js.map +1 -0
- package/dist/esm/upload/xhr-upload.js +39 -0
- package/dist/esm/upload/xhr-upload.js.map +1 -0
- package/dist/esm/utils/env.js +11 -0
- package/dist/esm/utils/env.js.map +1 -0
- package/dist/esm/utils/stream.d.ts +23 -0
- package/dist/esm/utils/stream.js +134 -0
- package/dist/esm/utils/stream.js.map +1 -0
- package/dist/esm/utils/tus-patch.js +51 -0
- package/dist/esm/utils/tus-patch.js.map +1 -0
- package/dist/esm/utils/validation.js +60 -0
- package/dist/esm/utils/validation.js.map +1 -0
- package/package.json +95 -8
- package/public/mockServiceWorker.js +349 -0
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
+
const require_mime_types = require('../types/mime-types.cjs');
|
|
3
|
+
const require_stream = require('../utils/stream.cjs');
|
|
4
|
+
const require_unstorage = require('../blockstore/unstorage.cjs');
|
|
5
|
+
require('../blockstore/index.cjs');
|
|
6
|
+
let _helia_car = require("@helia/car");
|
|
7
|
+
let _helia_http = require("@helia/http");
|
|
8
|
+
let _helia_unixfs = require("@helia/unixfs");
|
|
9
|
+
let _ipld_car = require("@ipld/car");
|
|
10
|
+
|
|
11
|
+
//#region src/upload/car.ts
|
|
12
|
+
let helia = null;
|
|
13
|
+
let blockstore = null;
|
|
14
|
+
let datastore = null;
|
|
15
|
+
let config = {};
|
|
16
|
+
function configureCar(carConfig) {
|
|
17
|
+
config = carConfig;
|
|
18
|
+
}
|
|
19
|
+
async function getHelia() {
|
|
20
|
+
if (helia) return helia;
|
|
21
|
+
const BlockstoreClass = require_unstorage.createBlockstore();
|
|
22
|
+
const DatastoreClass = require_unstorage.createDatastore();
|
|
23
|
+
blockstore = new BlockstoreClass({
|
|
24
|
+
prefix: "pinner-helia-blocks",
|
|
25
|
+
base: config.datastoreName
|
|
26
|
+
});
|
|
27
|
+
datastore = config.datastore || new DatastoreClass({
|
|
28
|
+
prefix: "pinner-helia-data",
|
|
29
|
+
base: config.datastoreName
|
|
30
|
+
});
|
|
31
|
+
helia = await (0, _helia_http.createHeliaHTTP)({
|
|
32
|
+
blockstore,
|
|
33
|
+
datastore
|
|
34
|
+
});
|
|
35
|
+
return helia;
|
|
36
|
+
}
|
|
37
|
+
async function cleanupHelia() {
|
|
38
|
+
if (datastore?.close) await datastore.close();
|
|
39
|
+
helia = null;
|
|
40
|
+
blockstore = null;
|
|
41
|
+
datastore = null;
|
|
42
|
+
}
|
|
43
|
+
async function* fileSource(files, onProgress, signal) {
|
|
44
|
+
const seenDirs = /* @__PURE__ */ new Set();
|
|
45
|
+
let totalBytes = 0n;
|
|
46
|
+
let processedBytes = 0n;
|
|
47
|
+
for (const file of files) totalBytes += BigInt(file.size);
|
|
48
|
+
for (const file of files) {
|
|
49
|
+
if (signal?.aborted) throw new Error("Aborted");
|
|
50
|
+
const fullPath = file.webkitRelativePath ?? file.name;
|
|
51
|
+
if (fullPath.includes("/.")) continue;
|
|
52
|
+
const parts = fullPath.split("/").filter((part) => part.length > 0);
|
|
53
|
+
for (let i = 1; i < parts.length; i++) {
|
|
54
|
+
if (signal?.aborted) throw new Error("Aborted");
|
|
55
|
+
const dirPath = parts.slice(0, i).join("/");
|
|
56
|
+
if (!seenDirs.has(dirPath)) {
|
|
57
|
+
seenDirs.add(dirPath);
|
|
58
|
+
yield {
|
|
59
|
+
content: (async function* () {})(),
|
|
60
|
+
path: dirPath
|
|
61
|
+
};
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
yield {
|
|
65
|
+
content: require_stream.readableStreamToAsyncIterable(file.stream()),
|
|
66
|
+
path: fullPath
|
|
67
|
+
};
|
|
68
|
+
if (onProgress && totalBytes > 0n) {
|
|
69
|
+
processedBytes += BigInt(file.size);
|
|
70
|
+
onProgress(Number(processedBytes * 100n / totalBytes));
|
|
71
|
+
}
|
|
72
|
+
}
|
|
73
|
+
}
|
|
74
|
+
async function preprocessToCar(input, options) {
|
|
75
|
+
let files;
|
|
76
|
+
if (input instanceof ReadableStream) {
|
|
77
|
+
const [streamForSize$1, streamForFile] = input.tee();
|
|
78
|
+
await require_stream.calculateStreamSize(streamForSize$1, options?.signal);
|
|
79
|
+
const streamBlob = await require_stream.streamToBlob(streamForFile, "application/octet-stream");
|
|
80
|
+
files = [new File([streamBlob], options?.name || "upload", { type: streamBlob.type })];
|
|
81
|
+
} else if (Array.isArray(input)) files = input;
|
|
82
|
+
else files = [input];
|
|
83
|
+
const heliaInstance = await getHelia();
|
|
84
|
+
const fs = (0, _helia_unixfs.unixfs)(heliaInstance);
|
|
85
|
+
const c = (0, _helia_car.car)(heliaInstance);
|
|
86
|
+
let rootCid;
|
|
87
|
+
let blocksCount = 0n;
|
|
88
|
+
const src = fileSource(files, options?.onProgress, options?.signal);
|
|
89
|
+
let hasFiles = false;
|
|
90
|
+
for await (const result of fs.addAll(src, {
|
|
91
|
+
cidVersion: 1,
|
|
92
|
+
rawLeaves: false,
|
|
93
|
+
signal: options?.signal,
|
|
94
|
+
onProgress(event) {
|
|
95
|
+
if (event.type === "blocks:put:blockstore:put") blocksCount++;
|
|
96
|
+
}
|
|
97
|
+
})) {
|
|
98
|
+
if (options?.signal?.aborted) throw new Error("Aborted");
|
|
99
|
+
rootCid = result.cid;
|
|
100
|
+
hasFiles = true;
|
|
101
|
+
}
|
|
102
|
+
if (!hasFiles || !rootCid) throw new Error("No files to process");
|
|
103
|
+
const [streamForSize, streamForProcessing] = require_stream.asyncGeneratorToReadableStream(c.export(rootCid, { signal: options?.signal })).tee();
|
|
104
|
+
const size = await require_stream.calculateStreamSize(streamForSize, options?.signal);
|
|
105
|
+
return {
|
|
106
|
+
carStream: streamForProcessing,
|
|
107
|
+
rootCid: rootCid.toString(),
|
|
108
|
+
size
|
|
109
|
+
};
|
|
110
|
+
}
|
|
111
|
+
async function isCarFile(file) {
|
|
112
|
+
if (file.type !== require_mime_types.MIME_TYPE_CAR && !file.name.endsWith(require_mime_types.FILE_EXTENSION_CAR)) return false;
|
|
113
|
+
try {
|
|
114
|
+
const iterable = require_stream.readableStreamToAsyncIterable(file.stream());
|
|
115
|
+
return (await (await _ipld_car.CarReader.fromIterable(iterable)).getRoots()).length > 0;
|
|
116
|
+
} catch {
|
|
117
|
+
return false;
|
|
118
|
+
}
|
|
119
|
+
}
|
|
120
|
+
async function destroyCarPreprocessor() {
|
|
121
|
+
await cleanupHelia();
|
|
122
|
+
}
|
|
123
|
+
|
|
124
|
+
//#endregion
|
|
125
|
+
exports.configureCar = configureCar;
|
|
126
|
+
exports.destroyCarPreprocessor = destroyCarPreprocessor;
|
|
127
|
+
exports.isCarFile = isCarFile;
|
|
128
|
+
exports.preprocessToCar = preprocessToCar;
|
|
129
|
+
//# sourceMappingURL=car.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"car.cjs","names":["createBlockstore","createUnstorageDatastore","readableStreamToAsyncIterable","streamForSize","calculateStreamSize","streamToBlob","asyncGeneratorToReadableStream","MIME_TYPE_CAR","FILE_EXTENSION_CAR","CarReader"],"sources":["../../../src/upload/car.ts"],"sourcesContent":["import { car } from \"@helia/car\";\nimport { createHeliaHTTP } from \"@helia/http\";\nimport { unixfs } from \"@helia/unixfs\";\nimport {\n createBlockstore,\n createDatastore as createUnstorageDatastore,\n} from \"@/blockstore\";\nimport type { CID } from \"multiformats/cid\";\nimport { CarReader } from \"@ipld/car\";\nimport type { Datastore } from \"interface-datastore\";\n\nimport {\n asyncGeneratorToReadableStream,\n calculateStreamSize,\n readableStreamToAsyncIterable,\n streamToBlob,\n} from \"@/utils/stream\";\nimport { FILE_EXTENSION_CAR, MIME_TYPE_CAR } from \"@/types/mime-types\";\n\nexport interface CarPreprocessOptions {\n name?: string;\n onProgress?: (percentage: number) => void;\n signal?: AbortSignal;\n}\n\nexport interface CarPreprocessResult {\n carStream: ReadableStream<Uint8Array>;\n rootCid: string;\n size: bigint;\n}\n\nexport interface CarConfig {\n /**\n * Custom datastore instance for Helia.\n * If provided, this datastore will be used directly without creating one from storage.\n * Highest priority - takes precedence over storage and datastoreName.\n */\n datastore?: Datastore;\n\n /**\n * Custom base name for Helia storage.\n * Passed as the `base` option to both blockstore and datastore storage instances.\n * Only used when datastore is not provided.\n * @default \"pinner-helia-data\"\n */\n datastoreName?: string;\n}\n\nlet helia: any = null;\nlet blockstore: any = null;\nlet datastore: any = null;\nlet config: CarConfig = {};\n\nexport function configureCar(carConfig: CarConfig) {\n config = carConfig;\n}\n\nasync function getHelia() {\n if (helia) return helia;\n\n const BlockstoreClass = createBlockstore();\n const DatastoreClass = createUnstorageDatastore();\n\n blockstore = new BlockstoreClass({\n prefix: \"pinner-helia-blocks\",\n base: config.datastoreName,\n });\n datastore =\n config.datastore ||\n new DatastoreClass({\n prefix: \"pinner-helia-data\",\n base: config.datastoreName,\n });\n\n helia = await createHeliaHTTP({\n blockstore,\n datastore,\n });\n\n return helia;\n}\n\nasync function cleanupHelia() {\n if (datastore?.close) {\n await datastore.close();\n }\n helia = null;\n blockstore = null;\n datastore = null;\n}\n\nasync function* fileSource(\n files: File[],\n onProgress?: (percentage: number) => void,\n signal?: AbortSignal,\n): AsyncGenerator<{\n content: AsyncIterable<Uint8Array> | undefined;\n path: string;\n}> {\n const seenDirs = new Set<string>();\n let totalBytes = 0n;\n let processedBytes = 0n;\n\n for (const file of files) {\n totalBytes += BigInt(file.size);\n }\n\n for (const file of files) {\n if (signal?.aborted) {\n throw new Error(\"Aborted\");\n }\n\n const fullPath = (file as any).webkitRelativePath ?? file.name;\n\n if (fullPath.includes(\"/.\")) {\n continue;\n }\n\n const parts = fullPath.split(\"/\").filter((part: string) => part.length > 0);\n\n for (let i = 1; i < parts.length; i++) {\n if (signal?.aborted) {\n throw new Error(\"Aborted\");\n }\n\n const dirPath = parts.slice(0, i).join(\"/\");\n\n if (!seenDirs.has(dirPath)) {\n seenDirs.add(dirPath);\n yield {\n content: (async function* () {})(),\n path: dirPath,\n };\n }\n }\n\n yield {\n content: readableStreamToAsyncIterable(file.stream()),\n path: fullPath,\n };\n\n if (onProgress && totalBytes > 0n) {\n processedBytes += BigInt(file.size);\n const progressPercent = Number((processedBytes * 100n) / totalBytes);\n onProgress(progressPercent);\n }\n }\n}\n\nexport async function preprocessToCar(\n input: File | ReadableStream<Uint8Array> | File[],\n options?: CarPreprocessOptions,\n): Promise<CarPreprocessResult> {\n let files: File[];\n\n if (input instanceof ReadableStream) {\n const [streamForSize, streamForFile] = input.tee();\n const size = await calculateStreamSize(streamForSize, options?.signal);\n const streamBlob = await streamToBlob(\n streamForFile,\n \"application/octet-stream\",\n );\n files = [\n new File([streamBlob], options?.name || \"upload\", {\n type: streamBlob.type,\n }),\n ];\n } else if (Array.isArray(input)) {\n files = input;\n } else {\n files = [input];\n }\n\n const heliaInstance = await getHelia();\n const fs = unixfs(heliaInstance);\n const c = car(heliaInstance);\n\n let rootCid: CID | undefined;\n let blocksCount = 0n;\n\n const src = fileSource(files, options?.onProgress, options?.signal);\n\n let hasFiles = false;\n for await (const result of fs.addAll(src, {\n cidVersion: 1,\n rawLeaves: false,\n signal: options?.signal,\n onProgress(event) {\n if (event.type === \"blocks:put:blockstore:put\") {\n blocksCount++;\n }\n },\n })) {\n if (options?.signal?.aborted) {\n throw new Error(\"Aborted\");\n }\n rootCid = result.cid;\n hasFiles = true;\n }\n\n if (!hasFiles || !rootCid) {\n throw new Error(\"No files to process\");\n }\n\n // c.export() now returns an async generator directly (was renamed from 'stream')\n const carAsyncGenerator = c.export(rootCid!, { signal: options?.signal });\n const carStream = asyncGeneratorToReadableStream(carAsyncGenerator);\n\n // Use stream tee to create two identical streams - one for size calculation, one for processing\n const [streamForSize, streamForProcessing] = carStream.tee();\n\n const size = await calculateStreamSize(streamForSize, options?.signal);\n\n return {\n carStream: streamForProcessing,\n rootCid: rootCid!.toString(),\n size,\n };\n}\n\nexport async function isCarFile(file: File): Promise<boolean> {\n if (file.type !== MIME_TYPE_CAR && !file.name.endsWith(FILE_EXTENSION_CAR)) {\n return false;\n }\n\n try {\n const iterable = readableStreamToAsyncIterable(file.stream());\n const reader = await CarReader.fromIterable(iterable);\n const roots = await reader.getRoots();\n return roots.length > 0;\n } catch {\n return false;\n }\n}\n\nexport async function destroyCarPreprocessor() {\n await cleanupHelia();\n}\n"],"mappings":";;;;;;;;;;;AAgDA,IAAI,QAAa;AACjB,IAAI,aAAkB;AACtB,IAAI,YAAiB;AACrB,IAAI,SAAoB,EAAE;AAE1B,SAAgB,aAAa,WAAsB;AACjD,UAAS;;AAGX,eAAe,WAAW;AACxB,KAAI,MAAO,QAAO;CAElB,MAAM,kBAAkBA,oCAAkB;CAC1C,MAAM,iBAAiBC,mCAA0B;AAEjD,cAAa,IAAI,gBAAgB;EAC/B,QAAQ;EACR,MAAM,OAAO;EACd,CAAC;AACF,aACE,OAAO,aACP,IAAI,eAAe;EACjB,QAAQ;EACR,MAAM,OAAO;EACd,CAAC;AAEJ,SAAQ,uCAAsB;EAC5B;EACA;EACD,CAAC;AAEF,QAAO;;AAGT,eAAe,eAAe;AAC5B,KAAI,WAAW,MACb,OAAM,UAAU,OAAO;AAEzB,SAAQ;AACR,cAAa;AACb,aAAY;;AAGd,gBAAgB,WACd,OACA,YACA,QAIC;CACD,MAAM,2BAAW,IAAI,KAAa;CAClC,IAAI,aAAa;CACjB,IAAI,iBAAiB;AAErB,MAAK,MAAM,QAAQ,MACjB,eAAc,OAAO,KAAK,KAAK;AAGjC,MAAK,MAAM,QAAQ,OAAO;AACxB,MAAI,QAAQ,QACV,OAAM,IAAI,MAAM,UAAU;EAG5B,MAAM,WAAY,KAAa,sBAAsB,KAAK;AAE1D,MAAI,SAAS,SAAS,KAAK,CACzB;EAGF,MAAM,QAAQ,SAAS,MAAM,IAAI,CAAC,QAAQ,SAAiB,KAAK,SAAS,EAAE;AAE3E,OAAK,IAAI,IAAI,GAAG,IAAI,MAAM,QAAQ,KAAK;AACrC,OAAI,QAAQ,QACV,OAAM,IAAI,MAAM,UAAU;GAG5B,MAAM,UAAU,MAAM,MAAM,GAAG,EAAE,CAAC,KAAK,IAAI;AAE3C,OAAI,CAAC,SAAS,IAAI,QAAQ,EAAE;AAC1B,aAAS,IAAI,QAAQ;AACrB,UAAM;KACJ,UAAU,mBAAmB,KAAK;KAClC,MAAM;KACP;;;AAIL,QAAM;GACJ,SAASC,6CAA8B,KAAK,QAAQ,CAAC;GACrD,MAAM;GACP;AAED,MAAI,cAAc,aAAa,IAAI;AACjC,qBAAkB,OAAO,KAAK,KAAK;AAEnC,cADwB,OAAQ,iBAAiB,OAAQ,WAAW,CACzC;;;;AAKjC,eAAsB,gBACpB,OACA,SAC8B;CAC9B,IAAI;AAEJ,KAAI,iBAAiB,gBAAgB;EACnC,MAAM,CAACC,iBAAe,iBAAiB,MAAM,KAAK;AACrC,QAAMC,mCAAoBD,iBAAe,SAAS,OAAO;EACtE,MAAM,aAAa,MAAME,4BACvB,eACA,2BACD;AACD,UAAQ,CACN,IAAI,KAAK,CAAC,WAAW,EAAE,SAAS,QAAQ,UAAU,EAChD,MAAM,WAAW,MAClB,CAAC,CACH;YACQ,MAAM,QAAQ,MAAM,CAC7B,SAAQ;KAER,SAAQ,CAAC,MAAM;CAGjB,MAAM,gBAAgB,MAAM,UAAU;CACtC,MAAM,+BAAY,cAAc;CAChC,MAAM,wBAAQ,cAAc;CAE5B,IAAI;CACJ,IAAI,cAAc;CAElB,MAAM,MAAM,WAAW,OAAO,SAAS,YAAY,SAAS,OAAO;CAEnE,IAAI,WAAW;AACf,YAAW,MAAM,UAAU,GAAG,OAAO,KAAK;EACxC,YAAY;EACZ,WAAW;EACX,QAAQ,SAAS;EACjB,WAAW,OAAO;AAChB,OAAI,MAAM,SAAS,4BACjB;;EAGL,CAAC,EAAE;AACF,MAAI,SAAS,QAAQ,QACnB,OAAM,IAAI,MAAM,UAAU;AAE5B,YAAU,OAAO;AACjB,aAAW;;AAGb,KAAI,CAAC,YAAY,CAAC,QAChB,OAAM,IAAI,MAAM,sBAAsB;CAQxC,MAAM,CAAC,eAAe,uBAHJC,8CADQ,EAAE,OAAO,SAAU,EAAE,QAAQ,SAAS,QAAQ,CAAC,CACN,CAGZ,KAAK;CAE5D,MAAM,OAAO,MAAMF,mCAAoB,eAAe,SAAS,OAAO;AAEtE,QAAO;EACL,WAAW;EACX,SAAS,QAAS,UAAU;EAC5B;EACD;;AAGH,eAAsB,UAAU,MAA8B;AAC5D,KAAI,KAAK,SAASG,oCAAiB,CAAC,KAAK,KAAK,SAASC,sCAAmB,CACxE,QAAO;AAGT,KAAI;EACF,MAAM,WAAWN,6CAA8B,KAAK,QAAQ,CAAC;AAG7D,UADc,OADC,MAAMO,oBAAU,aAAa,SAAS,EAC1B,UAAU,EACxB,SAAS;SAChB;AACN,SAAO;;;AAIX,eAAsB,yBAAyB;AAC7C,OAAM,cAAc"}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
import { Datastore } from "interface-datastore";
|
|
2
|
+
|
|
3
|
+
//#region src/upload/car.d.ts
|
|
4
|
+
interface CarPreprocessOptions {
|
|
5
|
+
name?: string;
|
|
6
|
+
onProgress?: (percentage: number) => void;
|
|
7
|
+
signal?: AbortSignal;
|
|
8
|
+
}
|
|
9
|
+
interface CarPreprocessResult {
|
|
10
|
+
carStream: ReadableStream<Uint8Array>;
|
|
11
|
+
rootCid: string;
|
|
12
|
+
size: bigint;
|
|
13
|
+
}
|
|
14
|
+
declare function preprocessToCar(input: File | ReadableStream<Uint8Array> | File[], options?: CarPreprocessOptions): Promise<CarPreprocessResult>;
|
|
15
|
+
declare function isCarFile(file: File): Promise<boolean>;
|
|
16
|
+
declare function destroyCarPreprocessor(): Promise<void>;
|
|
17
|
+
//#endregion
|
|
18
|
+
export { CarPreprocessOptions, CarPreprocessResult, destroyCarPreprocessor, isCarFile, preprocessToCar };
|
|
19
|
+
//# sourceMappingURL=car.d.cts.map
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
|
|
2
|
+
//#region src/upload/constants.ts
|
|
3
|
+
const UPLOAD_SOURCE_XHR = "xhr-upload";
|
|
4
|
+
const UPLOAD_SOURCE_TUS = "tus-upload";
|
|
5
|
+
|
|
6
|
+
//#endregion
|
|
7
|
+
exports.UPLOAD_SOURCE_TUS = UPLOAD_SOURCE_TUS;
|
|
8
|
+
exports.UPLOAD_SOURCE_XHR = UPLOAD_SOURCE_XHR;
|
|
9
|
+
//# sourceMappingURL=constants.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"constants.cjs","names":[],"sources":["../../../src/upload/constants.ts"],"sourcesContent":["export const UPLOAD_SOURCE_XHR = \"xhr-upload\" as const;\nexport const UPLOAD_SOURCE_TUS = \"tus-upload\" as const;\n"],"mappings":";;AAAA,MAAa,oBAAoB;AACjC,MAAa,oBAAoB"}
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
const require_upload = require('../types/upload.cjs');
|
|
2
|
+
const require_normalize = require('./normalize.cjs');
|
|
3
|
+
const require_base_upload = require('./base-upload.cjs');
|
|
4
|
+
const require_xhr_upload = require('./xhr-upload.cjs');
|
|
5
|
+
const require_tus_upload = require('./tus-upload.cjs');
|
|
6
|
+
const require_car = require('./car.cjs');
|
|
7
|
+
const require_manager = require('./manager.cjs');
|
|
8
|
+
const require_builder = require('./builder.cjs');
|
|
@@ -0,0 +1,249 @@
|
|
|
1
|
+
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
+
const require_upload = require('../types/upload.cjs');
|
|
3
|
+
const require_mime_types = require('../types/mime-types.cjs');
|
|
4
|
+
const require_stream = require('../utils/stream.cjs');
|
|
5
|
+
const require_xhr_upload = require('./xhr-upload.cjs');
|
|
6
|
+
const require_tus_upload = require('./tus-upload.cjs');
|
|
7
|
+
const require_constants = require('../types/constants.cjs');
|
|
8
|
+
const require_car = require('./car.cjs');
|
|
9
|
+
const require_index = require('../errors/index.cjs');
|
|
10
|
+
let _lumeweb_portal_sdk = require("@lumeweb/portal-sdk");
|
|
11
|
+
let _lumeweb_query_builder = require("@lumeweb/query-builder");
|
|
12
|
+
|
|
13
|
+
//#region src/upload/manager.ts
|
|
14
|
+
var UploadManager = class {
|
|
15
|
+
xhrHandler;
|
|
16
|
+
tusHandler;
|
|
17
|
+
portalSdk;
|
|
18
|
+
uploadLimit = require_constants.TUS_SIZE_THRESHOLD;
|
|
19
|
+
limitFetched = false;
|
|
20
|
+
constructor(config) {
|
|
21
|
+
this.xhrHandler = new require_xhr_upload.XHRUploadHandler(config);
|
|
22
|
+
this.tusHandler = new require_tus_upload.TUSUploadHandler(config);
|
|
23
|
+
this.portalSdk = new _lumeweb_portal_sdk.Sdk(config.endpoint || require_constants.DEFAULT_ENDPOINT);
|
|
24
|
+
require_car.configureCar({
|
|
25
|
+
datastoreName: config.datastoreName,
|
|
26
|
+
datastore: config.datastore
|
|
27
|
+
});
|
|
28
|
+
}
|
|
29
|
+
async fetchUploadLimit() {
|
|
30
|
+
if (this.limitFetched) return this.uploadLimit;
|
|
31
|
+
try {
|
|
32
|
+
const result = await this.portalSdk.account().uploadLimit();
|
|
33
|
+
if (result.success && result.data?.limit) this.uploadLimit = result.data.limit;
|
|
34
|
+
} catch {
|
|
35
|
+
this.uploadLimit = require_constants.TUS_SIZE_THRESHOLD;
|
|
36
|
+
}
|
|
37
|
+
this.limitFetched = true;
|
|
38
|
+
return this.uploadLimit;
|
|
39
|
+
}
|
|
40
|
+
getUploadLimit() {
|
|
41
|
+
return this.uploadLimit;
|
|
42
|
+
}
|
|
43
|
+
async upload(input, options) {
|
|
44
|
+
this.#validateInput(input, options);
|
|
45
|
+
return this.#uploadInput(input, options);
|
|
46
|
+
}
|
|
47
|
+
async uploadCar(input, options) {
|
|
48
|
+
this.#validateInput(input, options);
|
|
49
|
+
return this.#uploadCarFile(input, options);
|
|
50
|
+
}
|
|
51
|
+
/**
|
|
52
|
+
* Wait for an operation to complete or reach a settled state.
|
|
53
|
+
*
|
|
54
|
+
* Handles two scenarios:
|
|
55
|
+
* 1. If an operationId is provided (in UploadResult), uses it directly
|
|
56
|
+
* 2. If only CID is available, lists operations filtered by CID and polls the first result
|
|
57
|
+
*
|
|
58
|
+
* @param input Either an operation ID (number) or an UploadResult
|
|
59
|
+
* @param options Polling options (interval, timeout, settledStates)
|
|
60
|
+
* @returns UploadResult with operation status merged in
|
|
61
|
+
*/
|
|
62
|
+
async waitForOperation(input, options) {
|
|
63
|
+
if (require_upload.isUploadResult(input) && input.operationId) {
|
|
64
|
+
const result = await this.portalSdk.account().waitForOperation(input.operationId, options);
|
|
65
|
+
if (!result.success) throw new Error(result.error?.message || `Operation ${input.operationId} failed`);
|
|
66
|
+
const operation = result.data;
|
|
67
|
+
if (!operation.cid) throw new Error(`Operation ${input.operationId} completed without CID`);
|
|
68
|
+
if (operation.status?.toLowerCase() === _lumeweb_portal_sdk.OPERATION_STATUS.FAILED || operation.status?.toLowerCase() === _lumeweb_portal_sdk.OPERATION_STATUS.ERROR) throw new Error(`Operation ${input.operationId} failed: ${operation.error || operation.status_message || "Unknown error"}`);
|
|
69
|
+
return {
|
|
70
|
+
...input,
|
|
71
|
+
cid: operation.cid,
|
|
72
|
+
operationId: operation.id
|
|
73
|
+
};
|
|
74
|
+
}
|
|
75
|
+
if (require_upload.isUploadResult(input) && input.cid) return await this.#waitForOperationByCid(input, options);
|
|
76
|
+
const operationId = typeof input === "number" ? input : void 0;
|
|
77
|
+
if (operationId) {
|
|
78
|
+
const result = await this.portalSdk.account().waitForOperation(operationId, options);
|
|
79
|
+
if (!result.success) throw new Error(result.error?.message || `Operation ${operationId} failed`);
|
|
80
|
+
const operation = result.data;
|
|
81
|
+
if (!operation.cid) throw new Error(`Operation ${operationId} completed without CID`);
|
|
82
|
+
if (operation.status?.toLowerCase() === _lumeweb_portal_sdk.OPERATION_STATUS.FAILED || operation.status?.toLowerCase() === _lumeweb_portal_sdk.OPERATION_STATUS.ERROR) throw new Error(`Operation ${operationId} failed: ${operation.error || operation.status_message || "Unknown error"}`);
|
|
83
|
+
return {
|
|
84
|
+
id: operationId.toString(),
|
|
85
|
+
cid: operation.cid,
|
|
86
|
+
name: operation.operation_display_name || "Unknown",
|
|
87
|
+
size: 0,
|
|
88
|
+
mimeType: "",
|
|
89
|
+
createdAt: new Date(operation.started_at),
|
|
90
|
+
numberOfFiles: 1,
|
|
91
|
+
operationId: operation.id
|
|
92
|
+
};
|
|
93
|
+
}
|
|
94
|
+
throw new Error("No operation ID or CID provided, cannot wait for operation");
|
|
95
|
+
}
|
|
96
|
+
/**
|
|
97
|
+
* Wait for an operation by CID.
|
|
98
|
+
*
|
|
99
|
+
* This is used when we have a CID from an upload but no operation ID.
|
|
100
|
+
* We list operations filtered by CID to find the operation ID,
|
|
101
|
+
* then use the SDK's waitForOperation for polling.
|
|
102
|
+
*
|
|
103
|
+
* @param uploadResult UploadResult with CID
|
|
104
|
+
* @param options Polling options (interval, timeout, settledStates)
|
|
105
|
+
* @returns UploadResult with operation status merged in
|
|
106
|
+
*/
|
|
107
|
+
async #waitForOperationByCid(uploadResult, options) {
|
|
108
|
+
const params = {
|
|
109
|
+
filters: [(0, _lumeweb_query_builder.createEqFilter)("cid", uploadResult.cid)],
|
|
110
|
+
pagination: {
|
|
111
|
+
start: 0,
|
|
112
|
+
end: 1
|
|
113
|
+
}
|
|
114
|
+
};
|
|
115
|
+
const result = await this.portalSdk.account().listOperations(params);
|
|
116
|
+
if (!result.success) throw new Error(`Failed to find operation with CID ${uploadResult.cid}`);
|
|
117
|
+
const operation = result.data.data?.[0];
|
|
118
|
+
if (!operation) throw new Error(`Failed to find operation with CID ${uploadResult.cid}`);
|
|
119
|
+
const operationId = operation.id;
|
|
120
|
+
const waitResult = await this.portalSdk.account().waitForOperation(operationId, options);
|
|
121
|
+
if (!waitResult.success) throw new Error(waitResult.error?.message || `Operation ${operationId} failed`);
|
|
122
|
+
const finalOperation = waitResult.data;
|
|
123
|
+
if (!finalOperation.cid) throw new Error(`Operation ${operationId} completed without CID`);
|
|
124
|
+
if (finalOperation.status?.toLowerCase() === _lumeweb_portal_sdk.OPERATION_STATUS.FAILED || finalOperation.status?.toLowerCase() === _lumeweb_portal_sdk.OPERATION_STATUS.ERROR) throw new Error(`Operation ${operationId} failed: ${finalOperation.error || finalOperation.status_message || "Unknown error"}`);
|
|
125
|
+
return {
|
|
126
|
+
...uploadResult,
|
|
127
|
+
cid: finalOperation.cid,
|
|
128
|
+
operationId: finalOperation.id
|
|
129
|
+
};
|
|
130
|
+
}
|
|
131
|
+
#validateInput(input, options) {
|
|
132
|
+
if (input instanceof File) {
|
|
133
|
+
if (input.size === 0) throw new require_index.EmptyFileError(`Cannot upload empty file: ${input.name}`);
|
|
134
|
+
} else if (input instanceof ReadableStream) {
|
|
135
|
+
if (options?.size !== void 0 && options.size === 0) throw new require_index.EmptyFileError("Cannot upload empty stream");
|
|
136
|
+
}
|
|
137
|
+
}
|
|
138
|
+
async uploadDirectory(files, options) {
|
|
139
|
+
const carResult = await require_car.preprocessToCar(files, {
|
|
140
|
+
onProgress: options?.onProgress ? (p) => options.onProgress({
|
|
141
|
+
percentage: p,
|
|
142
|
+
bytesUploaded: 0,
|
|
143
|
+
bytesTotal: 0
|
|
144
|
+
}) : void 0,
|
|
145
|
+
signal: options?.signal
|
|
146
|
+
});
|
|
147
|
+
const operation = await this.#uploadCarResult(carResult, options?.name || "directory", options);
|
|
148
|
+
if (options?.waitForOperation && operation.result) {
|
|
149
|
+
const uploadResult = await operation.result;
|
|
150
|
+
operation.result = this.waitForOperation({
|
|
151
|
+
...uploadResult,
|
|
152
|
+
isDirectory: true,
|
|
153
|
+
numberOfFiles: files.length
|
|
154
|
+
}, options.operationPollingOptions);
|
|
155
|
+
}
|
|
156
|
+
return operation;
|
|
157
|
+
}
|
|
158
|
+
async #uploadInput(input, options) {
|
|
159
|
+
if (await this.#isCarFileUpload(input, options)) return this.#uploadCarFile(input, options);
|
|
160
|
+
const limit = await this.fetchUploadLimit();
|
|
161
|
+
if (input instanceof ReadableStream) {
|
|
162
|
+
const [streamForSize, streamForUpload] = input.tee();
|
|
163
|
+
let size;
|
|
164
|
+
if (options?.size !== void 0) size = BigInt(options.size);
|
|
165
|
+
else size = await require_stream.calculateStreamSize(streamForSize, options?.signal);
|
|
166
|
+
if (size >= BigInt(limit)) return this.#uploadFile({
|
|
167
|
+
data: streamForUpload,
|
|
168
|
+
name: options?.name || "upload",
|
|
169
|
+
type: options?.name?.endsWith(require_mime_types.FILE_EXTENSION_CAR) || options?.isDirectory ? require_mime_types.MIME_TYPE_CAR : require_mime_types.MIME_TYPE_OCTET_STREAM,
|
|
170
|
+
size: Number(size)
|
|
171
|
+
}, options);
|
|
172
|
+
else {
|
|
173
|
+
const blob = await require_stream.streamToBlob(streamForUpload, "application/octet-stream");
|
|
174
|
+
const file = new File([blob], options?.name || "upload", { type: blob.type });
|
|
175
|
+
return this.#uploadFile(file, options);
|
|
176
|
+
}
|
|
177
|
+
}
|
|
178
|
+
return this.#uploadFile(input, options);
|
|
179
|
+
}
|
|
180
|
+
async #isCarFileUpload(input, options) {
|
|
181
|
+
if (options?.isCarFile === true) return true;
|
|
182
|
+
if (options?.isCarFile === false) return false;
|
|
183
|
+
if (input instanceof File) {
|
|
184
|
+
if (input.type === require_mime_types.MIME_TYPE_CAR || input.name.endsWith(require_mime_types.FILE_EXTENSION_CAR)) return await require_car.isCarFile(input);
|
|
185
|
+
}
|
|
186
|
+
if (input instanceof ReadableStream && options?.name?.endsWith(require_mime_types.FILE_EXTENSION_CAR)) return options?.isCarFile !== false;
|
|
187
|
+
return false;
|
|
188
|
+
}
|
|
189
|
+
async #uploadCarResult(carResult, name, options) {
|
|
190
|
+
const limit = await this.fetchUploadLimit();
|
|
191
|
+
if (carResult.size >= BigInt(limit)) return this.#uploadFile({
|
|
192
|
+
data: carResult.carStream,
|
|
193
|
+
name: `${name}${require_mime_types.FILE_EXTENSION_CAR}`,
|
|
194
|
+
type: require_mime_types.MIME_TYPE_CAR,
|
|
195
|
+
size: Number(carResult.size)
|
|
196
|
+
}, options);
|
|
197
|
+
else {
|
|
198
|
+
const blob = await require_stream.streamToBlob(carResult.carStream, require_mime_types.MIME_TYPE_CAR);
|
|
199
|
+
const file = new File([blob], `${name}${require_mime_types.FILE_EXTENSION_CAR}`, { type: require_mime_types.MIME_TYPE_CAR });
|
|
200
|
+
return this.#uploadFile(file, options);
|
|
201
|
+
}
|
|
202
|
+
}
|
|
203
|
+
async #uploadCarFile(input, options) {
|
|
204
|
+
const limit = await this.fetchUploadLimit();
|
|
205
|
+
if (input instanceof ReadableStream) {
|
|
206
|
+
const [streamForSize, streamForUpload] = input.tee();
|
|
207
|
+
let size;
|
|
208
|
+
if (options?.size !== void 0) size = BigInt(options.size);
|
|
209
|
+
else size = await require_stream.calculateStreamSize(streamForSize, options?.signal);
|
|
210
|
+
if (size >= BigInt(limit)) return this.#uploadFile({
|
|
211
|
+
data: streamForUpload,
|
|
212
|
+
name: options?.name || "upload.car",
|
|
213
|
+
type: require_mime_types.MIME_TYPE_CAR,
|
|
214
|
+
size: Number(size)
|
|
215
|
+
}, options);
|
|
216
|
+
else {
|
|
217
|
+
const blob = await require_stream.streamToBlob(streamForUpload, require_mime_types.MIME_TYPE_CAR);
|
|
218
|
+
const file = new File([blob], options?.name || "upload.car", { type: require_mime_types.MIME_TYPE_CAR });
|
|
219
|
+
return this.#uploadFile(file, options);
|
|
220
|
+
}
|
|
221
|
+
}
|
|
222
|
+
if (input.type !== require_mime_types.MIME_TYPE_CAR) input = new File([input], input.name, {
|
|
223
|
+
type: require_mime_types.MIME_TYPE_CAR,
|
|
224
|
+
lastModified: input.lastModified
|
|
225
|
+
});
|
|
226
|
+
return this.#uploadFile(input, options);
|
|
227
|
+
}
|
|
228
|
+
async #uploadFile(input, options) {
|
|
229
|
+
const limit = await this.fetchUploadLimit();
|
|
230
|
+
let isLargeFile = false;
|
|
231
|
+
if (input instanceof File) isLargeFile = input.size > limit;
|
|
232
|
+
else isLargeFile = true;
|
|
233
|
+
const operation = await (isLargeFile ? this.tusHandler.upload(input, options) : this.xhrHandler.upload(input, options));
|
|
234
|
+
if (options?.waitForOperation && operation.result) {
|
|
235
|
+
const uploadResult = await operation.result;
|
|
236
|
+
operation.result = this.waitForOperation(uploadResult, options.operationPollingOptions);
|
|
237
|
+
}
|
|
238
|
+
return operation;
|
|
239
|
+
}
|
|
240
|
+
destroy() {
|
|
241
|
+
this.xhrHandler.destroy();
|
|
242
|
+
this.tusHandler.destroy();
|
|
243
|
+
require_car.destroyCarPreprocessor();
|
|
244
|
+
}
|
|
245
|
+
};
|
|
246
|
+
|
|
247
|
+
//#endregion
|
|
248
|
+
exports.UploadManager = UploadManager;
|
|
249
|
+
//# sourceMappingURL=manager.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"manager.cjs","names":["TUS_SIZE_THRESHOLD","XHRUploadHandler","TUSUploadHandler","Sdk","DEFAULT_ENDPOINT","#validateInput","#uploadInput","#uploadCarFile","isUploadResult","OPERATION_STATUS","#waitForOperationByCid","EmptyFileError","preprocessToCar","#uploadCarResult","#isCarFileUpload","calculateStreamSize","#uploadFile","FILE_EXTENSION_CAR","MIME_TYPE_CAR","MIME_TYPE_OCTET_STREAM","streamToBlob","isCarFile"],"sources":["../../../src/upload/manager.ts"],"sourcesContent":["import type { OperationPollingOptions } from \"@lumeweb/portal-sdk\";\nimport {\n OPERATION_STATUS,\n type OperationsQueryParams,\n Sdk,\n} from \"@lumeweb/portal-sdk\";\nimport { createEqFilter } from \"@lumeweb/query-builder\";\n\nimport type { PinnerConfig } from \"../config\";\nimport type {\n UploadInput,\n UploadOperation,\n UploadOptions,\n UploadResult,\n} from \"@/types/upload\";\nimport { isUploadResult } from \"@/types/upload\";\nimport { XHRUploadHandler } from \"./xhr-upload\";\nimport { TUSUploadHandler } from \"./tus-upload\";\nimport { DEFAULT_ENDPOINT, TUS_SIZE_THRESHOLD } from \"@/types/constants\";\nimport {\n FILE_EXTENSION_CAR,\n MIME_TYPE_CAR,\n MIME_TYPE_OCTET_STREAM,\n} from \"@/types/mime-types\";\nimport {\n type CarPreprocessResult,\n configureCar,\n destroyCarPreprocessor,\n isCarFile,\n preprocessToCar,\n} from \"./car\";\nimport { calculateStreamSize, streamToBlob } from \"../utils/stream\";\nimport { EmptyFileError } from \"../errors\";\nimport { type UploadInputObject } from \"./normalize\";\n\nexport class UploadManager {\n private xhrHandler: XHRUploadHandler;\n private tusHandler: TUSUploadHandler;\n private portalSdk: Sdk;\n private uploadLimit: number = TUS_SIZE_THRESHOLD; // Default to 100 MB\n private limitFetched: boolean = false;\n\n constructor(config: PinnerConfig) {\n this.xhrHandler = new XHRUploadHandler(config);\n this.tusHandler = new TUSUploadHandler(config);\n this.portalSdk = new Sdk(config.endpoint || DEFAULT_ENDPOINT);\n configureCar({\n datastoreName: config.datastoreName,\n datastore: config.datastore,\n });\n }\n\n async fetchUploadLimit(): Promise<number> {\n if (this.limitFetched) {\n return this.uploadLimit;\n }\n\n try {\n const result = await this.portalSdk.account().uploadLimit();\n if (result.success && result.data?.limit) {\n this.uploadLimit = result.data.limit;\n }\n } catch {\n // Fallback to default 100 MB if API fails\n this.uploadLimit = TUS_SIZE_THRESHOLD;\n }\n\n this.limitFetched = true;\n return this.uploadLimit;\n }\n\n getUploadLimit(): number {\n return this.uploadLimit;\n }\n\n async upload(\n input: UploadInput,\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n this.#validateInput(input, options);\n return this.#uploadInput(input, options);\n }\n\n async uploadCar(\n input: File | ReadableStream<Uint8Array>,\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n this.#validateInput(input, options);\n return this.#uploadCarFile(input, options);\n }\n\n /**\n * Wait for an operation to complete or reach a settled state.\n *\n * Handles two scenarios:\n * 1. If an operationId is provided (in UploadResult), uses it directly\n * 2. If only CID is available, lists operations filtered by CID and polls the first result\n *\n * @param input Either an operation ID (number) or an UploadResult\n * @param options Polling options (interval, timeout, settledStates)\n * @returns UploadResult with operation status merged in\n */\n async waitForOperation(\n input: number | UploadResult,\n options?: OperationPollingOptions,\n ): Promise<UploadResult> {\n // Scenario 1: UploadResult with operationId - use it directly\n if (isUploadResult(input) && input.operationId) {\n const result = await this.portalSdk\n .account()\n .waitForOperation(input.operationId, options);\n\n if (!result.success) {\n throw new Error(\n result.error?.message || `Operation ${input.operationId} failed`,\n );\n }\n\n const operation = result.data;\n if (!operation.cid) {\n throw new Error(`Operation ${input.operationId} completed without CID`);\n }\n\n if (\n operation.status?.toLowerCase() === OPERATION_STATUS.FAILED ||\n operation.status?.toLowerCase() === OPERATION_STATUS.ERROR\n ) {\n throw new Error(\n `Operation ${input.operationId} failed: ${operation.error || operation.status_message || \"Unknown error\"}`,\n );\n }\n\n // Merge operation data into the original upload result\n return {\n ...input,\n cid: operation.cid,\n operationId: operation.id,\n };\n }\n\n // Scenario 2: UploadResult with CID but no operationId - find by CID\n if (isUploadResult(input) && input.cid) {\n return await this.#waitForOperationByCid(input, options);\n }\n\n // Scenario 3: Only operation ID provided\n const operationId = typeof input === \"number\" ? input : undefined;\n if (operationId) {\n const result = await this.portalSdk\n .account()\n .waitForOperation(operationId, options);\n\n if (!result.success) {\n throw new Error(\n result.error?.message || `Operation ${operationId} failed`,\n );\n }\n\n const operation = result.data;\n if (!operation.cid) {\n throw new Error(`Operation ${operationId} completed without CID`);\n }\n\n if (\n operation.status?.toLowerCase() === OPERATION_STATUS.FAILED ||\n operation.status?.toLowerCase() === OPERATION_STATUS.ERROR\n ) {\n throw new Error(\n `Operation ${operationId} failed: ${operation.error || operation.status_message || \"Unknown error\"}`,\n );\n }\n\n return {\n id: operationId.toString(),\n cid: operation.cid,\n name: operation.operation_display_name || \"Unknown\",\n size: 0,\n mimeType: \"\",\n createdAt: new Date(operation.started_at),\n numberOfFiles: 1,\n operationId: operation.id,\n };\n }\n\n throw new Error(\n \"No operation ID or CID provided, cannot wait for operation\",\n );\n }\n\n /**\n * Wait for an operation by CID.\n *\n * This is used when we have a CID from an upload but no operation ID.\n * We list operations filtered by CID to find the operation ID,\n * then use the SDK's waitForOperation for polling.\n *\n * @param uploadResult UploadResult with CID\n * @param options Polling options (interval, timeout, settledStates)\n * @returns UploadResult with operation status merged in\n */\n async #waitForOperationByCid(\n uploadResult: UploadResult,\n options?: OperationPollingOptions,\n ): Promise<UploadResult> {\n // List operations filtered by CID\n const params: OperationsQueryParams = {\n filters: [createEqFilter(\"cid\", uploadResult.cid)],\n pagination: { start: 0, end: 1 },\n };\n\n const result = await this.portalSdk.account().listOperations(params);\n\n if (!result.success) {\n throw new Error(`Failed to find operation with CID ${uploadResult.cid}`);\n }\n\n const operation = result.data.data?.[0];\n if (!operation) {\n throw new Error(`Failed to find operation with CID ${uploadResult.cid}`);\n }\n\n const operationId = operation.id;\n\n // Use SDK's waitForOperation for polling\n const waitResult = await this.portalSdk\n .account()\n .waitForOperation(operationId, options);\n\n if (!waitResult.success) {\n throw new Error(\n waitResult.error?.message || `Operation ${operationId} failed`,\n );\n }\n\n const finalOperation = waitResult.data;\n if (!finalOperation.cid) {\n throw new Error(`Operation ${operationId} completed without CID`);\n }\n\n if (\n finalOperation.status?.toLowerCase() === OPERATION_STATUS.FAILED ||\n finalOperation.status?.toLowerCase() === OPERATION_STATUS.ERROR\n ) {\n throw new Error(\n `Operation ${operationId} failed: ${finalOperation.error || finalOperation.status_message || \"Unknown error\"}`,\n );\n }\n\n // Return merged result\n return {\n ...uploadResult,\n cid: finalOperation.cid,\n operationId: finalOperation.id,\n };\n }\n\n #validateInput(input: UploadInput, options?: UploadOptions): void {\n if (input instanceof File) {\n if (input.size === 0) {\n throw new EmptyFileError(`Cannot upload empty file: ${input.name}`);\n }\n } else if (input instanceof ReadableStream) {\n // For ReadableStream, we can only validate if size is provided\n // Otherwise we need to calculate the size which consumes the stream\n if (options?.size !== undefined && options.size === 0) {\n throw new EmptyFileError(\"Cannot upload empty stream\");\n }\n }\n }\n\n async uploadDirectory(\n files: File[],\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n const carResult = await preprocessToCar(files, {\n onProgress: options?.onProgress\n ? (p) =>\n options.onProgress!({\n percentage: p,\n bytesUploaded: 0,\n bytesTotal: 0,\n })\n : undefined,\n signal: options?.signal,\n });\n\n const operation = await this.#uploadCarResult(\n carResult,\n options?.name || \"directory\",\n options,\n );\n\n if (options?.waitForOperation && operation.result) {\n const uploadResult = await operation.result;\n operation.result = this.waitForOperation(\n { ...uploadResult, isDirectory: true, numberOfFiles: files.length },\n options.operationPollingOptions,\n );\n }\n\n return operation;\n }\n\n async #uploadInput(\n input: UploadInput,\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n // Check if this is a CAR file that should be uploaded without preprocessing\n const isCarUpload = await this.#isCarFileUpload(input, options);\n if (isCarUpload) {\n return this.#uploadCarFile(input, options);\n }\n\n const limit = await this.fetchUploadLimit();\n\n if (input instanceof ReadableStream) {\n const [streamForSize, streamForUpload] = input.tee();\n let size: bigint;\n if (options?.size !== undefined) {\n size = BigInt(options.size);\n } else {\n size = await calculateStreamSize(streamForSize, options?.signal);\n }\n\n if (size >= BigInt(limit)) {\n return this.#uploadFile(\n {\n data: streamForUpload,\n name: options?.name || \"upload\",\n type:\n options?.name?.endsWith(FILE_EXTENSION_CAR) ||\n options?.isDirectory\n ? MIME_TYPE_CAR\n : MIME_TYPE_OCTET_STREAM,\n size: Number(size),\n },\n options,\n );\n } else {\n const blob = await streamToBlob(\n streamForUpload,\n \"application/octet-stream\",\n );\n const file = new File([blob], options?.name || \"upload\", {\n type: blob.type,\n });\n return this.#uploadFile(file, options);\n }\n }\n\n return this.#uploadFile(input, options);\n }\n\n async #isCarFileUpload(\n input: UploadInput,\n options?: UploadOptions,\n ): Promise<boolean> {\n // Explicit option takes precedence\n if (options?.isCarFile === true) {\n return true;\n }\n if (options?.isCarFile === false) {\n return false;\n }\n\n // Check if File input is a valid CAR file\n if (input instanceof File) {\n // Quick check: MIME type or extension\n if (\n input.type === MIME_TYPE_CAR ||\n input.name.endsWith(FILE_EXTENSION_CAR)\n ) {\n // Verify it's actually a valid CAR file\n return await isCarFile(input);\n }\n }\n\n // For ReadableStream, rely on explicit isCarFile option or name extension\n if (\n input instanceof ReadableStream &&\n options?.name?.endsWith(FILE_EXTENSION_CAR)\n ) {\n // We can't verify stream content without consuming it,\n // so we trust the explicit isCarFile option or extension\n return options?.isCarFile !== false;\n }\n\n return false;\n }\n\n async #uploadCarResult(\n carResult: CarPreprocessResult,\n name: string,\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n const limit = await this.fetchUploadLimit();\n\n if (carResult.size >= BigInt(limit)) {\n return this.#uploadFile(\n {\n data: carResult.carStream,\n name: `${name}${FILE_EXTENSION_CAR}`,\n type: MIME_TYPE_CAR,\n size: Number(carResult.size),\n },\n options,\n );\n } else {\n const blob = await streamToBlob(carResult.carStream, MIME_TYPE_CAR);\n const file = new File([blob], `${name}${FILE_EXTENSION_CAR}`, {\n type: MIME_TYPE_CAR,\n });\n return this.#uploadFile(file, options);\n }\n }\n\n async #uploadCarFile(\n input: File | ReadableStream<Uint8Array>,\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n const limit = await this.fetchUploadLimit();\n\n if (input instanceof ReadableStream) {\n const [streamForSize, streamForUpload] = input.tee();\n let size: bigint;\n if (options?.size !== undefined) {\n size = BigInt(options.size);\n } else {\n size = await calculateStreamSize(streamForSize, options?.signal);\n }\n\n if (size >= BigInt(limit)) {\n return this.#uploadFile(\n {\n data: streamForUpload,\n name: options?.name || \"upload.car\",\n type: MIME_TYPE_CAR,\n size: Number(size),\n },\n options,\n );\n } else {\n const blob = await streamToBlob(streamForUpload, MIME_TYPE_CAR);\n const file = new File([blob], options?.name || \"upload.car\", {\n type: MIME_TYPE_CAR,\n });\n return this.#uploadFile(file, options);\n }\n }\n\n // File input - ensure it has correct CAR MIME type\n if (input.type !== MIME_TYPE_CAR) {\n // Create a new File with correct CAR MIME type\n input = new File([input], input.name, {\n type: MIME_TYPE_CAR,\n lastModified: input.lastModified,\n });\n }\n\n return this.#uploadFile(input, options);\n }\n\n async #uploadFile(\n input: UploadInput | UploadInputObject,\n options?: UploadOptions,\n ): Promise<UploadOperation> {\n const limit = await this.fetchUploadLimit();\n\n let isLargeFile = false;\n\n if (input instanceof File) {\n isLargeFile = input.size > limit;\n } else {\n isLargeFile = true;\n }\n\n const operation = await (isLargeFile\n ? this.tusHandler.upload(input, options)\n : this.xhrHandler.upload(input, options));\n\n if (options?.waitForOperation && operation.result) {\n const uploadResult = await operation.result;\n operation.result = this.waitForOperation(\n uploadResult,\n options.operationPollingOptions,\n );\n }\n\n return operation;\n }\n\n destroy(): void {\n this.xhrHandler.destroy();\n this.tusHandler.destroy();\n destroyCarPreprocessor();\n }\n}\n"],"mappings":";;;;;;;;;;;;;AAmCA,IAAa,gBAAb,MAA2B;CACzB,AAAQ;CACR,AAAQ;CACR,AAAQ;CACR,AAAQ,cAAsBA;CAC9B,AAAQ,eAAwB;CAEhC,YAAY,QAAsB;AAChC,OAAK,aAAa,IAAIC,oCAAiB,OAAO;AAC9C,OAAK,aAAa,IAAIC,oCAAiB,OAAO;AAC9C,OAAK,YAAY,IAAIC,wBAAI,OAAO,YAAYC,mCAAiB;AAC7D,2BAAa;GACX,eAAe,OAAO;GACtB,WAAW,OAAO;GACnB,CAAC;;CAGJ,MAAM,mBAAoC;AACxC,MAAI,KAAK,aACP,QAAO,KAAK;AAGd,MAAI;GACF,MAAM,SAAS,MAAM,KAAK,UAAU,SAAS,CAAC,aAAa;AAC3D,OAAI,OAAO,WAAW,OAAO,MAAM,MACjC,MAAK,cAAc,OAAO,KAAK;UAE3B;AAEN,QAAK,cAAcJ;;AAGrB,OAAK,eAAe;AACpB,SAAO,KAAK;;CAGd,iBAAyB;AACvB,SAAO,KAAK;;CAGd,MAAM,OACJ,OACA,SAC0B;AAC1B,QAAKK,cAAe,OAAO,QAAQ;AACnC,SAAO,MAAKC,YAAa,OAAO,QAAQ;;CAG1C,MAAM,UACJ,OACA,SAC0B;AAC1B,QAAKD,cAAe,OAAO,QAAQ;AACnC,SAAO,MAAKE,cAAe,OAAO,QAAQ;;;;;;;;;;;;;CAc5C,MAAM,iBACJ,OACA,SACuB;AAEvB,MAAIC,8BAAe,MAAM,IAAI,MAAM,aAAa;GAC9C,MAAM,SAAS,MAAM,KAAK,UACvB,SAAS,CACT,iBAAiB,MAAM,aAAa,QAAQ;AAE/C,OAAI,CAAC,OAAO,QACV,OAAM,IAAI,MACR,OAAO,OAAO,WAAW,aAAa,MAAM,YAAY,SACzD;GAGH,MAAM,YAAY,OAAO;AACzB,OAAI,CAAC,UAAU,IACb,OAAM,IAAI,MAAM,aAAa,MAAM,YAAY,wBAAwB;AAGzE,OACE,UAAU,QAAQ,aAAa,KAAKC,qCAAiB,UACrD,UAAU,QAAQ,aAAa,KAAKA,qCAAiB,MAErD,OAAM,IAAI,MACR,aAAa,MAAM,YAAY,WAAW,UAAU,SAAS,UAAU,kBAAkB,kBAC1F;AAIH,UAAO;IACL,GAAG;IACH,KAAK,UAAU;IACf,aAAa,UAAU;IACxB;;AAIH,MAAID,8BAAe,MAAM,IAAI,MAAM,IACjC,QAAO,MAAM,MAAKE,sBAAuB,OAAO,QAAQ;EAI1D,MAAM,cAAc,OAAO,UAAU,WAAW,QAAQ;AACxD,MAAI,aAAa;GACf,MAAM,SAAS,MAAM,KAAK,UACvB,SAAS,CACT,iBAAiB,aAAa,QAAQ;AAEzC,OAAI,CAAC,OAAO,QACV,OAAM,IAAI,MACR,OAAO,OAAO,WAAW,aAAa,YAAY,SACnD;GAGH,MAAM,YAAY,OAAO;AACzB,OAAI,CAAC,UAAU,IACb,OAAM,IAAI,MAAM,aAAa,YAAY,wBAAwB;AAGnE,OACE,UAAU,QAAQ,aAAa,KAAKD,qCAAiB,UACrD,UAAU,QAAQ,aAAa,KAAKA,qCAAiB,MAErD,OAAM,IAAI,MACR,aAAa,YAAY,WAAW,UAAU,SAAS,UAAU,kBAAkB,kBACpF;AAGH,UAAO;IACL,IAAI,YAAY,UAAU;IAC1B,KAAK,UAAU;IACf,MAAM,UAAU,0BAA0B;IAC1C,MAAM;IACN,UAAU;IACV,WAAW,IAAI,KAAK,UAAU,WAAW;IACzC,eAAe;IACf,aAAa,UAAU;IACxB;;AAGH,QAAM,IAAI,MACR,6DACD;;;;;;;;;;;;;CAcH,OAAMC,sBACJ,cACA,SACuB;EAEvB,MAAM,SAAgC;GACpC,SAAS,4CAAgB,OAAO,aAAa,IAAI,CAAC;GAClD,YAAY;IAAE,OAAO;IAAG,KAAK;IAAG;GACjC;EAED,MAAM,SAAS,MAAM,KAAK,UAAU,SAAS,CAAC,eAAe,OAAO;AAEpE,MAAI,CAAC,OAAO,QACV,OAAM,IAAI,MAAM,qCAAqC,aAAa,MAAM;EAG1E,MAAM,YAAY,OAAO,KAAK,OAAO;AACrC,MAAI,CAAC,UACH,OAAM,IAAI,MAAM,qCAAqC,aAAa,MAAM;EAG1E,MAAM,cAAc,UAAU;EAG9B,MAAM,aAAa,MAAM,KAAK,UAC3B,SAAS,CACT,iBAAiB,aAAa,QAAQ;AAEzC,MAAI,CAAC,WAAW,QACd,OAAM,IAAI,MACR,WAAW,OAAO,WAAW,aAAa,YAAY,SACvD;EAGH,MAAM,iBAAiB,WAAW;AAClC,MAAI,CAAC,eAAe,IAClB,OAAM,IAAI,MAAM,aAAa,YAAY,wBAAwB;AAGnE,MACE,eAAe,QAAQ,aAAa,KAAKD,qCAAiB,UAC1D,eAAe,QAAQ,aAAa,KAAKA,qCAAiB,MAE1D,OAAM,IAAI,MACR,aAAa,YAAY,WAAW,eAAe,SAAS,eAAe,kBAAkB,kBAC9F;AAIH,SAAO;GACL,GAAG;GACH,KAAK,eAAe;GACpB,aAAa,eAAe;GAC7B;;CAGH,eAAe,OAAoB,SAA+B;AAChE,MAAI,iBAAiB,MACnB;OAAI,MAAM,SAAS,EACjB,OAAM,IAAIE,6BAAe,6BAA6B,MAAM,OAAO;aAE5D,iBAAiB,gBAG1B;OAAI,SAAS,SAAS,UAAa,QAAQ,SAAS,EAClD,OAAM,IAAIA,6BAAe,6BAA6B;;;CAK5D,MAAM,gBACJ,OACA,SAC0B;EAC1B,MAAM,YAAY,MAAMC,4BAAgB,OAAO;GAC7C,YAAY,SAAS,cAChB,MACC,QAAQ,WAAY;IAClB,YAAY;IACZ,eAAe;IACf,YAAY;IACb,CAAC,GACJ;GACJ,QAAQ,SAAS;GAClB,CAAC;EAEF,MAAM,YAAY,MAAM,MAAKC,gBAC3B,WACA,SAAS,QAAQ,aACjB,QACD;AAED,MAAI,SAAS,oBAAoB,UAAU,QAAQ;GACjD,MAAM,eAAe,MAAM,UAAU;AACrC,aAAU,SAAS,KAAK,iBACtB;IAAE,GAAG;IAAc,aAAa;IAAM,eAAe,MAAM;IAAQ,EACnE,QAAQ,wBACT;;AAGH,SAAO;;CAGT,OAAMP,YACJ,OACA,SAC0B;AAG1B,MADoB,MAAM,MAAKQ,gBAAiB,OAAO,QAAQ,CAE7D,QAAO,MAAKP,cAAe,OAAO,QAAQ;EAG5C,MAAM,QAAQ,MAAM,KAAK,kBAAkB;AAE3C,MAAI,iBAAiB,gBAAgB;GACnC,MAAM,CAAC,eAAe,mBAAmB,MAAM,KAAK;GACpD,IAAI;AACJ,OAAI,SAAS,SAAS,OACpB,QAAO,OAAO,QAAQ,KAAK;OAE3B,QAAO,MAAMQ,mCAAoB,eAAe,SAAS,OAAO;AAGlE,OAAI,QAAQ,OAAO,MAAM,CACvB,QAAO,MAAKC,WACV;IACE,MAAM;IACN,MAAM,SAAS,QAAQ;IACvB,MACE,SAAS,MAAM,SAASC,sCAAmB,IAC3C,SAAS,cACLC,mCACAC;IACN,MAAM,OAAO,KAAK;IACnB,EACD,QACD;QACI;IACL,MAAM,OAAO,MAAMC,4BACjB,iBACA,2BACD;IACD,MAAM,OAAO,IAAI,KAAK,CAAC,KAAK,EAAE,SAAS,QAAQ,UAAU,EACvD,MAAM,KAAK,MACZ,CAAC;AACF,WAAO,MAAKJ,WAAY,MAAM,QAAQ;;;AAI1C,SAAO,MAAKA,WAAY,OAAO,QAAQ;;CAGzC,OAAMF,gBACJ,OACA,SACkB;AAElB,MAAI,SAAS,cAAc,KACzB,QAAO;AAET,MAAI,SAAS,cAAc,MACzB,QAAO;AAIT,MAAI,iBAAiB,MAEnB;OACE,MAAM,SAASI,oCACf,MAAM,KAAK,SAASD,sCAAmB,CAGvC,QAAO,MAAMI,sBAAU,MAAM;;AAKjC,MACE,iBAAiB,kBACjB,SAAS,MAAM,SAASJ,sCAAmB,CAI3C,QAAO,SAAS,cAAc;AAGhC,SAAO;;CAGT,OAAMJ,gBACJ,WACA,MACA,SAC0B;EAC1B,MAAM,QAAQ,MAAM,KAAK,kBAAkB;AAE3C,MAAI,UAAU,QAAQ,OAAO,MAAM,CACjC,QAAO,MAAKG,WACV;GACE,MAAM,UAAU;GAChB,MAAM,GAAG,OAAOC;GAChB,MAAMC;GACN,MAAM,OAAO,UAAU,KAAK;GAC7B,EACD,QACD;OACI;GACL,MAAM,OAAO,MAAME,4BAAa,UAAU,WAAWF,iCAAc;GACnE,MAAM,OAAO,IAAI,KAAK,CAAC,KAAK,EAAE,GAAG,OAAOD,yCAAsB,EAC5D,MAAMC,kCACP,CAAC;AACF,UAAO,MAAKF,WAAY,MAAM,QAAQ;;;CAI1C,OAAMT,cACJ,OACA,SAC0B;EAC1B,MAAM,QAAQ,MAAM,KAAK,kBAAkB;AAE3C,MAAI,iBAAiB,gBAAgB;GACnC,MAAM,CAAC,eAAe,mBAAmB,MAAM,KAAK;GACpD,IAAI;AACJ,OAAI,SAAS,SAAS,OACpB,QAAO,OAAO,QAAQ,KAAK;OAE3B,QAAO,MAAMQ,mCAAoB,eAAe,SAAS,OAAO;AAGlE,OAAI,QAAQ,OAAO,MAAM,CACvB,QAAO,MAAKC,WACV;IACE,MAAM;IACN,MAAM,SAAS,QAAQ;IACvB,MAAME;IACN,MAAM,OAAO,KAAK;IACnB,EACD,QACD;QACI;IACL,MAAM,OAAO,MAAME,4BAAa,iBAAiBF,iCAAc;IAC/D,MAAM,OAAO,IAAI,KAAK,CAAC,KAAK,EAAE,SAAS,QAAQ,cAAc,EAC3D,MAAMA,kCACP,CAAC;AACF,WAAO,MAAKF,WAAY,MAAM,QAAQ;;;AAK1C,MAAI,MAAM,SAASE,iCAEjB,SAAQ,IAAI,KAAK,CAAC,MAAM,EAAE,MAAM,MAAM;GACpC,MAAMA;GACN,cAAc,MAAM;GACrB,CAAC;AAGJ,SAAO,MAAKF,WAAY,OAAO,QAAQ;;CAGzC,OAAMA,WACJ,OACA,SAC0B;EAC1B,MAAM,QAAQ,MAAM,KAAK,kBAAkB;EAE3C,IAAI,cAAc;AAElB,MAAI,iBAAiB,KACnB,eAAc,MAAM,OAAO;MAE3B,eAAc;EAGhB,MAAM,YAAY,OAAO,cACrB,KAAK,WAAW,OAAO,OAAO,QAAQ,GACtC,KAAK,WAAW,OAAO,OAAO,QAAQ;AAE1C,MAAI,SAAS,oBAAoB,UAAU,QAAQ;GACjD,MAAM,eAAe,MAAM,UAAU;AACrC,aAAU,SAAS,KAAK,iBACtB,cACA,QAAQ,wBACT;;AAGH,SAAO;;CAGT,UAAgB;AACd,OAAK,WAAW,SAAS;AACzB,OAAK,WAAW,SAAS;AACzB,sCAAwB"}
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
import { PinnerConfig } from "../config.cjs";
|
|
2
|
+
import { UploadInput, UploadOperation, UploadOptions, UploadResult } from "../types/upload.cjs";
|
|
3
|
+
import { OperationPollingOptions } from "@lumeweb/portal-sdk";
|
|
4
|
+
|
|
5
|
+
//#region src/upload/manager.d.ts
|
|
6
|
+
declare class UploadManager {
|
|
7
|
+
#private;
|
|
8
|
+
private xhrHandler;
|
|
9
|
+
private tusHandler;
|
|
10
|
+
private portalSdk;
|
|
11
|
+
private uploadLimit;
|
|
12
|
+
private limitFetched;
|
|
13
|
+
constructor(config: PinnerConfig);
|
|
14
|
+
fetchUploadLimit(): Promise<number>;
|
|
15
|
+
getUploadLimit(): number;
|
|
16
|
+
upload(input: UploadInput, options?: UploadOptions): Promise<UploadOperation>;
|
|
17
|
+
uploadCar(input: File | ReadableStream<Uint8Array>, options?: UploadOptions): Promise<UploadOperation>;
|
|
18
|
+
/**
|
|
19
|
+
* Wait for an operation to complete or reach a settled state.
|
|
20
|
+
*
|
|
21
|
+
* Handles two scenarios:
|
|
22
|
+
* 1. If an operationId is provided (in UploadResult), uses it directly
|
|
23
|
+
* 2. If only CID is available, lists operations filtered by CID and polls the first result
|
|
24
|
+
*
|
|
25
|
+
* @param input Either an operation ID (number) or an UploadResult
|
|
26
|
+
* @param options Polling options (interval, timeout, settledStates)
|
|
27
|
+
* @returns UploadResult with operation status merged in
|
|
28
|
+
*/
|
|
29
|
+
waitForOperation(input: number | UploadResult, options?: OperationPollingOptions): Promise<UploadResult>;
|
|
30
|
+
uploadDirectory(files: File[], options?: UploadOptions): Promise<UploadOperation>;
|
|
31
|
+
destroy(): void;
|
|
32
|
+
}
|
|
33
|
+
//#endregion
|
|
34
|
+
export { UploadManager };
|
|
35
|
+
//# sourceMappingURL=manager.d.cts.map
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
const require_mime_types = require('../types/mime-types.cjs');
|
|
2
|
+
|
|
3
|
+
//#region src/upload/normalize.ts
|
|
4
|
+
function normalizeUploadInput(input, options) {
|
|
5
|
+
if (input instanceof File) return {
|
|
6
|
+
data: input,
|
|
7
|
+
name: input.name,
|
|
8
|
+
type: input.type,
|
|
9
|
+
size: input.size
|
|
10
|
+
};
|
|
11
|
+
if (input instanceof ReadableStream) return {
|
|
12
|
+
data: input,
|
|
13
|
+
name: options?.name || "upload",
|
|
14
|
+
type: options?.name?.endsWith(require_mime_types.FILE_EXTENSION_CAR) ? require_mime_types.MIME_TYPE_CAR : require_mime_types.MIME_TYPE_OCTET_STREAM,
|
|
15
|
+
size: 0
|
|
16
|
+
};
|
|
17
|
+
const objectInput = input;
|
|
18
|
+
return {
|
|
19
|
+
data: objectInput.data,
|
|
20
|
+
name: objectInput.name,
|
|
21
|
+
type: objectInput.type,
|
|
22
|
+
size: objectInput.size || 0
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
//#endregion
|
|
27
|
+
exports.normalizeUploadInput = normalizeUploadInput;
|
|
28
|
+
//# sourceMappingURL=normalize.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"normalize.cjs","names":["FILE_EXTENSION_CAR","MIME_TYPE_CAR","MIME_TYPE_OCTET_STREAM"],"sources":["../../../src/upload/normalize.ts"],"sourcesContent":["import type { UploadInput, UploadOptions } from \"@/types/upload\";\nimport {\n FILE_EXTENSION_CAR,\n MIME_TYPE_CAR,\n MIME_TYPE_OCTET_STREAM,\n} from \"@/types/mime-types\";\n\nexport interface NormalizedUploadInput {\n data: File | ReadableStream<Uint8Array>;\n name: string;\n type: string;\n size: number;\n}\n\nexport interface UploadInputObject {\n data: ReadableStream<Uint8Array>;\n name: string;\n type: string;\n size?: number;\n}\n\nexport function normalizeUploadInput(\n input: UploadInput | UploadInputObject,\n options?: UploadOptions,\n): NormalizedUploadInput {\n if (input instanceof File) {\n return {\n data: input,\n name: input.name,\n type: input.type,\n size: input.size,\n };\n }\n\n if (input instanceof ReadableStream) {\n return {\n data: input,\n name: options?.name || \"upload\",\n type: options?.name?.endsWith(FILE_EXTENSION_CAR)\n ? MIME_TYPE_CAR\n : MIME_TYPE_OCTET_STREAM,\n size: 0,\n };\n }\n\n const objectInput = input as UploadInputObject;\n return {\n data: objectInput.data,\n name: objectInput.name,\n type: objectInput.type,\n size: objectInput.size || 0,\n };\n}\n"],"mappings":";;;AAqBA,SAAgB,qBACd,OACA,SACuB;AACvB,KAAI,iBAAiB,KACnB,QAAO;EACL,MAAM;EACN,MAAM,MAAM;EACZ,MAAM,MAAM;EACZ,MAAM,MAAM;EACb;AAGH,KAAI,iBAAiB,eACnB,QAAO;EACL,MAAM;EACN,MAAM,SAAS,QAAQ;EACvB,MAAM,SAAS,MAAM,SAASA,sCAAmB,GAC7CC,mCACAC;EACJ,MAAM;EACP;CAGH,MAAM,cAAc;AACpB,QAAO;EACL,MAAM,YAAY;EAClB,MAAM,YAAY;EAClB,MAAM,YAAY;EAClB,MAAM,YAAY,QAAQ;EAC3B"}
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
const require_rolldown_runtime = require('../_virtual/rolldown_runtime.cjs');
|
|
2
|
+
const require_upload = require('../types/upload.cjs');
|
|
3
|
+
const require_constants = require('./constants.cjs');
|
|
4
|
+
const require_base_upload = require('./base-upload.cjs');
|
|
5
|
+
const require_tus_patch = require('../utils/tus-patch.cjs');
|
|
6
|
+
let _uppy_tus = require("@uppy/tus");
|
|
7
|
+
_uppy_tus = require_rolldown_runtime.__toESM(_uppy_tus);
|
|
8
|
+
|
|
9
|
+
//#region src/upload/tus-upload.ts
|
|
10
|
+
var TUSUploadHandler = class extends require_base_upload.BaseUploadHandler {
|
|
11
|
+
constructor(config) {
|
|
12
|
+
require_tus_patch.patchTusNodeHttpStack();
|
|
13
|
+
super(config);
|
|
14
|
+
}
|
|
15
|
+
configurePlugin(uppy) {
|
|
16
|
+
uppy.use(_uppy_tus.default, {
|
|
17
|
+
endpoint: `${this.config.endpoint}/api/upload/tus`,
|
|
18
|
+
headers: { Authorization: `Bearer ${this.config.jwt}` },
|
|
19
|
+
chunkSize: 10 * 1024 * 1024,
|
|
20
|
+
retryDelays: [
|
|
21
|
+
0,
|
|
22
|
+
1e3,
|
|
23
|
+
3e3,
|
|
24
|
+
5e3
|
|
25
|
+
]
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
parseResult(result) {
|
|
29
|
+
const uppyResponse = result;
|
|
30
|
+
if (!uppyResponse) return {
|
|
31
|
+
id: "",
|
|
32
|
+
cid: "",
|
|
33
|
+
name: "",
|
|
34
|
+
size: 0,
|
|
35
|
+
mimeType: "",
|
|
36
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
37
|
+
numberOfFiles: 1,
|
|
38
|
+
isDirectory: false,
|
|
39
|
+
[require_upload.UploadResultSymbol]: true
|
|
40
|
+
};
|
|
41
|
+
const response = uppyResponse.body;
|
|
42
|
+
if (response && response.cid) return {
|
|
43
|
+
id: response.id,
|
|
44
|
+
cid: response.cid,
|
|
45
|
+
name: response.name,
|
|
46
|
+
size: response.size,
|
|
47
|
+
mimeType: response.mimeType,
|
|
48
|
+
createdAt: new Date(response.createdAt),
|
|
49
|
+
numberOfFiles: response.numberOfFiles,
|
|
50
|
+
isDirectory: response.isDirectory ?? false,
|
|
51
|
+
keyvalues: response.keyvalues,
|
|
52
|
+
operationId: response.operationId,
|
|
53
|
+
[require_upload.UploadResultSymbol]: true
|
|
54
|
+
};
|
|
55
|
+
return {
|
|
56
|
+
id: uppyResponse.uploadURL?.split("/").pop() || "",
|
|
57
|
+
cid: "",
|
|
58
|
+
name: "",
|
|
59
|
+
size: 0,
|
|
60
|
+
mimeType: "",
|
|
61
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
62
|
+
numberOfFiles: 1,
|
|
63
|
+
isDirectory: false,
|
|
64
|
+
[require_upload.UploadResultSymbol]: true
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
getUploadSource() {
|
|
68
|
+
return require_constants.UPLOAD_SOURCE_TUS;
|
|
69
|
+
}
|
|
70
|
+
};
|
|
71
|
+
|
|
72
|
+
//#endregion
|
|
73
|
+
exports.TUSUploadHandler = TUSUploadHandler;
|
|
74
|
+
//# sourceMappingURL=tus-upload.cjs.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"tus-upload.cjs","names":["BaseUploadHandler","TusPlugin","UploadResultSymbol","UPLOAD_SOURCE_TUS"],"sources":["../../../src/upload/tus-upload.ts"],"sourcesContent":["import Uppy from \"@uppy/core\";\nimport TusPlugin from \"@uppy/tus\";\nimport type { UploadResult } from \"@/types/upload\";\nimport { UploadResultSymbol } from \"@/types/upload\";\nimport { BaseUploadHandler } from \"./base-upload\";\nimport { patchTusNodeHttpStack } from \"@/utils/tus-patch\";\nimport type { PinnerConfig } from \"@/config\";\nimport { UPLOAD_SOURCE_TUS } from \"./constants\";\n\nexport class TUSUploadHandler extends BaseUploadHandler {\n constructor(config: PinnerConfig) {\n // Apply runtime patch for tus-js-client to handle abort() before send()\n patchTusNodeHttpStack();\n super(config);\n }\n protected configurePlugin(uppy: Uppy): void {\n uppy.use(TusPlugin, {\n endpoint: `${this.config.endpoint}/api/upload/tus`,\n headers: {\n Authorization: `Bearer ${this.config.jwt}`,\n },\n chunkSize: 10 * 1024 * 1024, // 10MB chunks\n retryDelays: [0, 1000, 3000, 5000],\n });\n }\n\n protected parseResult(result: unknown): UploadResult {\n const uppyResponse = result as\n | {\n uploadURL: string;\n body?: UploadResult;\n }\n | undefined;\n\n if (!uppyResponse) {\n return {\n id: \"\",\n cid: \"\",\n name: \"\",\n size: 0,\n mimeType: \"\",\n createdAt: new Date(),\n numberOfFiles: 1,\n isDirectory: false,\n [UploadResultSymbol]: true,\n };\n }\n\n const response = uppyResponse.body;\n\n if (response && response.cid) {\n return {\n id: response.id,\n cid: response.cid,\n name: response.name,\n size: response.size,\n mimeType: response.mimeType,\n createdAt: new Date(response.createdAt),\n numberOfFiles: response.numberOfFiles,\n isDirectory: response.isDirectory ?? false,\n keyvalues: response.keyvalues,\n operationId: response.operationId,\n [UploadResultSymbol]: true,\n };\n }\n\n const uploadId = uppyResponse.uploadURL?.split(\"/\").pop() || \"\";\n\n return {\n id: uploadId,\n cid: \"\",\n name: \"\",\n size: 0,\n mimeType: \"\",\n createdAt: new Date(),\n numberOfFiles: 1,\n isDirectory: false,\n [UploadResultSymbol]: true,\n };\n }\n\n protected getUploadSource(): string {\n return UPLOAD_SOURCE_TUS;\n }\n}\n"],"mappings":";;;;;;;;;AASA,IAAa,mBAAb,cAAsCA,sCAAkB;CACtD,YAAY,QAAsB;AAEhC,2CAAuB;AACvB,QAAM,OAAO;;CAEf,AAAU,gBAAgB,MAAkB;AAC1C,OAAK,IAAIC,mBAAW;GAClB,UAAU,GAAG,KAAK,OAAO,SAAS;GAClC,SAAS,EACP,eAAe,UAAU,KAAK,OAAO,OACtC;GACD,WAAW,KAAK,OAAO;GACvB,aAAa;IAAC;IAAG;IAAM;IAAM;IAAK;GACnC,CAAC;;CAGJ,AAAU,YAAY,QAA+B;EACnD,MAAM,eAAe;AAOrB,MAAI,CAAC,aACH,QAAO;GACL,IAAI;GACJ,KAAK;GACL,MAAM;GACN,MAAM;GACN,UAAU;GACV,2BAAW,IAAI,MAAM;GACrB,eAAe;GACf,aAAa;IACZC,oCAAqB;GACvB;EAGH,MAAM,WAAW,aAAa;AAE9B,MAAI,YAAY,SAAS,IACvB,QAAO;GACL,IAAI,SAAS;GACb,KAAK,SAAS;GACd,MAAM,SAAS;GACf,MAAM,SAAS;GACf,UAAU,SAAS;GACnB,WAAW,IAAI,KAAK,SAAS,UAAU;GACvC,eAAe,SAAS;GACxB,aAAa,SAAS,eAAe;GACrC,WAAW,SAAS;GACpB,aAAa,SAAS;IACrBA,oCAAqB;GACvB;AAKH,SAAO;GACL,IAHe,aAAa,WAAW,MAAM,IAAI,CAAC,KAAK,IAAI;GAI3D,KAAK;GACL,MAAM;GACN,MAAM;GACN,UAAU;GACV,2BAAW,IAAI,MAAM;GACrB,eAAe;GACf,aAAa;IACZA,oCAAqB;GACvB;;CAGH,AAAU,kBAA0B;AAClC,SAAOC"}
|