@better-upload/client 0.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +15 -0
- package/dist/error-DFL8oJkp.js +12 -0
- package/dist/helpers/index.d.ts +27 -0
- package/dist/helpers/index.js +37 -0
- package/dist/index.d.ts +77 -0
- package/dist/index.js +589 -0
- package/dist/internal-export.d.ts +15 -0
- package/dist/internal-export.js +3 -0
- package/dist/internal-gLVx5GpR.d.ts +360 -0
- package/package.json +57 -0
package/README.md
ADDED
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
# Better Upload
|
|
2
|
+
|
|
3
|
+
Simple and easy file uploads for React. Upload directly to any S3-compatible service with minimal setup.
|
|
4
|
+
|
|
5
|
+
Documentation is available [here](https://better-upload.com).
|
|
6
|
+
|
|
7
|
+
## Getting Started
|
|
8
|
+
|
|
9
|
+
You can have file uploads in your React app in a few minutes with Better Upload. To get started, follow the [quickstart guide](https://better-upload.com/docs/quickstart).
|
|
10
|
+
|
|
11
|
+
## License
|
|
12
|
+
|
|
13
|
+
Better Upload is licensed under the MIT License. You are free to do whatever you want with it.
|
|
14
|
+
|
|
15
|
+
Feel free to contribute to the project.
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
//#region src/types/error.ts
|
|
2
|
+
var ClientUploadErrorClass = class extends Error {
|
|
3
|
+
type;
|
|
4
|
+
constructor({ type, message }) {
|
|
5
|
+
super(message);
|
|
6
|
+
this.type = type;
|
|
7
|
+
this.message = message;
|
|
8
|
+
}
|
|
9
|
+
};
|
|
10
|
+
|
|
11
|
+
//#endregion
|
|
12
|
+
export { ClientUploadErrorClass as t };
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
//#region src/helpers/format-bytes.d.ts
|
|
2
|
+
/**
|
|
3
|
+
* Convert bytes to a human-readable format.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```ts
|
|
7
|
+
* formatBytes(1000) // "1 kB"
|
|
8
|
+
* formatBytes(1000, { decimalPlaces: 2 }) // "1.00 kB"
|
|
9
|
+
* formatBytes(1024, { si: false }) // "1 KiB"
|
|
10
|
+
* ```
|
|
11
|
+
*/
|
|
12
|
+
declare function formatBytes(bytes: number, options?: {
|
|
13
|
+
/**
|
|
14
|
+
* Use metric units, aka powers of 1000.
|
|
15
|
+
*
|
|
16
|
+
* @default true
|
|
17
|
+
*/
|
|
18
|
+
si?: boolean;
|
|
19
|
+
/**
|
|
20
|
+
* Number of decimal places to show.
|
|
21
|
+
*
|
|
22
|
+
* @default 0
|
|
23
|
+
*/
|
|
24
|
+
decimalPlaces?: number;
|
|
25
|
+
}): string;
|
|
26
|
+
//#endregion
|
|
27
|
+
export { formatBytes };
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
//#region src/helpers/format-bytes.ts
|
|
2
|
+
/**
|
|
3
|
+
* Convert bytes to a human-readable format.
|
|
4
|
+
*
|
|
5
|
+
* @example
|
|
6
|
+
* ```ts
|
|
7
|
+
* formatBytes(1000) // "1 kB"
|
|
8
|
+
* formatBytes(1000, { decimalPlaces: 2 }) // "1.00 kB"
|
|
9
|
+
* formatBytes(1024, { si: false }) // "1 KiB"
|
|
10
|
+
* ```
|
|
11
|
+
*/
|
|
12
|
+
function formatBytes(bytes, options) {
|
|
13
|
+
const { si = true, decimalPlaces = 0 } = options || {};
|
|
14
|
+
const threshold = si ? 1e3 : 1024;
|
|
15
|
+
const units = si ? [
|
|
16
|
+
"B",
|
|
17
|
+
"kB",
|
|
18
|
+
"MB",
|
|
19
|
+
"GB",
|
|
20
|
+
"TB",
|
|
21
|
+
"PB"
|
|
22
|
+
] : [
|
|
23
|
+
"B",
|
|
24
|
+
"KiB",
|
|
25
|
+
"MiB",
|
|
26
|
+
"GiB",
|
|
27
|
+
"TiB",
|
|
28
|
+
"PiB"
|
|
29
|
+
];
|
|
30
|
+
if (bytes < threshold) return `${bytes} ${units[0]}`;
|
|
31
|
+
const exponent = Math.floor(Math.log(bytes) / Math.log(threshold));
|
|
32
|
+
const unit = units[exponent];
|
|
33
|
+
return `${(bytes / Math.pow(threshold, exponent)).toFixed(decimalPlaces)} ${unit}`;
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
//#endregion
|
|
37
|
+
export { formatBytes };
|
package/dist/index.d.ts
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import { a as UploadHookProps, c as FileUploadInfo, l as UploadHookControl, o as UploadHookReturn, r as ServerMetadata, s as ClientUploadError, t as DirectUploadResult, u as UploadStatus } from "./internal-gLVx5GpR.js";
|
|
2
|
+
|
|
3
|
+
//#region src/hooks/use-upload-file.d.ts
|
|
4
|
+
declare function useUploadFile(props: UploadHookProps<false>): UploadHookReturn<false>;
|
|
5
|
+
//#endregion
|
|
6
|
+
//#region src/hooks/use-upload-files.d.ts
|
|
7
|
+
declare function useUploadFiles({
|
|
8
|
+
api,
|
|
9
|
+
route,
|
|
10
|
+
uploadBatchSize,
|
|
11
|
+
multipartBatchSize,
|
|
12
|
+
headers,
|
|
13
|
+
credentials,
|
|
14
|
+
signal,
|
|
15
|
+
retry,
|
|
16
|
+
retryDelay,
|
|
17
|
+
onError,
|
|
18
|
+
onBeforeUpload,
|
|
19
|
+
onUploadBegin,
|
|
20
|
+
onUploadComplete,
|
|
21
|
+
onUploadFail,
|
|
22
|
+
onUploadProgress,
|
|
23
|
+
onUploadSettle
|
|
24
|
+
}: UploadHookProps<true>): UploadHookReturn<true>;
|
|
25
|
+
//#endregion
|
|
26
|
+
//#region src/utils/upload.d.ts
|
|
27
|
+
/**
|
|
28
|
+
* Upload multiple files to S3.
|
|
29
|
+
*
|
|
30
|
+
* This will not throw if one of the uploads fails, but will return the files that failed to upload.
|
|
31
|
+
*/
|
|
32
|
+
declare function uploadFiles(params: {
|
|
33
|
+
api?: string;
|
|
34
|
+
route: string;
|
|
35
|
+
files: File[] | FileList;
|
|
36
|
+
metadata?: ServerMetadata;
|
|
37
|
+
multipartBatchSize?: number;
|
|
38
|
+
uploadBatchSize?: number;
|
|
39
|
+
signal?: AbortSignal;
|
|
40
|
+
headers?: HeadersInit;
|
|
41
|
+
credentials?: RequestCredentials;
|
|
42
|
+
retry?: number;
|
|
43
|
+
retryDelay?: number;
|
|
44
|
+
onUploadBegin?: (data: {
|
|
45
|
+
files: FileUploadInfo<'pending'>[];
|
|
46
|
+
metadata: ServerMetadata;
|
|
47
|
+
}) => void;
|
|
48
|
+
onFileStateChange?: (data: {
|
|
49
|
+
file: FileUploadInfo<UploadStatus>;
|
|
50
|
+
}) => void;
|
|
51
|
+
}): Promise<DirectUploadResult<true>>;
|
|
52
|
+
/**
|
|
53
|
+
* Upload a single file to S3.
|
|
54
|
+
*
|
|
55
|
+
* This will throw if the upload fails.
|
|
56
|
+
*/
|
|
57
|
+
declare function uploadFile(params: {
|
|
58
|
+
api?: string;
|
|
59
|
+
route: string;
|
|
60
|
+
file: File;
|
|
61
|
+
metadata?: ServerMetadata;
|
|
62
|
+
multipartBatchSize?: number;
|
|
63
|
+
signal?: AbortSignal;
|
|
64
|
+
headers?: HeadersInit;
|
|
65
|
+
credentials?: RequestCredentials;
|
|
66
|
+
retry?: number;
|
|
67
|
+
retryDelay?: number;
|
|
68
|
+
onUploadBegin?: (data: {
|
|
69
|
+
file: FileUploadInfo<'pending'>;
|
|
70
|
+
metadata: ServerMetadata;
|
|
71
|
+
}) => void;
|
|
72
|
+
onFileStateChange?: (data: {
|
|
73
|
+
file: FileUploadInfo<UploadStatus>;
|
|
74
|
+
}) => void;
|
|
75
|
+
}): Promise<DirectUploadResult<false>>;
|
|
76
|
+
//#endregion
|
|
77
|
+
export { ClientUploadError, FileUploadInfo, UploadHookControl, UploadStatus, uploadFile, uploadFiles, useUploadFile, useUploadFiles };
|
package/dist/index.js
ADDED
|
@@ -0,0 +1,589 @@
|
|
|
1
|
+
import { t as ClientUploadErrorClass } from "./error-DFL8oJkp.js";
|
|
2
|
+
import { useCallback, useMemo, useState } from "react";
|
|
3
|
+
|
|
4
|
+
//#region src/utils/internal/retry.ts
|
|
5
|
+
async function withRetries(fn, { retry = 0, delay = 0, signal, abortHandler } = {}) {
|
|
6
|
+
const maxTries = retry + 1;
|
|
7
|
+
for (let attempt = 0; attempt < maxTries; attempt++) {
|
|
8
|
+
if (attempt > 0) {
|
|
9
|
+
if (signal?.aborted) {
|
|
10
|
+
abortHandler?.();
|
|
11
|
+
throw new Error("Retries aborted.");
|
|
12
|
+
}
|
|
13
|
+
if (delay) {
|
|
14
|
+
await new Promise((resolve) => {
|
|
15
|
+
const timeout = setTimeout(() => {
|
|
16
|
+
signal?.removeEventListener("abort", abort);
|
|
17
|
+
resolve(void 0);
|
|
18
|
+
}, delay);
|
|
19
|
+
const abort = () => {
|
|
20
|
+
clearTimeout(timeout);
|
|
21
|
+
signal?.removeEventListener("abort", abort);
|
|
22
|
+
resolve(void 0);
|
|
23
|
+
};
|
|
24
|
+
signal?.addEventListener("abort", abort);
|
|
25
|
+
});
|
|
26
|
+
if (signal?.aborted) {
|
|
27
|
+
abortHandler?.();
|
|
28
|
+
throw new Error("Retries aborted.");
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
try {
|
|
33
|
+
return await fn();
|
|
34
|
+
} catch (e) {
|
|
35
|
+
if (attempt === maxTries - 1) throw e;
|
|
36
|
+
}
|
|
37
|
+
}
|
|
38
|
+
throw new Error("Unreachable Better Upload code.");
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
//#endregion
|
|
42
|
+
//#region src/utils/internal/s3-upload.ts
|
|
43
|
+
async function uploadFileToS3(params) {
|
|
44
|
+
const xhr = new XMLHttpRequest();
|
|
45
|
+
await withRetries(() => new Promise((resolve, reject) => {
|
|
46
|
+
const abortHandler = createAbortHandler(xhr, reject);
|
|
47
|
+
if (params.signal?.aborted) abortHandler();
|
|
48
|
+
params.signal?.addEventListener("abort", abortHandler);
|
|
49
|
+
xhr.onloadend = () => {
|
|
50
|
+
params.signal?.removeEventListener("abort", abortHandler);
|
|
51
|
+
if (xhr.readyState === 4 && xhr.status === 200) {
|
|
52
|
+
params.onProgress?.(1);
|
|
53
|
+
resolve();
|
|
54
|
+
} else reject(/* @__PURE__ */ new Error("Failed to upload file to S3."));
|
|
55
|
+
};
|
|
56
|
+
xhr.upload.onprogress = (event) => {
|
|
57
|
+
if (event.lengthComputable) params.onProgress?.(Math.min(event.loaded / event.total, .99));
|
|
58
|
+
};
|
|
59
|
+
xhr.open("PUT", params.signedUrl, true);
|
|
60
|
+
xhr.setRequestHeader("Content-Type", params.file.type);
|
|
61
|
+
if (params.objectCacheControl) xhr.setRequestHeader("Cache-Control", params.objectCacheControl);
|
|
62
|
+
Object.entries(params.objectMetadata).forEach(([key, value]) => {
|
|
63
|
+
xhr.setRequestHeader(`x-amz-meta-${key}`, value);
|
|
64
|
+
});
|
|
65
|
+
xhr.send(params.file);
|
|
66
|
+
}), {
|
|
67
|
+
retry: params.retry,
|
|
68
|
+
delay: params.retryDelay,
|
|
69
|
+
signal: params.signal,
|
|
70
|
+
abortHandler: () => {
|
|
71
|
+
xhr.abort();
|
|
72
|
+
throw new Error("Upload aborted.");
|
|
73
|
+
}
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
async function uploadMultipartFileToS3(params) {
|
|
77
|
+
const uploadedParts = [];
|
|
78
|
+
const progresses = {};
|
|
79
|
+
const uploadPromises = params.parts.map((part) => async () => {
|
|
80
|
+
const xhr = new XMLHttpRequest();
|
|
81
|
+
const start = (part.partNumber - 1) * params.partSize;
|
|
82
|
+
const end = Math.min(start + part.size, params.file.size);
|
|
83
|
+
const blob = params.file.slice(start, end);
|
|
84
|
+
await withRetries(() => new Promise((resolve, reject) => {
|
|
85
|
+
const abortHandler = createAbortHandler(xhr, reject);
|
|
86
|
+
if (params.signal?.aborted) abortHandler();
|
|
87
|
+
params.signal?.addEventListener("abort", abortHandler);
|
|
88
|
+
xhr.onloadend = () => {
|
|
89
|
+
params.signal?.removeEventListener("abort", abortHandler);
|
|
90
|
+
if (xhr.readyState === 4 && xhr.status === 200) {
|
|
91
|
+
uploadedParts.push({
|
|
92
|
+
etag: xhr.getResponseHeader("ETag").replace(/"/g, ""),
|
|
93
|
+
number: part.partNumber
|
|
94
|
+
});
|
|
95
|
+
resolve();
|
|
96
|
+
} else reject(/* @__PURE__ */ new Error("Failed to upload part to S3."));
|
|
97
|
+
};
|
|
98
|
+
xhr.upload.onprogress = (event) => {
|
|
99
|
+
if (event.lengthComputable) {
|
|
100
|
+
progresses[part.partNumber] = event.loaded / event.total;
|
|
101
|
+
const totalProgress = Object.values(progresses).reduce((acc, curr) => acc + curr, 0) / params.parts.length;
|
|
102
|
+
params.onProgress?.(Math.min(totalProgress, .99));
|
|
103
|
+
}
|
|
104
|
+
};
|
|
105
|
+
xhr.open("PUT", part.signedUrl, true);
|
|
106
|
+
xhr.send(blob);
|
|
107
|
+
}), {
|
|
108
|
+
retry: params.retry,
|
|
109
|
+
delay: params.retryDelay,
|
|
110
|
+
signal: params.signal,
|
|
111
|
+
abortHandler: () => {
|
|
112
|
+
xhr.abort();
|
|
113
|
+
throw new Error("Upload aborted.");
|
|
114
|
+
}
|
|
115
|
+
});
|
|
116
|
+
});
|
|
117
|
+
const batchSize = params.partsBatchSize || uploadPromises.length;
|
|
118
|
+
for (let i = 0; i < uploadPromises.length; i += batchSize) await Promise.all(uploadPromises.slice(i, i + batchSize).map((fn) => fn()));
|
|
119
|
+
const completeXmlBody = `
|
|
120
|
+
<CompleteMultipartUpload>
|
|
121
|
+
${uploadedParts.sort((a, b) => a.number - b.number).map((part) => `<Part>
|
|
122
|
+
<ETag>${part.etag}</ETag>
|
|
123
|
+
<PartNumber>${part.number}</PartNumber>
|
|
124
|
+
</Part>`).join("")}
|
|
125
|
+
</CompleteMultipartUpload>
|
|
126
|
+
`;
|
|
127
|
+
if (!(await withRetries(() => fetch(params.completeSignedUrl, {
|
|
128
|
+
method: "POST",
|
|
129
|
+
body: completeXmlBody,
|
|
130
|
+
headers: { "Content-Type": "application/xml" },
|
|
131
|
+
signal: params.signal
|
|
132
|
+
}), {
|
|
133
|
+
retry: params.retry,
|
|
134
|
+
delay: params.retryDelay,
|
|
135
|
+
signal: params.signal
|
|
136
|
+
})).ok) throw new Error("Failed to complete multipart upload.");
|
|
137
|
+
params.onProgress?.(1);
|
|
138
|
+
}
|
|
139
|
+
function createAbortHandler(xhr, reject) {
|
|
140
|
+
return () => {
|
|
141
|
+
xhr.abort();
|
|
142
|
+
reject(/* @__PURE__ */ new Error("Upload aborted."));
|
|
143
|
+
};
|
|
144
|
+
}
|
|
145
|
+
|
|
146
|
+
//#endregion
|
|
147
|
+
//#region src/utils/upload.ts
|
|
148
|
+
/**
|
|
149
|
+
* Upload multiple files to S3.
|
|
150
|
+
*
|
|
151
|
+
* This will not throw if one of the uploads fails, but will return the files that failed to upload.
|
|
152
|
+
*/
|
|
153
|
+
async function uploadFiles(params) {
|
|
154
|
+
const files = Array.from(params.files);
|
|
155
|
+
if (files.length === 0) throw new ClientUploadErrorClass({
|
|
156
|
+
type: "no_files",
|
|
157
|
+
message: "No files to upload."
|
|
158
|
+
});
|
|
159
|
+
try {
|
|
160
|
+
const headers = new Headers(params.headers);
|
|
161
|
+
headers.set("Content-Type", "application/json");
|
|
162
|
+
const signedUrlRes = await withRetries(() => fetch(params.api || "/api/upload", {
|
|
163
|
+
method: "POST",
|
|
164
|
+
body: JSON.stringify({
|
|
165
|
+
route: params.route,
|
|
166
|
+
metadata: params.metadata,
|
|
167
|
+
files: files.map((file) => ({
|
|
168
|
+
name: file.name,
|
|
169
|
+
size: file.size,
|
|
170
|
+
type: file.type
|
|
171
|
+
}))
|
|
172
|
+
}),
|
|
173
|
+
headers,
|
|
174
|
+
credentials: params.credentials,
|
|
175
|
+
signal: params.signal
|
|
176
|
+
}), {
|
|
177
|
+
retry: params.retry,
|
|
178
|
+
delay: params.retryDelay,
|
|
179
|
+
signal: params.signal
|
|
180
|
+
});
|
|
181
|
+
if (!signedUrlRes.ok) {
|
|
182
|
+
const { error } = await signedUrlRes.json();
|
|
183
|
+
throw new ClientUploadErrorClass({
|
|
184
|
+
type: error.type || "unknown",
|
|
185
|
+
message: error.message || "Failed to obtain pre-signed URLs."
|
|
186
|
+
});
|
|
187
|
+
}
|
|
188
|
+
const payload = await signedUrlRes.json();
|
|
189
|
+
const signedUrls = "multipart" in payload ? payload.multipart.files : payload.files;
|
|
190
|
+
const serverMetadata = payload.metadata;
|
|
191
|
+
const partSize = "multipart" in payload ? payload.multipart.partSize : 0;
|
|
192
|
+
if (!signedUrls || signedUrls.length === 0) throw new ClientUploadErrorClass({
|
|
193
|
+
type: "unknown",
|
|
194
|
+
message: "No pre-signed URLs returned from server. Check your upload router config."
|
|
195
|
+
});
|
|
196
|
+
const uploads = new Map(signedUrls.map((url) => [url.file.objectInfo.key, {
|
|
197
|
+
status: "pending",
|
|
198
|
+
progress: 0,
|
|
199
|
+
raw: files.find((file) => file.name === url.file.name && file.size === url.file.size && file.type === url.file.type),
|
|
200
|
+
...url.file
|
|
201
|
+
}]));
|
|
202
|
+
const uploadPromises = files.map((file) => async () => {
|
|
203
|
+
const url = signedUrls.find((item) => item.file.name === file.name && item.file.size === file.size && item.file.type === file.type);
|
|
204
|
+
const isMultipart = "parts" in url;
|
|
205
|
+
try {
|
|
206
|
+
uploads.set(url.file.objectInfo.key, {
|
|
207
|
+
...uploads.get(url.file.objectInfo.key),
|
|
208
|
+
status: "uploading",
|
|
209
|
+
progress: 0
|
|
210
|
+
});
|
|
211
|
+
params.onFileStateChange?.({ file: uploads.get(url.file.objectInfo.key) });
|
|
212
|
+
if (isMultipart) await uploadMultipartFileToS3({
|
|
213
|
+
file,
|
|
214
|
+
parts: url.parts,
|
|
215
|
+
partSize,
|
|
216
|
+
uploadId: url.uploadId,
|
|
217
|
+
completeSignedUrl: url.completeSignedUrl,
|
|
218
|
+
partsBatchSize: params.multipartBatchSize,
|
|
219
|
+
signal: params.signal,
|
|
220
|
+
retry: params.retry,
|
|
221
|
+
retryDelay: params.retryDelay,
|
|
222
|
+
onProgress: (progress) => {
|
|
223
|
+
if (uploads.get(url.file.objectInfo.key).status === "failed") return;
|
|
224
|
+
uploads.set(url.file.objectInfo.key, {
|
|
225
|
+
...uploads.get(url.file.objectInfo.key),
|
|
226
|
+
status: progress === 1 ? "complete" : "uploading",
|
|
227
|
+
progress
|
|
228
|
+
});
|
|
229
|
+
params.onFileStateChange?.({ file: uploads.get(url.file.objectInfo.key) });
|
|
230
|
+
}
|
|
231
|
+
});
|
|
232
|
+
else await uploadFileToS3({
|
|
233
|
+
file,
|
|
234
|
+
signedUrl: url.signedUrl,
|
|
235
|
+
objectMetadata: url.file.objectInfo.metadata,
|
|
236
|
+
objectCacheControl: url.file.objectInfo.cacheControl,
|
|
237
|
+
signal: params.signal,
|
|
238
|
+
retry: params.retry,
|
|
239
|
+
retryDelay: params.retryDelay,
|
|
240
|
+
onProgress: (progress) => {
|
|
241
|
+
uploads.set(url.file.objectInfo.key, {
|
|
242
|
+
...uploads.get(url.file.objectInfo.key),
|
|
243
|
+
status: progress === 1 ? "complete" : "uploading",
|
|
244
|
+
progress
|
|
245
|
+
});
|
|
246
|
+
params.onFileStateChange?.({ file: uploads.get(url.file.objectInfo.key) });
|
|
247
|
+
}
|
|
248
|
+
});
|
|
249
|
+
} catch (error) {
|
|
250
|
+
if (isMultipart) await fetch(url.abortSignedUrl, { method: "DELETE" }).catch(() => {});
|
|
251
|
+
uploads.set(url.file.objectInfo.key, {
|
|
252
|
+
...uploads.get(url.file.objectInfo.key),
|
|
253
|
+
status: "failed",
|
|
254
|
+
error: {
|
|
255
|
+
type: params.signal?.aborted ? "aborted" : "s3_upload",
|
|
256
|
+
message: params.signal?.aborted ? "Upload aborted." : "Failed to upload file to S3."
|
|
257
|
+
}
|
|
258
|
+
});
|
|
259
|
+
params.onFileStateChange?.({ file: uploads.get(url.file.objectInfo.key) });
|
|
260
|
+
}
|
|
261
|
+
});
|
|
262
|
+
params.onUploadBegin?.({
|
|
263
|
+
files: Array.from(uploads.values()),
|
|
264
|
+
metadata: serverMetadata
|
|
265
|
+
});
|
|
266
|
+
uploads.forEach((file) => {
|
|
267
|
+
params.onFileStateChange?.({ file });
|
|
268
|
+
});
|
|
269
|
+
const batchSize = params.uploadBatchSize || files.length;
|
|
270
|
+
for (let i = 0; i < uploadPromises.length; i += batchSize) await Promise.all(uploadPromises.slice(i, i + batchSize).map((fn) => fn()));
|
|
271
|
+
return {
|
|
272
|
+
files: Array.from(uploads.values()).filter((file) => file.status === "complete"),
|
|
273
|
+
failedFiles: Array.from(uploads.values()).filter((file) => file.status === "failed"),
|
|
274
|
+
metadata: serverMetadata
|
|
275
|
+
};
|
|
276
|
+
} catch (error) {
|
|
277
|
+
if (params.signal?.aborted) throw new ClientUploadErrorClass({
|
|
278
|
+
type: "aborted",
|
|
279
|
+
message: "Upload aborted."
|
|
280
|
+
});
|
|
281
|
+
if (error instanceof ClientUploadErrorClass) throw error;
|
|
282
|
+
else if (error instanceof Error) throw new ClientUploadErrorClass({
|
|
283
|
+
type: "unknown",
|
|
284
|
+
message: error.message
|
|
285
|
+
});
|
|
286
|
+
else throw new ClientUploadErrorClass({
|
|
287
|
+
type: "unknown",
|
|
288
|
+
message: "Failed to upload files."
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
/**
|
|
293
|
+
* Upload a single file to S3.
|
|
294
|
+
*
|
|
295
|
+
* This will throw if the upload fails.
|
|
296
|
+
*/
|
|
297
|
+
async function uploadFile(params) {
|
|
298
|
+
const { files, metadata } = await uploadFiles({
|
|
299
|
+
api: params.api,
|
|
300
|
+
route: params.route,
|
|
301
|
+
files: [params.file],
|
|
302
|
+
metadata: params.metadata,
|
|
303
|
+
multipartBatchSize: params.multipartBatchSize,
|
|
304
|
+
signal: params.signal,
|
|
305
|
+
headers: params.headers,
|
|
306
|
+
credentials: params.credentials,
|
|
307
|
+
retry: params.retry,
|
|
308
|
+
retryDelay: params.retryDelay,
|
|
309
|
+
onUploadBegin: (data) => {
|
|
310
|
+
params.onUploadBegin?.({
|
|
311
|
+
file: data.files[0],
|
|
312
|
+
metadata: data.metadata
|
|
313
|
+
});
|
|
314
|
+
},
|
|
315
|
+
onFileStateChange: params.onFileStateChange
|
|
316
|
+
});
|
|
317
|
+
const file = files[0];
|
|
318
|
+
if (!file) throw new ClientUploadErrorClass({
|
|
319
|
+
type: "unknown",
|
|
320
|
+
message: "Failed to upload file."
|
|
321
|
+
});
|
|
322
|
+
return {
|
|
323
|
+
file,
|
|
324
|
+
metadata
|
|
325
|
+
};
|
|
326
|
+
}
|
|
327
|
+
|
|
328
|
+
//#endregion
|
|
329
|
+
//#region src/hooks/use-upload-files.ts
|
|
330
|
+
function useUploadFiles({ api, route, uploadBatchSize, multipartBatchSize, headers, credentials, signal, retry, retryDelay, onError, onBeforeUpload, onUploadBegin, onUploadComplete, onUploadFail, onUploadProgress, onUploadSettle }) {
|
|
331
|
+
const [uploads, setUploads] = useState(() => /* @__PURE__ */ new Map());
|
|
332
|
+
const [serverMetadata, setServerMetadata] = useState({});
|
|
333
|
+
const [isPending, setIsPending] = useState(false);
|
|
334
|
+
const [error, setError] = useState(null);
|
|
335
|
+
const uploadsArray = useMemo(() => Array.from(uploads.values()), [uploads]);
|
|
336
|
+
const uploadedFiles = useMemo(() => uploadsArray.filter((file) => file.status === "complete"), [uploadsArray]);
|
|
337
|
+
const failedFiles = useMemo(() => uploadsArray.filter((file) => file.status === "failed"), [uploadsArray]);
|
|
338
|
+
const allSucceeded = useMemo(() => uploadsArray.length > 0 && uploadsArray.every((file) => file.status === "complete"), [uploadsArray]);
|
|
339
|
+
const hasFailedFiles = useMemo(() => uploadsArray.length > 0 && uploadsArray.some((file) => file.status === "failed"), [uploadsArray]);
|
|
340
|
+
const isSettled = useMemo(() => uploadsArray.length > 0 && uploadsArray.every((file) => file.status === "complete" || file.status === "failed"), [uploadsArray]);
|
|
341
|
+
const averageProgress = useMemo(() => uploadsArray.length === 0 ? 0 : uploadsArray.reduce((acc, file) => acc + file.progress, 0) / uploadsArray.length, [uploadsArray]);
|
|
342
|
+
const uploadAsync = useCallback(async (files, { metadata } = {}) => {
|
|
343
|
+
reset();
|
|
344
|
+
setIsPending(true);
|
|
345
|
+
const fileArray = Array.from(files);
|
|
346
|
+
try {
|
|
347
|
+
if (fileArray.length === 0) throw new ClientUploadErrorClass({
|
|
348
|
+
type: "no_files",
|
|
349
|
+
message: "No files to upload."
|
|
350
|
+
});
|
|
351
|
+
let filesToUpload = fileArray;
|
|
352
|
+
if (onBeforeUpload) {
|
|
353
|
+
const callbackResult = await onBeforeUpload({ files: fileArray });
|
|
354
|
+
if (Array.isArray(callbackResult)) {
|
|
355
|
+
if (callbackResult.length === 0) throw new ClientUploadErrorClass({
|
|
356
|
+
type: "no_files",
|
|
357
|
+
message: "No files to upload."
|
|
358
|
+
});
|
|
359
|
+
filesToUpload = callbackResult;
|
|
360
|
+
}
|
|
361
|
+
}
|
|
362
|
+
const result = await uploadFiles({
|
|
363
|
+
api,
|
|
364
|
+
route,
|
|
365
|
+
files: filesToUpload,
|
|
366
|
+
metadata,
|
|
367
|
+
uploadBatchSize,
|
|
368
|
+
multipartBatchSize,
|
|
369
|
+
headers,
|
|
370
|
+
credentials,
|
|
371
|
+
signal,
|
|
372
|
+
retry,
|
|
373
|
+
retryDelay,
|
|
374
|
+
onUploadBegin,
|
|
375
|
+
onFileStateChange: ({ file }) => {
|
|
376
|
+
setUploads((prev) => new Map(prev).set(file.objectInfo.key, file));
|
|
377
|
+
onUploadProgress?.({ file });
|
|
378
|
+
}
|
|
379
|
+
});
|
|
380
|
+
if (result.files.length > 0) await onUploadComplete?.(result);
|
|
381
|
+
if (result.failedFiles.length > 0) await onUploadFail?.({
|
|
382
|
+
succeededFiles: result.files,
|
|
383
|
+
failedFiles: result.failedFiles,
|
|
384
|
+
metadata: result.metadata
|
|
385
|
+
});
|
|
386
|
+
setIsPending(false);
|
|
387
|
+
setServerMetadata(result.metadata);
|
|
388
|
+
await onUploadSettle?.(result);
|
|
389
|
+
return result;
|
|
390
|
+
} catch (error$1) {
|
|
391
|
+
setIsPending(false);
|
|
392
|
+
if (error$1 instanceof ClientUploadErrorClass) {
|
|
393
|
+
onError?.(error$1);
|
|
394
|
+
setError(error$1);
|
|
395
|
+
await onUploadSettle?.({
|
|
396
|
+
files: [],
|
|
397
|
+
failedFiles: [],
|
|
398
|
+
metadata: {}
|
|
399
|
+
});
|
|
400
|
+
throw error$1;
|
|
401
|
+
} else if (error$1 instanceof Error) {
|
|
402
|
+
const _error = new ClientUploadErrorClass({
|
|
403
|
+
type: "unknown",
|
|
404
|
+
message: error$1.message
|
|
405
|
+
});
|
|
406
|
+
onError?.(_error);
|
|
407
|
+
setError(_error);
|
|
408
|
+
await onUploadSettle?.({
|
|
409
|
+
files: [],
|
|
410
|
+
failedFiles: [],
|
|
411
|
+
metadata: {}
|
|
412
|
+
});
|
|
413
|
+
throw _error;
|
|
414
|
+
} else {
|
|
415
|
+
const _error = new ClientUploadErrorClass({
|
|
416
|
+
type: "unknown",
|
|
417
|
+
message: "Failed to upload files."
|
|
418
|
+
});
|
|
419
|
+
onError?.(_error);
|
|
420
|
+
setError(_error);
|
|
421
|
+
await onUploadSettle?.({
|
|
422
|
+
files: [],
|
|
423
|
+
failedFiles: [],
|
|
424
|
+
metadata: {}
|
|
425
|
+
});
|
|
426
|
+
throw _error;
|
|
427
|
+
}
|
|
428
|
+
}
|
|
429
|
+
}, [
|
|
430
|
+
api,
|
|
431
|
+
route,
|
|
432
|
+
uploadBatchSize,
|
|
433
|
+
multipartBatchSize,
|
|
434
|
+
headers,
|
|
435
|
+
credentials,
|
|
436
|
+
signal,
|
|
437
|
+
onError,
|
|
438
|
+
onBeforeUpload,
|
|
439
|
+
onUploadBegin,
|
|
440
|
+
onUploadComplete,
|
|
441
|
+
onUploadFail,
|
|
442
|
+
onUploadProgress,
|
|
443
|
+
onUploadSettle
|
|
444
|
+
]);
|
|
445
|
+
const upload = useCallback(async (files, options = {}) => {
|
|
446
|
+
try {
|
|
447
|
+
return await uploadAsync(files, options);
|
|
448
|
+
} catch (error$1) {
|
|
449
|
+
return {
|
|
450
|
+
files: [],
|
|
451
|
+
failedFiles: [],
|
|
452
|
+
metadata: {}
|
|
453
|
+
};
|
|
454
|
+
}
|
|
455
|
+
}, [uploadAsync]);
|
|
456
|
+
const reset = useCallback(() => {
|
|
457
|
+
setUploads(/* @__PURE__ */ new Map());
|
|
458
|
+
setServerMetadata({});
|
|
459
|
+
setIsPending(false);
|
|
460
|
+
setError(null);
|
|
461
|
+
}, []);
|
|
462
|
+
const control = useMemo(() => ({
|
|
463
|
+
uploadAsync,
|
|
464
|
+
upload,
|
|
465
|
+
reset,
|
|
466
|
+
progresses: uploadsArray,
|
|
467
|
+
allSucceeded,
|
|
468
|
+
hasFailedFiles,
|
|
469
|
+
uploadedFiles,
|
|
470
|
+
failedFiles,
|
|
471
|
+
isSettled,
|
|
472
|
+
averageProgress,
|
|
473
|
+
isPending,
|
|
474
|
+
isError: !!error,
|
|
475
|
+
isAborted: signal?.aborted ?? false,
|
|
476
|
+
error,
|
|
477
|
+
metadata: serverMetadata
|
|
478
|
+
}), [
|
|
479
|
+
uploadAsync,
|
|
480
|
+
upload,
|
|
481
|
+
reset,
|
|
482
|
+
uploadsArray,
|
|
483
|
+
allSucceeded,
|
|
484
|
+
hasFailedFiles,
|
|
485
|
+
uploadedFiles,
|
|
486
|
+
failedFiles,
|
|
487
|
+
isSettled,
|
|
488
|
+
averageProgress,
|
|
489
|
+
isPending,
|
|
490
|
+
signal?.aborted,
|
|
491
|
+
error,
|
|
492
|
+
serverMetadata
|
|
493
|
+
]);
|
|
494
|
+
return {
|
|
495
|
+
...control,
|
|
496
|
+
control
|
|
497
|
+
};
|
|
498
|
+
}
|
|
499
|
+
|
|
500
|
+
//#endregion
|
|
501
|
+
//#region src/hooks/use-upload-file.ts
|
|
502
|
+
function useUploadFile(props) {
|
|
503
|
+
const { upload, uploadAsync, reset, averageProgress, error, isError, isPending, isSettled, isAborted, allSucceeded, metadata, uploadedFiles } = useUploadFiles({
|
|
504
|
+
api: props.api,
|
|
505
|
+
route: props.route,
|
|
506
|
+
uploadBatchSize: 1,
|
|
507
|
+
multipartBatchSize: props.multipartBatchSize,
|
|
508
|
+
headers: props.headers,
|
|
509
|
+
credentials: props.credentials,
|
|
510
|
+
signal: props.signal,
|
|
511
|
+
retry: props.retry,
|
|
512
|
+
retryDelay: props.retryDelay,
|
|
513
|
+
onError: props.onError,
|
|
514
|
+
onUploadProgress: props.onUploadProgress,
|
|
515
|
+
onBeforeUpload: props.onBeforeUpload ? async ({ files }) => {
|
|
516
|
+
const result = await props.onBeforeUpload({ file: files[0] });
|
|
517
|
+
if (result) return [result];
|
|
518
|
+
} : void 0,
|
|
519
|
+
onUploadBegin: props.onUploadBegin ? ({ files, metadata: metadata$1 }) => props.onUploadBegin({
|
|
520
|
+
file: files[0],
|
|
521
|
+
metadata: metadata$1
|
|
522
|
+
}) : void 0,
|
|
523
|
+
onUploadComplete: props.onUploadComplete ? async ({ files, metadata: metadata$1 }) => {
|
|
524
|
+
await props.onUploadComplete({
|
|
525
|
+
file: files[0],
|
|
526
|
+
metadata: metadata$1
|
|
527
|
+
});
|
|
528
|
+
} : void 0,
|
|
529
|
+
onUploadFail: props.onError ? async ({ failedFiles }) => {
|
|
530
|
+
props.onError?.({
|
|
531
|
+
type: failedFiles[0].error.type,
|
|
532
|
+
message: failedFiles[0].error.message
|
|
533
|
+
});
|
|
534
|
+
} : void 0,
|
|
535
|
+
onUploadSettle: props.onUploadSettle ? async ({ files, metadata: metadata$1 }) => {
|
|
536
|
+
await props.onUploadSettle({
|
|
537
|
+
file: files[0],
|
|
538
|
+
metadata: metadata$1
|
|
539
|
+
});
|
|
540
|
+
} : void 0
|
|
541
|
+
});
|
|
542
|
+
const uploadedFile = uploadedFiles?.[0] ?? null;
|
|
543
|
+
const control = useMemo(() => ({
|
|
544
|
+
upload: async (file, options) => {
|
|
545
|
+
const result = await upload([file], options);
|
|
546
|
+
return {
|
|
547
|
+
file: result.files[0],
|
|
548
|
+
metadata: result.metadata
|
|
549
|
+
};
|
|
550
|
+
},
|
|
551
|
+
uploadAsync: async (file, options) => {
|
|
552
|
+
const result = await uploadAsync([file], options);
|
|
553
|
+
return {
|
|
554
|
+
file: result.files[0],
|
|
555
|
+
metadata: result.metadata
|
|
556
|
+
};
|
|
557
|
+
},
|
|
558
|
+
reset,
|
|
559
|
+
progress: averageProgress,
|
|
560
|
+
error,
|
|
561
|
+
isError,
|
|
562
|
+
isPending,
|
|
563
|
+
isSettled,
|
|
564
|
+
isSuccess: allSucceeded,
|
|
565
|
+
isAborted,
|
|
566
|
+
metadata,
|
|
567
|
+
uploadedFile
|
|
568
|
+
}), [
|
|
569
|
+
upload,
|
|
570
|
+
uploadAsync,
|
|
571
|
+
reset,
|
|
572
|
+
averageProgress,
|
|
573
|
+
error,
|
|
574
|
+
isError,
|
|
575
|
+
isPending,
|
|
576
|
+
isSettled,
|
|
577
|
+
allSucceeded,
|
|
578
|
+
isAborted,
|
|
579
|
+
metadata,
|
|
580
|
+
uploadedFile
|
|
581
|
+
]);
|
|
582
|
+
return {
|
|
583
|
+
...control,
|
|
584
|
+
control
|
|
585
|
+
};
|
|
586
|
+
}
|
|
587
|
+
|
|
588
|
+
//#endregion
|
|
589
|
+
export { uploadFile, uploadFiles, useUploadFile, useUploadFiles };
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import { a as UploadHookProps, i as SignedUrlsSuccessResponse, n as ObjectMetadata, o as UploadHookReturn, r as ServerMetadata, s as ClientUploadError, t as DirectUploadResult } from "./internal-gLVx5GpR.js";
|
|
2
|
+
|
|
3
|
+
//#region src/types/error.d.ts
|
|
4
|
+
declare class ClientUploadErrorClass extends Error {
|
|
5
|
+
type: ClientUploadError['type'];
|
|
6
|
+
constructor({
|
|
7
|
+
type,
|
|
8
|
+
message
|
|
9
|
+
}: {
|
|
10
|
+
type: ClientUploadError['type'];
|
|
11
|
+
message: string;
|
|
12
|
+
});
|
|
13
|
+
}
|
|
14
|
+
//#endregion
|
|
15
|
+
export { ClientUploadErrorClass, DirectUploadResult, ObjectMetadata, ServerMetadata, SignedUrlsSuccessResponse, UploadHookProps, UploadHookReturn };
|
|
@@ -0,0 +1,360 @@
|
|
|
1
|
+
//#region src/types/public.d.ts
|
|
2
|
+
type ClientUploadError = {
|
|
3
|
+
type: 'unknown' | 'invalid_request' | 'no_files' | 's3_upload' | 'file_too_large' | 'invalid_file_type' | 'rejected' | 'too_many_files' | 'aborted';
|
|
4
|
+
message: string;
|
|
5
|
+
};
|
|
6
|
+
type UploadStatus = 'pending' | 'uploading' | 'complete' | 'failed';
|
|
7
|
+
type FileUploadInfo<T extends UploadStatus> = {
|
|
8
|
+
/**
|
|
9
|
+
* The status of the file being uploaded.
|
|
10
|
+
*
|
|
11
|
+
* - `pending` - The file is waiting to be uploaded. The signed URL has already been generated.
|
|
12
|
+
* - `uploading` - The file is currently being uploaded.
|
|
13
|
+
* - `complete` - The file has been uploaded successfully.
|
|
14
|
+
* - `error` - The file failed to upload.
|
|
15
|
+
*/
|
|
16
|
+
status: T;
|
|
17
|
+
/**
|
|
18
|
+
* The progress of the upload, from 0 to 1.
|
|
19
|
+
*
|
|
20
|
+
* @example 0.5 // 50%
|
|
21
|
+
*/
|
|
22
|
+
progress: number;
|
|
23
|
+
/**
|
|
24
|
+
* The original raw file that was uploaded.
|
|
25
|
+
*/
|
|
26
|
+
raw: File;
|
|
27
|
+
/**
|
|
28
|
+
* The name of the file.
|
|
29
|
+
*/
|
|
30
|
+
name: string;
|
|
31
|
+
/**
|
|
32
|
+
* The size of the file in bytes.
|
|
33
|
+
*/
|
|
34
|
+
size: number;
|
|
35
|
+
/**
|
|
36
|
+
* The type of the file.
|
|
37
|
+
*/
|
|
38
|
+
type: string;
|
|
39
|
+
/**
|
|
40
|
+
* Information about the S3 object.
|
|
41
|
+
*/
|
|
42
|
+
objectInfo: {
|
|
43
|
+
/**
|
|
44
|
+
* The key of the S3 object.
|
|
45
|
+
*/
|
|
46
|
+
key: string;
|
|
47
|
+
/**
|
|
48
|
+
* The metadata of the S3 object.
|
|
49
|
+
*/
|
|
50
|
+
metadata: ObjectMetadata;
|
|
51
|
+
/**
|
|
52
|
+
* The Cache-Control header of the S3 object.
|
|
53
|
+
*/
|
|
54
|
+
cacheControl?: string;
|
|
55
|
+
};
|
|
56
|
+
} & (T extends 'failed' ? {
|
|
57
|
+
error: ClientUploadError;
|
|
58
|
+
} : {});
|
|
59
|
+
type UploadHookControl<T extends boolean> = {
|
|
60
|
+
/**
|
|
61
|
+
* Metadata sent back from the server.
|
|
62
|
+
*/
|
|
63
|
+
metadata: ServerMetadata;
|
|
64
|
+
/**
|
|
65
|
+
* If a critical error occurred during the upload, and no files were able to be uploaded. For example, if your server is unreachable.
|
|
66
|
+
*
|
|
67
|
+
* Is also `true` if some input is invalid. For example, if no files were selected.
|
|
68
|
+
*/
|
|
69
|
+
isError: boolean;
|
|
70
|
+
/**
|
|
71
|
+
* The error critical that occurred during the upload.
|
|
72
|
+
*
|
|
73
|
+
* @see `isError` for more information.
|
|
74
|
+
*/
|
|
75
|
+
error: ClientUploadError | null;
|
|
76
|
+
/**
|
|
77
|
+
* If the upload is in progress.
|
|
78
|
+
*/
|
|
79
|
+
isPending: boolean;
|
|
80
|
+
/**
|
|
81
|
+
* If the upload progress is complete. Regardless of if all files succeeded or failed to upload.
|
|
82
|
+
*/
|
|
83
|
+
isSettled: boolean;
|
|
84
|
+
/**
|
|
85
|
+
* If the upload was aborted.
|
|
86
|
+
*/
|
|
87
|
+
isAborted: boolean;
|
|
88
|
+
/**
|
|
89
|
+
* Reset the state of the upload.
|
|
90
|
+
*/
|
|
91
|
+
reset: () => void;
|
|
92
|
+
/**
|
|
93
|
+
* Upload files to S3.
|
|
94
|
+
*
|
|
95
|
+
* Will throw if critical errors occur.
|
|
96
|
+
*/
|
|
97
|
+
uploadAsync: (input: T extends true ? File[] | FileList : File, options?: {
|
|
98
|
+
metadata?: ServerMetadata;
|
|
99
|
+
}) => Promise<DirectUploadResult<T>>;
|
|
100
|
+
/**
|
|
101
|
+
* Upload files to S3.
|
|
102
|
+
*
|
|
103
|
+
* Will never throw an error.
|
|
104
|
+
*/
|
|
105
|
+
upload: (input: T extends true ? File[] | FileList : File, options?: {
|
|
106
|
+
metadata?: ServerMetadata;
|
|
107
|
+
}) => Promise<DirectUploadResult<T>>;
|
|
108
|
+
} & (T extends true ? {
|
|
109
|
+
/**
|
|
110
|
+
* The progress of all files during the upload process.
|
|
111
|
+
*
|
|
112
|
+
* `uploadedFiles` and `failedFiles` derive from this array, use this to get information about **all** files.
|
|
113
|
+
*/
|
|
114
|
+
progresses: FileUploadInfo<UploadStatus>[];
|
|
115
|
+
/**
|
|
116
|
+
* If all files succeeded to upload.
|
|
117
|
+
*/
|
|
118
|
+
allSucceeded: boolean;
|
|
119
|
+
/**
|
|
120
|
+
* If some files failed to upload.
|
|
121
|
+
*/
|
|
122
|
+
hasFailedFiles: boolean;
|
|
123
|
+
/**
|
|
124
|
+
* Files that succeeded to upload.
|
|
125
|
+
*/
|
|
126
|
+
uploadedFiles: FileUploadInfo<'complete'>[];
|
|
127
|
+
/**
|
|
128
|
+
* Files that failed to upload.
|
|
129
|
+
*/
|
|
130
|
+
failedFiles: FileUploadInfo<'failed'>[];
|
|
131
|
+
/**
|
|
132
|
+
* The progress of **all** files during the upload. Goes from 0 to 1.
|
|
133
|
+
*
|
|
134
|
+
* If one file is 100% complete, and another is 0% complete, this will be 0.5.
|
|
135
|
+
*
|
|
136
|
+
* @example 0.5 // 50%
|
|
137
|
+
*/
|
|
138
|
+
averageProgress: number;
|
|
139
|
+
} : {
|
|
140
|
+
/**
|
|
141
|
+
* The progress of the file during the upload process.
|
|
142
|
+
*/
|
|
143
|
+
progress: number;
|
|
144
|
+
/**
|
|
145
|
+
* The file that was successfully uploaded.
|
|
146
|
+
*/
|
|
147
|
+
uploadedFile: FileUploadInfo<'complete'> | null;
|
|
148
|
+
/**
|
|
149
|
+
* If the file was successfully uploaded.
|
|
150
|
+
*/
|
|
151
|
+
isSuccess: boolean;
|
|
152
|
+
});
|
|
153
|
+
//#endregion
|
|
154
|
+
//#region src/types/internal.d.ts
|
|
155
|
+
type ObjectMetadata = Record<string, string>;
|
|
156
|
+
type ServerMetadata = Record<string, unknown>;
|
|
157
|
+
type SignedUrlsSuccessResponse = {
|
|
158
|
+
metadata: ServerMetadata;
|
|
159
|
+
} & ({
|
|
160
|
+
multipart: {
|
|
161
|
+
files: {
|
|
162
|
+
file: {
|
|
163
|
+
name: string;
|
|
164
|
+
size: number;
|
|
165
|
+
type: string;
|
|
166
|
+
objectInfo: {
|
|
167
|
+
key: string;
|
|
168
|
+
metadata: ObjectMetadata;
|
|
169
|
+
cacheControl?: string;
|
|
170
|
+
};
|
|
171
|
+
};
|
|
172
|
+
parts: {
|
|
173
|
+
signedUrl: string;
|
|
174
|
+
partNumber: number;
|
|
175
|
+
size: number;
|
|
176
|
+
}[];
|
|
177
|
+
uploadId: string;
|
|
178
|
+
completeSignedUrl: string;
|
|
179
|
+
abortSignedUrl: string;
|
|
180
|
+
}[];
|
|
181
|
+
partSize: number;
|
|
182
|
+
};
|
|
183
|
+
} | {
|
|
184
|
+
files: {
|
|
185
|
+
signedUrl: string;
|
|
186
|
+
file: {
|
|
187
|
+
name: string;
|
|
188
|
+
size: number;
|
|
189
|
+
type: string;
|
|
190
|
+
objectInfo: {
|
|
191
|
+
key: string;
|
|
192
|
+
metadata: ObjectMetadata;
|
|
193
|
+
cacheControl?: string;
|
|
194
|
+
};
|
|
195
|
+
};
|
|
196
|
+
}[];
|
|
197
|
+
});
|
|
198
|
+
type UploadHookProps<T extends boolean> = {
|
|
199
|
+
/**
|
|
200
|
+
* The API endpoint to use for uploading files.
|
|
201
|
+
*
|
|
202
|
+
* @default '/api/upload'
|
|
203
|
+
*/
|
|
204
|
+
api?: string;
|
|
205
|
+
/**
|
|
206
|
+
* The route to use to upload the files. Should match the upload route name defined in the server.
|
|
207
|
+
*/
|
|
208
|
+
route: string;
|
|
209
|
+
/**
|
|
210
|
+
* The number of parts that will be uploaded in parallel when uploading a file.
|
|
211
|
+
*
|
|
212
|
+
* **Only used in multipart uploads.**
|
|
213
|
+
*
|
|
214
|
+
* @default All parts at once.
|
|
215
|
+
*/
|
|
216
|
+
multipartBatchSize?: number;
|
|
217
|
+
/**
|
|
218
|
+
* Callback that is called before requesting the pre-signed URLs. Use this to modify files before uploading them, like resizing or compressing.
|
|
219
|
+
*
|
|
220
|
+
* You can also throw an error to reject the file upload.
|
|
221
|
+
*/
|
|
222
|
+
onBeforeUpload?: (data: T extends true ? {
|
|
223
|
+
files: File[];
|
|
224
|
+
} : {
|
|
225
|
+
file: File;
|
|
226
|
+
}) => void | (T extends true ? File[] | Promise<void | File[]> : File | Promise<void | File>);
|
|
227
|
+
/**
|
|
228
|
+
* Event that is called before the files start being uploaded to S3. This happens after the server responds with the pre-signed URL.
|
|
229
|
+
*/
|
|
230
|
+
onUploadBegin?: (data: {
|
|
231
|
+
/**
|
|
232
|
+
* Metadata sent from the server.
|
|
233
|
+
*/
|
|
234
|
+
metadata: ServerMetadata;
|
|
235
|
+
} & (T extends true ? {
|
|
236
|
+
files: FileUploadInfo<'pending'>[];
|
|
237
|
+
} : {
|
|
238
|
+
file: FileUploadInfo<'pending'>;
|
|
239
|
+
})) => void;
|
|
240
|
+
/**
|
|
241
|
+
* Event that is called when a file upload progress changes.
|
|
242
|
+
*/
|
|
243
|
+
onUploadProgress?: (data: {
|
|
244
|
+
file: FileUploadInfo<UploadStatus>;
|
|
245
|
+
}) => void;
|
|
246
|
+
/**
|
|
247
|
+
* Event that is called after files are successfully uploaded.
|
|
248
|
+
*
|
|
249
|
+
* This event is called even if some files fail to upload, but some succeed. This event is not called if all files fail to upload.
|
|
250
|
+
*/
|
|
251
|
+
onUploadComplete?: (data: {
|
|
252
|
+
/**
|
|
253
|
+
* Metadata sent back from the server.
|
|
254
|
+
*/
|
|
255
|
+
metadata: ServerMetadata;
|
|
256
|
+
} & (T extends true ? {
|
|
257
|
+
files: FileUploadInfo<'complete'>[];
|
|
258
|
+
failedFiles: FileUploadInfo<'failed'>[];
|
|
259
|
+
} : {
|
|
260
|
+
file: FileUploadInfo<'complete'>;
|
|
261
|
+
})) => void | Promise<void>;
|
|
262
|
+
/**
|
|
263
|
+
* Event that is called after the upload settles (either successfully completed or an error occurs).
|
|
264
|
+
*/
|
|
265
|
+
onUploadSettle?: (data: {
|
|
266
|
+
/**
|
|
267
|
+
* Metadata sent back from the server.
|
|
268
|
+
*/
|
|
269
|
+
metadata: ServerMetadata;
|
|
270
|
+
} & (T extends true ? {
|
|
271
|
+
files: FileUploadInfo<'complete'>[];
|
|
272
|
+
failedFiles: FileUploadInfo<'failed'>[];
|
|
273
|
+
} : {
|
|
274
|
+
file: FileUploadInfo<'complete'>;
|
|
275
|
+
})) => void | Promise<void>;
|
|
276
|
+
/**
|
|
277
|
+
* Abort signal to cancel the upload.
|
|
278
|
+
*/
|
|
279
|
+
signal?: AbortSignal;
|
|
280
|
+
/**
|
|
281
|
+
* Headers to send to your server when requesting the pre-signed URLs.
|
|
282
|
+
*/
|
|
283
|
+
headers?: HeadersInit;
|
|
284
|
+
/**
|
|
285
|
+
* Credentials mode when requesting pre-signed URLs from your server.
|
|
286
|
+
*
|
|
287
|
+
* Use `include` to send cookies if your server is on a different origin.
|
|
288
|
+
*/
|
|
289
|
+
credentials?: RequestCredentials;
|
|
290
|
+
/**
|
|
291
|
+
* Number of times to retry network requests that fail.
|
|
292
|
+
*
|
|
293
|
+
* @default 0
|
|
294
|
+
*/
|
|
295
|
+
retry?: number;
|
|
296
|
+
/**
|
|
297
|
+
* Delay between retries in milliseconds.
|
|
298
|
+
*
|
|
299
|
+
* @default 0
|
|
300
|
+
*/
|
|
301
|
+
retryDelay?: number;
|
|
302
|
+
} & (T extends true ? {
|
|
303
|
+
/**
|
|
304
|
+
* The size of the batch to upload files in parallel. Use `1` to upload files sequentially.
|
|
305
|
+
*
|
|
306
|
+
* By default, all files are uploaded in parallel.
|
|
307
|
+
*/
|
|
308
|
+
uploadBatchSize?: number;
|
|
309
|
+
/**
|
|
310
|
+
* Event that is called after the entire upload if a file fails to upload.
|
|
311
|
+
*
|
|
312
|
+
* This event is called even if some files succeed to upload, but some fail. This event is not called if all files succeed.
|
|
313
|
+
*/
|
|
314
|
+
onUploadFail?: (data: {
|
|
315
|
+
/**
|
|
316
|
+
* Metadata sent back from the server.
|
|
317
|
+
*/
|
|
318
|
+
metadata: ServerMetadata;
|
|
319
|
+
succeededFiles: FileUploadInfo<'complete'>[];
|
|
320
|
+
failedFiles: FileUploadInfo<'failed'>[];
|
|
321
|
+
}) => void | Promise<void>;
|
|
322
|
+
/**
|
|
323
|
+
* Event that is called if a critical error occurs before the upload to S3, and no files were able to be uploaded. For example, if your server is unreachable.
|
|
324
|
+
*
|
|
325
|
+
* Is also called some input is invalid. For example, if no files were selected.
|
|
326
|
+
*/
|
|
327
|
+
onError?: (error: ClientUploadError) => void;
|
|
328
|
+
} : {
|
|
329
|
+
/**
|
|
330
|
+
* Event that is called if the upload fails.
|
|
331
|
+
*
|
|
332
|
+
* Also called if some input is invalid. For example, if no files were selected.
|
|
333
|
+
*/
|
|
334
|
+
onError?: (error: ClientUploadError) => void;
|
|
335
|
+
});
|
|
336
|
+
type UploadHookReturn<T extends boolean> = UploadHookControl<T> & {
|
|
337
|
+
control: UploadHookControl<T>;
|
|
338
|
+
};
|
|
339
|
+
type DirectUploadResult<T extends boolean> = {
|
|
340
|
+
/**
|
|
341
|
+
* Metadata sent back from the server.
|
|
342
|
+
*/
|
|
343
|
+
metadata: ServerMetadata;
|
|
344
|
+
} & (T extends true ? {
|
|
345
|
+
/**
|
|
346
|
+
* Files that were successfully uploaded.
|
|
347
|
+
*/
|
|
348
|
+
files: FileUploadInfo<'complete'>[];
|
|
349
|
+
/**
|
|
350
|
+
* Files that failed to upload.
|
|
351
|
+
*/
|
|
352
|
+
failedFiles: FileUploadInfo<'failed'>[];
|
|
353
|
+
} : {
|
|
354
|
+
/**
|
|
355
|
+
* The file that was successfully uploaded.
|
|
356
|
+
*/
|
|
357
|
+
file: FileUploadInfo<'complete'>;
|
|
358
|
+
});
|
|
359
|
+
//#endregion
|
|
360
|
+
export { UploadHookProps as a, FileUploadInfo as c, SignedUrlsSuccessResponse as i, UploadHookControl as l, ObjectMetadata as n, UploadHookReturn as o, ServerMetadata as r, ClientUploadError as s, DirectUploadResult as t, UploadStatus as u };
|
package/package.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "@better-upload/client",
|
|
3
|
+
"description": "The Better Upload client library. Simple and easy file uploads for React.",
|
|
4
|
+
"version": "0.0.0",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"scripts": {
|
|
7
|
+
"build": "tsdown",
|
|
8
|
+
"dev": "tsdown --watch",
|
|
9
|
+
"lint": "tsc"
|
|
10
|
+
},
|
|
11
|
+
"files": [
|
|
12
|
+
"dist"
|
|
13
|
+
],
|
|
14
|
+
"exports": {
|
|
15
|
+
"./package.json": "./package.json",
|
|
16
|
+
".": {
|
|
17
|
+
"import": "./dist/index.js",
|
|
18
|
+
"types": "./dist/index.d.ts"
|
|
19
|
+
},
|
|
20
|
+
"./helpers": {
|
|
21
|
+
"import": "./dist/helpers/index.js",
|
|
22
|
+
"types": "./dist/helpers/index.d.ts"
|
|
23
|
+
},
|
|
24
|
+
"./internal": {
|
|
25
|
+
"import": "./dist/internal-export.js",
|
|
26
|
+
"types": "./dist/internal-export.d.ts"
|
|
27
|
+
}
|
|
28
|
+
},
|
|
29
|
+
"peerDependencies": {
|
|
30
|
+
"react": "*"
|
|
31
|
+
},
|
|
32
|
+
"devDependencies": {
|
|
33
|
+
"@repo/tsconfig": "workspace:*",
|
|
34
|
+
"tsdown": "^0.15.12",
|
|
35
|
+
"typescript": "^5.9.3",
|
|
36
|
+
"react": "^19",
|
|
37
|
+
"@types/react": "^19"
|
|
38
|
+
},
|
|
39
|
+
"keywords": [
|
|
40
|
+
"react",
|
|
41
|
+
"nextjs",
|
|
42
|
+
"tanstack",
|
|
43
|
+
"upload",
|
|
44
|
+
"files",
|
|
45
|
+
"multipart"
|
|
46
|
+
],
|
|
47
|
+
"license": "MIT",
|
|
48
|
+
"homepage": "https://better-upload.com",
|
|
49
|
+
"bugs": {
|
|
50
|
+
"url": "https://github.com/Nic13Gamer/better-upload/issues"
|
|
51
|
+
},
|
|
52
|
+
"author": "Nicholas",
|
|
53
|
+
"repository": {
|
|
54
|
+
"type": "git",
|
|
55
|
+
"url": "git+https://github.com/Nic13Gamer/better-upload.git"
|
|
56
|
+
}
|
|
57
|
+
}
|