@edgestore/react 0.4.0 → 0.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/errors/index.js +2 -14
- package/dist/index.js +156 -159
- package/dist/shared/index.js +4 -7
- package/dist/utils/index.js +8 -20
- package/package.json +3 -3
package/dist/errors/index.js
CHANGED
|
@@ -1,14 +1,2 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
-
|
|
5
|
-
var shared = require('@edgestore/shared');
|
|
6
|
-
var uploadAbortedError = require('../uploadAbortedError-fbfcc57b.js');
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
Object.defineProperty(exports, 'EdgeStoreApiClientError', {
|
|
11
|
-
enumerable: true,
|
|
12
|
-
get: function () { return shared.EdgeStoreApiClientError; }
|
|
13
|
-
});
|
|
14
|
-
exports.UploadAbortedError = uploadAbortedError.UploadAbortedError;
|
|
1
|
+
export { EdgeStoreApiClientError } from '@edgestore/shared';
|
|
2
|
+
export { U as UploadAbortedError } from '../uploadAbortedError-a628b025.js';
|
package/dist/index.js
CHANGED
|
@@ -1,33 +1,9 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
var React = require('react');
|
|
6
|
-
var shared = require('@edgestore/shared');
|
|
7
|
-
var uploadAbortedError = require('./uploadAbortedError-fbfcc57b.js');
|
|
8
|
-
|
|
9
|
-
function _interopNamespace(e) {
|
|
10
|
-
if (e && e.__esModule) return e;
|
|
11
|
-
var n = Object.create(null);
|
|
12
|
-
if (e) {
|
|
13
|
-
Object.keys(e).forEach(function (k) {
|
|
14
|
-
if (k !== 'default') {
|
|
15
|
-
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
16
|
-
Object.defineProperty(n, k, d.get ? d : {
|
|
17
|
-
enumerable: true,
|
|
18
|
-
get: function () { return e[k]; }
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
});
|
|
22
|
-
}
|
|
23
|
-
n["default"] = e;
|
|
24
|
-
return Object.freeze(n);
|
|
25
|
-
}
|
|
26
|
-
|
|
27
|
-
var React__namespace = /*#__PURE__*/_interopNamespace(React);
|
|
1
|
+
import * as React from 'react';
|
|
2
|
+
import { EdgeStoreApiClientError } from '@edgestore/shared';
|
|
3
|
+
import { U as UploadAbortedError } from './uploadAbortedError-a628b025.js';
|
|
28
4
|
|
|
29
5
|
class EdgeStoreClientError extends Error {
|
|
30
|
-
constructor(message){
|
|
6
|
+
constructor(message) {
|
|
31
7
|
super(message);
|
|
32
8
|
this.name = 'EdgeStoreError';
|
|
33
9
|
}
|
|
@@ -37,73 +13,72 @@ async function handleError(res) {
|
|
|
37
13
|
let json = {};
|
|
38
14
|
try {
|
|
39
15
|
json = await res.json();
|
|
40
|
-
}
|
|
16
|
+
}
|
|
17
|
+
catch (err) {
|
|
41
18
|
throw new EdgeStoreClientError(`Failed to parse response. Make sure the api is correctly configured at ${res.url}`);
|
|
42
19
|
}
|
|
43
|
-
throw new
|
|
44
|
-
response: json
|
|
45
|
-
});
|
|
20
|
+
throw new EdgeStoreApiClientError({ response: json });
|
|
46
21
|
}
|
|
47
22
|
|
|
48
|
-
function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5 }) {
|
|
23
|
+
function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5, }) {
|
|
49
24
|
return new Proxy({}, {
|
|
50
|
-
get
|
|
25
|
+
get(_, prop) {
|
|
51
26
|
const bucketName = prop;
|
|
52
27
|
const bucketFunctions = {
|
|
53
|
-
upload: async (params)=>{
|
|
28
|
+
upload: async (params) => {
|
|
54
29
|
try {
|
|
55
30
|
params.onProgressChange?.(0);
|
|
56
31
|
// This handles the case where the user cancels the upload while it's waiting in the queue
|
|
57
|
-
const abortPromise = new Promise((resolve)=>{
|
|
58
|
-
params.signal?.addEventListener('abort', ()=>{
|
|
32
|
+
const abortPromise = new Promise((resolve) => {
|
|
33
|
+
params.signal?.addEventListener('abort', () => {
|
|
59
34
|
resolve();
|
|
60
|
-
}, {
|
|
61
|
-
once: true
|
|
62
|
-
});
|
|
35
|
+
}, { once: true });
|
|
63
36
|
});
|
|
64
|
-
while(uploadingCountRef.current >= maxConcurrentUploads &&
|
|
37
|
+
while (uploadingCountRef.current >= maxConcurrentUploads &&
|
|
38
|
+
uploadingCountRef.current > 0) {
|
|
65
39
|
await Promise.race([
|
|
66
|
-
new Promise((resolve)=>setTimeout(resolve, 300)),
|
|
67
|
-
abortPromise
|
|
40
|
+
new Promise((resolve) => setTimeout(resolve, 300)),
|
|
41
|
+
abortPromise,
|
|
68
42
|
]);
|
|
69
43
|
if (params.signal?.aborted) {
|
|
70
|
-
throw new
|
|
44
|
+
throw new UploadAbortedError('File upload aborted');
|
|
71
45
|
}
|
|
72
46
|
}
|
|
73
47
|
uploadingCountRef.current++;
|
|
74
48
|
const fileInfo = await uploadFile(params, {
|
|
75
49
|
bucketName: bucketName,
|
|
76
|
-
apiPath
|
|
50
|
+
apiPath,
|
|
77
51
|
});
|
|
78
52
|
return fileInfo;
|
|
79
|
-
}
|
|
53
|
+
}
|
|
54
|
+
finally {
|
|
80
55
|
uploadingCountRef.current--;
|
|
81
56
|
}
|
|
82
57
|
},
|
|
83
|
-
confirmUpload: async (params)=>{
|
|
58
|
+
confirmUpload: async (params) => {
|
|
84
59
|
const { success } = await confirmUpload(params, {
|
|
85
60
|
bucketName: bucketName,
|
|
86
|
-
apiPath
|
|
61
|
+
apiPath,
|
|
87
62
|
});
|
|
88
63
|
if (!success) {
|
|
89
64
|
throw new EdgeStoreClientError('Failed to confirm upload');
|
|
90
65
|
}
|
|
91
66
|
},
|
|
92
|
-
delete: async (params)=>{
|
|
67
|
+
delete: async (params) => {
|
|
93
68
|
const { success } = await deleteFile(params, {
|
|
94
69
|
bucketName: bucketName,
|
|
95
|
-
apiPath
|
|
70
|
+
apiPath,
|
|
96
71
|
});
|
|
97
72
|
if (!success) {
|
|
98
73
|
throw new EdgeStoreClientError('Failed to delete file');
|
|
99
74
|
}
|
|
100
|
-
}
|
|
75
|
+
},
|
|
101
76
|
};
|
|
102
77
|
return bucketFunctions;
|
|
103
|
-
}
|
|
78
|
+
},
|
|
104
79
|
});
|
|
105
80
|
}
|
|
106
|
-
async function uploadFile({ file, signal, input, onProgressChange, options }, { apiPath, bucketName }) {
|
|
81
|
+
async function uploadFile({ file, signal, input, onProgressChange, options, }, { apiPath, bucketName, }) {
|
|
107
82
|
try {
|
|
108
83
|
onProgressChange?.(0);
|
|
109
84
|
const res = await fetch(`${apiPath}/request-upload`, {
|
|
@@ -119,17 +94,17 @@ async function uploadFile({ file, signal, input, onProgressChange, options }, {
|
|
|
119
94
|
size: file.size,
|
|
120
95
|
fileName: options?.manualFileName,
|
|
121
96
|
replaceTargetUrl: options?.replaceTargetUrl,
|
|
122
|
-
temporary: options?.temporary
|
|
123
|
-
}
|
|
97
|
+
temporary: options?.temporary,
|
|
98
|
+
},
|
|
124
99
|
}),
|
|
125
100
|
headers: {
|
|
126
|
-
'Content-Type': 'application/json'
|
|
127
|
-
}
|
|
101
|
+
'Content-Type': 'application/json',
|
|
102
|
+
},
|
|
128
103
|
});
|
|
129
104
|
if (!res.ok) {
|
|
130
105
|
await handleError(res);
|
|
131
106
|
}
|
|
132
|
-
const json = await res.json();
|
|
107
|
+
const json = (await res.json());
|
|
133
108
|
if ('multipart' in json) {
|
|
134
109
|
await multipartUpload({
|
|
135
110
|
bucketName,
|
|
@@ -137,32 +112,37 @@ async function uploadFile({ file, signal, input, onProgressChange, options }, {
|
|
|
137
112
|
onProgressChange,
|
|
138
113
|
signal,
|
|
139
114
|
file,
|
|
140
|
-
apiPath
|
|
115
|
+
apiPath,
|
|
141
116
|
});
|
|
142
|
-
}
|
|
117
|
+
}
|
|
118
|
+
else if ('uploadUrl' in json) {
|
|
143
119
|
// Single part upload
|
|
144
120
|
// Upload the file to the signed URL and get the progress
|
|
145
121
|
await uploadFileInner({
|
|
146
122
|
file,
|
|
147
123
|
uploadUrl: json.uploadUrl,
|
|
148
124
|
onProgressChange,
|
|
149
|
-
signal
|
|
125
|
+
signal,
|
|
150
126
|
});
|
|
151
|
-
}
|
|
127
|
+
}
|
|
128
|
+
else {
|
|
152
129
|
throw new EdgeStoreClientError('An error occurred');
|
|
153
130
|
}
|
|
154
131
|
return {
|
|
155
132
|
url: getUrl(json.accessUrl, apiPath),
|
|
156
|
-
thumbnailUrl: json.thumbnailUrl
|
|
133
|
+
thumbnailUrl: json.thumbnailUrl
|
|
134
|
+
? getUrl(json.thumbnailUrl, apiPath)
|
|
135
|
+
: null,
|
|
157
136
|
size: json.size,
|
|
158
137
|
uploadedAt: new Date(json.uploadedAt),
|
|
159
138
|
path: json.path,
|
|
160
139
|
pathOrder: json.pathOrder,
|
|
161
|
-
metadata: json.metadata
|
|
140
|
+
metadata: json.metadata,
|
|
162
141
|
};
|
|
163
|
-
}
|
|
142
|
+
}
|
|
143
|
+
catch (e) {
|
|
164
144
|
if (e instanceof Error && e.name === 'AbortError') {
|
|
165
|
-
throw new
|
|
145
|
+
throw new UploadAbortedError('File upload aborted');
|
|
166
146
|
}
|
|
167
147
|
onProgressChange?.(0);
|
|
168
148
|
throw e;
|
|
@@ -172,13 +152,19 @@ async function uploadFile({ file, signal, input, onProgressChange, options }, {
|
|
|
172
152
|
* Protected files need third-party cookies to work.
|
|
173
153
|
* Since third party cookies don't work on localhost,
|
|
174
154
|
* we need to proxy the file through the server.
|
|
175
|
-
*/
|
|
176
|
-
|
|
155
|
+
*/
|
|
156
|
+
function getUrl(url, apiPath) {
|
|
157
|
+
const mode = typeof process !== 'undefined'
|
|
158
|
+
? process.env.NODE_ENV
|
|
159
|
+
: // @ts-expect-error - DEV is injected by Vite
|
|
160
|
+
import.meta.env?.DEV
|
|
161
|
+
? 'development'
|
|
162
|
+
: 'production';
|
|
177
163
|
if (mode === 'development' && !url.includes('/_public/')) {
|
|
178
164
|
const proxyUrl = new URL(window.location.origin);
|
|
179
165
|
proxyUrl.pathname = `${apiPath}/proxy-file`;
|
|
180
166
|
proxyUrl.search = new URLSearchParams({
|
|
181
|
-
url
|
|
167
|
+
url,
|
|
182
168
|
}).toString();
|
|
183
169
|
return proxyUrl.toString();
|
|
184
170
|
}
|
|
@@ -186,37 +172,37 @@ async function uploadFile({ file, signal, input, onProgressChange, options }, {
|
|
|
186
172
|
}
|
|
187
173
|
async function uploadFileInner(props) {
|
|
188
174
|
const { file, uploadUrl, onProgressChange, signal } = props;
|
|
189
|
-
const promise = new Promise((resolve, reject)=>{
|
|
175
|
+
const promise = new Promise((resolve, reject) => {
|
|
190
176
|
if (signal?.aborted) {
|
|
191
|
-
reject(new
|
|
177
|
+
reject(new UploadAbortedError('File upload aborted'));
|
|
192
178
|
return;
|
|
193
179
|
}
|
|
194
180
|
const request = new XMLHttpRequest();
|
|
195
181
|
request.open('PUT', uploadUrl);
|
|
196
182
|
// This is for Azure provider. Specifies the blob type
|
|
197
183
|
request.setRequestHeader('x-ms-blob-type', 'BlockBlob');
|
|
198
|
-
request.addEventListener('loadstart', ()=>{
|
|
184
|
+
request.addEventListener('loadstart', () => {
|
|
199
185
|
onProgressChange?.(0);
|
|
200
186
|
});
|
|
201
|
-
request.upload.addEventListener('progress', (e)=>{
|
|
187
|
+
request.upload.addEventListener('progress', (e) => {
|
|
202
188
|
if (e.lengthComputable) {
|
|
203
189
|
// 2 decimal progress
|
|
204
|
-
const progress = Math.round(e.loaded / e.total * 10000) / 100;
|
|
190
|
+
const progress = Math.round((e.loaded / e.total) * 10000) / 100;
|
|
205
191
|
onProgressChange?.(progress);
|
|
206
192
|
}
|
|
207
193
|
});
|
|
208
|
-
request.addEventListener('error', ()=>{
|
|
194
|
+
request.addEventListener('error', () => {
|
|
209
195
|
reject(new Error('Error uploading file'));
|
|
210
196
|
});
|
|
211
|
-
request.addEventListener('abort', ()=>{
|
|
212
|
-
reject(new
|
|
197
|
+
request.addEventListener('abort', () => {
|
|
198
|
+
reject(new UploadAbortedError('File upload aborted'));
|
|
213
199
|
});
|
|
214
|
-
request.addEventListener('loadend', ()=>{
|
|
200
|
+
request.addEventListener('loadend', () => {
|
|
215
201
|
// Return the ETag header (needed to complete multipart upload)
|
|
216
202
|
resolve(request.getResponseHeader('ETag'));
|
|
217
203
|
});
|
|
218
204
|
if (signal) {
|
|
219
|
-
signal.addEventListener('abort', ()=>{
|
|
205
|
+
signal.addEventListener('abort', () => {
|
|
220
206
|
request.abort();
|
|
221
207
|
});
|
|
222
208
|
}
|
|
@@ -228,44 +214,46 @@ async function multipartUpload(params) {
|
|
|
228
214
|
const { bucketName, multipartInfo, onProgressChange, file, signal, apiPath } = params;
|
|
229
215
|
const { partSize, parts, totalParts, uploadId, key } = multipartInfo;
|
|
230
216
|
const uploadingParts = [];
|
|
231
|
-
const uploadPart = async (params)=>{
|
|
217
|
+
const uploadPart = async (params) => {
|
|
232
218
|
const { part, chunk } = params;
|
|
233
219
|
const { uploadUrl } = part;
|
|
234
220
|
const eTag = await uploadFileInner({
|
|
235
221
|
file: chunk,
|
|
236
222
|
uploadUrl,
|
|
237
223
|
signal,
|
|
238
|
-
onProgressChange: (progress)=>{
|
|
239
|
-
const uploadingPart = uploadingParts.find((p)=>p.partNumber === part.partNumber);
|
|
224
|
+
onProgressChange: (progress) => {
|
|
225
|
+
const uploadingPart = uploadingParts.find((p) => p.partNumber === part.partNumber);
|
|
240
226
|
if (uploadingPart) {
|
|
241
227
|
uploadingPart.progress = progress;
|
|
242
|
-
}
|
|
228
|
+
}
|
|
229
|
+
else {
|
|
243
230
|
uploadingParts.push({
|
|
244
231
|
partNumber: part.partNumber,
|
|
245
|
-
progress
|
|
232
|
+
progress,
|
|
246
233
|
});
|
|
247
234
|
}
|
|
248
|
-
const totalProgress = Math.round(uploadingParts.reduce((acc, p)=>acc + p.progress * 100, 0) /
|
|
235
|
+
const totalProgress = Math.round(uploadingParts.reduce((acc, p) => acc + p.progress * 100, 0) /
|
|
236
|
+
totalParts) / 100;
|
|
249
237
|
onProgressChange?.(totalProgress);
|
|
250
|
-
}
|
|
238
|
+
},
|
|
251
239
|
});
|
|
252
240
|
if (!eTag) {
|
|
253
241
|
throw new EdgeStoreClientError('Could not get ETag from multipart response');
|
|
254
242
|
}
|
|
255
243
|
return {
|
|
256
244
|
partNumber: part.partNumber,
|
|
257
|
-
eTag
|
|
245
|
+
eTag,
|
|
258
246
|
};
|
|
259
247
|
};
|
|
260
248
|
// Upload the parts in parallel
|
|
261
249
|
const completedParts = await queuedPromises({
|
|
262
|
-
items: parts.map((part)=>({
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
250
|
+
items: parts.map((part) => ({
|
|
251
|
+
part,
|
|
252
|
+
chunk: file.slice((part.partNumber - 1) * partSize, part.partNumber * partSize),
|
|
253
|
+
})),
|
|
266
254
|
fn: uploadPart,
|
|
267
255
|
maxParallel: 5,
|
|
268
|
-
maxRetries: 10
|
|
256
|
+
maxRetries: 10, // retry 10 times per part
|
|
269
257
|
});
|
|
270
258
|
// Complete multipart upload
|
|
271
259
|
const res = await fetch(`${apiPath}/complete-multipart-upload`, {
|
|
@@ -275,100 +263,107 @@ async function multipartUpload(params) {
|
|
|
275
263
|
bucketName,
|
|
276
264
|
uploadId,
|
|
277
265
|
key,
|
|
278
|
-
parts: completedParts
|
|
266
|
+
parts: completedParts,
|
|
279
267
|
}),
|
|
280
268
|
headers: {
|
|
281
|
-
'Content-Type': 'application/json'
|
|
282
|
-
}
|
|
269
|
+
'Content-Type': 'application/json',
|
|
270
|
+
},
|
|
283
271
|
});
|
|
284
272
|
if (!res.ok) {
|
|
285
273
|
await handleError(res);
|
|
286
274
|
}
|
|
287
275
|
}
|
|
288
|
-
async function confirmUpload({ url }, { apiPath, bucketName }) {
|
|
276
|
+
async function confirmUpload({ url, }, { apiPath, bucketName, }) {
|
|
289
277
|
const res = await fetch(`${apiPath}/confirm-upload`, {
|
|
290
278
|
method: 'POST',
|
|
291
279
|
credentials: 'include',
|
|
292
280
|
body: JSON.stringify({
|
|
293
281
|
url,
|
|
294
|
-
bucketName
|
|
282
|
+
bucketName,
|
|
295
283
|
}),
|
|
296
284
|
headers: {
|
|
297
|
-
'Content-Type': 'application/json'
|
|
298
|
-
}
|
|
285
|
+
'Content-Type': 'application/json',
|
|
286
|
+
},
|
|
299
287
|
});
|
|
300
288
|
if (!res.ok) {
|
|
301
289
|
await handleError(res);
|
|
302
290
|
}
|
|
303
291
|
return res.json();
|
|
304
292
|
}
|
|
305
|
-
async function deleteFile({ url }, { apiPath, bucketName }) {
|
|
293
|
+
async function deleteFile({ url, }, { apiPath, bucketName, }) {
|
|
306
294
|
const res = await fetch(`${apiPath}/delete-file`, {
|
|
307
295
|
method: 'POST',
|
|
308
296
|
credentials: 'include',
|
|
309
297
|
body: JSON.stringify({
|
|
310
298
|
url,
|
|
311
|
-
bucketName
|
|
299
|
+
bucketName,
|
|
312
300
|
}),
|
|
313
301
|
headers: {
|
|
314
|
-
'Content-Type': 'application/json'
|
|
315
|
-
}
|
|
302
|
+
'Content-Type': 'application/json',
|
|
303
|
+
},
|
|
316
304
|
});
|
|
317
305
|
if (!res.ok) {
|
|
318
306
|
await handleError(res);
|
|
319
307
|
}
|
|
320
308
|
return res.json();
|
|
321
309
|
}
|
|
322
|
-
async function queuedPromises({ items, fn, maxParallel, maxRetries = 0 }) {
|
|
310
|
+
async function queuedPromises({ items, fn, maxParallel, maxRetries = 0, }) {
|
|
323
311
|
const results = new Array(items.length);
|
|
324
|
-
const executeWithRetry = async (func, retries)=>{
|
|
312
|
+
const executeWithRetry = async (func, retries) => {
|
|
325
313
|
try {
|
|
326
314
|
return await func();
|
|
327
|
-
}
|
|
328
|
-
|
|
315
|
+
}
|
|
316
|
+
catch (error) {
|
|
317
|
+
if (error instanceof UploadAbortedError) {
|
|
329
318
|
throw error;
|
|
330
319
|
}
|
|
331
320
|
if (retries > 0) {
|
|
332
|
-
await new Promise((resolve)=>setTimeout(resolve, 5000));
|
|
321
|
+
await new Promise((resolve) => setTimeout(resolve, 5000));
|
|
333
322
|
return executeWithRetry(func, retries - 1);
|
|
334
|
-
}
|
|
323
|
+
}
|
|
324
|
+
else {
|
|
335
325
|
throw error;
|
|
336
326
|
}
|
|
337
327
|
}
|
|
338
328
|
};
|
|
339
329
|
const semaphore = {
|
|
340
330
|
count: maxParallel,
|
|
341
|
-
async wait
|
|
331
|
+
async wait() {
|
|
342
332
|
// If we've reached our maximum concurrency, or it's the last item, wait
|
|
343
|
-
while(this.count <= 0)
|
|
333
|
+
while (this.count <= 0)
|
|
334
|
+
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
344
335
|
this.count--;
|
|
345
336
|
},
|
|
346
|
-
signal
|
|
337
|
+
signal() {
|
|
347
338
|
this.count++;
|
|
348
|
-
}
|
|
339
|
+
},
|
|
349
340
|
};
|
|
350
|
-
const tasks = items.map((item, i)=>(async ()=>{
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
}
|
|
341
|
+
const tasks = items.map((item, i) => (async () => {
|
|
342
|
+
await semaphore.wait();
|
|
343
|
+
try {
|
|
344
|
+
const result = await executeWithRetry(() => fn(item), maxRetries);
|
|
345
|
+
results[i] = result;
|
|
346
|
+
}
|
|
347
|
+
finally {
|
|
348
|
+
semaphore.signal();
|
|
349
|
+
}
|
|
350
|
+
})());
|
|
359
351
|
await Promise.all(tasks);
|
|
360
352
|
return results;
|
|
361
353
|
}
|
|
362
354
|
|
|
363
|
-
const DEFAULT_BASE_URL = (typeof process !== 'undefined'
|
|
355
|
+
const DEFAULT_BASE_URL = (typeof process !== 'undefined'
|
|
356
|
+
? process.env.NEXT_PUBLIC_EDGE_STORE_BASE_URL
|
|
357
|
+
: // @ts-expect-error - In Vite, the env variables are available on `import.meta`.
|
|
358
|
+
import.meta.env?.EDGE_STORE_BASE_URL) ?? 'https://files.edgestore.dev';
|
|
364
359
|
function createEdgeStoreProvider(opts) {
|
|
365
|
-
const EdgeStoreContext =
|
|
366
|
-
const EdgeStoreProvider = ({ children, basePath })=>{
|
|
360
|
+
const EdgeStoreContext = React.createContext(undefined);
|
|
361
|
+
const EdgeStoreProvider = ({ children, basePath, }) => {
|
|
367
362
|
return EdgeStoreProviderInner({
|
|
368
363
|
children,
|
|
369
364
|
context: EdgeStoreContext,
|
|
370
365
|
basePath,
|
|
371
|
-
maxConcurrentUploads: opts?.maxConcurrentUploads
|
|
366
|
+
maxConcurrentUploads: opts?.maxConcurrentUploads,
|
|
372
367
|
});
|
|
373
368
|
};
|
|
374
369
|
function useEdgeStore() {
|
|
@@ -376,7 +371,7 @@ function createEdgeStoreProvider(opts) {
|
|
|
376
371
|
throw new Error('React Context is unavailable in Server Components');
|
|
377
372
|
}
|
|
378
373
|
// @ts-expect-error - We know that the context value should not be undefined
|
|
379
|
-
const value =
|
|
374
|
+
const value = React.useContext(EdgeStoreContext);
|
|
380
375
|
if (!value && process.env.NODE_ENV !== 'production') {
|
|
381
376
|
throw new Error('[edgestore]: `useEdgeStore` must be wrapped in a <EdgeStoreProvider />');
|
|
382
377
|
}
|
|
@@ -384,37 +379,37 @@ function createEdgeStoreProvider(opts) {
|
|
|
384
379
|
}
|
|
385
380
|
return {
|
|
386
381
|
EdgeStoreProvider,
|
|
387
|
-
useEdgeStore
|
|
382
|
+
useEdgeStore,
|
|
388
383
|
};
|
|
389
384
|
}
|
|
390
|
-
function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUploads }) {
|
|
385
|
+
function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUploads, }) {
|
|
391
386
|
const apiPath = basePath ? `${basePath}` : '/api/edgestore';
|
|
392
|
-
const [state, setState] =
|
|
387
|
+
const [state, setState] = React.useState({
|
|
393
388
|
loading: true,
|
|
394
389
|
initialized: false,
|
|
395
|
-
error: false
|
|
390
|
+
error: false,
|
|
396
391
|
});
|
|
397
|
-
const uploadingCountRef =
|
|
398
|
-
const initExecuted =
|
|
399
|
-
|
|
392
|
+
const uploadingCountRef = React.useRef(0);
|
|
393
|
+
const initExecuted = React.useRef(false); // to make sure we don't run init twice
|
|
394
|
+
React.useEffect(() => {
|
|
400
395
|
if (!initExecuted.current) {
|
|
401
396
|
void init();
|
|
402
397
|
}
|
|
403
|
-
return ()=>{
|
|
398
|
+
return () => {
|
|
404
399
|
initExecuted.current = true;
|
|
405
400
|
};
|
|
406
|
-
|
|
401
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
407
402
|
}, []);
|
|
408
403
|
async function init() {
|
|
409
404
|
try {
|
|
410
405
|
setState({
|
|
411
406
|
loading: true,
|
|
412
407
|
initialized: false,
|
|
413
|
-
error: false
|
|
408
|
+
error: false,
|
|
414
409
|
});
|
|
415
410
|
const res = await fetch(`${apiPath}/init`, {
|
|
416
411
|
method: 'POST',
|
|
417
|
-
credentials: 'include'
|
|
412
|
+
credentials: 'include',
|
|
418
413
|
});
|
|
419
414
|
if (res.ok) {
|
|
420
415
|
const json = await res.json();
|
|
@@ -422,37 +417,40 @@ function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUplo
|
|
|
422
417
|
method: 'GET',
|
|
423
418
|
credentials: 'include',
|
|
424
419
|
headers: {
|
|
425
|
-
'x-edgestore-token': json.token
|
|
426
|
-
}
|
|
420
|
+
'x-edgestore-token': json.token,
|
|
421
|
+
},
|
|
427
422
|
});
|
|
428
423
|
if (innerRes.ok) {
|
|
429
424
|
// update state
|
|
430
425
|
setState({
|
|
431
426
|
loading: false,
|
|
432
427
|
initialized: true,
|
|
433
|
-
error: false
|
|
428
|
+
error: false,
|
|
434
429
|
});
|
|
435
|
-
}
|
|
430
|
+
}
|
|
431
|
+
else {
|
|
436
432
|
setState({
|
|
437
433
|
loading: false,
|
|
438
434
|
initialized: false,
|
|
439
|
-
error: true
|
|
435
|
+
error: true,
|
|
440
436
|
});
|
|
441
437
|
throw new EdgeStoreClientError("Couldn't initialize EdgeStore.");
|
|
442
438
|
}
|
|
443
|
-
}
|
|
439
|
+
}
|
|
440
|
+
else {
|
|
444
441
|
setState({
|
|
445
442
|
loading: false,
|
|
446
443
|
initialized: false,
|
|
447
|
-
error: true
|
|
444
|
+
error: true,
|
|
448
445
|
});
|
|
449
446
|
await handleError(res);
|
|
450
447
|
}
|
|
451
|
-
}
|
|
448
|
+
}
|
|
449
|
+
catch (err) {
|
|
452
450
|
setState({
|
|
453
451
|
loading: false,
|
|
454
452
|
initialized: false,
|
|
455
|
-
error: true
|
|
453
|
+
error: true,
|
|
456
454
|
});
|
|
457
455
|
throw err;
|
|
458
456
|
}
|
|
@@ -460,17 +458,16 @@ function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUplo
|
|
|
460
458
|
async function reset() {
|
|
461
459
|
await init();
|
|
462
460
|
}
|
|
463
|
-
return
|
|
464
|
-
value: {
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
471
|
-
|
|
472
|
-
|
|
473
|
-
}, children));
|
|
461
|
+
return (React.createElement(React.Fragment, null,
|
|
462
|
+
React.createElement(context.Provider, { value: {
|
|
463
|
+
edgestore: createNextProxy({
|
|
464
|
+
apiPath,
|
|
465
|
+
uploadingCountRef,
|
|
466
|
+
maxConcurrentUploads,
|
|
467
|
+
}),
|
|
468
|
+
reset,
|
|
469
|
+
state,
|
|
470
|
+
} }, children)));
|
|
474
471
|
}
|
|
475
472
|
|
|
476
|
-
|
|
473
|
+
export { createEdgeStoreProvider };
|
package/dist/shared/index.js
CHANGED
|
@@ -1,13 +1,10 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
-
|
|
5
|
-
var shared = require('@edgestore/shared');
|
|
1
|
+
import { EdgeStoreApiClientError as EdgeStoreApiClientError$1 } from '@edgestore/shared';
|
|
6
2
|
|
|
7
3
|
// TODO: delete this file on next major release (moved to "errors")
|
|
8
4
|
/**
|
|
9
5
|
* @deprecated import from `@edgestore/react/errors` instead.
|
|
10
|
-
*/
|
|
6
|
+
*/
|
|
7
|
+
class EdgeStoreApiClientError extends EdgeStoreApiClientError$1 {
|
|
11
8
|
}
|
|
12
9
|
|
|
13
|
-
|
|
10
|
+
export { EdgeStoreApiClientError };
|
package/dist/utils/index.js
CHANGED
|
@@ -1,13 +1,10 @@
|
|
|
1
|
-
'use strict';
|
|
2
|
-
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
-
|
|
5
1
|
/**
|
|
6
2
|
* This will add the necessary query param to the url
|
|
7
3
|
* to make the browser download the file instead of opening it.
|
|
8
4
|
*
|
|
9
5
|
* You can also override the name of the file by passing the name param.
|
|
10
|
-
*/
|
|
6
|
+
*/
|
|
7
|
+
function getDownloadUrl(url, name) {
|
|
11
8
|
const urlObj = new URL(url);
|
|
12
9
|
urlObj.searchParams.set('download', name ?? 'true');
|
|
13
10
|
return urlObj.toString();
|
|
@@ -16,24 +13,15 @@ Object.defineProperty(exports, '__esModule', { value: true });
|
|
|
16
13
|
* This will format the file size to a human-readable format.
|
|
17
14
|
*
|
|
18
15
|
* @example 1024 => 1 KB
|
|
19
|
-
*/
|
|
20
|
-
|
|
16
|
+
*/
|
|
17
|
+
function formatFileSize(bytes) {
|
|
18
|
+
if (!bytes)
|
|
19
|
+
return '0 B';
|
|
21
20
|
const k = 1024;
|
|
22
21
|
const dm = 2;
|
|
23
|
-
const sizes = [
|
|
24
|
-
'B',
|
|
25
|
-
'KB',
|
|
26
|
-
'MB',
|
|
27
|
-
'GB',
|
|
28
|
-
'TB',
|
|
29
|
-
'PB',
|
|
30
|
-
'EB',
|
|
31
|
-
'ZB',
|
|
32
|
-
'YB'
|
|
33
|
-
];
|
|
22
|
+
const sizes = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'];
|
|
34
23
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
35
24
|
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(dm))} ${sizes[i]}`;
|
|
36
25
|
}
|
|
37
26
|
|
|
38
|
-
|
|
39
|
-
exports.getDownloadUrl = getDownloadUrl;
|
|
27
|
+
export { formatFileSize, getDownloadUrl };
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@edgestore/react",
|
|
3
|
-
"version": "0.
|
|
3
|
+
"version": "0.5.0",
|
|
4
4
|
"description": "Upload files with ease from React/Next.js",
|
|
5
5
|
"homepage": "https://edgestore.dev",
|
|
6
6
|
"repository": "https://github.com/edgestorejs/edgestore.git",
|
|
@@ -66,7 +66,7 @@
|
|
|
66
66
|
"dependencies": {
|
|
67
67
|
"@aws-sdk/client-s3": "^3.294.0",
|
|
68
68
|
"@aws-sdk/s3-request-presigner": "^3.294.0",
|
|
69
|
-
"@edgestore/shared": "0.
|
|
69
|
+
"@edgestore/shared": "0.5.0",
|
|
70
70
|
"@panva/hkdf": "^1.0.4",
|
|
71
71
|
"cookie": "^0.5.0",
|
|
72
72
|
"jose": "^4.13.1",
|
|
@@ -88,5 +88,5 @@
|
|
|
88
88
|
"typescript": "^5",
|
|
89
89
|
"zod": "3.21.4"
|
|
90
90
|
},
|
|
91
|
-
"gitHead": "
|
|
91
|
+
"gitHead": "1bc997b2797e026d6661b9964e74c6e117dba352"
|
|
92
92
|
}
|