@edgestore/react 0.5.0 → 0.5.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/errors/index.js +14 -2
- package/dist/index.js +159 -156
- package/dist/shared/index.js +7 -4
- package/dist/utils/index.js +20 -8
- package/package.json +3 -3
package/dist/errors/index.js
CHANGED
|
@@ -1,2 +1,14 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var shared = require('@edgestore/shared');
|
|
6
|
+
var uploadAbortedError = require('../uploadAbortedError-fbfcc57b.js');
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
Object.defineProperty(exports, 'EdgeStoreApiClientError', {
|
|
11
|
+
enumerable: true,
|
|
12
|
+
get: function () { return shared.EdgeStoreApiClientError; }
|
|
13
|
+
});
|
|
14
|
+
exports.UploadAbortedError = uploadAbortedError.UploadAbortedError;
|
package/dist/index.js
CHANGED
|
@@ -1,9 +1,33 @@
|
|
|
1
|
-
|
|
2
|
-
|
|
3
|
-
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var React = require('react');
|
|
6
|
+
var shared = require('@edgestore/shared');
|
|
7
|
+
var uploadAbortedError = require('./uploadAbortedError-fbfcc57b.js');
|
|
8
|
+
|
|
9
|
+
function _interopNamespace(e) {
|
|
10
|
+
if (e && e.__esModule) return e;
|
|
11
|
+
var n = Object.create(null);
|
|
12
|
+
if (e) {
|
|
13
|
+
Object.keys(e).forEach(function (k) {
|
|
14
|
+
if (k !== 'default') {
|
|
15
|
+
var d = Object.getOwnPropertyDescriptor(e, k);
|
|
16
|
+
Object.defineProperty(n, k, d.get ? d : {
|
|
17
|
+
enumerable: true,
|
|
18
|
+
get: function () { return e[k]; }
|
|
19
|
+
});
|
|
20
|
+
}
|
|
21
|
+
});
|
|
22
|
+
}
|
|
23
|
+
n["default"] = e;
|
|
24
|
+
return Object.freeze(n);
|
|
25
|
+
}
|
|
26
|
+
|
|
27
|
+
var React__namespace = /*#__PURE__*/_interopNamespace(React);
|
|
4
28
|
|
|
5
29
|
class EdgeStoreClientError extends Error {
|
|
6
|
-
constructor(message)
|
|
30
|
+
constructor(message){
|
|
7
31
|
super(message);
|
|
8
32
|
this.name = 'EdgeStoreError';
|
|
9
33
|
}
|
|
@@ -13,72 +37,73 @@ async function handleError(res) {
|
|
|
13
37
|
let json = {};
|
|
14
38
|
try {
|
|
15
39
|
json = await res.json();
|
|
16
|
-
}
|
|
17
|
-
catch (err) {
|
|
40
|
+
} catch (err) {
|
|
18
41
|
throw new EdgeStoreClientError(`Failed to parse response. Make sure the api is correctly configured at ${res.url}`);
|
|
19
42
|
}
|
|
20
|
-
throw new EdgeStoreApiClientError({
|
|
43
|
+
throw new shared.EdgeStoreApiClientError({
|
|
44
|
+
response: json
|
|
45
|
+
});
|
|
21
46
|
}
|
|
22
47
|
|
|
23
|
-
function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5
|
|
48
|
+
function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5 }) {
|
|
24
49
|
return new Proxy({}, {
|
|
25
|
-
get(_, prop) {
|
|
50
|
+
get (_, prop) {
|
|
26
51
|
const bucketName = prop;
|
|
27
52
|
const bucketFunctions = {
|
|
28
|
-
upload: async (params)
|
|
53
|
+
upload: async (params)=>{
|
|
29
54
|
try {
|
|
30
55
|
params.onProgressChange?.(0);
|
|
31
56
|
// This handles the case where the user cancels the upload while it's waiting in the queue
|
|
32
|
-
const abortPromise = new Promise((resolve)
|
|
33
|
-
params.signal?.addEventListener('abort', ()
|
|
57
|
+
const abortPromise = new Promise((resolve)=>{
|
|
58
|
+
params.signal?.addEventListener('abort', ()=>{
|
|
34
59
|
resolve();
|
|
35
|
-
}, {
|
|
60
|
+
}, {
|
|
61
|
+
once: true
|
|
62
|
+
});
|
|
36
63
|
});
|
|
37
|
-
while
|
|
38
|
-
uploadingCountRef.current > 0) {
|
|
64
|
+
while(uploadingCountRef.current >= maxConcurrentUploads && uploadingCountRef.current > 0){
|
|
39
65
|
await Promise.race([
|
|
40
|
-
new Promise((resolve)
|
|
41
|
-
abortPromise
|
|
66
|
+
new Promise((resolve)=>setTimeout(resolve, 300)),
|
|
67
|
+
abortPromise
|
|
42
68
|
]);
|
|
43
69
|
if (params.signal?.aborted) {
|
|
44
|
-
throw new UploadAbortedError('File upload aborted');
|
|
70
|
+
throw new uploadAbortedError.UploadAbortedError('File upload aborted');
|
|
45
71
|
}
|
|
46
72
|
}
|
|
47
73
|
uploadingCountRef.current++;
|
|
48
74
|
const fileInfo = await uploadFile(params, {
|
|
49
75
|
bucketName: bucketName,
|
|
50
|
-
apiPath
|
|
76
|
+
apiPath
|
|
51
77
|
});
|
|
52
78
|
return fileInfo;
|
|
53
|
-
}
|
|
54
|
-
finally {
|
|
79
|
+
} finally{
|
|
55
80
|
uploadingCountRef.current--;
|
|
56
81
|
}
|
|
57
82
|
},
|
|
58
|
-
confirmUpload: async (params)
|
|
83
|
+
confirmUpload: async (params)=>{
|
|
59
84
|
const { success } = await confirmUpload(params, {
|
|
60
85
|
bucketName: bucketName,
|
|
61
|
-
apiPath
|
|
86
|
+
apiPath
|
|
62
87
|
});
|
|
63
88
|
if (!success) {
|
|
64
89
|
throw new EdgeStoreClientError('Failed to confirm upload');
|
|
65
90
|
}
|
|
66
91
|
},
|
|
67
|
-
delete: async (params)
|
|
92
|
+
delete: async (params)=>{
|
|
68
93
|
const { success } = await deleteFile(params, {
|
|
69
94
|
bucketName: bucketName,
|
|
70
|
-
apiPath
|
|
95
|
+
apiPath
|
|
71
96
|
});
|
|
72
97
|
if (!success) {
|
|
73
98
|
throw new EdgeStoreClientError('Failed to delete file');
|
|
74
99
|
}
|
|
75
|
-
}
|
|
100
|
+
}
|
|
76
101
|
};
|
|
77
102
|
return bucketFunctions;
|
|
78
|
-
}
|
|
103
|
+
}
|
|
79
104
|
});
|
|
80
105
|
}
|
|
81
|
-
async function uploadFile({ file, signal, input, onProgressChange, options
|
|
106
|
+
async function uploadFile({ file, signal, input, onProgressChange, options }, { apiPath, bucketName }) {
|
|
82
107
|
try {
|
|
83
108
|
onProgressChange?.(0);
|
|
84
109
|
const res = await fetch(`${apiPath}/request-upload`, {
|
|
@@ -94,17 +119,17 @@ async function uploadFile({ file, signal, input, onProgressChange, options, }, {
|
|
|
94
119
|
size: file.size,
|
|
95
120
|
fileName: options?.manualFileName,
|
|
96
121
|
replaceTargetUrl: options?.replaceTargetUrl,
|
|
97
|
-
temporary: options?.temporary
|
|
98
|
-
}
|
|
122
|
+
temporary: options?.temporary
|
|
123
|
+
}
|
|
99
124
|
}),
|
|
100
125
|
headers: {
|
|
101
|
-
'Content-Type': 'application/json'
|
|
102
|
-
}
|
|
126
|
+
'Content-Type': 'application/json'
|
|
127
|
+
}
|
|
103
128
|
});
|
|
104
129
|
if (!res.ok) {
|
|
105
130
|
await handleError(res);
|
|
106
131
|
}
|
|
107
|
-
const json =
|
|
132
|
+
const json = await res.json();
|
|
108
133
|
if ('multipart' in json) {
|
|
109
134
|
await multipartUpload({
|
|
110
135
|
bucketName,
|
|
@@ -112,37 +137,32 @@ async function uploadFile({ file, signal, input, onProgressChange, options, }, {
|
|
|
112
137
|
onProgressChange,
|
|
113
138
|
signal,
|
|
114
139
|
file,
|
|
115
|
-
apiPath
|
|
140
|
+
apiPath
|
|
116
141
|
});
|
|
117
|
-
}
|
|
118
|
-
else if ('uploadUrl' in json) {
|
|
142
|
+
} else if ('uploadUrl' in json) {
|
|
119
143
|
// Single part upload
|
|
120
144
|
// Upload the file to the signed URL and get the progress
|
|
121
145
|
await uploadFileInner({
|
|
122
146
|
file,
|
|
123
147
|
uploadUrl: json.uploadUrl,
|
|
124
148
|
onProgressChange,
|
|
125
|
-
signal
|
|
149
|
+
signal
|
|
126
150
|
});
|
|
127
|
-
}
|
|
128
|
-
else {
|
|
151
|
+
} else {
|
|
129
152
|
throw new EdgeStoreClientError('An error occurred');
|
|
130
153
|
}
|
|
131
154
|
return {
|
|
132
155
|
url: getUrl(json.accessUrl, apiPath),
|
|
133
|
-
thumbnailUrl: json.thumbnailUrl
|
|
134
|
-
? getUrl(json.thumbnailUrl, apiPath)
|
|
135
|
-
: null,
|
|
156
|
+
thumbnailUrl: json.thumbnailUrl ? getUrl(json.thumbnailUrl, apiPath) : null,
|
|
136
157
|
size: json.size,
|
|
137
158
|
uploadedAt: new Date(json.uploadedAt),
|
|
138
159
|
path: json.path,
|
|
139
160
|
pathOrder: json.pathOrder,
|
|
140
|
-
metadata: json.metadata
|
|
161
|
+
metadata: json.metadata
|
|
141
162
|
};
|
|
142
|
-
}
|
|
143
|
-
catch (e) {
|
|
163
|
+
} catch (e) {
|
|
144
164
|
if (e instanceof Error && e.name === 'AbortError') {
|
|
145
|
-
throw new UploadAbortedError('File upload aborted');
|
|
165
|
+
throw new uploadAbortedError.UploadAbortedError('File upload aborted');
|
|
146
166
|
}
|
|
147
167
|
onProgressChange?.(0);
|
|
148
168
|
throw e;
|
|
@@ -152,19 +172,13 @@ async function uploadFile({ file, signal, input, onProgressChange, options, }, {
|
|
|
152
172
|
* Protected files need third-party cookies to work.
|
|
153
173
|
* Since third party cookies don't work on localhost,
|
|
154
174
|
* we need to proxy the file through the server.
|
|
155
|
-
*/
|
|
156
|
-
|
|
157
|
-
const mode = typeof process !== 'undefined'
|
|
158
|
-
? process.env.NODE_ENV
|
|
159
|
-
: // @ts-expect-error - DEV is injected by Vite
|
|
160
|
-
import.meta.env?.DEV
|
|
161
|
-
? 'development'
|
|
162
|
-
: 'production';
|
|
175
|
+
*/ function getUrl(url, apiPath) {
|
|
176
|
+
const mode = typeof process !== 'undefined' ? process.env.NODE_ENV : undefined?.DEV ? 'development' : 'production';
|
|
163
177
|
if (mode === 'development' && !url.includes('/_public/')) {
|
|
164
178
|
const proxyUrl = new URL(window.location.origin);
|
|
165
179
|
proxyUrl.pathname = `${apiPath}/proxy-file`;
|
|
166
180
|
proxyUrl.search = new URLSearchParams({
|
|
167
|
-
url
|
|
181
|
+
url
|
|
168
182
|
}).toString();
|
|
169
183
|
return proxyUrl.toString();
|
|
170
184
|
}
|
|
@@ -172,37 +186,37 @@ function getUrl(url, apiPath) {
|
|
|
172
186
|
}
|
|
173
187
|
async function uploadFileInner(props) {
|
|
174
188
|
const { file, uploadUrl, onProgressChange, signal } = props;
|
|
175
|
-
const promise = new Promise((resolve, reject)
|
|
189
|
+
const promise = new Promise((resolve, reject)=>{
|
|
176
190
|
if (signal?.aborted) {
|
|
177
|
-
reject(new UploadAbortedError('File upload aborted'));
|
|
191
|
+
reject(new uploadAbortedError.UploadAbortedError('File upload aborted'));
|
|
178
192
|
return;
|
|
179
193
|
}
|
|
180
194
|
const request = new XMLHttpRequest();
|
|
181
195
|
request.open('PUT', uploadUrl);
|
|
182
196
|
// This is for Azure provider. Specifies the blob type
|
|
183
197
|
request.setRequestHeader('x-ms-blob-type', 'BlockBlob');
|
|
184
|
-
request.addEventListener('loadstart', ()
|
|
198
|
+
request.addEventListener('loadstart', ()=>{
|
|
185
199
|
onProgressChange?.(0);
|
|
186
200
|
});
|
|
187
|
-
request.upload.addEventListener('progress', (e)
|
|
201
|
+
request.upload.addEventListener('progress', (e)=>{
|
|
188
202
|
if (e.lengthComputable) {
|
|
189
203
|
// 2 decimal progress
|
|
190
|
-
const progress = Math.round(
|
|
204
|
+
const progress = Math.round(e.loaded / e.total * 10000) / 100;
|
|
191
205
|
onProgressChange?.(progress);
|
|
192
206
|
}
|
|
193
207
|
});
|
|
194
|
-
request.addEventListener('error', ()
|
|
208
|
+
request.addEventListener('error', ()=>{
|
|
195
209
|
reject(new Error('Error uploading file'));
|
|
196
210
|
});
|
|
197
|
-
request.addEventListener('abort', ()
|
|
198
|
-
reject(new UploadAbortedError('File upload aborted'));
|
|
211
|
+
request.addEventListener('abort', ()=>{
|
|
212
|
+
reject(new uploadAbortedError.UploadAbortedError('File upload aborted'));
|
|
199
213
|
});
|
|
200
|
-
request.addEventListener('loadend', ()
|
|
214
|
+
request.addEventListener('loadend', ()=>{
|
|
201
215
|
// Return the ETag header (needed to complete multipart upload)
|
|
202
216
|
resolve(request.getResponseHeader('ETag'));
|
|
203
217
|
});
|
|
204
218
|
if (signal) {
|
|
205
|
-
signal.addEventListener('abort', ()
|
|
219
|
+
signal.addEventListener('abort', ()=>{
|
|
206
220
|
request.abort();
|
|
207
221
|
});
|
|
208
222
|
}
|
|
@@ -214,46 +228,44 @@ async function multipartUpload(params) {
|
|
|
214
228
|
const { bucketName, multipartInfo, onProgressChange, file, signal, apiPath } = params;
|
|
215
229
|
const { partSize, parts, totalParts, uploadId, key } = multipartInfo;
|
|
216
230
|
const uploadingParts = [];
|
|
217
|
-
const uploadPart = async (params)
|
|
231
|
+
const uploadPart = async (params)=>{
|
|
218
232
|
const { part, chunk } = params;
|
|
219
233
|
const { uploadUrl } = part;
|
|
220
234
|
const eTag = await uploadFileInner({
|
|
221
235
|
file: chunk,
|
|
222
236
|
uploadUrl,
|
|
223
237
|
signal,
|
|
224
|
-
onProgressChange: (progress)
|
|
225
|
-
const uploadingPart = uploadingParts.find((p)
|
|
238
|
+
onProgressChange: (progress)=>{
|
|
239
|
+
const uploadingPart = uploadingParts.find((p)=>p.partNumber === part.partNumber);
|
|
226
240
|
if (uploadingPart) {
|
|
227
241
|
uploadingPart.progress = progress;
|
|
228
|
-
}
|
|
229
|
-
else {
|
|
242
|
+
} else {
|
|
230
243
|
uploadingParts.push({
|
|
231
244
|
partNumber: part.partNumber,
|
|
232
|
-
progress
|
|
245
|
+
progress
|
|
233
246
|
});
|
|
234
247
|
}
|
|
235
|
-
const totalProgress = Math.round(uploadingParts.reduce((acc, p)
|
|
236
|
-
totalParts) / 100;
|
|
248
|
+
const totalProgress = Math.round(uploadingParts.reduce((acc, p)=>acc + p.progress * 100, 0) / totalParts) / 100;
|
|
237
249
|
onProgressChange?.(totalProgress);
|
|
238
|
-
}
|
|
250
|
+
}
|
|
239
251
|
});
|
|
240
252
|
if (!eTag) {
|
|
241
253
|
throw new EdgeStoreClientError('Could not get ETag from multipart response');
|
|
242
254
|
}
|
|
243
255
|
return {
|
|
244
256
|
partNumber: part.partNumber,
|
|
245
|
-
eTag
|
|
257
|
+
eTag
|
|
246
258
|
};
|
|
247
259
|
};
|
|
248
260
|
// Upload the parts in parallel
|
|
249
261
|
const completedParts = await queuedPromises({
|
|
250
|
-
items: parts.map((part)
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
262
|
+
items: parts.map((part)=>({
|
|
263
|
+
part,
|
|
264
|
+
chunk: file.slice((part.partNumber - 1) * partSize, part.partNumber * partSize)
|
|
265
|
+
})),
|
|
254
266
|
fn: uploadPart,
|
|
255
267
|
maxParallel: 5,
|
|
256
|
-
maxRetries: 10
|
|
268
|
+
maxRetries: 10
|
|
257
269
|
});
|
|
258
270
|
// Complete multipart upload
|
|
259
271
|
const res = await fetch(`${apiPath}/complete-multipart-upload`, {
|
|
@@ -263,107 +275,100 @@ async function multipartUpload(params) {
|
|
|
263
275
|
bucketName,
|
|
264
276
|
uploadId,
|
|
265
277
|
key,
|
|
266
|
-
parts: completedParts
|
|
278
|
+
parts: completedParts
|
|
267
279
|
}),
|
|
268
280
|
headers: {
|
|
269
|
-
'Content-Type': 'application/json'
|
|
270
|
-
}
|
|
281
|
+
'Content-Type': 'application/json'
|
|
282
|
+
}
|
|
271
283
|
});
|
|
272
284
|
if (!res.ok) {
|
|
273
285
|
await handleError(res);
|
|
274
286
|
}
|
|
275
287
|
}
|
|
276
|
-
async function confirmUpload({ url
|
|
288
|
+
async function confirmUpload({ url }, { apiPath, bucketName }) {
|
|
277
289
|
const res = await fetch(`${apiPath}/confirm-upload`, {
|
|
278
290
|
method: 'POST',
|
|
279
291
|
credentials: 'include',
|
|
280
292
|
body: JSON.stringify({
|
|
281
293
|
url,
|
|
282
|
-
bucketName
|
|
294
|
+
bucketName
|
|
283
295
|
}),
|
|
284
296
|
headers: {
|
|
285
|
-
'Content-Type': 'application/json'
|
|
286
|
-
}
|
|
297
|
+
'Content-Type': 'application/json'
|
|
298
|
+
}
|
|
287
299
|
});
|
|
288
300
|
if (!res.ok) {
|
|
289
301
|
await handleError(res);
|
|
290
302
|
}
|
|
291
303
|
return res.json();
|
|
292
304
|
}
|
|
293
|
-
async function deleteFile({ url
|
|
305
|
+
async function deleteFile({ url }, { apiPath, bucketName }) {
|
|
294
306
|
const res = await fetch(`${apiPath}/delete-file`, {
|
|
295
307
|
method: 'POST',
|
|
296
308
|
credentials: 'include',
|
|
297
309
|
body: JSON.stringify({
|
|
298
310
|
url,
|
|
299
|
-
bucketName
|
|
311
|
+
bucketName
|
|
300
312
|
}),
|
|
301
313
|
headers: {
|
|
302
|
-
'Content-Type': 'application/json'
|
|
303
|
-
}
|
|
314
|
+
'Content-Type': 'application/json'
|
|
315
|
+
}
|
|
304
316
|
});
|
|
305
317
|
if (!res.ok) {
|
|
306
318
|
await handleError(res);
|
|
307
319
|
}
|
|
308
320
|
return res.json();
|
|
309
321
|
}
|
|
310
|
-
async function queuedPromises({ items, fn, maxParallel, maxRetries = 0
|
|
322
|
+
async function queuedPromises({ items, fn, maxParallel, maxRetries = 0 }) {
|
|
311
323
|
const results = new Array(items.length);
|
|
312
|
-
const executeWithRetry = async (func, retries)
|
|
324
|
+
const executeWithRetry = async (func, retries)=>{
|
|
313
325
|
try {
|
|
314
326
|
return await func();
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
if (error instanceof UploadAbortedError) {
|
|
327
|
+
} catch (error) {
|
|
328
|
+
if (error instanceof uploadAbortedError.UploadAbortedError) {
|
|
318
329
|
throw error;
|
|
319
330
|
}
|
|
320
331
|
if (retries > 0) {
|
|
321
|
-
await new Promise((resolve)
|
|
332
|
+
await new Promise((resolve)=>setTimeout(resolve, 5000));
|
|
322
333
|
return executeWithRetry(func, retries - 1);
|
|
323
|
-
}
|
|
324
|
-
else {
|
|
334
|
+
} else {
|
|
325
335
|
throw error;
|
|
326
336
|
}
|
|
327
337
|
}
|
|
328
338
|
};
|
|
329
339
|
const semaphore = {
|
|
330
340
|
count: maxParallel,
|
|
331
|
-
async wait() {
|
|
341
|
+
async wait () {
|
|
332
342
|
// If we've reached our maximum concurrency, or it's the last item, wait
|
|
333
|
-
while
|
|
334
|
-
await new Promise((resolve) => setTimeout(resolve, 500));
|
|
343
|
+
while(this.count <= 0)await new Promise((resolve)=>setTimeout(resolve, 500));
|
|
335
344
|
this.count--;
|
|
336
345
|
},
|
|
337
|
-
signal() {
|
|
346
|
+
signal () {
|
|
338
347
|
this.count++;
|
|
339
|
-
},
|
|
340
|
-
};
|
|
341
|
-
const tasks = items.map((item, i) => (async () => {
|
|
342
|
-
await semaphore.wait();
|
|
343
|
-
try {
|
|
344
|
-
const result = await executeWithRetry(() => fn(item), maxRetries);
|
|
345
|
-
results[i] = result;
|
|
346
|
-
}
|
|
347
|
-
finally {
|
|
348
|
-
semaphore.signal();
|
|
349
348
|
}
|
|
350
|
-
}
|
|
349
|
+
};
|
|
350
|
+
const tasks = items.map((item, i)=>(async ()=>{
|
|
351
|
+
await semaphore.wait();
|
|
352
|
+
try {
|
|
353
|
+
const result = await executeWithRetry(()=>fn(item), maxRetries);
|
|
354
|
+
results[i] = result;
|
|
355
|
+
} finally{
|
|
356
|
+
semaphore.signal();
|
|
357
|
+
}
|
|
358
|
+
})());
|
|
351
359
|
await Promise.all(tasks);
|
|
352
360
|
return results;
|
|
353
361
|
}
|
|
354
362
|
|
|
355
|
-
const DEFAULT_BASE_URL = (typeof process !== 'undefined'
|
|
356
|
-
? process.env.NEXT_PUBLIC_EDGE_STORE_BASE_URL
|
|
357
|
-
: // @ts-expect-error - In Vite, the env variables are available on `import.meta`.
|
|
358
|
-
import.meta.env?.EDGE_STORE_BASE_URL) ?? 'https://files.edgestore.dev';
|
|
363
|
+
const DEFAULT_BASE_URL = (typeof process !== 'undefined' ? process.env.NEXT_PUBLIC_EDGE_STORE_BASE_URL : undefined?.EDGE_STORE_BASE_URL) ?? 'https://files.edgestore.dev';
|
|
359
364
|
function createEdgeStoreProvider(opts) {
|
|
360
|
-
const EdgeStoreContext =
|
|
361
|
-
const EdgeStoreProvider = ({ children, basePath
|
|
365
|
+
const EdgeStoreContext = /*#__PURE__*/ React__namespace.createContext(undefined);
|
|
366
|
+
const EdgeStoreProvider = ({ children, basePath })=>{
|
|
362
367
|
return EdgeStoreProviderInner({
|
|
363
368
|
children,
|
|
364
369
|
context: EdgeStoreContext,
|
|
365
370
|
basePath,
|
|
366
|
-
maxConcurrentUploads: opts?.maxConcurrentUploads
|
|
371
|
+
maxConcurrentUploads: opts?.maxConcurrentUploads
|
|
367
372
|
});
|
|
368
373
|
};
|
|
369
374
|
function useEdgeStore() {
|
|
@@ -371,7 +376,7 @@ function createEdgeStoreProvider(opts) {
|
|
|
371
376
|
throw new Error('React Context is unavailable in Server Components');
|
|
372
377
|
}
|
|
373
378
|
// @ts-expect-error - We know that the context value should not be undefined
|
|
374
|
-
const value =
|
|
379
|
+
const value = React__namespace.useContext(EdgeStoreContext);
|
|
375
380
|
if (!value && process.env.NODE_ENV !== 'production') {
|
|
376
381
|
throw new Error('[edgestore]: `useEdgeStore` must be wrapped in a <EdgeStoreProvider />');
|
|
377
382
|
}
|
|
@@ -379,37 +384,37 @@ function createEdgeStoreProvider(opts) {
|
|
|
379
384
|
}
|
|
380
385
|
return {
|
|
381
386
|
EdgeStoreProvider,
|
|
382
|
-
useEdgeStore
|
|
387
|
+
useEdgeStore
|
|
383
388
|
};
|
|
384
389
|
}
|
|
385
|
-
function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUploads
|
|
390
|
+
function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUploads }) {
|
|
386
391
|
const apiPath = basePath ? `${basePath}` : '/api/edgestore';
|
|
387
|
-
const [state, setState] =
|
|
392
|
+
const [state, setState] = React__namespace.useState({
|
|
388
393
|
loading: true,
|
|
389
394
|
initialized: false,
|
|
390
|
-
error: false
|
|
395
|
+
error: false
|
|
391
396
|
});
|
|
392
|
-
const uploadingCountRef =
|
|
393
|
-
const initExecuted =
|
|
394
|
-
|
|
397
|
+
const uploadingCountRef = React__namespace.useRef(0);
|
|
398
|
+
const initExecuted = React__namespace.useRef(false); // to make sure we don't run init twice
|
|
399
|
+
React__namespace.useEffect(()=>{
|
|
395
400
|
if (!initExecuted.current) {
|
|
396
401
|
void init();
|
|
397
402
|
}
|
|
398
|
-
return ()
|
|
403
|
+
return ()=>{
|
|
399
404
|
initExecuted.current = true;
|
|
400
405
|
};
|
|
401
|
-
|
|
406
|
+
// eslint-disable-next-line react-hooks/exhaustive-deps
|
|
402
407
|
}, []);
|
|
403
408
|
async function init() {
|
|
404
409
|
try {
|
|
405
410
|
setState({
|
|
406
411
|
loading: true,
|
|
407
412
|
initialized: false,
|
|
408
|
-
error: false
|
|
413
|
+
error: false
|
|
409
414
|
});
|
|
410
415
|
const res = await fetch(`${apiPath}/init`, {
|
|
411
416
|
method: 'POST',
|
|
412
|
-
credentials: 'include'
|
|
417
|
+
credentials: 'include'
|
|
413
418
|
});
|
|
414
419
|
if (res.ok) {
|
|
415
420
|
const json = await res.json();
|
|
@@ -417,40 +422,37 @@ function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUplo
|
|
|
417
422
|
method: 'GET',
|
|
418
423
|
credentials: 'include',
|
|
419
424
|
headers: {
|
|
420
|
-
'x-edgestore-token': json.token
|
|
421
|
-
}
|
|
425
|
+
'x-edgestore-token': json.token
|
|
426
|
+
}
|
|
422
427
|
});
|
|
423
428
|
if (innerRes.ok) {
|
|
424
429
|
// update state
|
|
425
430
|
setState({
|
|
426
431
|
loading: false,
|
|
427
432
|
initialized: true,
|
|
428
|
-
error: false
|
|
433
|
+
error: false
|
|
429
434
|
});
|
|
430
|
-
}
|
|
431
|
-
else {
|
|
435
|
+
} else {
|
|
432
436
|
setState({
|
|
433
437
|
loading: false,
|
|
434
438
|
initialized: false,
|
|
435
|
-
error: true
|
|
439
|
+
error: true
|
|
436
440
|
});
|
|
437
441
|
throw new EdgeStoreClientError("Couldn't initialize EdgeStore.");
|
|
438
442
|
}
|
|
439
|
-
}
|
|
440
|
-
else {
|
|
443
|
+
} else {
|
|
441
444
|
setState({
|
|
442
445
|
loading: false,
|
|
443
446
|
initialized: false,
|
|
444
|
-
error: true
|
|
447
|
+
error: true
|
|
445
448
|
});
|
|
446
449
|
await handleError(res);
|
|
447
450
|
}
|
|
448
|
-
}
|
|
449
|
-
catch (err) {
|
|
451
|
+
} catch (err) {
|
|
450
452
|
setState({
|
|
451
453
|
loading: false,
|
|
452
454
|
initialized: false,
|
|
453
|
-
error: true
|
|
455
|
+
error: true
|
|
454
456
|
});
|
|
455
457
|
throw err;
|
|
456
458
|
}
|
|
@@ -458,16 +460,17 @@ function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUplo
|
|
|
458
460
|
async function reset() {
|
|
459
461
|
await init();
|
|
460
462
|
}
|
|
461
|
-
return
|
|
462
|
-
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
468
|
-
|
|
469
|
-
|
|
470
|
-
|
|
463
|
+
return /*#__PURE__*/ React__namespace.createElement(React__namespace.Fragment, null, /*#__PURE__*/ React__namespace.createElement(context.Provider, {
|
|
464
|
+
value: {
|
|
465
|
+
edgestore: createNextProxy({
|
|
466
|
+
apiPath,
|
|
467
|
+
uploadingCountRef,
|
|
468
|
+
maxConcurrentUploads
|
|
469
|
+
}),
|
|
470
|
+
reset,
|
|
471
|
+
state
|
|
472
|
+
}
|
|
473
|
+
}, children));
|
|
471
474
|
}
|
|
472
475
|
|
|
473
|
-
|
|
476
|
+
exports.createEdgeStoreProvider = createEdgeStoreProvider;
|
package/dist/shared/index.js
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
|
-
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
5
|
+
var shared = require('@edgestore/shared');
|
|
2
6
|
|
|
3
7
|
// TODO: delete this file on next major release (moved to "errors")
|
|
4
8
|
/**
|
|
5
9
|
* @deprecated import from `@edgestore/react/errors` instead.
|
|
6
|
-
*/
|
|
7
|
-
class EdgeStoreApiClientError extends EdgeStoreApiClientError$1 {
|
|
10
|
+
*/ class EdgeStoreApiClientError extends shared.EdgeStoreApiClientError {
|
|
8
11
|
}
|
|
9
12
|
|
|
10
|
-
|
|
13
|
+
exports.EdgeStoreApiClientError = EdgeStoreApiClientError;
|
package/dist/utils/index.js
CHANGED
|
@@ -1,10 +1,13 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
+
|
|
1
5
|
/**
|
|
2
6
|
* This will add the necessary query param to the url
|
|
3
7
|
* to make the browser download the file instead of opening it.
|
|
4
8
|
*
|
|
5
9
|
* You can also override the name of the file by passing the name param.
|
|
6
|
-
*/
|
|
7
|
-
function getDownloadUrl(url, name) {
|
|
10
|
+
*/ function getDownloadUrl(url, name) {
|
|
8
11
|
const urlObj = new URL(url);
|
|
9
12
|
urlObj.searchParams.set('download', name ?? 'true');
|
|
10
13
|
return urlObj.toString();
|
|
@@ -13,15 +16,24 @@ function getDownloadUrl(url, name) {
|
|
|
13
16
|
* This will format the file size to a human-readable format.
|
|
14
17
|
*
|
|
15
18
|
* @example 1024 => 1 KB
|
|
16
|
-
*/
|
|
17
|
-
|
|
18
|
-
if (!bytes)
|
|
19
|
-
return '0 B';
|
|
19
|
+
*/ function formatFileSize(bytes) {
|
|
20
|
+
if (!bytes) return '0 B';
|
|
20
21
|
const k = 1024;
|
|
21
22
|
const dm = 2;
|
|
22
|
-
const sizes = [
|
|
23
|
+
const sizes = [
|
|
24
|
+
'B',
|
|
25
|
+
'KB',
|
|
26
|
+
'MB',
|
|
27
|
+
'GB',
|
|
28
|
+
'TB',
|
|
29
|
+
'PB',
|
|
30
|
+
'EB',
|
|
31
|
+
'ZB',
|
|
32
|
+
'YB'
|
|
33
|
+
];
|
|
23
34
|
const i = Math.floor(Math.log(bytes) / Math.log(k));
|
|
24
35
|
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(dm))} ${sizes[i]}`;
|
|
25
36
|
}
|
|
26
37
|
|
|
27
|
-
|
|
38
|
+
exports.formatFileSize = formatFileSize;
|
|
39
|
+
exports.getDownloadUrl = getDownloadUrl;
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@edgestore/react",
|
|
3
|
-
"version": "0.5.
|
|
3
|
+
"version": "0.5.1",
|
|
4
4
|
"description": "Upload files with ease from React/Next.js",
|
|
5
5
|
"homepage": "https://edgestore.dev",
|
|
6
6
|
"repository": "https://github.com/edgestorejs/edgestore.git",
|
|
@@ -66,7 +66,7 @@
|
|
|
66
66
|
"dependencies": {
|
|
67
67
|
"@aws-sdk/client-s3": "^3.294.0",
|
|
68
68
|
"@aws-sdk/s3-request-presigner": "^3.294.0",
|
|
69
|
-
"@edgestore/shared": "0.5.
|
|
69
|
+
"@edgestore/shared": "0.5.1",
|
|
70
70
|
"@panva/hkdf": "^1.0.4",
|
|
71
71
|
"cookie": "^0.5.0",
|
|
72
72
|
"jose": "^4.13.1",
|
|
@@ -88,5 +88,5 @@
|
|
|
88
88
|
"typescript": "^5",
|
|
89
89
|
"zod": "3.21.4"
|
|
90
90
|
},
|
|
91
|
-
"gitHead": "
|
|
91
|
+
"gitHead": "0a04ff24dfb62f749cc970794b3d239c75f48fed"
|
|
92
92
|
}
|