@edgestore/react 0.6.0-canary.2 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/contextProvider.js +152 -0
- package/dist/contextProvider.mjs +131 -0
- package/dist/createNextProxy.js +322 -0
- package/dist/createNextProxy.mjs +320 -0
- package/dist/errors/index.js +2 -4
- package/dist/errors/index.mjs +1 -1
- package/dist/index.js +2 -483
- package/dist/index.mjs +1 -464
- package/dist/libs/errors/EdgeStoreClientError.js +10 -0
- package/dist/libs/errors/EdgeStoreClientError.mjs +8 -0
- package/dist/libs/errors/handleError.js +18 -0
- package/dist/libs/errors/handleError.mjs +16 -0
- package/dist/{uploadAbortedError-e1379bb0.mjs → libs/errors/uploadAbortedError.mjs} +1 -1
- package/dist/shared/index.js +0 -2
- package/dist/utils/index.js +0 -2
- package/package.json +25 -20
- package/dist/uploadAbortedError-a628b025.js +0 -8
- /package/dist/{uploadAbortedError-fbfcc57b.js → libs/errors/uploadAbortedError.js} +0 -0
|
@@ -0,0 +1,320 @@
|
|
|
1
|
+
import EdgeStoreClientError from './libs/errors/EdgeStoreClientError.mjs';
|
|
2
|
+
import { handleError } from './libs/errors/handleError.mjs';
|
|
3
|
+
import { UploadAbortedError } from './libs/errors/uploadAbortedError.mjs';
|
|
4
|
+
|
|
5
|
+
function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5, disableDevProxy }) {
|
|
6
|
+
return new Proxy({}, {
|
|
7
|
+
get (_, prop) {
|
|
8
|
+
const bucketName = prop;
|
|
9
|
+
const bucketFunctions = {
|
|
10
|
+
upload: async (params)=>{
|
|
11
|
+
try {
|
|
12
|
+
params.onProgressChange?.(0);
|
|
13
|
+
// This handles the case where the user cancels the upload while it's waiting in the queue
|
|
14
|
+
const abortPromise = new Promise((resolve)=>{
|
|
15
|
+
params.signal?.addEventListener('abort', ()=>{
|
|
16
|
+
resolve();
|
|
17
|
+
}, {
|
|
18
|
+
once: true
|
|
19
|
+
});
|
|
20
|
+
});
|
|
21
|
+
while(uploadingCountRef.current >= maxConcurrentUploads && uploadingCountRef.current > 0){
|
|
22
|
+
await Promise.race([
|
|
23
|
+
new Promise((resolve)=>setTimeout(resolve, 300)),
|
|
24
|
+
abortPromise
|
|
25
|
+
]);
|
|
26
|
+
if (params.signal?.aborted) {
|
|
27
|
+
throw new UploadAbortedError('File upload aborted');
|
|
28
|
+
}
|
|
29
|
+
}
|
|
30
|
+
uploadingCountRef.current++;
|
|
31
|
+
const fileInfo = await uploadFile(params, {
|
|
32
|
+
bucketName: bucketName,
|
|
33
|
+
apiPath
|
|
34
|
+
}, disableDevProxy);
|
|
35
|
+
return fileInfo;
|
|
36
|
+
} finally{
|
|
37
|
+
uploadingCountRef.current--;
|
|
38
|
+
}
|
|
39
|
+
},
|
|
40
|
+
confirmUpload: async (params)=>{
|
|
41
|
+
const { success } = await confirmUpload(params, {
|
|
42
|
+
bucketName: bucketName,
|
|
43
|
+
apiPath
|
|
44
|
+
});
|
|
45
|
+
if (!success) {
|
|
46
|
+
throw new EdgeStoreClientError('Failed to confirm upload');
|
|
47
|
+
}
|
|
48
|
+
},
|
|
49
|
+
delete: async (params)=>{
|
|
50
|
+
const { success } = await deleteFile(params, {
|
|
51
|
+
bucketName: bucketName,
|
|
52
|
+
apiPath
|
|
53
|
+
});
|
|
54
|
+
if (!success) {
|
|
55
|
+
throw new EdgeStoreClientError('Failed to delete file');
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
return bucketFunctions;
|
|
60
|
+
}
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
async function uploadFile({ file, signal, input, onProgressChange, options }, { apiPath, bucketName }, disableDevProxy) {
|
|
64
|
+
try {
|
|
65
|
+
onProgressChange?.(0);
|
|
66
|
+
const res = await fetch(`${apiPath}/request-upload`, {
|
|
67
|
+
method: 'POST',
|
|
68
|
+
credentials: 'include',
|
|
69
|
+
signal: signal,
|
|
70
|
+
body: JSON.stringify({
|
|
71
|
+
bucketName,
|
|
72
|
+
input,
|
|
73
|
+
fileInfo: {
|
|
74
|
+
extension: file.name.split('.').pop(),
|
|
75
|
+
type: file.type,
|
|
76
|
+
size: file.size,
|
|
77
|
+
fileName: options?.manualFileName,
|
|
78
|
+
replaceTargetUrl: options?.replaceTargetUrl,
|
|
79
|
+
temporary: options?.temporary
|
|
80
|
+
}
|
|
81
|
+
}),
|
|
82
|
+
headers: {
|
|
83
|
+
'Content-Type': 'application/json'
|
|
84
|
+
}
|
|
85
|
+
});
|
|
86
|
+
if (!res.ok) {
|
|
87
|
+
await handleError(res);
|
|
88
|
+
}
|
|
89
|
+
const json = await res.json();
|
|
90
|
+
if ('multipart' in json) {
|
|
91
|
+
await multipartUpload({
|
|
92
|
+
bucketName,
|
|
93
|
+
multipartInfo: json.multipart,
|
|
94
|
+
onProgressChange,
|
|
95
|
+
signal,
|
|
96
|
+
file,
|
|
97
|
+
apiPath
|
|
98
|
+
});
|
|
99
|
+
} else if ('uploadUrl' in json) {
|
|
100
|
+
// Single part upload
|
|
101
|
+
// Upload the file to the signed URL and get the progress
|
|
102
|
+
await uploadFileInner({
|
|
103
|
+
file,
|
|
104
|
+
uploadUrl: json.uploadUrl,
|
|
105
|
+
onProgressChange,
|
|
106
|
+
signal
|
|
107
|
+
});
|
|
108
|
+
} else {
|
|
109
|
+
throw new EdgeStoreClientError('An error occurred');
|
|
110
|
+
}
|
|
111
|
+
return {
|
|
112
|
+
url: getUrl(json.accessUrl, apiPath, disableDevProxy),
|
|
113
|
+
thumbnailUrl: json.thumbnailUrl ? getUrl(json.thumbnailUrl, apiPath, disableDevProxy) : null,
|
|
114
|
+
size: json.size,
|
|
115
|
+
uploadedAt: new Date(json.uploadedAt),
|
|
116
|
+
path: json.path,
|
|
117
|
+
pathOrder: json.pathOrder,
|
|
118
|
+
metadata: json.metadata
|
|
119
|
+
};
|
|
120
|
+
} catch (e) {
|
|
121
|
+
if (e instanceof Error && e.name === 'AbortError') {
|
|
122
|
+
throw new UploadAbortedError('File upload aborted');
|
|
123
|
+
}
|
|
124
|
+
onProgressChange?.(0);
|
|
125
|
+
throw e;
|
|
126
|
+
}
|
|
127
|
+
}
|
|
128
|
+
/**
|
|
129
|
+
* Protected files need third-party cookies to work.
|
|
130
|
+
* Since third party cookies don't work on localhost,
|
|
131
|
+
* we need to proxy the file through the server.
|
|
132
|
+
*/ function getUrl(url, apiPath, disableDevProxy) {
|
|
133
|
+
const mode = typeof process !== 'undefined' ? process.env.NODE_ENV : import.meta.env?.DEV ? 'development' : 'production';
|
|
134
|
+
if (mode === 'development' && !url.includes('/_public/') && !disableDevProxy) {
|
|
135
|
+
const proxyUrl = new URL(window.location.origin);
|
|
136
|
+
proxyUrl.pathname = `${apiPath}/proxy-file`;
|
|
137
|
+
proxyUrl.search = new URLSearchParams({
|
|
138
|
+
url
|
|
139
|
+
}).toString();
|
|
140
|
+
return proxyUrl.toString();
|
|
141
|
+
}
|
|
142
|
+
return url;
|
|
143
|
+
}
|
|
144
|
+
async function uploadFileInner(props) {
|
|
145
|
+
const { file, uploadUrl, onProgressChange, signal } = props;
|
|
146
|
+
const promise = new Promise((resolve, reject)=>{
|
|
147
|
+
if (signal?.aborted) {
|
|
148
|
+
reject(new UploadAbortedError('File upload aborted'));
|
|
149
|
+
return;
|
|
150
|
+
}
|
|
151
|
+
const request = new XMLHttpRequest();
|
|
152
|
+
request.open('PUT', uploadUrl);
|
|
153
|
+
// This is for Azure provider. Specifies the blob type
|
|
154
|
+
request.setRequestHeader('x-ms-blob-type', 'BlockBlob');
|
|
155
|
+
request.addEventListener('loadstart', ()=>{
|
|
156
|
+
onProgressChange?.(0);
|
|
157
|
+
});
|
|
158
|
+
request.upload.addEventListener('progress', (e)=>{
|
|
159
|
+
if (e.lengthComputable) {
|
|
160
|
+
// 2 decimal progress
|
|
161
|
+
const progress = Math.round(e.loaded / e.total * 10000) / 100;
|
|
162
|
+
onProgressChange?.(progress);
|
|
163
|
+
}
|
|
164
|
+
});
|
|
165
|
+
request.addEventListener('error', ()=>{
|
|
166
|
+
reject(new Error('Error uploading file'));
|
|
167
|
+
});
|
|
168
|
+
request.addEventListener('abort', ()=>{
|
|
169
|
+
reject(new UploadAbortedError('File upload aborted'));
|
|
170
|
+
});
|
|
171
|
+
request.addEventListener('loadend', ()=>{
|
|
172
|
+
// Return the ETag header (needed to complete multipart upload)
|
|
173
|
+
resolve(request.getResponseHeader('ETag'));
|
|
174
|
+
});
|
|
175
|
+
if (signal) {
|
|
176
|
+
signal.addEventListener('abort', ()=>{
|
|
177
|
+
request.abort();
|
|
178
|
+
});
|
|
179
|
+
}
|
|
180
|
+
request.send(file);
|
|
181
|
+
});
|
|
182
|
+
return promise;
|
|
183
|
+
}
|
|
184
|
+
async function multipartUpload(params) {
|
|
185
|
+
const { bucketName, multipartInfo, onProgressChange, file, signal, apiPath } = params;
|
|
186
|
+
const { partSize, parts, totalParts, uploadId, key } = multipartInfo;
|
|
187
|
+
const uploadingParts = [];
|
|
188
|
+
const uploadPart = async (params)=>{
|
|
189
|
+
const { part, chunk } = params;
|
|
190
|
+
const { uploadUrl } = part;
|
|
191
|
+
const eTag = await uploadFileInner({
|
|
192
|
+
file: chunk,
|
|
193
|
+
uploadUrl,
|
|
194
|
+
signal,
|
|
195
|
+
onProgressChange: (progress)=>{
|
|
196
|
+
const uploadingPart = uploadingParts.find((p)=>p.partNumber === part.partNumber);
|
|
197
|
+
if (uploadingPart) {
|
|
198
|
+
uploadingPart.progress = progress;
|
|
199
|
+
} else {
|
|
200
|
+
uploadingParts.push({
|
|
201
|
+
partNumber: part.partNumber,
|
|
202
|
+
progress
|
|
203
|
+
});
|
|
204
|
+
}
|
|
205
|
+
const totalProgress = Math.round(uploadingParts.reduce((acc, p)=>acc + p.progress * 100, 0) / totalParts) / 100;
|
|
206
|
+
onProgressChange?.(totalProgress);
|
|
207
|
+
}
|
|
208
|
+
});
|
|
209
|
+
if (!eTag) {
|
|
210
|
+
throw new EdgeStoreClientError('Could not get ETag from multipart response');
|
|
211
|
+
}
|
|
212
|
+
return {
|
|
213
|
+
partNumber: part.partNumber,
|
|
214
|
+
eTag
|
|
215
|
+
};
|
|
216
|
+
};
|
|
217
|
+
// Upload the parts in parallel
|
|
218
|
+
const completedParts = await queuedPromises({
|
|
219
|
+
items: parts.map((part)=>({
|
|
220
|
+
part,
|
|
221
|
+
chunk: file.slice((part.partNumber - 1) * partSize, part.partNumber * partSize)
|
|
222
|
+
})),
|
|
223
|
+
fn: uploadPart,
|
|
224
|
+
maxParallel: 5,
|
|
225
|
+
maxRetries: 10
|
|
226
|
+
});
|
|
227
|
+
// Complete multipart upload
|
|
228
|
+
const res = await fetch(`${apiPath}/complete-multipart-upload`, {
|
|
229
|
+
method: 'POST',
|
|
230
|
+
credentials: 'include',
|
|
231
|
+
body: JSON.stringify({
|
|
232
|
+
bucketName,
|
|
233
|
+
uploadId,
|
|
234
|
+
key,
|
|
235
|
+
parts: completedParts
|
|
236
|
+
}),
|
|
237
|
+
headers: {
|
|
238
|
+
'Content-Type': 'application/json'
|
|
239
|
+
}
|
|
240
|
+
});
|
|
241
|
+
if (!res.ok) {
|
|
242
|
+
await handleError(res);
|
|
243
|
+
}
|
|
244
|
+
}
|
|
245
|
+
async function confirmUpload({ url }, { apiPath, bucketName }) {
|
|
246
|
+
const res = await fetch(`${apiPath}/confirm-upload`, {
|
|
247
|
+
method: 'POST',
|
|
248
|
+
credentials: 'include',
|
|
249
|
+
body: JSON.stringify({
|
|
250
|
+
url,
|
|
251
|
+
bucketName
|
|
252
|
+
}),
|
|
253
|
+
headers: {
|
|
254
|
+
'Content-Type': 'application/json'
|
|
255
|
+
}
|
|
256
|
+
});
|
|
257
|
+
if (!res.ok) {
|
|
258
|
+
await handleError(res);
|
|
259
|
+
}
|
|
260
|
+
return res.json();
|
|
261
|
+
}
|
|
262
|
+
async function deleteFile({ url }, { apiPath, bucketName }) {
|
|
263
|
+
const res = await fetch(`${apiPath}/delete-file`, {
|
|
264
|
+
method: 'POST',
|
|
265
|
+
credentials: 'include',
|
|
266
|
+
body: JSON.stringify({
|
|
267
|
+
url,
|
|
268
|
+
bucketName
|
|
269
|
+
}),
|
|
270
|
+
headers: {
|
|
271
|
+
'Content-Type': 'application/json'
|
|
272
|
+
}
|
|
273
|
+
});
|
|
274
|
+
if (!res.ok) {
|
|
275
|
+
await handleError(res);
|
|
276
|
+
}
|
|
277
|
+
return res.json();
|
|
278
|
+
}
|
|
279
|
+
async function queuedPromises({ items, fn, maxParallel, maxRetries = 0 }) {
|
|
280
|
+
const results = new Array(items.length);
|
|
281
|
+
const executeWithRetry = async (func, retries)=>{
|
|
282
|
+
try {
|
|
283
|
+
return await func();
|
|
284
|
+
} catch (error) {
|
|
285
|
+
if (error instanceof UploadAbortedError) {
|
|
286
|
+
throw error;
|
|
287
|
+
}
|
|
288
|
+
if (retries > 0) {
|
|
289
|
+
await new Promise((resolve)=>setTimeout(resolve, 5000));
|
|
290
|
+
return executeWithRetry(func, retries - 1);
|
|
291
|
+
} else {
|
|
292
|
+
throw error;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
};
|
|
296
|
+
const semaphore = {
|
|
297
|
+
count: maxParallel,
|
|
298
|
+
async wait () {
|
|
299
|
+
// If we've reached our maximum concurrency, or it's the last item, wait
|
|
300
|
+
while(this.count <= 0)await new Promise((resolve)=>setTimeout(resolve, 500));
|
|
301
|
+
this.count--;
|
|
302
|
+
},
|
|
303
|
+
signal () {
|
|
304
|
+
this.count++;
|
|
305
|
+
}
|
|
306
|
+
};
|
|
307
|
+
const tasks = items.map((item, i)=>(async ()=>{
|
|
308
|
+
await semaphore.wait();
|
|
309
|
+
try {
|
|
310
|
+
const result = await executeWithRetry(()=>fn(item), maxRetries);
|
|
311
|
+
results[i] = result;
|
|
312
|
+
} finally{
|
|
313
|
+
semaphore.signal();
|
|
314
|
+
}
|
|
315
|
+
})());
|
|
316
|
+
await Promise.all(tasks);
|
|
317
|
+
return results;
|
|
318
|
+
}
|
|
319
|
+
|
|
320
|
+
export { createNextProxy };
|
package/dist/errors/index.js
CHANGED
|
@@ -1,13 +1,11 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
Object.defineProperty(exports, '__esModule', { value: true });
|
|
4
|
-
|
|
5
3
|
var shared = require('@edgestore/shared');
|
|
6
|
-
var uploadAbortedError = require('../uploadAbortedError
|
|
4
|
+
var uploadAbortedError = require('../libs/errors/uploadAbortedError.js');
|
|
7
5
|
|
|
8
6
|
|
|
9
7
|
|
|
10
|
-
Object.defineProperty(exports,
|
|
8
|
+
Object.defineProperty(exports, "EdgeStoreApiClientError", {
|
|
11
9
|
enumerable: true,
|
|
12
10
|
get: function () { return shared.EdgeStoreApiClientError; }
|
|
13
11
|
});
|
package/dist/errors/index.mjs
CHANGED
|
@@ -1,2 +1,2 @@
|
|
|
1
1
|
export { EdgeStoreApiClientError } from '@edgestore/shared';
|
|
2
|
-
export {
|
|
2
|
+
export { UploadAbortedError } from '../libs/errors/uploadAbortedError.mjs';
|