@edgestore/react 0.1.1 → 0.1.3-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -144,7 +144,7 @@ export default function Page() {
144
144
  By passing the `replaceTargetUrl` option, you can replace an existing file with a new one.
145
145
  It will automatically delete the old file after the upload is complete.
146
146
 
147
- You can also just upload the file using the same file name, but in that case, you might still see the old file for a while becasue of the CDN cache.
147
+ You can also just upload the file using the same file name, but in that case, you might still see the old file for a while because of the CDN cache.
148
148
 
149
149
  ```tsx
150
150
  const res = await edgestore.publicFiles.upload({
@@ -4,10 +4,12 @@ import { type BucketFunctions } from './createNextProxy';
4
4
  type EdgeStoreContextValue<TRouter extends AnyRouter> = {
5
5
  edgestore: BucketFunctions<TRouter>;
6
6
  /**
7
- * In development, if this is a protected file, this function will add the token as a query param to the url.
8
- * This is needed because third party cookies don't work with http urls.
7
+ * This will re-run the Edge Store initialization process,
8
+ * which will run the `createContext` function again.
9
+ *
10
+ * Can be used after a sign-in or sign-out, for example.
9
11
  */
10
- getSrc: (url: string) => string;
12
+ reset: () => Promise<void>;
11
13
  };
12
14
  export declare function createEdgeStoreProvider<TRouter extends AnyRouter>(opts?: {
13
15
  /**
@@ -1 +1 @@
1
- {"version":3,"file":"contextProvider.d.ts","sourceRoot":"","sources":["../src/contextProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,SAAS,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAmB,KAAK,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAK1E,KAAK,qBAAqB,CAAC,OAAO,SAAS,SAAS,IAAI;IACtD,SAAS,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC;IACpC;;;OAGG;IACH,MAAM,EAAE,CAAC,GAAG,EAAE,MAAM,KAAK,MAAM,CAAC;CACjC,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,OAAO,SAAS,SAAS,EAAE,IAAI,CAAC,EAAE;IACxE;;;;;;OAMG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B;;kBASa,MAAM,SAAS;QACzB;;;;;;WAMG;;;;EAgCN"}
1
+ {"version":3,"file":"contextProvider.d.ts","sourceRoot":"","sources":["../src/contextProvider.tsx"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,SAAS,EAAE,MAAM,wBAAwB,CAAC;AACxD,OAAO,KAAK,KAAK,MAAM,OAAO,CAAC;AAC/B,OAAO,EAAmB,KAAK,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAK1E,KAAK,qBAAqB,CAAC,OAAO,SAAS,SAAS,IAAI;IACtD,SAAS,EAAE,eAAe,CAAC,OAAO,CAAC,CAAC;IACpC;;;;;OAKG;IACH,KAAK,EAAE,MAAM,OAAO,CAAC,IAAI,CAAC,CAAC;CAC5B,CAAC;AAEF,wBAAgB,uBAAuB,CAAC,OAAO,SAAS,SAAS,EAAE,IAAI,CAAC,EAAE;IACxE;;;;;;OAMG;IACH,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B;;kBASa,MAAM,SAAS;QACzB;;;;;;WAMG;;;;EAgCN"}
@@ -26,6 +26,11 @@ export type BucketFunctions<TRouter extends AnyRouter> = {
26
26
  metadata: InferMetadataObject<TRouter['buckets'][K]>;
27
27
  path: InferBucketPathObject<TRouter['buckets'][K]>;
28
28
  }>;
29
+ confirmUpload: (params: {
30
+ url: string;
31
+ }) => Promise<{
32
+ success: boolean;
33
+ }>;
29
34
  delete: (params: {
30
35
  url: string;
31
36
  }) => Promise<{
@@ -52,6 +57,16 @@ type UploadOptions = {
52
57
  * It will automatically delete the existing file when the upload is complete.
53
58
  */
54
59
  replaceTargetUrl?: string;
60
+ /**
61
+ * If true, the file needs to be confirmed by using the `confirmUpload` function.
62
+ * If the file is not confirmed within 24 hours, it will be deleted.
63
+ *
64
+ * This is useful for pages where the file is uploaded as soon as it is selected,
65
+ * but the user can leave the page without submitting the form.
66
+ *
67
+ * This avoids unnecessary zombie files in the bucket.
68
+ */
69
+ temporary?: boolean;
55
70
  };
56
71
  export declare function createNextProxy<TRouter extends AnyRouter>({ apiPath, uploadingCountRef, maxConcurrentUploads, }: {
57
72
  apiPath: string;
@@ -1 +1 @@
1
- {"version":3,"file":"createNextProxy.d.ts","sourceRoot":"","sources":["../src/createNextProxy.ts"],"names":[],"mappings":";AAAA,OAAO,EACL,KAAK,SAAS,EACd,KAAK,qBAAqB,EAC1B,KAAK,mBAAmB,EACzB,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,KAAK,CAAC;AAG7B,MAAM,MAAM,eAAe,CAAC,OAAO,SAAS,SAAS,IAAI;KACtD,CAAC,IAAI,MAAM,OAAO,CAAC,SAAS,CAAC,GAAG;QAC/B,MAAM,EAAE,CACN,MAAM,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,SAAS,KAAK,GACjE;YACE,IAAI,EAAE,IAAI,CAAC;YACX,gBAAgB,CAAC,EAAE,uBAAuB,CAAC;YAC3C,OAAO,CAAC,EAAE,aAAa,CAAC;SACzB,GACD;YACE,IAAI,EAAE,IAAI,CAAC;YACX,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YACvD,gBAAgB,CAAC,EAAE,uBAAuB,CAAC;YAC3C,OAAO,CAAC,EAAE,aAAa,CAAC;SACzB,KACF,OAAO,CACV,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,SAAS,OAAO,GACjD;YACE,GAAG,EAAE,MAAM,CAAC;YACZ,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;YAC5B,IAAI,EAAE,MAAM,CAAC;YACb,UAAU,EAAE,IAAI,CAAC;YACjB,QAAQ,EAAE,mBAAmB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACrD,IAAI,EAAE,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACpD,GACD;YACE,GAAG,EAAE,MAAM,CAAC;YACZ,IAAI,EAAE,MAAM,CAAC;YACb,UAAU,EAAE,IAAI,CAAC;YACjB,QAAQ,EAAE,mBAAmB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACrD,IAAI,EAAE,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACpD,CACN,CAAC;QACF,MAAM,EAAE,CAAC,MAAM,EAAE;YAAE,GAAG,EAAE,MAAM,CAAA;SAAE,KAAK,OAAO,CAAC;YAC3C,OAAO,EAAE,OAAO,CAAC;SAClB,CAAC,CAAC;KACJ;CACF,CAAC;AAEF,KAAK,uBAAuB,GAAG,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;AAE1D,KAAK,aAAa,GAAG;IACnB;;;;;;;;;;OAUG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B,CAAC;AAEF,wBAAgB,eAAe,CAAC,OAAO,SAAS,SAAS,EAAE,EACzD,OAAO,EACP,iBAAiB,EACjB,oBAAwB,GACzB,EAAE;IACD,OAAO,EAAE,MAAM,CAAC;IAChB,iBAAiB,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAClD,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B,4BAiCA"}
1
+ {"version":3,"file":"createNextProxy.d.ts","sourceRoot":"","sources":["../src/createNextProxy.ts"],"names":[],"mappings":";AACA,OAAO,EACL,KAAK,SAAS,EACd,KAAK,qBAAqB,EAC1B,KAAK,mBAAmB,EACzB,MAAM,wBAAwB,CAAC;AAChC,OAAO,EAAE,KAAK,CAAC,EAAE,MAAM,KAAK,CAAC;AAG7B,MAAM,MAAM,eAAe,CAAC,OAAO,SAAS,SAAS,IAAI;KACtD,CAAC,IAAI,MAAM,OAAO,CAAC,SAAS,CAAC,GAAG;QAC/B,MAAM,EAAE,CACN,MAAM,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,SAAS,KAAK,GACjE;YACE,IAAI,EAAE,IAAI,CAAC;YACX,gBAAgB,CAAC,EAAE,uBAAuB,CAAC;YAC3C,OAAO,CAAC,EAAE,aAAa,CAAC;SACzB,GACD;YACE,IAAI,EAAE,IAAI,CAAC;YACX,KAAK,EAAE,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC;YACvD,gBAAgB,CAAC,EAAE,uBAAuB,CAAC;YAC3C,OAAO,CAAC,EAAE,aAAa,CAAC;SACzB,KACF,OAAO,CACV,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,MAAM,CAAC,SAAS,OAAO,GACjD;YACE,GAAG,EAAE,MAAM,CAAC;YACZ,YAAY,EAAE,MAAM,GAAG,IAAI,CAAC;YAC5B,IAAI,EAAE,MAAM,CAAC;YACb,UAAU,EAAE,IAAI,CAAC;YACjB,QAAQ,EAAE,mBAAmB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACrD,IAAI,EAAE,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACpD,GACD;YACE,GAAG,EAAE,MAAM,CAAC;YACZ,IAAI,EAAE,MAAM,CAAC;YACb,UAAU,EAAE,IAAI,CAAC;YACjB,QAAQ,EAAE,mBAAmB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACrD,IAAI,EAAE,qBAAqB,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;SACpD,CACN,CAAC;QACF,aAAa,EAAE,CAAC,MAAM,EAAE;YAAE,GAAG,EAAE,MAAM,CAAA;SAAE,KAAK,OAAO,CAAC;YAClD,OAAO,EAAE,OAAO,CAAC;SAClB,CAAC,CAAC;QACH,MAAM,EAAE,CAAC,MAAM,EAAE;YAAE,GAAG,EAAE,MAAM,CAAA;SAAE,KAAK,OAAO,CAAC;YAC3C,OAAO,EAAE,OAAO,CAAC;SAClB,CAAC,CAAC;KACJ;CACF,CAAC;AAEF,KAAK,uBAAuB,GAAG,CAAC,QAAQ,EAAE,MAAM,KAAK,IAAI,CAAC;AAE1D,KAAK,aAAa,GAAG;IACnB;;;;;;;;;;OAUG;IACH,cAAc,CAAC,EAAE,MAAM,CAAC;IACxB;;;OAGG;IACH,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAC1B;;;;;;;;OAQG;IACH,SAAS,CAAC,EAAE,OAAO,CAAC;CACrB,CAAC;AAEF,wBAAgB,eAAe,CAAC,OAAO,SAAS,SAAS,EAAE,EACzD,OAAO,EACP,iBAAiB,EACjB,oBAAwB,GACzB,EAAE;IACD,OAAO,EAAE,MAAM,CAAC;IAChB,iBAAiB,EAAE,KAAK,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAC;IAClD,oBAAoB,CAAC,EAAE,MAAM,CAAC;CAC/B,4BAuCA"}
package/dist/index.js CHANGED
@@ -51,6 +51,12 @@ function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5
51
51
  uploadingCountRef.current--;
52
52
  }
53
53
  },
54
+ confirmUpload: async (params)=>{
55
+ return await confirmUpload(params, {
56
+ bucketName: bucketName,
57
+ apiPath
58
+ });
59
+ },
54
60
  delete: async (params)=>{
55
61
  return await deleteFile(params, {
56
62
  bucketName: bucketName,
@@ -75,7 +81,8 @@ async function uploadFile({ file, input, onProgressChange, options }, { apiPath,
75
81
  type: file.type,
76
82
  size: file.size,
77
83
  fileName: options?.manualFileName,
78
- replaceTargetUrl: options?.replaceTargetUrl
84
+ replaceTargetUrl: options?.replaceTargetUrl,
85
+ temporary: options?.temporary
79
86
  }
80
87
  }),
81
88
  headers: {
@@ -83,11 +90,21 @@ async function uploadFile({ file, input, onProgressChange, options }, { apiPath,
83
90
  }
84
91
  });
85
92
  const json = await res.json();
86
- if (!json.uploadUrl) {
93
+ if ('multipart' in json) {
94
+ await multipartUpload({
95
+ bucketName,
96
+ multipartInfo: json.multipart,
97
+ onProgressChange,
98
+ file,
99
+ apiPath
100
+ });
101
+ } else if ('uploadUrl' in json) {
102
+ // Single part upload
103
+ // Upload the file to the signed URL and get the progress
104
+ await uploadFileInner(file, json.uploadUrl, onProgressChange);
105
+ } else {
87
106
  throw new EdgeStoreError('An error occurred');
88
107
  }
89
- // Upload the file to the signed URL and get the progress
90
- await uploadFileInner(file, json.uploadUrl, onProgressChange);
91
108
  return {
92
109
  url: getUrl(json.accessUrl, apiPath),
93
110
  thumbnailUrl: json.thumbnailUrl ? getUrl(json.thumbnailUrl, apiPath) : null,
@@ -137,12 +154,86 @@ const uploadFileInner = async (file, uploadUrl, onProgressChange)=>{
137
154
  reject(new Error('File upload aborted'));
138
155
  });
139
156
  request.addEventListener('loadend', ()=>{
140
- resolve();
157
+ // Return the ETag header (needed to complete multipart upload)
158
+ resolve(request.getResponseHeader('ETag'));
141
159
  });
142
160
  request.send(file);
143
161
  });
144
162
  return promise;
145
163
  };
164
+ async function multipartUpload(params) {
165
+ const { bucketName, multipartInfo, onProgressChange, file, apiPath } = params;
166
+ const { partSize, parts, totalParts, uploadId, key } = multipartInfo;
167
+ const uploadingParts = [];
168
+ const uploadPart = async (params)=>{
169
+ const { part, chunk } = params;
170
+ const { uploadUrl } = part;
171
+ const eTag = await uploadFileInner(chunk, uploadUrl, (progress)=>{
172
+ const uploadingPart = uploadingParts.find((p)=>p.partNumber === part.partNumber);
173
+ if (uploadingPart) {
174
+ uploadingPart.progress = progress;
175
+ } else {
176
+ uploadingParts.push({
177
+ partNumber: part.partNumber,
178
+ progress
179
+ });
180
+ }
181
+ const totalProgress = Math.round(uploadingParts.reduce((acc, p)=>acc + p.progress * 100, 0) / totalParts) / 100;
182
+ onProgressChange?.(totalProgress);
183
+ });
184
+ if (!eTag) {
185
+ throw new EdgeStoreError('Could not get ETag from multipart response');
186
+ }
187
+ return {
188
+ partNumber: part.partNumber,
189
+ eTag
190
+ };
191
+ };
192
+ // Upload the parts in parallel
193
+ const completedParts = await queuedPromises({
194
+ items: parts.map((part)=>({
195
+ part,
196
+ chunk: file.slice((part.partNumber - 1) * partSize, part.partNumber * partSize)
197
+ })),
198
+ fn: uploadPart,
199
+ maxParallel: 5,
200
+ maxRetries: 10
201
+ });
202
+ // Complete multipart upload
203
+ const res = await fetch(`${apiPath}/complete-multipart-upload`, {
204
+ method: 'POST',
205
+ body: JSON.stringify({
206
+ bucketName,
207
+ uploadId,
208
+ key,
209
+ parts: completedParts
210
+ }),
211
+ headers: {
212
+ 'Content-Type': 'application/json'
213
+ }
214
+ });
215
+ if (!res.ok) {
216
+ throw new EdgeStoreError('Multi-part upload failed');
217
+ }
218
+ }
219
+ async function confirmUpload({ url }, { apiPath, bucketName }) {
220
+ const res = await fetch(`${apiPath}/confirm-upload`, {
221
+ method: 'POST',
222
+ body: JSON.stringify({
223
+ url,
224
+ bucketName
225
+ }),
226
+ headers: {
227
+ 'Content-Type': 'application/json'
228
+ }
229
+ });
230
+ if (!res.ok) {
231
+ throw new EdgeStoreError('An error occurred');
232
+ }
233
+ return {
234
+ success: true
235
+ };
236
+ }
146
237
  async function deleteFile({ url }, { apiPath, bucketName }) {
147
238
  const res = await fetch(`${apiPath}/delete-file`, {
148
239
  method: 'POST',
@@ -161,6 +252,43 @@ async function deleteFile({ url }, { apiPath, bucketName }) {
161
252
  success: true
162
253
  };
163
254
  }
255
+ async function queuedPromises({ items, fn, maxParallel, maxRetries = 0 }) {
256
+ const results = new Array(items.length);
257
+ const executeWithRetry = async (func, retries)=>{
258
+ try {
259
+ return await func();
260
+ } catch (error) {
261
+ if (retries > 0) {
262
+ await new Promise((resolve)=>setTimeout(resolve, 5000));
263
+ return executeWithRetry(func, retries - 1);
264
+ } else {
265
+ throw error;
266
+ }
267
+ }
268
+ };
269
+ const semaphore = {
270
+ count: maxParallel,
271
+ async wait () {
272
+ // If we've reached our maximum concurrency or it's the last item, wait
273
+ while(this.count <= 0)await new Promise((resolve)=>setTimeout(resolve, 500));
274
+ this.count--;
275
+ },
276
+ signal () {
277
+ this.count++;
278
+ }
279
+ };
280
+ const tasks = items.map((item, i)=>(async ()=>{
281
+ await semaphore.wait();
282
+ try {
283
+ const result = await executeWithRetry(()=>fn(item), maxRetries);
284
+ results[i] = result;
285
+ } finally{
286
+ semaphore.signal();
287
+ }
288
+ })());
289
+ await Promise.all(tasks);
290
+ return results;
291
+ }
164
292
 
165
293
  const DEFAULT_BASE_URL = process.env.NEXT_PUBLIC_EDGE_STORE_BASE_URL ?? 'https://files.edgestore.dev';
166
294
  function createEdgeStoreProvider(opts) {
@@ -191,38 +319,28 @@ function createEdgeStoreProvider(opts) {
191
319
  }
192
320
  function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUploads }) {
193
321
  const apiPath = basePath ? `${basePath}` : '/api/edgestore';
194
- const [token, setToken] = React__namespace.useState(null);
195
322
  const uploadingCountRef = React__namespace.useRef(0);
196
323
  React__namespace.useEffect(()=>{
197
- void fetch(`${apiPath}/init`, {
198
- method: 'POST'
199
- }).then(async (res)=>{
200
- if (res.ok) {
201
- const json = await res.json();
202
- setToken(json.token);
203
- await fetch(`${DEFAULT_BASE_URL}/_init`, {
204
- method: 'GET',
205
- headers: {
206
- 'x-edgestore-token': json.token
207
- }
208
- });
209
- }
210
- });
324
+ void init();
211
325
  // eslint-disable-next-line react-hooks/exhaustive-deps
212
326
  }, []);
213
- function getSrc(url) {
214
- if (// in production we use cookies, so we don't need a token
215
- process.env.NODE_ENV === 'production' || // public urls don't need a token
216
- // e.g. https://files.edgestore.dev/project/bucket/_public/...
217
- url.match(/^https?:\/\/[^\/]+\/[^\/]+\/[^\/]+\/_public\/.+/)) {
218
- return `${url}`;
219
- } else {
220
- // in development, third party cookies don't work, so we need to pass the token as a query param
221
- const uri = new URL(url);
222
- uri.searchParams.set('token', token ?? '');
223
- return `${uri}`;
327
+ async function init() {
328
+ const res = await fetch(`${apiPath}/init`, {
329
+ method: 'POST'
330
+ });
331
+ if (res.ok) {
332
+ const json = await res.json();
333
+ await fetch(`${DEFAULT_BASE_URL}/_init`, {
334
+ method: 'GET',
335
+ headers: {
336
+ 'x-edgestore-token': json.token
337
+ }
338
+ });
224
339
  }
225
340
  }
341
+ async function reset() {
342
+ await init();
343
+ }
226
344
  return /*#__PURE__*/ React__namespace.createElement(React__namespace.Fragment, null, /*#__PURE__*/ React__namespace.createElement(context.Provider, {
227
345
  value: {
228
346
  edgestore: createNextProxy({
@@ -230,7 +348,7 @@ function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUplo
230
348
  uploadingCountRef,
231
349
  maxConcurrentUploads
232
350
  }),
233
- getSrc
351
+ reset
234
352
  }
235
353
  }, children));
236
354
  }
package/dist/index.mjs CHANGED
@@ -27,6 +27,12 @@ function createNextProxy({ apiPath, uploadingCountRef, maxConcurrentUploads = 5
27
27
  uploadingCountRef.current--;
28
28
  }
29
29
  },
30
+ confirmUpload: async (params)=>{
31
+ return await confirmUpload(params, {
32
+ bucketName: bucketName,
33
+ apiPath
34
+ });
35
+ },
30
36
  delete: async (params)=>{
31
37
  return await deleteFile(params, {
32
38
  bucketName: bucketName,
@@ -51,7 +57,8 @@ async function uploadFile({ file, input, onProgressChange, options }, { apiPath,
51
57
  type: file.type,
52
58
  size: file.size,
53
59
  fileName: options?.manualFileName,
54
- replaceTargetUrl: options?.replaceTargetUrl
60
+ replaceTargetUrl: options?.replaceTargetUrl,
61
+ temporary: options?.temporary
55
62
  }
56
63
  }),
57
64
  headers: {
@@ -59,11 +66,21 @@ async function uploadFile({ file, input, onProgressChange, options }, { apiPath,
59
66
  }
60
67
  });
61
68
  const json = await res.json();
62
- if (!json.uploadUrl) {
69
+ if ('multipart' in json) {
70
+ await multipartUpload({
71
+ bucketName,
72
+ multipartInfo: json.multipart,
73
+ onProgressChange,
74
+ file,
75
+ apiPath
76
+ });
77
+ } else if ('uploadUrl' in json) {
78
+ // Single part upload
79
+ // Upload the file to the signed URL and get the progress
80
+ await uploadFileInner(file, json.uploadUrl, onProgressChange);
81
+ } else {
63
82
  throw new EdgeStoreError('An error occurred');
64
83
  }
65
- // Upload the file to the signed URL and get the progress
66
- await uploadFileInner(file, json.uploadUrl, onProgressChange);
67
84
  return {
68
85
  url: getUrl(json.accessUrl, apiPath),
69
86
  thumbnailUrl: json.thumbnailUrl ? getUrl(json.thumbnailUrl, apiPath) : null,
@@ -113,12 +130,86 @@ const uploadFileInner = async (file, uploadUrl, onProgressChange)=>{
113
130
  reject(new Error('File upload aborted'));
114
131
  });
115
132
  request.addEventListener('loadend', ()=>{
116
- resolve();
133
+ // Return the ETag header (needed to complete multipart upload)
134
+ resolve(request.getResponseHeader('ETag'));
117
135
  });
118
136
  request.send(file);
119
137
  });
120
138
  return promise;
121
139
  };
140
+ async function multipartUpload(params) {
141
+ const { bucketName, multipartInfo, onProgressChange, file, apiPath } = params;
142
+ const { partSize, parts, totalParts, uploadId, key } = multipartInfo;
143
+ const uploadingParts = [];
144
+ const uploadPart = async (params)=>{
145
+ const { part, chunk } = params;
146
+ const { uploadUrl } = part;
147
+ const eTag = await uploadFileInner(chunk, uploadUrl, (progress)=>{
148
+ const uploadingPart = uploadingParts.find((p)=>p.partNumber === part.partNumber);
149
+ if (uploadingPart) {
150
+ uploadingPart.progress = progress;
151
+ } else {
152
+ uploadingParts.push({
153
+ partNumber: part.partNumber,
154
+ progress
155
+ });
156
+ }
157
+ const totalProgress = Math.round(uploadingParts.reduce((acc, p)=>acc + p.progress * 100, 0) / totalParts) / 100;
158
+ onProgressChange?.(totalProgress);
159
+ });
160
+ if (!eTag) {
161
+ throw new EdgeStoreError('Could not get ETag from multipart response');
162
+ }
163
+ return {
164
+ partNumber: part.partNumber,
165
+ eTag
166
+ };
167
+ };
168
+ // Upload the parts in parallel
169
+ const completedParts = await queuedPromises({
170
+ items: parts.map((part)=>({
171
+ part,
172
+ chunk: file.slice((part.partNumber - 1) * partSize, part.partNumber * partSize)
173
+ })),
174
+ fn: uploadPart,
175
+ maxParallel: 5,
176
+ maxRetries: 10
177
+ });
178
+ // Complete multipart upload
179
+ const res = await fetch(`${apiPath}/complete-multipart-upload`, {
180
+ method: 'POST',
181
+ body: JSON.stringify({
182
+ bucketName,
183
+ uploadId,
184
+ key,
185
+ parts: completedParts
186
+ }),
187
+ headers: {
188
+ 'Content-Type': 'application/json'
189
+ }
190
+ });
191
+ if (!res.ok) {
192
+ throw new EdgeStoreError('Multi-part upload failed');
193
+ }
194
+ }
195
+ async function confirmUpload({ url }, { apiPath, bucketName }) {
196
+ const res = await fetch(`${apiPath}/confirm-upload`, {
197
+ method: 'POST',
198
+ body: JSON.stringify({
199
+ url,
200
+ bucketName
201
+ }),
202
+ headers: {
203
+ 'Content-Type': 'application/json'
204
+ }
205
+ });
206
+ if (!res.ok) {
207
+ throw new EdgeStoreError('An error occurred');
208
+ }
209
+ return {
210
+ success: true
211
+ };
212
+ }
122
213
  async function deleteFile({ url }, { apiPath, bucketName }) {
123
214
  const res = await fetch(`${apiPath}/delete-file`, {
124
215
  method: 'POST',
@@ -137,6 +228,43 @@ async function deleteFile({ url }, { apiPath, bucketName }) {
137
228
  success: true
138
229
  };
139
230
  }
231
+ async function queuedPromises({ items, fn, maxParallel, maxRetries = 0 }) {
232
+ const results = new Array(items.length);
233
+ const executeWithRetry = async (func, retries)=>{
234
+ try {
235
+ return await func();
236
+ } catch (error) {
237
+ if (retries > 0) {
238
+ await new Promise((resolve)=>setTimeout(resolve, 5000));
239
+ return executeWithRetry(func, retries - 1);
240
+ } else {
241
+ throw error;
242
+ }
243
+ }
244
+ };
245
+ const semaphore = {
246
+ count: maxParallel,
247
+ async wait () {
248
+ // If we've reached our maximum concurrency or it's the last item, wait
249
+ while(this.count <= 0)await new Promise((resolve)=>setTimeout(resolve, 500));
250
+ this.count--;
251
+ },
252
+ signal () {
253
+ this.count++;
254
+ }
255
+ };
256
+ const tasks = items.map((item, i)=>(async ()=>{
257
+ await semaphore.wait();
258
+ try {
259
+ const result = await executeWithRetry(()=>fn(item), maxRetries);
260
+ results[i] = result;
261
+ } finally{
262
+ semaphore.signal();
263
+ }
264
+ })());
265
+ await Promise.all(tasks);
266
+ return results;
267
+ }
140
268
 
141
269
  const DEFAULT_BASE_URL = process.env.NEXT_PUBLIC_EDGE_STORE_BASE_URL ?? 'https://files.edgestore.dev';
142
270
  function createEdgeStoreProvider(opts) {
@@ -167,38 +295,28 @@ function createEdgeStoreProvider(opts) {
167
295
  }
168
296
  function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUploads }) {
169
297
  const apiPath = basePath ? `${basePath}` : '/api/edgestore';
170
- const [token, setToken] = React.useState(null);
171
298
  const uploadingCountRef = React.useRef(0);
172
299
  React.useEffect(()=>{
173
- void fetch(`${apiPath}/init`, {
174
- method: 'POST'
175
- }).then(async (res)=>{
176
- if (res.ok) {
177
- const json = await res.json();
178
- setToken(json.token);
179
- await fetch(`${DEFAULT_BASE_URL}/_init`, {
180
- method: 'GET',
181
- headers: {
182
- 'x-edgestore-token': json.token
183
- }
184
- });
185
- }
186
- });
300
+ void init();
187
301
  // eslint-disable-next-line react-hooks/exhaustive-deps
188
302
  }, []);
189
- function getSrc(url) {
190
- if (// in production we use cookies, so we don't need a token
191
- process.env.NODE_ENV === 'production' || // public urls don't need a token
192
- // e.g. https://files.edgestore.dev/project/bucket/_public/...
193
- url.match(/^https?:\/\/[^\/]+\/[^\/]+\/[^\/]+\/_public\/.+/)) {
194
- return `${url}`;
195
- } else {
196
- // in development, third party cookies don't work, so we need to pass the token as a query param
197
- const uri = new URL(url);
198
- uri.searchParams.set('token', token ?? '');
199
- return `${uri}`;
303
+ async function init() {
304
+ const res = await fetch(`${apiPath}/init`, {
305
+ method: 'POST'
306
+ });
307
+ if (res.ok) {
308
+ const json = await res.json();
309
+ await fetch(`${DEFAULT_BASE_URL}/_init`, {
310
+ method: 'GET',
311
+ headers: {
312
+ 'x-edgestore-token': json.token
313
+ }
314
+ });
200
315
  }
201
316
  }
317
+ async function reset() {
318
+ await init();
319
+ }
202
320
  return /*#__PURE__*/ React.createElement(React.Fragment, null, /*#__PURE__*/ React.createElement(context.Provider, {
203
321
  value: {
204
322
  edgestore: createNextProxy({
@@ -206,7 +324,7 @@ function EdgeStoreProviderInner({ children, context, basePath, maxConcurrentUplo
206
324
  uploadingCountRef,
207
325
  maxConcurrentUploads
208
326
  }),
209
- getSrc
327
+ reset
210
328
  }
211
329
  }, children));
212
330
  }
package/package.json CHANGED
@@ -1,7 +1,7 @@
1
1
  {
2
2
  "name": "@edgestore/react",
3
- "version": "0.1.1",
4
- "description": "The best DX for uploading files from your Next.js app",
3
+ "version": "0.1.3-alpha.0",
4
+ "description": "Upload files with ease from React/Next.js",
5
5
  "homepage": "https://edgestore.dev",
6
6
  "repository": "https://github.com/edgestorejs/edgestore.git",
7
7
  "author": "Ravi <me@ravi.com>",
@@ -54,14 +54,14 @@
54
54
  "uuid": "^9.0.0"
55
55
  },
56
56
  "peerDependencies": {
57
- "@edgestore/server": "0.1.1",
57
+ "@edgestore/server": "0.1.3-alpha.0",
58
58
  "next": "*",
59
59
  "react": ">=16.8.0",
60
60
  "react-dom": ">=16.8.0",
61
61
  "zod": ">=3.0.0"
62
62
  },
63
63
  "devDependencies": {
64
- "@edgestore/server": "0.1.1",
64
+ "@edgestore/server": "0.1.3-alpha.0",
65
65
  "@types/cookie": "^0.5.1",
66
66
  "@types/node": "^18.11.18",
67
67
  "@types/uuid": "^9.0.1",
@@ -71,5 +71,5 @@
71
71
  "typescript": "^5.1.6",
72
72
  "zod": "^3.21.4"
73
73
  },
74
- "gitHead": "aef56ef04e24a3de539d949831b2bf876a15d4e3"
74
+ "gitHead": "3ed23e543ee1ed151685884bb0983c2471e03880"
75
75
  }
@@ -8,10 +8,12 @@ const DEFAULT_BASE_URL =
8
8
  type EdgeStoreContextValue<TRouter extends AnyRouter> = {
9
9
  edgestore: BucketFunctions<TRouter>;
10
10
  /**
11
- * In development, if this is a protected file, this function will add the token as a query param to the url.
12
- * This is needed because third party cookies don't work with http urls.
11
+ * This will re-run the Edge Store initialization process,
12
+ * which will run the `createContext` function again.
13
+ *
14
+ * Can be used after a sign-in or sign-out, for example.
13
15
  */
14
- getSrc: (url: string) => string;
16
+ reset: () => Promise<void>;
15
17
  };
16
18
 
17
19
  export function createEdgeStoreProvider<TRouter extends AnyRouter>(opts?: {
@@ -85,43 +87,31 @@ function EdgeStoreProviderInner<TRouter extends AnyRouter>({
85
87
  maxConcurrentUploads?: number;
86
88
  }) {
87
89
  const apiPath = basePath ? `${basePath}` : '/api/edgestore';
88
- const [token, setToken] = React.useState<string | null>(null);
89
90
  const uploadingCountRef = React.useRef(0);
90
91
  React.useEffect(() => {
91
- void fetch(`${apiPath}/init`, {
92
- method: 'POST',
93
- }).then(async (res) => {
94
- if (res.ok) {
95
- const json = await res.json();
96
- setToken(json.token);
97
- await fetch(`${DEFAULT_BASE_URL}/_init`, {
98
- method: 'GET',
99
- headers: {
100
- 'x-edgestore-token': json.token,
101
- },
102
- });
103
- }
104
- });
92
+ void init();
105
93
  // eslint-disable-next-line react-hooks/exhaustive-deps
106
94
  }, []);
107
95
 
108
- function getSrc(url: string) {
109
- if (
110
- // in production we use cookies, so we don't need a token
111
- process.env.NODE_ENV === 'production' ||
112
- // public urls don't need a token
113
- // e.g. https://files.edgestore.dev/project/bucket/_public/...
114
- url.match(/^https?:\/\/[^\/]+\/[^\/]+\/[^\/]+\/_public\/.+/)
115
- ) {
116
- return `${url}`;
117
- } else {
118
- // in development, third party cookies don't work, so we need to pass the token as a query param
119
- const uri = new URL(url);
120
- uri.searchParams.set('token', token ?? '');
121
- return `${uri}`;
96
+ async function init() {
97
+ const res = await fetch(`${apiPath}/init`, {
98
+ method: 'POST',
99
+ });
100
+ if (res.ok) {
101
+ const json = await res.json();
102
+ await fetch(`${DEFAULT_BASE_URL}/_init`, {
103
+ method: 'GET',
104
+ headers: {
105
+ 'x-edgestore-token': json.token,
106
+ },
107
+ });
122
108
  }
123
109
  }
124
110
 
111
+ async function reset() {
112
+ await init();
113
+ }
114
+
125
115
  return (
126
116
  <>
127
117
  <context.Provider
@@ -131,7 +121,7 @@ function EdgeStoreProviderInner<TRouter extends AnyRouter>({
131
121
  uploadingCountRef,
132
122
  maxConcurrentUploads,
133
123
  }),
134
- getSrc,
124
+ reset,
135
125
  }}
136
126
  >
137
127
  {children}
@@ -1,3 +1,4 @@
1
+ import { type RequestUploadRes } from '@edgestore/server/adapters';
1
2
  import {
2
3
  type AnyRouter,
3
4
  type InferBucketPathObject,
@@ -39,6 +40,9 @@ export type BucketFunctions<TRouter extends AnyRouter> = {
39
40
  path: InferBucketPathObject<TRouter['buckets'][K]>;
40
41
  }
41
42
  >;
43
+ confirmUpload: (params: { url: string }) => Promise<{
44
+ success: boolean;
45
+ }>;
42
46
  delete: (params: { url: string }) => Promise<{
43
47
  success: boolean;
44
48
  }>;
@@ -65,6 +69,16 @@ type UploadOptions = {
65
69
  * It will automatically delete the existing file when the upload is complete.
66
70
  */
67
71
  replaceTargetUrl?: string;
72
+ /**
73
+ * If true, the file needs to be confirmed by using the `confirmUpload` function.
74
+ * If the file is not confirmed within 24 hours, it will be deleted.
75
+ *
76
+ * This is useful for pages where the file is uploaded as soon as it is selected,
77
+ * but the user can leave the page without submitting the form.
78
+ *
79
+ * This avoids unnecessary zombie files in the bucket.
80
+ */
81
+ temporary?: boolean;
68
82
  };
69
83
 
70
84
  export function createNextProxy<TRouter extends AnyRouter>({
@@ -98,6 +112,12 @@ export function createNextProxy<TRouter extends AnyRouter>({
98
112
  uploadingCountRef.current--;
99
113
  }
100
114
  },
115
+ confirmUpload: async (params: { url: string }) => {
116
+ return await confirmUpload(params, {
117
+ bucketName: bucketName as string,
118
+ apiPath,
119
+ });
120
+ },
101
121
  delete: async (params: { url: string }) => {
102
122
  return await deleteFile(params, {
103
123
  bucketName: bucketName as string,
@@ -143,18 +163,29 @@ async function uploadFile(
143
163
  size: file.size,
144
164
  fileName: options?.manualFileName,
145
165
  replaceTargetUrl: options?.replaceTargetUrl,
166
+ temporary: options?.temporary,
146
167
  },
147
168
  }),
148
169
  headers: {
149
170
  'Content-Type': 'application/json',
150
171
  },
151
172
  });
152
- const json = await res.json();
153
- if (!json.uploadUrl) {
173
+ const json = (await res.json()) as RequestUploadRes;
174
+ if ('multipart' in json) {
175
+ await multipartUpload({
176
+ bucketName,
177
+ multipartInfo: json.multipart,
178
+ onProgressChange,
179
+ file,
180
+ apiPath,
181
+ });
182
+ } else if ('uploadUrl' in json) {
183
+ // Single part upload
184
+ // Upload the file to the signed URL and get the progress
185
+ await uploadFileInner(file, json.uploadUrl, onProgressChange);
186
+ } else {
154
187
  throw new EdgeStoreError('An error occurred');
155
188
  }
156
- // Upload the file to the signed URL and get the progress
157
- await uploadFileInner(file, json.uploadUrl, onProgressChange);
158
189
  return {
159
190
  url: getUrl(json.accessUrl, apiPath),
160
191
  thumbnailUrl: json.thumbnailUrl
@@ -162,8 +193,8 @@ async function uploadFile(
162
193
  : null,
163
194
  size: json.size,
164
195
  uploadedAt: new Date(json.uploadedAt),
165
- path: json.path,
166
- metadata: json.metadata,
196
+ path: json.path as any,
197
+ metadata: json.metadata as any,
167
198
  };
168
199
  } catch (e) {
169
200
  onProgressChange?.(0);
@@ -189,11 +220,11 @@ function getUrl(url: string, apiPath: string) {
189
220
  }
190
221
 
191
222
  const uploadFileInner = async (
192
- file: File,
223
+ file: File | Blob,
193
224
  uploadUrl: string,
194
225
  onProgressChange?: OnProgressChangeHandler,
195
226
  ) => {
196
- const promise = new Promise<void>((resolve, reject) => {
227
+ const promise = new Promise<string | null>((resolve, reject) => {
197
228
  const request = new XMLHttpRequest();
198
229
  request.open('PUT', uploadUrl);
199
230
  request.addEventListener('loadstart', () => {
@@ -213,7 +244,8 @@ const uploadFileInner = async (
213
244
  reject(new Error('File upload aborted'));
214
245
  });
215
246
  request.addEventListener('loadend', () => {
216
- resolve();
247
+ // Return the ETag header (needed to complete multipart upload)
248
+ resolve(request.getResponseHeader('ETag'));
217
249
  });
218
250
 
219
251
  request.send(file);
@@ -221,6 +253,115 @@ const uploadFileInner = async (
221
253
  return promise;
222
254
  };
223
255
 
256
+ async function multipartUpload(params: {
257
+ bucketName: string;
258
+ multipartInfo: Extract<RequestUploadRes, { multipart: any }>['multipart'];
259
+ onProgressChange: OnProgressChangeHandler | undefined;
260
+ file: File;
261
+ apiPath: string;
262
+ }) {
263
+ const { bucketName, multipartInfo, onProgressChange, file, apiPath } = params;
264
+ const { partSize, parts, totalParts, uploadId, key } = multipartInfo;
265
+ const uploadingParts: {
266
+ partNumber: number;
267
+ progress: number;
268
+ }[] = [];
269
+ const uploadPart = async (params: {
270
+ part: typeof parts[number];
271
+ chunk: Blob;
272
+ }) => {
273
+ const { part, chunk } = params;
274
+ const { uploadUrl } = part;
275
+ const eTag = await uploadFileInner(chunk, uploadUrl, (progress) => {
276
+ const uploadingPart = uploadingParts.find(
277
+ (p) => p.partNumber === part.partNumber,
278
+ );
279
+ if (uploadingPart) {
280
+ uploadingPart.progress = progress;
281
+ } else {
282
+ uploadingParts.push({
283
+ partNumber: part.partNumber,
284
+ progress,
285
+ });
286
+ }
287
+ const totalProgress =
288
+ Math.round(
289
+ uploadingParts.reduce((acc, p) => acc + p.progress * 100, 0) /
290
+ totalParts,
291
+ ) / 100;
292
+ onProgressChange?.(totalProgress);
293
+ });
294
+ if (!eTag) {
295
+ throw new EdgeStoreError('Could not get ETag from multipart response');
296
+ }
297
+ return {
298
+ partNumber: part.partNumber,
299
+ eTag,
300
+ };
301
+ };
302
+
303
+ // Upload the parts in parallel
304
+ const completedParts = await queuedPromises({
305
+ items: parts.map((part) => ({
306
+ part,
307
+ chunk: file.slice(
308
+ (part.partNumber - 1) * partSize,
309
+ part.partNumber * partSize,
310
+ ),
311
+ })),
312
+ fn: uploadPart,
313
+ maxParallel: 5,
314
+ maxRetries: 10, // retry 10 times per part
315
+ });
316
+
317
+ // Complete multipart upload
318
+ const res = await fetch(`${apiPath}/complete-multipart-upload`, {
319
+ method: 'POST',
320
+ body: JSON.stringify({
321
+ bucketName,
322
+ uploadId,
323
+ key,
324
+ parts: completedParts,
325
+ }),
326
+ headers: {
327
+ 'Content-Type': 'application/json',
328
+ },
329
+ });
330
+ if (!res.ok) {
331
+ throw new EdgeStoreError('Multi-part upload failed');
332
+ }
333
+ }
334
+
335
+ async function confirmUpload(
336
+ {
337
+ url,
338
+ }: {
339
+ url: string;
340
+ },
341
+ {
342
+ apiPath,
343
+ bucketName,
344
+ }: {
345
+ apiPath: string;
346
+ bucketName: string;
347
+ },
348
+ ) {
349
+ const res = await fetch(`${apiPath}/confirm-upload`, {
350
+ method: 'POST',
351
+ body: JSON.stringify({
352
+ url,
353
+ bucketName,
354
+ }),
355
+ headers: {
356
+ 'Content-Type': 'application/json',
357
+ },
358
+ });
359
+ if (!res.ok) {
360
+ throw new EdgeStoreError('An error occurred');
361
+ }
362
+ return { success: true };
363
+ }
364
+
224
365
  async function deleteFile(
225
366
  {
226
367
  url,
@@ -250,3 +391,62 @@ async function deleteFile(
250
391
  }
251
392
  return { success: true };
252
393
  }
394
+
395
+ async function queuedPromises<TType, TRes>({
396
+ items,
397
+ fn,
398
+ maxParallel,
399
+ maxRetries = 0,
400
+ }: {
401
+ items: TType[];
402
+ fn: (item: TType) => Promise<TRes>;
403
+ maxParallel: number;
404
+ maxRetries?: number;
405
+ }): Promise<TRes[]> {
406
+ const results: TRes[] = new Array(items.length);
407
+
408
+ const executeWithRetry = async (
409
+ func: () => Promise<TRes>,
410
+ retries: number,
411
+ ): Promise<TRes> => {
412
+ try {
413
+ return await func();
414
+ } catch (error) {
415
+ if (retries > 0) {
416
+ await new Promise((resolve) => setTimeout(resolve, 5000));
417
+ return executeWithRetry(func, retries - 1);
418
+ } else {
419
+ throw error;
420
+ }
421
+ }
422
+ };
423
+
424
+ const semaphore = {
425
+ count: maxParallel,
426
+ async wait() {
427
+ // If we've reached our maximum concurrency or it's the last item, wait
428
+ while (this.count <= 0)
429
+ await new Promise((resolve) => setTimeout(resolve, 500));
430
+ this.count--;
431
+ },
432
+ signal() {
433
+ this.count++;
434
+ },
435
+ };
436
+
437
+ const tasks: Promise<void>[] = items.map((item, i) =>
438
+ (async () => {
439
+ await semaphore.wait();
440
+
441
+ try {
442
+ const result = await executeWithRetry(() => fn(item), maxRetries);
443
+ results[i] = result;
444
+ } finally {
445
+ semaphore.signal();
446
+ }
447
+ })(),
448
+ );
449
+
450
+ await Promise.all(tasks);
451
+ return results;
452
+ }