@axium/storage 0.16.1 → 0.16.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -3,6 +3,20 @@ import { blake2b } from 'blakejs';
3
3
  import { prettifyError } from 'zod';
4
4
  import { StorageItemMetadata } from '../common.js';
5
5
  import '../polyfills.js';
6
+ const uploadConfig = {
7
+ /**
8
+ * Requests below this amount in MB will be hashed client-side to avoid bandwidth usage.
9
+ * This is most useful when the client has plenty of compute but a poor network connection.
10
+ * For good connections, this isn't really useful since it takes longer than just uploading.
11
+ * Note that hashing takes a really long time client-side though.
12
+ */
13
+ hashThreshold: 10,
14
+ /**
15
+ * Set an upper limit for chunk size in MB, independent of `max_transfer_size`.
16
+ * Smaller chunks means better UX but more latency from RTT and more requests.
17
+ */
18
+ uxChunkSize: 10,
19
+ };
6
20
  function rawStorage(suffix) {
7
21
  const raw = '/raw/storage' + (suffix ? '/' + suffix : '');
8
22
  if (prefix[0] == '/')
@@ -29,10 +43,9 @@ export async function uploadItem(file, opt = {}) {
29
43
  if (!opt.name)
30
44
  throw 'item name is required';
31
45
  const content = await file.bytes();
32
- /**
33
- * For big files, it takes a *really* long time to compute the hash, so we just don't do it ahead of time and leave it up to the server.
34
- */
35
- const hash = content.length < 10_000_000 ? blake2b(content).toHex() : null;
46
+ opt.onProgress?.(0, content.length);
47
+ /** For big files, it takes a *really* long time to compute the hash, so we just don't do it ahead of time and leave it up to the server. */
48
+ const hash = content.length < uploadConfig.hashThreshold * 1_000_000 ? blake2b(content).toHex() : null;
36
49
  const upload = await fetchAPI('PUT', 'storage', {
37
50
  parentId: opt.parentId,
38
51
  name: opt.name,
@@ -42,10 +55,12 @@ export async function uploadItem(file, opt = {}) {
42
55
  });
43
56
  if (upload.status == 'created')
44
57
  return upload.item;
45
- let chunkSize = upload.max_transfer_size * 1_000_000;
58
+ let chunkSize = Math.min(upload.max_transfer_size, uploadConfig.uxChunkSize);
46
59
  if (globalThis.navigator?.connection) {
47
- chunkSize = Math.min(upload.max_transfer_size, conTypeToSpeed[globalThis.navigator.connection.effectiveType]) * 1_000_000;
60
+ chunkSize = Math.min(chunkSize, conTypeToSpeed[globalThis.navigator.connection.effectiveType]);
48
61
  }
62
+ // MB -> bytes
63
+ chunkSize *= 1_000_000;
49
64
  let response;
50
65
  for (let offset = 0; offset < content.length; offset += chunkSize) {
51
66
  const size = Math.min(chunkSize, content.length - offset);
@@ -115,7 +115,7 @@ addRoute({
115
115
  if (hash.toHex() != upload.init.hash)
116
116
  error(409, 'Hash mismatch');
117
117
  upload.remove();
118
- return await createNewItem(upload.init, upload.userId, path => {
118
+ const item = await createNewItem(upload.init, upload.userId, path => {
119
119
  try {
120
120
  renameSync(upload.file, path);
121
121
  }
@@ -123,9 +123,15 @@ addRoute({
123
123
  if (e.code != 'EXDEV')
124
124
  throw e;
125
125
  writeFileSync(path, readFileSync(upload.file));
126
- unlinkSync(upload.file);
127
126
  }
128
127
  });
128
+ try {
129
+ unlinkSync(upload.file);
130
+ }
131
+ catch {
132
+ // probably renamed
133
+ }
134
+ return item;
129
135
  },
130
136
  });
131
137
  addRoute({
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@axium/storage",
3
- "version": "0.16.1",
3
+ "version": "0.16.3",
4
4
  "author": "James Prevett <axium@jamespre.dev>",
5
5
  "description": "User file storage for Axium",
6
6
  "funding": {