@es-labs/jslib 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (72) hide show
  1. package/CHANGELOG.md +4 -0
  2. package/README.md +42 -0
  3. package/__test__/services.test.js +32 -0
  4. package/auth/index.js +226 -0
  5. package/auth/keyv.js +23 -0
  6. package/auth/knex.js +29 -0
  7. package/auth/redis.js +23 -0
  8. package/comms/email.js +123 -0
  9. package/comms/nexmo.js +44 -0
  10. package/comms/telegram.js +43 -0
  11. package/comms/telegram2/inbound.js +314 -0
  12. package/comms/telegram2/outbound.js +574 -0
  13. package/comms/webpush.js +60 -0
  14. package/config.js +37 -0
  15. package/express/controller/auth/oauth.js +39 -0
  16. package/express/controller/auth/oidc.js +87 -0
  17. package/express/controller/auth/own.js +100 -0
  18. package/express/controller/auth/saml.js +74 -0
  19. package/express/upload.js +48 -0
  20. package/index.js +1 -0
  21. package/iso/README.md +4 -0
  22. package/iso/__tests__/csv-utils.spec.js +128 -0
  23. package/iso/__tests__/datetime.spec.js +101 -0
  24. package/iso/__tests__/fetch.spec.js +270 -0
  25. package/iso/csv-utils.js +206 -0
  26. package/iso/datetime.js +103 -0
  27. package/iso/fetch.js +129 -0
  28. package/iso/fetch2.js +180 -0
  29. package/iso/log-filter.js +17 -0
  30. package/iso/sleep.js +6 -0
  31. package/iso/ws.js +63 -0
  32. package/node/oss-files/oss-uploader-client-fetch.js +258 -0
  33. package/node/oss-files/oss-uploader-client-fetch.md +31 -0
  34. package/node/oss-files/oss-uploader-client.js +219 -0
  35. package/node/oss-files/oss-uploader-server.js +199 -0
  36. package/node/oss-files/oss-uploader-usage.js +121 -0
  37. package/node/oss-files/oss-uploader-usage.md +34 -0
  38. package/node/oss-files/s3-uploader-client.js +217 -0
  39. package/node/oss-files/s3-uploader-server.js +123 -0
  40. package/node/oss-files/s3-uploader-usage.js +77 -0
  41. package/node/oss-files/s3-uploader-usage.md +34 -0
  42. package/package.json +53 -0
  43. package/packageInfo.js +9 -0
  44. package/services/ali.js +279 -0
  45. package/services/aws.js +194 -0
  46. package/services/db/__tests__/keyv.spec.js +31 -0
  47. package/services/db/keyv.js +14 -0
  48. package/services/db/knex.js +67 -0
  49. package/services/db/redis.js +51 -0
  50. package/services/index.js +57 -0
  51. package/services/mq/README.md +8 -0
  52. package/services/websocket.js +139 -0
  53. package/t4t/README.md +1 -0
  54. package/traps.js +20 -0
  55. package/utils/__tests__/aes.spec.js +52 -0
  56. package/utils/aes.js +23 -0
  57. package/web/UI.md +71 -0
  58. package/web/bwc-autocomplete.js +211 -0
  59. package/web/bwc-combobox.js +343 -0
  60. package/web/bwc-fileupload.js +87 -0
  61. package/web/bwc-loading-overlay.js +54 -0
  62. package/web/bwc-t4t-form.js +511 -0
  63. package/web/bwc-table.js +756 -0
  64. package/web/fetch.js +129 -0
  65. package/web/i18n.js +24 -0
  66. package/web/idle.js +49 -0
  67. package/web/parse-jwt.js +15 -0
  68. package/web/pwa.js +84 -0
  69. package/web/sign-pad.js +164 -0
  70. package/web/t4t-fe.js +164 -0
  71. package/web/util.js +126 -0
  72. package/web/web-cam.js +182 -0
package/iso/fetch.js ADDED
@@ -0,0 +1,129 @@
1
+ // TODO add retry - https://dev.to/ycmjason/javascript-fetch-retry-upon-failure-3p6g
2
+ class Fetch {
3
+ /**
4
+ *
5
+ * @param {*} options
6
+ * @param {*} tokens
7
+ */
8
+ constructor(options = {}, tokens = {}) {
9
+ this.options = {
10
+ baseUrl: '',
11
+ credentials: 'same-origin',
12
+ forceLogoutFn: () => {}, // function to call when forcing a logout
13
+ refreshUrl: '',
14
+ timeoutMs: 0,
15
+ maxRetry: 0
16
+ }
17
+ Object.assign(this.options, options)
18
+ this.tokens = { access: '', refresh: '' }
19
+ Object.assign(this.tokens, tokens)
20
+ }
21
+
22
+ /**
23
+ *
24
+ * @param {string} url
25
+ * @param {string} baseUrl
26
+ * @returns {object} { urlOrigin, urlPath, urlFull, urlSearch }
27
+ * @throws {Error} if URL is invalid
28
+ */
29
+ static parseUrl (url, baseUrl = '') {
30
+ let urlPath = url
31
+ let urlOrigin = baseUrl
32
+ let urlFull = baseUrl + urlPath
33
+ let urlSearch = ''
34
+ try {
35
+ urlSearch = (url.lastIndexOf('?') !== -1) ? url.split('?').pop() : '' // handle /abc/def?aa=1&bb=2
36
+ if (urlSearch) urlSearch = '?' + urlSearch // prepend ?
37
+ const { origin = '', pathname = '', search = '' } = new URL(url) // http://example.com:3001/abc/ees?aa=1&bb=2
38
+ urlOrigin = origin
39
+ urlPath = pathname
40
+ urlFull = origin + pathname
41
+ urlSearch = search
42
+ } catch (e) {
43
+ }
44
+ return { urlOrigin, urlPath, urlFull, urlSearch }
45
+ }
46
+
47
+ setOptions (options) { Object.assign(this.options, options) }
48
+ getOptions () { return this.options }
49
+
50
+ setTokens (tokens) { Object.assign(this.tokens, tokens) }
51
+ getTokens () { return this.tokens }
52
+
53
+ async http (method, url, body = null, query = null, headers = null) {
54
+ const { urlOrigin, urlPath, urlFull, urlSearch } = Fetch.parseUrl(url, this.options.baseUrl)
55
+ try {
56
+ const controller = new AbortController()
57
+ const signal = controller.signal
58
+ if (this.options.timeoutMs > 0) setTimeout(() => controller.abort(), this.options.timeoutMs) // err.name === 'AbortError'
59
+
60
+ let qs = (query && typeof query === 'object') // null is also an object
61
+ ? '?' +
62
+ Object.keys(query).map((key) => encodeURIComponent(key) + '=' + encodeURIComponent(query[key])).join('&')
63
+ : (query || '')
64
+ qs = qs ? qs + urlSearch.substring(1) // remove the question mark
65
+ : urlSearch
66
+
67
+ if (!headers) {
68
+ headers = {
69
+ Accept: 'application/json'
70
+ }
71
+ }
72
+ const options = { method, headers }
73
+ if (this.options.timeoutMs > 0) options.signal = signal
74
+ if (this.options.credentials !== 'include') { // include === HTTPONLY_TOKEN
75
+ if (this.tokens.access) options.headers.Authorization = `Bearer ${this.tokens.access}`
76
+ }
77
+ options.credentials = this.options.credentials
78
+
79
+ if (['POST', 'PATCH', 'PUT'].includes(method)) { // check if HTTP method has req body (DELETE is maybe)
80
+ if (body && body instanceof FormData) {
81
+ options.body = body // options.headers['Content-Type'] = 'multipart/form-data' // NOT NEEDED!!!
82
+ } else if (options.headers['Content-Type'] && options.headers['Content-Type'] === 'application/x-www-form-urlencoded') {
83
+ options.body = new URLSearchParams(body) // body should be JSON
84
+ } else if (options.headers['Content-Type'] && options.headers['Content-Type'] === 'application/octet-stream') {
85
+ options.body = body // handling stream...
86
+ } else {
87
+ options.headers['Content-Type'] = 'application/json' // NEEDED!!!
88
+ options.body = JSON.stringify(body)
89
+ }
90
+ }
91
+
92
+ const rv0 = await fetch(urlFull + qs, options)
93
+ const txt0 = await rv0.text() // handle empty body as xxx.json() cannot
94
+ rv0.data = txt0.length ? JSON.parse(txt0) : {}
95
+ if (rv0.status >= 200 && rv0.status < 400) return rv0
96
+ else if (rv0.status === 401) { // no longer needed urlPath !== '/api/auth/refresh'
97
+ if (rv0.data.message === 'Token Expired Error' && this.options.refreshUrl) {
98
+ try {
99
+ const rv1 = await this.http('POST', urlOrigin + this.options.refreshUrl, { refresh_token: this.tokens.refresh }) // rv1 JSON already processed
100
+ // status code should be < 400 here
101
+ this.tokens.access = rv1.data.access_token
102
+ this.tokens.refresh = rv1.data.refresh_token
103
+ if (options.credentials !== 'include') { // include === HTTPONLY_TOKEN
104
+ if (this.tokens.access) options.headers.Authorization = `Bearer ${this.tokens.access}`
105
+ }
106
+ const rv2 = await fetch(urlFull + qs, options)
107
+ const txt2 = await rv2.text()
108
+ rv2.data = txt2.length ? JSON.parse(txt2) : {}
109
+ return rv2
110
+ } catch (e) {
111
+ throw e
112
+ }
113
+ }
114
+ }
115
+ throw rv0 // error
116
+ } catch (e) {
117
+ if (e?.data?.message !== 'Token Expired Error' && (e.status === 401 || e.status === 403)) this.options.forceLogoutFn()
118
+ throw e // some other error
119
+ }
120
+ }
121
+
122
+ async post (url, body = null, query = null, headers = null) { return this.http('POST', url, body, query, headers) }
123
+ async put (url, body = null, query = null, headers = null) { return this.http('PUT', url, body, query, headers) }
124
+ async patch (url, body = null, query = null, headers = null) { return this.http('PATCH', url, body, query, headers) }
125
+ async del (url, query = null, headers = null) { return this.http('DELETE', url, null, query, headers) }
126
+ async get (url, query = null, headers = null) { return this.http('GET', url, null, query, headers) }
127
+ }
128
+
129
+ export default Fetch
package/iso/fetch2.js ADDED
@@ -0,0 +1,180 @@
1
+ /**
2
+ * Fetch with AbortController timeout + retry mechanism
3
+ *
4
+ * Features:
5
+ * - Per-attempt timeout via AbortController
6
+ * - Exponential backoff with jitter between retries
7
+ * - Retries on network errors and configurable HTTP status codes
8
+ * - External abort signal support (cancel from outside)
9
+ * - Retry lifecycle hooks (onRetry)
10
+ */
11
+
12
+ // ─── Default Config ───────────────────────────────────────────────────────────
13
+
14
+ const DEFAULTS = {
15
+ timeout: 10_000, // ms per attempt before aborting
16
+ retries: 3, // max retry attempts (0 = no retries)
17
+ baseDelay: 300, // ms — base for exponential backoff
18
+ maxDelay: 10_000, // ms — cap on backoff delay
19
+ backoffFactor: 2, // exponential multiplier
20
+ jitter: true, // randomise delay to avoid thundering herd
21
+ retryOn: [408, 429, 500, 502, 503, 504], // HTTP codes to retry on
22
+ onRetry: null, // ({ attempt, error, delay }) => void
23
+ };
24
+
25
+ // ─── Core ─────────────────────────────────────────────────────────────────────
26
+
27
+ /**
28
+ * fetch() with per-attempt timeout and automatic retry on failure.
29
+ *
30
+ * @param {string} url
31
+ * @param {object} [options] - All standard fetch options, plus:
32
+ * @param {number} [options.timeout] - Ms before each attempt times out
33
+ * @param {number} [options.retries] - Max number of retries after first failure
34
+ * @param {number} [options.baseDelay] - Initial backoff delay in ms
35
+ * @param {number} [options.maxDelay] - Max backoff delay in ms
36
+ * @param {number} [options.backoffFactor] - Exponential backoff multiplier
37
+ * @param {boolean} [options.jitter] - Add randomness to delay
38
+ * @param {number[]} [options.retryOn] - HTTP status codes that trigger a retry
39
+ * @param {Function} [options.onRetry] - Called before each retry attempt
40
+ * @param {AbortSignal} [options.signal] - External signal to cancel all attempts
41
+ * @returns {Promise<Response>}
42
+ */
43
+ async function fetchWithRetry(url, options = {}) {
44
+ const {
45
+ timeout,
46
+ retries,
47
+ baseDelay,
48
+ maxDelay,
49
+ backoffFactor,
50
+ jitter,
51
+ retryOn,
52
+ onRetry,
53
+ signal: externalSignal, // caller-supplied cancel signal
54
+ ...fetchOptions // remaining standard fetch options
55
+ } = { ...DEFAULTS, ...options };
56
+
57
+ let attempt = 0;
58
+
59
+ while (true) {
60
+ // Honour external cancellation before starting each attempt
61
+ if (externalSignal?.aborted) {
62
+ throw new DOMException('Fetch aborted by caller', 'AbortError');
63
+ }
64
+
65
+ // Create a per-attempt AbortController for the timeout
66
+ const timeoutController = new AbortController();
67
+ const timeoutId = setTimeout(() => timeoutController.abort(), timeout);
68
+
69
+ // Merge the timeout signal with any external signal
70
+ const signal = externalSignal
71
+ ? mergeSignals([timeoutController.signal, externalSignal])
72
+ : timeoutController.signal;
73
+
74
+ let response;
75
+ let error;
76
+
77
+ try {
78
+ response = await fetch(url, { ...fetchOptions, signal });
79
+ } catch (err) {
80
+ error = err;
81
+ } finally {
82
+ clearTimeout(timeoutId);
83
+ }
84
+
85
+ // ── Determine if we should retry ────────────────────────────────────────
86
+
87
+ const isTimeout = error?.name === 'AbortError' && timeoutController.signal.aborted;
88
+ const isNetworkErr = error && !isTimeout;
89
+ const isExternalAbort = externalSignal?.aborted;
90
+
91
+ // Never retry if the caller explicitly cancelled
92
+ if (isExternalAbort) {
93
+ throw new DOMException('Fetch aborted by caller', 'AbortError');
94
+ }
95
+
96
+ const shouldRetry =
97
+ attempt < retries &&
98
+ (isTimeout || isNetworkErr || (response && retryOn.includes(response.status)));
99
+
100
+ if (!shouldRetry) {
101
+ // No more retries — resolve or throw
102
+ if (error) throw error;
103
+ return response;
104
+ }
105
+
106
+ // ── Backoff before next attempt ──────────────────────────────────────────
107
+
108
+ const delay = calcDelay({ attempt, baseDelay, maxDelay, backoffFactor, jitter });
109
+
110
+ // Check Retry-After header on 429/503 and honour it if present
111
+ const retryAfterMs = parseRetryAfter(response?.headers?.get('Retry-After'));
112
+ const waitMs = retryAfterMs ? Math.min(retryAfterMs, maxDelay) : delay;
113
+
114
+ const retryReason = isTimeout
115
+ ? `Timeout after ${timeout}ms`
116
+ : isNetworkErr
117
+ ? error.message
118
+ : `HTTP ${response.status}`;
119
+
120
+ if (typeof onRetry === 'function') {
121
+ onRetry({ attempt: attempt + 1, total: retries, reason: retryReason, delay: waitMs });
122
+ }
123
+
124
+ await sleep(waitMs, externalSignal); // sleep is also cancellable
125
+ attempt++;
126
+ }
127
+ }
128
+
129
+ // ─── Helpers ──────────────────────────────────────────────────────────────────
130
+
131
+ /** Exponential backoff with optional jitter */
132
+ function calcDelay({ attempt, baseDelay, maxDelay, backoffFactor, jitter }) {
133
+ const exponential = baseDelay * Math.pow(backoffFactor, attempt);
134
+ const capped = Math.min(exponential, maxDelay);
135
+ return jitter
136
+ ? capped * (0.5 + Math.random() * 0.5) // jitter: 50%–100% of capped
137
+ : capped;
138
+ }
139
+
140
+ /**
141
+ * Merge multiple AbortSignals into one.
142
+ * Aborts as soon as ANY of the signals fires.
143
+ * (Native AbortSignal.any() is available in Node 20+ / Chrome 116+)
144
+ */
145
+ function mergeSignals(signals) {
146
+ if (typeof AbortSignal.any === 'function') {
147
+ return AbortSignal.any(signals);
148
+ }
149
+ // Polyfill for older environments
150
+ const controller = new AbortController();
151
+ for (const signal of signals) {
152
+ if (signal.aborted) { controller.abort(signal.reason); break; }
153
+ signal.addEventListener('abort', () => controller.abort(signal.reason), { once: true });
154
+ }
155
+ return controller.signal;
156
+ }
157
+
158
+ /** Sleep for ms, but cancel early if signal fires */
159
+ function sleep(ms, signal) {
160
+ return new Promise((resolve, reject) => {
161
+ if (signal?.aborted) return reject(new DOMException('Aborted', 'AbortError'));
162
+ const id = setTimeout(resolve, ms);
163
+ signal?.addEventListener('abort', () => {
164
+ clearTimeout(id);
165
+ reject(new DOMException('Aborted', 'AbortError'));
166
+ }, { once: true });
167
+ });
168
+ }
169
+
170
+ /** Parse Retry-After header into milliseconds (supports seconds int and HTTP date) */
171
+ function parseRetryAfter(header) {
172
+ if (!header) return null;
173
+ const seconds = parseInt(header, 10);
174
+ if (!isNaN(seconds)) return seconds * 1000;
175
+ const date = new Date(header).getTime();
176
+ if (!isNaN(date)) return Math.max(0, date - Date.now());
177
+ return null;
178
+ }
179
+
180
+ export { fetchWithRetry };
@@ -0,0 +1,17 @@
1
+ //NOSONAR const type = 'error'
2
+ // const orig = console[type]
3
+ // console[type] = function logError() {
4
+ // orig.apply(console, [`[${new Date().toISOString().replace("T", " ").replace(/\..+/, "")}]`, ...arguments])
5
+ // }
6
+ //
7
+ // Usage (filter out console.log): LogFilter(['log'])
8
+
9
+ const LogFilter = (function () {
10
+ return function (list) {
11
+ if (list && list.length) {
12
+ list.forEach(item => {
13
+ console[item] = function () { }
14
+ })
15
+ }
16
+ }
17
+ })()
package/iso/sleep.js ADDED
@@ -0,0 +1,6 @@
1
+ /**
2
+ * wait asynchronously for ms milliseconds
3
+ * @param {number} ms - milliseconds to sleep
4
+ * @returns
5
+ */
6
+ export const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms))
package/iso/ws.js ADDED
@@ -0,0 +1,63 @@
1
+ // Using options instead of class, only below commented code is use to replace "class Ws {", and the constructor
2
+ // const ws = { // similar except need comma after eachb option
3
+ // instance: null, // web socket instance
4
+ // options: {
5
+ // onmessage: null, // attach message handler
6
+ // endpoint: null,
7
+ // reconnectMS: 0, // number of retries? not implemented
8
+ // },
9
+ class Ws {
10
+ constructor(options = {}, tokens = {}) {
11
+ this.instance = null // web socket instance
12
+ this.options = {
13
+ onmessage: null, // attach message handler
14
+ endpoint: null,
15
+ reconnectMS: 0, // number of retries? not implemented
16
+ }
17
+ Object.assign(this.options, options)
18
+ }
19
+ setOptions (options) { Object.assign(this.options, options) }
20
+ getOptions () { return this.options }
21
+
22
+ setMessage(onmessage) {
23
+ this.options.onmessage = onmessage
24
+ if (this.instance) this.instance.onmessage = this.options.onmessage
25
+ }
26
+ send(message) {
27
+ if (this.instance) this.instance.send(message)
28
+ }
29
+ close() {
30
+ if (this.instance) {
31
+ this.instance.close()
32
+ this.instance = null
33
+ }
34
+ }
35
+ connect() {
36
+ console.log(`ws connecting... endpoint=${this.options.endpoint} reconnectMs=${this.options.reconnectMS}`)
37
+ if (!this.options.endpoint) return console.log('ws connect failed - no endpoint')
38
+ if (this.instance) return console.log('ws connect failed - already connected')
39
+
40
+ try {
41
+ this.instance = new WebSocket(this.options.endpoint)
42
+ this.instance.onopen = () => console.log('ws open - connected')
43
+ this.instance.onerror = (err) => console.log(err)
44
+ this.instance.onmessage = this.options.onmessage
45
+ this.instance.onclose = (e) => {
46
+ if (!e.wasClean && this.options.reconnectMs) {
47
+ setTimeout(
48
+ () => {
49
+ this.connect()
50
+ },
51
+ this.options.reconnectMs > 1000 ? this.options.reconnectMs : 1000
52
+ )
53
+ } else {
54
+ console.log(`ws connection closed cleanly, code=${e.code} reason=${e.reason}`)
55
+ }
56
+ }
57
+ } catch (e) {
58
+ console.log('ws connect error', e.toString())
59
+ }
60
+ }
61
+ }
62
+
63
+ export default Ws
@@ -0,0 +1,258 @@
1
+ /**
2
+ * Alibaba Cloud OSS Large File Uploader (Client-Side)
3
+ * Uses the S3-compatible API via pre-signed URLs generated by your backend.
4
+ *
5
+ * OSS S3-compatibility docs:
6
+ * https://www.alibabacloud.com/help/en/oss/developer-reference/compatibility-with-amazon-s3
7
+ *
8
+ * Key OSS-specific differences from AWS S3:
9
+ * - Endpoint format: https://<bucket>.<region>.aliyuncs.com (path-style not needed)
10
+ * - Minimum multipart part size: 100KB (S3 is 5MB) — we still use 10MB for reliability
11
+ * - Max parts: 10,000 (same as S3)
12
+ * - ETag is returned without quotes in some OSS responses — we normalize this below
13
+ * - CORS must expose the ETag header explicitly on the OSS bucket
14
+ *
15
+ * Usage:
16
+ * const uploader = new OSSUploader({ signEndpoint: '/api/oss/sign' });
17
+ * const result = await uploader.upload(file, { onProgress: pct => console.log(pct + '%') });
18
+ *
19
+ * Note on progress reporting with fetch:
20
+ * The fetch API does not natively expose upload progress (only download progress via
21
+ * Response.body.getReader()). To track upload progress we wrap the Blob in a
22
+ * ReadableStream that counts bytes as they are consumed by fetch.
23
+ */
24
+
25
+ const CHUNK_SIZE = 10 * 1024 * 1024; // 10MB per part
26
+ const MULTIPART_THRESHOLD = 5 * 1024 * 1024; // Use multipart above 5MB
27
+
28
+ class OSSUploader {
29
+ /**
30
+ * @param {object} options
31
+ * @param {string} options.signEndpoint - Your backend endpoint for signed URL generation
32
+ * @param {number} [options.chunkSize] - Bytes per part (default 10MB, min 100KB for OSS)
33
+ * @param {number} [options.maxConcurrent] - Parallel part uploads (default 3)
34
+ */
35
+ constructor(options = {}) {
36
+ if (!options.signEndpoint) throw new Error('signEndpoint is required');
37
+ this.signEndpoint = options.signEndpoint;
38
+ this.chunkSize = options.chunkSize || CHUNK_SIZE;
39
+ this.maxConcurrent = options.maxConcurrent || 3;
40
+ }
41
+
42
+ // ─── Public API ──────────────────────────────────────────────────────────────
43
+
44
+ /**
45
+ * Upload a File or Blob to Alibaba OSS.
46
+ *
47
+ * @param {File|Blob} file
48
+ * @param {object} [opts]
49
+ * @param {string} [opts.key] - OSS object key (defaults to file.name)
50
+ * @param {Function} [opts.onProgress] - Callback with integer 0–100
51
+ * @param {AbortSignal} [opts.signal] - AbortController signal to cancel
52
+ * @returns {Promise<{ key: string, location: string }>}
53
+ */
54
+ async upload(file, opts = {}) {
55
+ const key = opts.key || file.name;
56
+ const onProgress = opts.onProgress || (() => {});
57
+ const signal = opts.signal || null;
58
+
59
+ if (file.size <= MULTIPART_THRESHOLD) {
60
+ return this._singleUpload(file, key, onProgress, signal);
61
+ }
62
+ return this._multipartUpload(file, key, onProgress, signal);
63
+ }
64
+
65
+ // ─── Single-Part Upload (≤5MB) ───────────────────────────────────────────────
66
+
67
+ async _singleUpload(file, key, onProgress, signal) {
68
+ onProgress(0);
69
+
70
+ const { signedUrl, location } = await this._callBackend({
71
+ type: 'single',
72
+ key,
73
+ contentType: file.type || 'application/octet-stream',
74
+ size: file.size,
75
+ });
76
+
77
+ await this._putBlob(signedUrl, file, file.type, (loaded) => {
78
+ onProgress(Math.round((loaded / file.size) * 100));
79
+ }, signal);
80
+
81
+ onProgress(100);
82
+ return { key, location };
83
+ }
84
+
85
+ // ─── Multipart Upload (>5MB) ─────────────────────────────────────────────────
86
+
87
+ async _multipartUpload(file, key, onProgress, signal) {
88
+ // Step 1 — Initiate: backend calls CreateMultipartUpload, returns uploadId
89
+ const { uploadId } = await this._callBackend({
90
+ type: 'initiate',
91
+ key,
92
+ contentType: file.type || 'application/octet-stream',
93
+ });
94
+
95
+ const chunks = this._splitFile(file);
96
+ const partProgress = new Array(chunks.length).fill(0);
97
+
98
+ const reportProgress = () => {
99
+ const uploaded = partProgress.reduce((s, v) => s + v, 0);
100
+ onProgress(Math.round((uploaded / file.size) * 100));
101
+ };
102
+
103
+ // Step 2 — Upload parts concurrently
104
+ const completedParts = [];
105
+ const queue = chunks.map((chunk, i) => ({ chunk, partNumber: i + 1, index: i }));
106
+
107
+ const worker = async () => {
108
+ while (queue.length > 0) {
109
+ const { chunk, partNumber, index } = queue.shift();
110
+
111
+ if (signal?.aborted) throw new DOMException('Upload aborted', 'AbortError');
112
+
113
+ // Get a signed URL for this specific part
114
+ const { signedUrl } = await this._callBackend({
115
+ type: 'part',
116
+ key,
117
+ uploadId,
118
+ partNumber,
119
+ });
120
+
121
+ // PUT the chunk; OSS returns ETag in response header
122
+ const rawETag = await this._putBlob(
123
+ signedUrl,
124
+ chunk,
125
+ file.type || 'application/octet-stream',
126
+ (loaded) => { partProgress[index] = loaded; reportProgress(); },
127
+ signal,
128
+ /* returnETag= */ true,
129
+ );
130
+
131
+ // OSS sometimes returns ETag without surrounding quotes — normalise
132
+ const etag = rawETag?.replace(/"/g, '') ? `"${rawETag.replace(/"/g, '')}"` : rawETag;
133
+
134
+ completedParts.push({ PartNumber: partNumber, ETag: etag });
135
+ }
136
+ };
137
+
138
+ const workers = Array.from({ length: this.maxConcurrent }, worker);
139
+ try {
140
+ await Promise.all(workers);
141
+ } catch (err) {
142
+ // Best-effort abort to avoid orphaned multipart uploads costing storage
143
+ await this._callBackend({ type: 'abort', key, uploadId }).catch(() => {});
144
+ throw err;
145
+ }
146
+
147
+ // Step 3 — Complete: parts must be sorted by PartNumber
148
+ completedParts.sort((a, b) => a.PartNumber - b.PartNumber);
149
+
150
+ const { location } = await this._callBackend({
151
+ type: 'complete',
152
+ key,
153
+ uploadId,
154
+ parts: completedParts,
155
+ });
156
+
157
+ onProgress(100);
158
+ return { key, location };
159
+ }
160
+
161
+ // ─── Helpers ─────────────────────────────────────────────────────────────────
162
+
163
+ _splitFile(file) {
164
+ const chunks = [];
165
+ for (let offset = 0; offset < file.size; offset += this.chunkSize) {
166
+ chunks.push(file.slice(offset, offset + this.chunkSize));
167
+ }
168
+ return chunks;
169
+ }
170
+
171
+ async _callBackend(payload) {
172
+ const res = await fetch(this.signEndpoint, {
173
+ method: 'POST',
174
+ headers: { 'Content-Type': 'application/json' },
175
+ body: JSON.stringify(payload),
176
+ });
177
+ if (!res.ok) {
178
+ const text = await res.text();
179
+ throw new Error(`Backend error (${res.status}): ${text}`);
180
+ }
181
+ return res.json();
182
+ }
183
+
184
+ /**
185
+ * PUT a Blob to a pre-signed OSS URL using the fetch API.
186
+ *
187
+ * Upload progress is tracked by wrapping the Blob in a ReadableStream that
188
+ * counts bytes as they pass through, since fetch does not expose an upload
189
+ * progress event like XHR does.
190
+ *
191
+ * OSS S3-compatible PUT requires:
192
+ * - Content-Type header must match the value used when signing
193
+ * - Do NOT send Content-MD5 unless you included it in the signed headers
194
+ *
195
+ * @param {string} signedUrl
196
+ * @param {Blob} blob
197
+ * @param {string} contentType
198
+ * @param {Function} onProgress - Called with bytes uploaded so far
199
+ * @param {AbortSignal|null} signal
200
+ * @param {boolean} returnETag - If true, resolves with the ETag header value
201
+ * @returns {Promise<string|undefined>}
202
+ */
203
+ async _putBlob(signedUrl, blob, contentType, onProgress, signal, returnETag = false) {
204
+ const totalBytes = blob.size;
205
+ let uploadedBytes = 0;
206
+
207
+ // Wrap the blob's byte stream so we can intercept each chunk and report progress.
208
+ // fetch() cannot report upload progress on its own — this is the standard workaround.
209
+ const progressStream = new ReadableStream({
210
+ async start(controller) {
211
+ const reader = blob.stream().getReader();
212
+ try {
213
+ while (true) {
214
+ const { done, value } = await reader.read();
215
+ if (done) { controller.close(); break; }
216
+ uploadedBytes += value.byteLength;
217
+ onProgress(uploadedBytes);
218
+ controller.enqueue(value);
219
+ }
220
+ } catch (err) {
221
+ controller.error(err);
222
+ }
223
+ },
224
+ });
225
+
226
+ const headers = {};
227
+ if (contentType) headers['Content-Type'] = contentType;
228
+
229
+ // Content-Length must be set explicitly — fetch cannot infer it from a
230
+ // ReadableStream, and OSS requires it for multipart PUT requests.
231
+ headers['Content-Length'] = String(totalBytes);
232
+
233
+ let response;
234
+ try {
235
+ response = await fetch(signedUrl, {
236
+ method: 'PUT',
237
+ headers,
238
+ body: progressStream,
239
+ signal,
240
+ // Required for streaming request bodies in the fetch API (Chrome 105+, Firefox 112+)
241
+ duplex: 'half',
242
+ });
243
+ } catch (err) {
244
+ if (err.name === 'AbortError') throw new DOMException('Upload aborted', 'AbortError');
245
+ throw new Error(`Network error during OSS upload: ${err.message}`);
246
+ }
247
+
248
+ if (!response.ok) {
249
+ // OSS returns XML error bodies — surface them for easier debugging
250
+ const text = await response.text().catch(() => '');
251
+ throw new Error(`OSS PUT failed (HTTP ${response.status}): ${text}`);
252
+ }
253
+
254
+ return returnETag ? response.headers.get('ETag') : undefined;
255
+ }
256
+ }
257
+
258
+ export default OSSUploader;
@@ -0,0 +1,31 @@
1
+ The only meaningful change was replacing `_putBlob` — everything else stayed identical. Here's what changed and why:
2
+
3
+ **The core swap — `XMLHttpRequest` → `fetch`**
4
+
5
+ The old `xhr.send(blob)` becomes `fetch(url, { method: 'PUT', body: progressStream, duplex: 'half' })`.
6
+
7
+ **The progress tracking problem**
8
+
9
+ `fetch` has no `upload.onprogress` event like XHR does. The workaround is wrapping the blob in a `ReadableStream` that increments a counter as bytes flow through it:
10
+
11
+ ```js
12
+ const progressStream = new ReadableStream({
13
+ async start(controller) {
14
+ const reader = blob.stream().getReader();
15
+ while (true) {
16
+ const { done, value } = await reader.read();
17
+ if (done) { controller.close(); break; }
18
+ uploadedBytes += value.byteLength; // ← count bytes here
19
+ onProgress(uploadedBytes);
20
+ controller.enqueue(value); // ← pass them along unchanged
21
+ }
22
+ }
23
+ });
24
+ ```
25
+
26
+ **Two things required for streaming bodies in fetch:**
27
+
28
+ 1. `duplex: 'half'` — required by the spec when passing a `ReadableStream` as the body (Chrome 105+, Firefox 112+). Without it, some browsers throw a `TypeError`.
29
+ 2. `Content-Length` set explicitly — fetch can't infer the size from a stream, and OSS requires it for part uploads.
30
+
31
+ **Error handling** is cleaner with fetch since `response.ok` + `response.text()` replaces checking `xhr.status` and reading `xhr.responseText`.