@ardrive/turbo-sdk 1.25.0 → 1.26.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (48) hide show
  1. package/README.md +170 -62
  2. package/bundles/web.bundle.min.js +1584 -730
  3. package/lib/cjs/common/events.js +256 -0
  4. package/lib/cjs/common/events.test.js +470 -0
  5. package/lib/cjs/common/http.js +4 -13
  6. package/lib/cjs/common/turbo.js +6 -4
  7. package/lib/cjs/common/upload.js +65 -37
  8. package/lib/cjs/node/signer.js +30 -11
  9. package/lib/cjs/node/upload.js +7 -1
  10. package/lib/cjs/utils/axiosClient.js +3 -0
  11. package/lib/cjs/utils/readableStream.js +15 -0
  12. package/lib/cjs/version.js +1 -1
  13. package/lib/cjs/web/signer.js +55 -28
  14. package/lib/esm/common/events.js +249 -0
  15. package/lib/esm/common/events.test.js +468 -0
  16. package/lib/esm/common/http.js +4 -13
  17. package/lib/esm/common/turbo.js +6 -4
  18. package/lib/esm/common/upload.js +66 -38
  19. package/lib/esm/node/signer.js +30 -11
  20. package/lib/esm/node/upload.js +7 -1
  21. package/lib/esm/utils/axiosClient.js +3 -0
  22. package/lib/esm/utils/readableStream.js +15 -0
  23. package/lib/esm/version.js +1 -1
  24. package/lib/esm/web/signer.js +55 -28
  25. package/lib/types/common/events.d.ts +56 -0
  26. package/lib/types/common/events.d.ts.map +1 -0
  27. package/lib/types/common/events.test.d.ts +2 -0
  28. package/lib/types/common/events.test.d.ts.map +1 -0
  29. package/lib/types/common/http.d.ts +1 -2
  30. package/lib/types/common/http.d.ts.map +1 -1
  31. package/lib/types/common/signer.d.ts +1 -1
  32. package/lib/types/common/signer.d.ts.map +1 -1
  33. package/lib/types/common/turbo.d.ts +4 -4
  34. package/lib/types/common/turbo.d.ts.map +1 -1
  35. package/lib/types/common/upload.d.ts +13 -5
  36. package/lib/types/common/upload.d.ts.map +1 -1
  37. package/lib/types/node/signer.d.ts +1 -1
  38. package/lib/types/node/signer.d.ts.map +1 -1
  39. package/lib/types/node/upload.d.ts.map +1 -1
  40. package/lib/types/types.d.ts +61 -7
  41. package/lib/types/types.d.ts.map +1 -1
  42. package/lib/types/utils/axiosClient.d.ts.map +1 -1
  43. package/lib/types/utils/readableStream.d.ts +0 -1
  44. package/lib/types/utils/readableStream.d.ts.map +1 -1
  45. package/lib/types/version.d.ts +1 -1
  46. package/lib/types/web/signer.d.ts +1 -1
  47. package/lib/types/web/signer.d.ts.map +1 -1
  48. package/package.json +9 -7
@@ -21,6 +21,7 @@ const plimit_lit_1 = require("plimit-lit");
21
21
  const axiosClient_js_1 = require("../utils/axiosClient.js");
22
22
  const common_js_1 = require("../utils/common.js");
23
23
  const errors_js_1 = require("../utils/errors.js");
24
+ const events_js_1 = require("./events.js");
24
25
  const http_js_1 = require("./http.js");
25
26
  const logger_js_1 = require("./logger.js");
26
27
  exports.creditSharingTagNames = {
@@ -42,19 +43,40 @@ class TurboUnauthenticatedUploadService {
42
43
  });
43
44
  this.retryConfig = retryConfig;
44
45
  }
45
- async uploadSignedDataItem({ dataItemStreamFactory, dataItemSizeFactory, signal, }) {
46
+ async uploadSignedDataItem({ dataItemStreamFactory, dataItemSizeFactory, dataItemOpts, signal, events = {}, }) {
46
47
  const fileSize = dataItemSizeFactory();
47
48
  this.logger.debug('Uploading signed data item...');
48
- // TODO: add p-limit constraint or replace with separate upload class
49
- return this.httpService.post({
49
+ // create the tapped stream with events
50
+ const emitter = new events_js_1.TurboEventEmitter(events);
51
+ // create the stream with upload events
52
+ const { stream: streamWithUploadEvents, resume } = (0, events_js_1.createStreamWithUploadEvents)({
53
+ data: dataItemStreamFactory(),
54
+ dataSize: fileSize,
55
+ emitter,
56
+ });
57
+ const headers = {
58
+ 'content-type': 'application/octet-stream',
59
+ 'content-length': `${fileSize}`,
60
+ };
61
+ if (dataItemOpts !== undefined && dataItemOpts.paidBy !== undefined) {
62
+ const paidBy = Array.isArray(dataItemOpts.paidBy)
63
+ ? dataItemOpts.paidBy
64
+ : [dataItemOpts.paidBy];
65
+ // TODO: these should be comma separated values vs. an array of headers
66
+ if (dataItemOpts.paidBy.length > 0) {
67
+ headers['x-paid-by'] = paidBy;
68
+ }
69
+ }
70
+ // setup the post request using the stream with upload events
71
+ const postPromise = this.httpService.post({
50
72
  endpoint: `/tx/${this.token}`,
51
73
  signal,
52
- data: dataItemStreamFactory(),
53
- headers: {
54
- 'content-type': 'application/octet-stream',
55
- 'content-length': `${fileSize}`,
56
- },
74
+ data: streamWithUploadEvents,
75
+ headers,
57
76
  });
77
+ // resume the stream so events start flowing to the post
78
+ resume();
79
+ return postPromise;
58
80
  }
59
81
  }
60
82
  exports.TurboUnauthenticatedUploadService = TurboUnauthenticatedUploadService;
@@ -67,7 +89,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
67
89
  /**
68
90
  * Signs and uploads raw data to the Turbo Upload Service.
69
91
  */
70
- upload({ data, dataItemOpts, signal, }) {
92
+ upload({ data, dataItemOpts, signal, events, }) {
71
93
  // This function is intended to be usable in both Node and browser environments.
72
94
  if ((0, common_js_1.isBlob)(data)) {
73
95
  const streamFactory = () => data.stream();
@@ -77,6 +99,7 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
77
99
  fileSizeFactory: sizeFactory,
78
100
  signal,
79
101
  dataItemOpts,
102
+ events,
80
103
  });
81
104
  }
82
105
  const dataBuffer = (() => {
@@ -93,56 +116,53 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
93
116
  fileSizeFactory: () => dataBuffer.byteLength,
94
117
  signal,
95
118
  dataItemOpts,
119
+ events,
96
120
  });
97
121
  }
98
- async uploadFile({ fileStreamFactory, fileSizeFactory, signal, dataItemOpts, }) {
122
+ async uploadFile({ fileStreamFactory, fileSizeFactory, signal, dataItemOpts, events = {}, }) {
99
123
  let retries = 0;
100
124
  const maxRetries = this.retryConfig.retries ?? 3;
101
125
  const retryDelay = this.retryConfig.retryDelay ??
102
126
  ((retryNumber) => retryNumber * 1000);
103
127
  let lastError = undefined; // Store the last error for throwing
104
128
  let lastStatusCode = undefined; // Store the last status code for throwing
129
+ const emitter = new events_js_1.TurboEventEmitter(events);
130
+ // avoid duplicating signing on failures here - these errors will immediately be thrown
131
+ // TODO: create a SigningError class and throw that instead of the generic Error
132
+ const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
133
+ fileStreamFactory,
134
+ fileSizeFactory,
135
+ dataItemOpts,
136
+ emitter,
137
+ });
138
+ // TODO: move the retry implementation to the http class, and avoid awaiting here. This will standardize the retry logic across all upload methods.
105
139
  while (retries < maxRetries) {
106
140
  if (signal?.aborted) {
107
141
  throw new axios_1.CanceledError();
108
142
  }
109
- const { dataItemStreamFactory, dataItemSizeFactory } = await this.signer.signDataItem({
110
- fileStreamFactory,
111
- fileSizeFactory,
112
- dataItemOpts,
113
- });
114
143
  try {
115
144
  this.logger.debug('Uploading signed data item...');
116
- // TODO: add p-limit constraint or replace with separate upload class
117
- const headers = {
118
- 'content-type': 'application/octet-stream',
119
- 'content-length': `${dataItemSizeFactory()}`,
120
- };
121
- if (dataItemOpts !== undefined && dataItemOpts.paidBy !== undefined) {
122
- const paidBy = Array.isArray(dataItemOpts.paidBy)
123
- ? dataItemOpts.paidBy
124
- : [dataItemOpts.paidBy];
125
- if (dataItemOpts.paidBy.length > 0) {
126
- headers['x-paid-by'] = paidBy;
127
- }
128
- }
129
- const data = await this.httpService.post({
130
- endpoint: `/tx/${this.token}`,
145
+ // Now that we have the signed data item, we can upload it using the uploadSignedDataItem method
146
+ // which will create a new emitter with upload events. We await
147
+ // this result due to the wrapped retry logic of this method.
148
+ const response = await this.uploadSignedDataItem({
149
+ dataItemStreamFactory,
150
+ dataItemSizeFactory,
151
+ dataItemOpts,
131
152
  signal,
132
- data: dataItemStreamFactory(),
133
- headers,
153
+ events,
134
154
  });
135
- return data;
155
+ return response;
136
156
  }
137
157
  catch (error) {
138
158
  // Store the last encountered error and status for re-throwing after retries
139
159
  lastError = error;
140
- if (error instanceof axios_1.AxiosError) {
141
- lastStatusCode = error.response?.status;
142
- }
143
- else if (error instanceof errors_js_1.FailedRequestError) {
160
+ if (error instanceof errors_js_1.FailedRequestError) {
144
161
  lastStatusCode = error.status;
145
162
  }
163
+ else {
164
+ lastStatusCode = error.response?.status;
165
+ }
146
166
  if (lastStatusCode !== undefined &&
147
167
  lastStatusCode >= 400 &&
148
168
  lastStatusCode < 500) {
@@ -202,6 +222,14 @@ class TurboAuthenticatedBaseUploadService extends TurboUnauthenticatedUploadServ
202
222
  }
203
223
  return this.contentTypeFromFile(file);
204
224
  }
225
+ /**
226
+ * TODO: add events to the uploadFolder method
227
+ * could be a predicate with a resolveConfig() function, eg: events: ({...file ctx}) => ({
228
+ * onProgress: (progress) => {
229
+ * console.log('progress', progress);
230
+ * },
231
+ * })
232
+ */
205
233
  async uploadFolder(params) {
206
234
  this.logger.debug('Uploading folder...', { params });
207
235
  const { dataItemOpts, signal, manifestOptions = {}, maxConcurrentUploads = 1, throwOnFailure = true, } = params;
@@ -22,6 +22,7 @@ Object.defineProperty(exports, "ArweaveSigner", { enumerable: true, get: functio
22
22
  Object.defineProperty(exports, "EthereumSigner", { enumerable: true, get: function () { return arbundles_1.EthereumSigner; } });
23
23
  Object.defineProperty(exports, "HexSolanaSigner", { enumerable: true, get: function () { return arbundles_1.HexSolanaSigner; } });
24
24
  const stream_1 = require("stream");
25
+ const events_js_1 = require("../common/events.js");
25
26
  const signer_js_1 = require("../common/signer.js");
26
27
  const base64_js_1 = require("../utils/base64.js");
27
28
  /**
@@ -31,23 +32,41 @@ class TurboNodeSigner extends signer_js_1.TurboDataItemAbstractSigner {
31
32
  constructor(p) {
32
33
  super(p);
33
34
  }
34
- async signDataItem({ fileStreamFactory, fileSizeFactory, dataItemOpts, }) {
35
+ async signDataItem({ fileStreamFactory, fileSizeFactory, dataItemOpts, emitter, }) {
35
36
  // TODO: replace with our own signer implementation
36
37
  this.logger.debug('Signing data item...');
38
+ // TODO: we could just use tee or PassThrough rather than require a fileStreamFactory
37
39
  let [stream1, stream2] = [fileStreamFactory(), fileStreamFactory()];
38
40
  stream1 = stream1 instanceof Buffer ? stream_1.Readable.from(stream1) : stream1;
39
41
  stream2 = stream2 instanceof Buffer ? stream_1.Readable.from(stream2) : stream2;
40
- const signedDataItem = await (0, arbundles_1.streamSigner)(stream1, stream2, this.signer, dataItemOpts);
41
- this.logger.debug('Successfully signed data item...');
42
- // TODO: support target, anchor, and tags
43
- const signedDataItemSize = this.calculateSignedDataHeadersSize({
44
- dataSize: fileSizeFactory(),
45
- dataItemOpts,
42
+ // If we have a signing emitter, wrap the stream with events
43
+ const fileSize = fileSizeFactory();
44
+ const { stream: streamWithSigningEvents, resume } = (0, events_js_1.createStreamWithSigningEvents)({
45
+ data: stream1,
46
+ dataSize: fileSize,
47
+ emitter,
46
48
  });
47
- return {
48
- dataItemStreamFactory: () => signedDataItem,
49
- dataItemSizeFactory: () => signedDataItemSize,
50
- };
49
+ try {
50
+ const signedDataItemPromise = (0, arbundles_1.streamSigner)(streamWithSigningEvents, // TODO: use generics to avoid this cast
51
+ stream2, this.signer, dataItemOpts);
52
+ // resume the stream so bytes start flowing to the streamSigner
53
+ resume();
54
+ const signedDataItem = await signedDataItemPromise;
55
+ this.logger.debug('Successfully signed data item...');
56
+ const signedDataItemSize = this.calculateSignedDataHeadersSize({
57
+ dataSize: fileSizeFactory(),
58
+ dataItemOpts,
59
+ });
60
+ return {
61
+ dataItemStreamFactory: () => signedDataItem,
62
+ dataItemSizeFactory: () => signedDataItemSize,
63
+ };
64
+ }
65
+ catch (error) {
66
+ // TODO: create a SigningError class and throw that instead of the generic Error
67
+ emitter?.emit('signing-error', error);
68
+ throw error;
69
+ }
51
70
  }
52
71
  // TODO: make dynamic that accepts anchor and target and tags to return the size of the headers + data
53
72
  // reference https://github.com/ArweaveTeam/arweave-standards/blob/master/ans/ANS-104.md#13-dataitem-format
@@ -24,7 +24,13 @@ const upload_js_1 = require("../common/upload.js");
24
24
  const types_js_1 = require("../types.js");
25
25
  class TurboAuthenticatedUploadService extends upload_js_1.TurboAuthenticatedBaseUploadService {
26
26
  constructor({ url = upload_js_1.defaultUploadServiceURL, retryConfig, signer, logger, token, }) {
27
- super({ url, retryConfig, logger, token, signer });
27
+ super({
28
+ url,
29
+ retryConfig,
30
+ logger,
31
+ token,
32
+ signer,
33
+ });
28
34
  }
29
35
  async getAbsoluteFilePathsFromFolder(folderPath) {
30
36
  const absoluteFilePaths = [];
@@ -70,9 +70,12 @@ const createAxiosInstance = ({ logger = logger_js_1.TurboWinstonLogger.default,
70
70
  ...axiosConfig.headers,
71
71
  ...exports.defaultRequestHeaders,
72
72
  },
73
+ adapter: 'fetch',
73
74
  validateStatus: () => true, // don't throw on non-200 status codes
74
75
  });
75
76
  if (retryConfig.retries !== undefined && retryConfig.retries > 0) {
77
+ // eslint-disable-next-line @typescript-eslint/ban-ts-comment
78
+ // @ts-ignore
76
79
  (0, axios_retry_1.default)(axiosInstance, retryConfig);
77
80
  }
78
81
  return axiosInstance;
@@ -1,6 +1,21 @@
1
1
  "use strict";
2
2
  Object.defineProperty(exports, "__esModule", { value: true });
3
3
  exports.readableStreamToBuffer = readableStreamToBuffer;
4
+ /**
5
+ * Copyright (C) 2022-2024 Permanent Data Solutions, Inc.
6
+ *
7
+ * Licensed under the Apache License, Version 2.0 (the "License");
8
+ * you may not use this file except in compliance with the License.
9
+ * You may obtain a copy of the License at
10
+ *
11
+ * http://www.apache.org/licenses/LICENSE-2.0
12
+ *
13
+ * Unless required by applicable law or agreed to in writing, software
14
+ * distributed under the License is distributed on an "AS IS" BASIS,
15
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
16
+ * See the License for the specific language governing permissions and
17
+ * limitations under the License.
18
+ */
4
19
  async function readableStreamToBuffer({ stream, size, }) {
5
20
  const reader = stream.getReader();
6
21
  const buffer = Buffer.alloc(size);
@@ -17,4 +17,4 @@
17
17
  Object.defineProperty(exports, "__esModule", { value: true });
18
18
  exports.version = void 0;
19
19
  // AUTOMATICALLY GENERATED FILE - DO NOT TOUCH
20
- exports.version = '1.25.0';
20
+ exports.version = '1.26.0';
@@ -42,38 +42,65 @@ class TurboWebArweaveSigner extends signer_js_1.TurboDataItemAbstractSigner {
42
42
  await this.setPublicKey();
43
43
  return super.getPublicKey();
44
44
  }
45
- async signDataItem({ fileStreamFactory, fileSizeFactory, dataItemOpts, }) {
45
+ async signDataItem({ fileStreamFactory, fileSizeFactory, dataItemOpts, emitter, }) {
46
46
  await this.setPublicKey();
47
- const fileStream = fileStreamFactory();
48
- // TODO: converts the readable stream to a buffer bc incrementally signing ReadableStreams is not trivial
49
- const buffer = fileStream instanceof Buffer
50
- ? fileStream
51
- : await (0, readableStream_js_1.readableStreamToBuffer)({
52
- stream: fileStream,
53
- size: fileSizeFactory(),
47
+ // Create signing emitter if events are provided
48
+ const fileSize = fileSizeFactory();
49
+ try {
50
+ const fileStream = fileStreamFactory();
51
+ // start with 0 progress
52
+ emitter?.emit('signing-progress', {
53
+ processedBytes: 0,
54
+ totalBytes: fileSize,
54
55
  });
55
- let signedDataItem;
56
- this.logger.debug('Signing data item...');
57
- if (this.signer instanceof arbundles_1.ArconnectSigner) {
58
- this.logger.debug('Arconnect signer detected, signing with Arconnect signData Item API...');
59
- const sign = Buffer.from(await this.signer['signer'].signDataItem({
60
- data: Uint8Array.from(buffer),
61
- tags: dataItemOpts?.tags,
62
- target: dataItemOpts?.target,
63
- anchor: dataItemOpts?.anchor,
64
- }));
65
- signedDataItem = new arbundles_1.DataItem(sign);
56
+ // TODO: implement streamReadableStreamSigner that incrementally signs the stream with events instead of converting to a buffer
57
+ const buffer = fileStream instanceof Buffer
58
+ ? fileStream
59
+ : await (0, readableStream_js_1.readableStreamToBuffer)({
60
+ stream: fileStream,
61
+ size: fileSize,
62
+ });
63
+ // TODO: replace this with streamSigner that uses a ReadableStream with events
64
+ emitter?.emit('signing-progress', {
65
+ processedBytes: Math.floor(fileSize / 2),
66
+ totalBytes: fileSize,
67
+ });
68
+ let signedDataItem;
69
+ this.logger.debug('Signing data item...');
70
+ if (this.signer instanceof arbundles_1.ArconnectSigner) {
71
+ this.logger.debug('Arconnect signer detected, signing with Arconnect signData Item API...');
72
+ const sign = Buffer.from(await this.signer['signer'].signDataItem({
73
+ data: Uint8Array.from(buffer),
74
+ tags: dataItemOpts?.tags,
75
+ target: dataItemOpts?.target,
76
+ anchor: dataItemOpts?.anchor,
77
+ }));
78
+ signedDataItem = new arbundles_1.DataItem(sign);
79
+ }
80
+ else {
81
+ signedDataItem = (0, arbundles_1.createData)(Uint8Array.from(buffer), this.signer, dataItemOpts);
82
+ await signedDataItem.sign(this.signer);
83
+ }
84
+ // emit last progress event (100%)
85
+ emitter?.emit('signing-progress', {
86
+ processedBytes: fileSize,
87
+ totalBytes: fileSize,
88
+ });
89
+ // emit completion event
90
+ emitter?.emit('signing-success');
91
+ this.logger.debug('Successfully signed data item...');
92
+ return {
93
+ // while this returns a Buffer - it needs to match our return type for uploading
94
+ dataItemStreamFactory: () => signedDataItem.getRaw(),
95
+ dataItemSizeFactory: () => signedDataItem.getRaw().length,
96
+ };
66
97
  }
67
- else {
68
- signedDataItem = (0, arbundles_1.createData)(Uint8Array.from(buffer), this.signer, dataItemOpts);
69
- await signedDataItem.sign(this.signer);
98
+ catch (error) {
99
+ // If we have a signing emitter, emit error
100
+ // TODO: create a SigningError class and throw that instead of the generic Error
101
+ emitter?.emit('signing-error', error);
102
+ throw error;
70
103
  }
71
- this.logger.debug('Successfully signed data item...');
72
- return {
73
- // while this returns a Buffer - it needs to match our return type for uploading
74
- dataItemStreamFactory: () => signedDataItem.getRaw(),
75
- dataItemSizeFactory: () => signedDataItem.getRaw().length,
76
- };
77
104
  }
78
105
  async generateSignedRequestHeaders() {
79
106
  await this.setPublicKey();
@@ -0,0 +1,249 @@
1
+ /**
2
+ * Copyright (C) 2022-2024 Permanent Data Solutions, Inc.
3
+ *
4
+ * Licensed under the Apache License, Version 2.0 (the "License");
5
+ * you may not use this file except in compliance with the License.
6
+ * You may obtain a copy of the License at
7
+ *
8
+ * http://www.apache.org/licenses/LICENSE-2.0
9
+ *
10
+ * Unless required by applicable law or agreed to in writing, software
11
+ * distributed under the License is distributed on an "AS IS" BASIS,
12
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ * See the License for the specific language governing permissions and
14
+ * limitations under the License.
15
+ */
16
+ import { EventEmitter } from 'eventemitter3';
17
+ import { PassThrough, Readable } from 'stream';
18
+ /**
19
+ * Creates a ReadableStream with events that emits progress and error events using the event names map.
20
+ *
21
+ * E.g.
22
+ *
23
+ * ```ts
24
+ * const eventNamesMap = {
25
+ * 'on-progress': 'signing-progress', // emits 'signing-progress' on event progress
26
+ * 'on-error': 'signing-error', // emits 'signing-error' errors
27
+ * 'on-end': 'signing-success', // emits 'signing-success' on end
28
+ * };
29
+ *
30
+ * const streamWithEvents = createStreamWithEvents({
31
+ * data,
32
+ * dataSize,
33
+ * emitter,
34
+ * eventNamesMap,
35
+ * });
36
+ * ```
37
+ */
38
+ function createReadableStreamWithEvents({ data, dataSize, emitter, eventNamesMap, }) {
39
+ const originalStream = data instanceof ReadableStream
40
+ ? data
41
+ : new ReadableStream({
42
+ start: (controller) => {
43
+ controller.enqueue(data);
44
+ controller.close();
45
+ },
46
+ });
47
+ let processedBytes = 0;
48
+ let reader;
49
+ const stream = new ReadableStream({
50
+ start() {
51
+ reader = originalStream.getReader();
52
+ },
53
+ async pull(controller) {
54
+ try {
55
+ const { value, done } = await reader.read();
56
+ if (done) {
57
+ emitter.emit(eventNamesMap['on-end']);
58
+ controller.close();
59
+ return;
60
+ }
61
+ processedBytes += value.length;
62
+ emitter.emit(eventNamesMap['on-progress'], {
63
+ processedBytes,
64
+ totalBytes: dataSize,
65
+ });
66
+ controller.enqueue(value);
67
+ }
68
+ catch (error) {
69
+ emitter.emit(eventNamesMap['on-error'], error);
70
+ controller.error(error);
71
+ }
72
+ },
73
+ cancel(reason) {
74
+ return reader.cancel(reason);
75
+ },
76
+ });
77
+ return {
78
+ stream,
79
+ resume: () => void 0, // not needed for ReadableStreams but stubbed out for type compatibility
80
+ };
81
+ }
82
+ /**
83
+ * Creates an eventing Readable stream that emits progress and error events.
84
+ *
85
+ * NOTE: When dealing ith Readable streams, any downstream consumer stream will need to call `resume()` once the consumer is properly set up.
86
+ * If we were to call it internally here, bytes would start flowing due to the configured 'data' event listener.
87
+ * For ReadableStreams, this is not a concern, so we stub out the resume function
88
+ *
89
+ * Example usage:
90
+ *
91
+ * ```ts
92
+ * const { stream, resume } = createReadableWithEvents({
93
+ * data,
94
+ * dataSize,
95
+ * emitter,
96
+ * eventNamesMap,
97
+ * });
98
+ *
99
+ * // setup any promise that will consume the stream (e.g. a POST request)
100
+ * const promise = new Promise((resolve, reject) => {
101
+ * stream.on('data', (chunk) => {
102
+ * resolve(chunk);
103
+ * });
104
+ * });
105
+ *
106
+ * // allow bytes to start flowing so the promise gets the data
107
+ * resume();
108
+ *
109
+ * // wait for the promise to resolve
110
+ * const result = await promise;
111
+ * ```
112
+ */
113
+ function createReadableWithEvents({ data, dataSize, emitter, eventNamesMap, }) {
114
+ const existingStream = data instanceof Readable ? data : Readable.from(data);
115
+ const eventingStream = new PassThrough();
116
+ // pause the stream to avoid emitting progress events until the stream is ready
117
+ existingStream.pause();
118
+ // add listener to emit progress events as the stream is read
119
+ let processedBytes = 0;
120
+ existingStream.on('data', (chunk) => {
121
+ eventingStream.write(chunk);
122
+ processedBytes += chunk.length;
123
+ emitter.emit(eventNamesMap['on-progress'], {
124
+ processedBytes,
125
+ totalBytes: dataSize,
126
+ });
127
+ });
128
+ existingStream.on('end', () => {
129
+ emitter.emit(eventNamesMap['on-end']);
130
+ eventingStream.end();
131
+ });
132
+ existingStream.on('error', (error) => {
133
+ emitter.emit(eventNamesMap['on-error'], error);
134
+ eventingStream.destroy(error);
135
+ });
136
+ return {
137
+ stream: eventingStream,
138
+ // allows bytes to start flowing from the original stream when the consumer is ready
139
+ resume: () => existingStream.resume(),
140
+ };
141
+ }
142
+ /**
143
+ * Creates an eventing stream from the input data that emits progress and error events
144
+ */
145
+ export function createStreamWithEvents({ data, dataSize, emitter, eventNamesMap, }) {
146
+ if (data instanceof ReadableStream ||
147
+ (typeof window !== 'undefined' && data instanceof Buffer)) {
148
+ return createReadableStreamWithEvents({
149
+ data,
150
+ dataSize,
151
+ emitter,
152
+ eventNamesMap,
153
+ });
154
+ }
155
+ if (data instanceof Readable || data instanceof Buffer) {
156
+ return createReadableWithEvents({
157
+ data,
158
+ dataSize,
159
+ emitter,
160
+ eventNamesMap,
161
+ });
162
+ }
163
+ throw new Error('Invalid data or platform type');
164
+ }
165
+ export class TurboEventEmitter extends EventEmitter {
166
+ constructor({ onProgress, onError, onSuccess, onUploadProgress, onUploadError, onUploadSuccess, onSigningProgress, onSigningError, onSigningSuccess, } = {}) {
167
+ super();
168
+ if (onUploadProgress !== undefined) {
169
+ this.on('upload-progress', onUploadProgress);
170
+ }
171
+ if (onUploadError !== undefined) {
172
+ this.on('upload-error', onUploadError);
173
+ }
174
+ if (onUploadSuccess !== undefined) {
175
+ this.on('upload-success', onUploadSuccess);
176
+ }
177
+ if (onSigningProgress !== undefined) {
178
+ this.on('signing-progress', onSigningProgress);
179
+ }
180
+ if (onSigningError !== undefined) {
181
+ this.on('signing-error', onSigningError);
182
+ }
183
+ if (onSigningSuccess !== undefined) {
184
+ this.on('signing-success', onSigningSuccess);
185
+ }
186
+ if (onProgress !== undefined) {
187
+ this.on('overall-progress', onProgress);
188
+ }
189
+ if (onError !== undefined) {
190
+ this.on('overall-error', onError);
191
+ }
192
+ if (onSuccess !== undefined) {
193
+ this.on('overall-success', onSuccess);
194
+ }
195
+ // emit listeners for total events
196
+ this.on('signing-progress', (event) => {
197
+ this.emit('overall-progress', {
198
+ ...event,
199
+ processedBytes: event.processedBytes / 2, // since the total progress requires 2 passes through the stream, signing progress is only half of the total progress
200
+ totalBytes: event.totalBytes,
201
+ step: 'signing',
202
+ });
203
+ });
204
+ this.on('signing-error', (error) => {
205
+ this.emit('overall-error', error);
206
+ });
207
+ this.on('upload-progress', (event) => {
208
+ this.emit('overall-progress', {
209
+ ...event,
210
+ processedBytes: event.totalBytes / 2 + event.processedBytes / 2, // Start at 50% since signing is done, then add half of upload progress
211
+ totalBytes: event.totalBytes,
212
+ step: 'upload',
213
+ });
214
+ });
215
+ this.on('upload-error', (error) => {
216
+ this.emit('overall-error', error);
217
+ });
218
+ // NOTE: this is the last event emitted for successful upload,
219
+ // if another step was added (e.g. verifying optimistic caching)
220
+ // then this overall-success event will be emitted after that step
221
+ this.on('upload-success', () => {
222
+ this.emit('overall-success');
223
+ });
224
+ }
225
+ }
226
+ export function createStreamWithUploadEvents({ data, dataSize, emitter = new TurboEventEmitter(), }) {
227
+ return createStreamWithEvents({
228
+ data,
229
+ dataSize,
230
+ emitter,
231
+ eventNamesMap: {
232
+ 'on-progress': 'upload-progress',
233
+ 'on-error': 'upload-error',
234
+ 'on-end': 'upload-success',
235
+ },
236
+ });
237
+ }
238
+ export function createStreamWithSigningEvents({ data, dataSize, emitter = new TurboEventEmitter(), }) {
239
+ return createStreamWithEvents({
240
+ data,
241
+ dataSize,
242
+ emitter,
243
+ eventNamesMap: {
244
+ 'on-progress': 'signing-progress',
245
+ 'on-error': 'signing-error',
246
+ 'on-end': 'signing-success',
247
+ },
248
+ });
249
+ }