roboto-js 1.4.50 → 1.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,17 +1,16 @@
1
+ import { Upload } from 'tus-js-client';
1
2
  import _ from 'lodash';
2
3
  import EventEmitter from 'eventemitter3';
3
4
  export default class RbtFile extends EventEmitter {
4
- static FRAME_SIZE = 1024 * 1024; // 1MB, for example
5
-
6
- constructor(record, axiosInstance, localDb) {
5
+ constructor(record, axiosInstance) {
7
6
  super(); // Call the constructor of EventEmitter
7
+
8
+ this.isRbtFile = true;
8
9
  this.id = record.id;
9
10
  this._axios = axiosInstance;
10
11
  this._internalData = record;
11
12
  this._data = record.data ? record.data : record.dataJson ? JSON.parse(record.dataJson) : {};
12
- this._localDb = localDb;
13
13
  this.progress = 0;
14
- this.fileHeader = {};
15
14
  }
16
15
  get(path) {
17
16
  return _.get(this._data, path);
@@ -24,6 +23,13 @@ export default class RbtFile extends EventEmitter {
24
23
  ...this._data
25
24
  };
26
25
  }
26
+ setData(data) {
27
+ // Use lodash's merge to deeply merge the incoming data into _data
28
+ _.merge(this._data, data);
29
+ return {
30
+ ...this._data
31
+ }; // Return the updated _data as a new object
32
+ }
27
33
  toRecord() {
28
34
  return {
29
35
  ...this._internalData,
@@ -53,7 +59,7 @@ export default class RbtFile extends EventEmitter {
53
59
  }
54
60
  try {
55
61
  const record = this.toRecord();
56
- const response = await this._axios.post('/object_service/deleteObject', [record]);
62
+ const response = await this._axios.delete(`/object_service/deleteObject/${this.id}`);
57
63
  if (response.data.ok === false) {
58
64
  throw new Error(response.data.message);
59
65
  }
@@ -64,175 +70,66 @@ export default class RbtFile extends EventEmitter {
64
70
  throw e; // Propagate the error
65
71
  }
66
72
  }
67
- async getFileBlobUrl() {
68
- const db = this._localDb;
69
- const tx = db.transaction('files', 'readonly');
70
- const cursor = await tx.store.openCursor();
71
- if (!cursor) return null;
72
- const fileData = cursor.value.chunk; // Assuming the first chunk has enough data
73
- const blob = new Blob([fileData], {
74
- type: 'application/octet-stream'
75
- });
76
- return URL.createObjectURL(blob);
77
- }
78
- setProgress(newProgress) {
79
- this.progress = newProgress;
80
- //console.log(`Progress: ${this.progress * 100}%`);
81
- this.emit('progress', this.progress); // Emit a progress event
82
- }
83
- async readAndStoreFile(file) {
84
- let offset = 0;
85
- this.fileHeader = {
86
- id: this.id,
87
- name: file.name,
88
- size: file.size,
89
- ext: this._extractFileExtension(file.name),
90
- mimeType: file.type,
91
- numFrames: Math.ceil(file.size / RbtFile.FRAME_SIZE)
92
- };
93
- while (offset < file.size) {
94
- const chunk = file.slice(offset, offset + RbtFile.FRAME_SIZE);
95
- await new Promise((resolve, reject) => {
96
- const reader = new FileReader();
97
- reader.onload = async e => {
98
- const arrayBuffer = e.target.result;
99
- const frameIndex = offset / RbtFile.FRAME_SIZE;
100
- await this._storeChunkInIDB(arrayBuffer, frameIndex, this.fileHeader);
101
- this.setProgress((frameIndex + 1) / this.fileHeader.numFrames);
102
- resolve();
103
- };
104
- reader.onerror = () => reject(reader.error);
105
- reader.readAsArrayBuffer(chunk);
106
- });
107
- offset += RbtFile.FRAME_SIZE;
108
- }
109
- }
110
- _extractFileExtension(fileName) {
111
- // Find the last dot in the filename
112
- const lastDotIndex = fileName.lastIndexOf('.');
113
-
114
- // No dot found, or the dot is the first character (hidden files)
115
- if (lastDotIndex === -1 || lastDotIndex === 0) return '';
73
+ uploadFile(file) {
74
+ return new Promise((resolve, reject) => {
75
+ const upload = new Upload(file, {
76
+ endpoint: this._axios.defaults.baseURL + "/file_service/files",
77
+ retryDelays: [0, 1000, 3000, 5000],
78
+ metadata: {
79
+ rbtfileid: this.id,
80
+ filename: file.name,
81
+ filetype: file.type
82
+ },
83
+ onError: error => {
84
+ console.error("Failed because:", error);
85
+ this.emit('error', error);
86
+ reject(error); // Reject the promise on error
87
+ },
88
+ onProgress: (bytesUploaded, bytesTotal) => {
89
+ const percentage = (bytesUploaded / bytesTotal * 100).toFixed(2);
90
+ this.emit('progress', percentage / 100); // Emit normalized progress
91
+ },
92
+ onSuccess: () => {
93
+ //console.log("File uploaded to:", upload.url);
94
+ const m1 = upload.url.match(/\/([^\/]+)$/);
95
+ const remoteId = m1 ? m1[1] : null;
96
+ this.setData({
97
+ 'remoteId': remoteId,
98
+ 'remoteSrc': upload.url,
99
+ 'sourceFile': {
100
+ name: upload.file.name,
101
+ size: upload.file.size,
102
+ type: upload.file.type,
103
+ lastModified: upload.file.lastModified
104
+ }
105
+ });
116
106
 
117
- // Extract the extension
118
- return fileName.substring(lastDotIndex + 1);
119
- }
120
- async _storeChunkInIDB(chunk, frameIndex, fileHeader) {
121
- const db = this._localDb;
122
- if (!db) {
123
- console.error('Database not initialized');
124
- return;
125
- }
126
- const tx = db.transaction('files', 'readwrite');
127
- const frameKey = `${this.id}_${frameIndex}`; // Unique key combining id and frameIndex
128
- await tx.store.put({
129
- key: frameKey,
130
- chunk,
131
- fileHeader
132
- }, frameKey);
133
- await tx.done;
134
- }
135
- async _readFrameFromIDB(frameIndex) {
136
- const db = this._localDb;
137
- if (!db) {
138
- console.error('Database not initialized');
139
- return null;
140
- }
141
- const frameKey = `${this.id}_${frameIndex}`; // Same key as used in _storeChunkInIDB
142
- const tx = db.transaction('files', 'readonly');
143
- const frame = await tx.store.get(frameKey);
144
- return frame ? frame.chunk : null;
145
- }
146
- async uploadFile() {
147
- if (!this.fileHeader || !this.fileHeader.numFrames) {
148
- throw new Error("File not ready for upload.");
149
- }
150
- this.fileRecord = this.toRecord();
151
- let frameRes;
152
- for (let frameIndex = 0; frameIndex < this.fileHeader.numFrames; frameIndex++) {
153
- try {
154
- const frameData = await this._readFrameFromIDB(frameIndex);
155
- if (!frameData) {
156
- throw new Error(`Failed to read frame ${frameIndex}`);
107
+ // Ensure save is called and finished before resolving the promise
108
+ this.save().then(() => {
109
+ this.emit('success', upload.url);
110
+ resolve(); // Resolve the promise after save is complete
111
+ }).catch(error => {
112
+ console.error("Save failed:", error);
113
+ reject(error); // Reject the promise if save fails
114
+ });
157
115
  }
158
- frameRes = await this._uploadFrameToServer(frameData, frameIndex, this.fileHeader, this.fileRecord);
159
-
160
- // Update progress after each frame is uploaded
161
- this.setProgress((frameIndex + 1) / this.fileHeader.numFrames);
162
- } catch (error) {
163
- console.error(`Error uploading frame ${frameIndex}:`, error);
164
- throw error; // Stop the upload process if an error occurs
165
- }
166
- }
167
-
168
- //
169
- // DONE UPLOADING, PROCESSING
170
- //
171
-
172
- try {
173
- const statusRes = await this._checkUploadStatusUntilDone(this.fileRecord);
174
- this.set('remoteSrc', statusRes.remoteSrc);
175
- this.set('progress', 1);
176
- await this.save();
177
- } catch (error) {
178
- console.error('Error during upload status check:', error);
179
- // Handle error, possibly update UI or retry logic
180
- }
181
- }
182
- async _uploadFrameToServer(frameData, frameIndex, fileHeader, fileRecord) {
183
- const base64Data = this.convertToBase64(frameData);
184
- const payload = ["@filekit.ffs_server_receiveAndStore_toS3_flow", {
185
- "fileObject": fileRecord,
186
- // using fileRecord from newFile.toRecord()
187
- "fileHeader": fileHeader,
188
- "frameIndex": frameIndex,
189
- "frameData": `rpcBase64:${base64Data}`,
190
- "timeout": 86400000
191
- }];
116
+ });
192
117
 
193
- //console.log(payload);
118
+ // Check if there are any previous uploads to continue.
119
+ // upload.findPreviousUploads().then(function (previousUploads) {
120
+ // // Found previous uploads so we select the first one.
121
+ // if (previousUploads.length) {
122
+ // upload.resumeFromPreviousUpload(previousUploads[0])
123
+ // }
194
124
 
195
- const response = await this._axios.post('/file_service/ffs_runFlow', payload);
196
- if (!response || response.status != 200) {
197
- throw new Error('Error uploading frame to server');
198
- }
199
- return response;
200
- }
201
- async _checkUploadStatusUntilDone(fileRecord) {
202
- return new Promise((resolve, reject) => {
203
- const intervalId = setInterval(async () => {
204
- try {
205
- const statusRes = await this._uploadGetStatusUpdate(fileRecord);
206
- if (statusRes.data && statusRes.data.fileHeader && statusRes.data.fileHeader.status === 'DONE') {
207
- clearInterval(intervalId); // Stop the interval
208
- resolve(statusRes.data.fileHeader); // Resolve the promise with the final status
209
- } else {
210
- // Optionally update progress or handle other status cases here
211
- //this.emit('progress', statusRes.progress); // Emit progress updates if available
212
- }
213
- } catch (error) {
214
- clearInterval(intervalId); // Stop the interval on error
215
- reject(error); // Reject the promise if there's an error
216
- }
217
- }, 2000); // Check every 2 seconds
125
+ // // Start the upload
126
+ // upload.start();
127
+ // })
128
+ upload.start(); // Start the upload
218
129
  });
219
130
  }
220
- async _uploadGetStatusUpdate(fileRecord) {
221
- const payload = ["@filekit.ffs_server_receiveAndStore_toS3_flow", {
222
- "fileObject": fileRecord,
223
- // using fileRecord from newFile.toRecord()
224
- "getStatusUpdate": true
225
- }];
226
-
227
- //console.log(payload);
228
-
229
- const response = await this._axios.post('/file_service/ffs_runFlow', payload);
230
- if (!response || response.status != 200) {
231
- throw new Error('Error getting upload status');
232
- }
233
- return response;
234
- }
235
- convertToBase64(buffer) {
236
- return btoa(new Uint8Array(buffer).reduce((data, byte) => data + String.fromCharCode(byte), ''));
131
+ setProgress(newProgress) {
132
+ this.progress = newProgress;
133
+ this.emit('progress', this.progress); // Emit a progress event
237
134
  }
238
135
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "roboto-js",
3
- "version": "1.4.50",
3
+ "version": "1.5.1",
4
4
  "type": "module",
5
5
  "description": "",
6
6
  "main": "dist/cjs/index.cjs",
@@ -25,7 +25,8 @@
25
25
  "crypto-js": "^4.1.1",
26
26
  "eventemitter3": "^5.0.1",
27
27
  "idb": "^8.0.0",
28
- "lodash": "^4.17.21"
28
+ "lodash": "^4.17.21",
29
+ "tus-js-client": "^4.2.3"
29
30
  },
30
31
  "devDependencies": {
31
32
  "@babel/cli": "^7.23.9",
package/src/index.js CHANGED
@@ -132,6 +132,9 @@ export default class Roboto{
132
132
  async loadFile(id){
133
133
  return this.api.loadFile(id);
134
134
  }
135
+ async loadFiles(ids){
136
+ return this.api.loadFiles(ids);
137
+ }
135
138
 
136
139
  //
137
140
  //
package/src/rbt_api.js CHANGED
@@ -403,6 +403,27 @@ export default class RbtApi {
403
403
 
404
404
  }
405
405
 
406
+ async loadFiles(ids) {
407
+ try {
408
+ // Use the bulk load method with the provided IDs
409
+ const responses = await this.load('<@filekit.file>', ids);
410
+
411
+ if (!responses || !Array.isArray(responses)) {
412
+ return []; // Return an empty array if no responses or invalid data
413
+ }
414
+
415
+ // Map over the responses to create RbtFile instances
416
+ return responses.map(response => {
417
+ const record = response.toRecord();
418
+ return new RbtFile(record, this.axios, this.localDb);
419
+ });
420
+ } catch (e) {
421
+ this._handleError(e); // Handle errors (log or process as needed)
422
+ return []; // Return an empty array on failure
423
+ }
424
+ }
425
+
426
+
406
427
 
407
428
  /**
408
429
  * Creates a new object of the given type.
package/src/rbt_file.js CHANGED
@@ -1,22 +1,18 @@
1
-
1
+ import { Upload } from 'tus-js-client';
2
2
  import _ from 'lodash';
3
3
  import EventEmitter from 'eventemitter3';
4
4
 
5
- export default class RbtFile extends EventEmitter{
6
-
7
- static FRAME_SIZE = 1024 * 1024; // 1MB, for example
8
-
9
- constructor(record, axiosInstance, localDb) {
5
+ export default class RbtFile extends EventEmitter {
10
6
 
7
+ constructor(record, axiosInstance) {
11
8
  super(); // Call the constructor of EventEmitter
9
+
10
+ this.isRbtFile = true;
12
11
  this.id = record.id;
13
12
  this._axios = axiosInstance;
14
13
  this._internalData = record;
15
14
  this._data = record.data ? record.data : (record.dataJson ? JSON.parse(record.dataJson) : {});
16
- this._localDb = localDb;
17
15
  this.progress = 0;
18
-
19
- this.fileHeader = {};
20
16
  }
21
17
 
22
18
  get(path) {
@@ -31,6 +27,12 @@ export default class RbtFile extends EventEmitter{
31
27
  return { ...this._data };
32
28
  }
33
29
 
30
+ setData(data) {
31
+ // Use lodash's merge to deeply merge the incoming data into _data
32
+ _.merge(this._data, data);
33
+ return { ...this._data }; // Return the updated _data as a new object
34
+ }
35
+
34
36
  toRecord() {
35
37
  return {
36
38
  ...this._internalData,
@@ -52,7 +54,6 @@ export default class RbtFile extends EventEmitter{
52
54
  }
53
55
 
54
56
  this._internalData = response.data;
55
-
56
57
  return this;
57
58
 
58
59
  } catch (e) {
@@ -68,7 +69,7 @@ export default class RbtFile extends EventEmitter{
68
69
 
69
70
  try {
70
71
  const record = this.toRecord();
71
- const response = await this._axios.post('/object_service/deleteObject', [record]);
72
+ const response = await this._axios.delete(`/object_service/deleteObject/${this.id}`);
72
73
 
73
74
  if (response.data.ok === false) {
74
75
  throw new Error(response.data.message);
@@ -83,210 +84,71 @@ export default class RbtFile extends EventEmitter{
83
84
  }
84
85
  }
85
86
 
86
- async getFileBlobUrl() {
87
- const db = this._localDb;
88
- const tx = db.transaction('files', 'readonly');
89
- const cursor = await tx.store.openCursor();
90
- if (!cursor) return null;
91
- const fileData = cursor.value.chunk; // Assuming the first chunk has enough data
92
- const blob = new Blob([fileData], { type: 'application/octet-stream' });
93
- return URL.createObjectURL(blob);
94
- }
95
-
96
- setProgress(newProgress) {
97
- this.progress = newProgress;
98
- //console.log(`Progress: ${this.progress * 100}%`);
99
- this.emit('progress', this.progress); // Emit a progress event
100
-
101
- }
102
-
103
-
104
- async readAndStoreFile(file) {
105
- let offset = 0;
106
- this.fileHeader = {
107
- id: this.id,
108
- name: file.name,
109
- size: file.size,
110
- ext: this._extractFileExtension(file.name),
111
- mimeType: file.type,
112
- numFrames: Math.ceil(file.size / RbtFile.FRAME_SIZE)
113
- };
114
-
115
- while (offset < file.size) {
116
- const chunk = file.slice(offset, offset + RbtFile.FRAME_SIZE);
117
- await new Promise((resolve, reject) => {
118
- const reader = new FileReader();
119
- reader.onload = async (e) => {
120
- const arrayBuffer = e.target.result;
121
- const frameIndex = offset / RbtFile.FRAME_SIZE;
122
- await this._storeChunkInIDB(arrayBuffer, frameIndex, this.fileHeader);
123
- this.setProgress((frameIndex + 1) / this.fileHeader.numFrames);
124
- resolve();
125
- };
126
- reader.onerror = () => reject(reader.error);
127
- reader.readAsArrayBuffer(chunk);
128
- });
129
- offset += RbtFile.FRAME_SIZE;
130
- }
131
- }
132
-
133
- _extractFileExtension(fileName) {
134
- // Find the last dot in the filename
135
- const lastDotIndex = fileName.lastIndexOf('.');
136
-
137
- // No dot found, or the dot is the first character (hidden files)
138
- if (lastDotIndex === -1 || lastDotIndex === 0) return '';
139
-
140
- // Extract the extension
141
- return fileName.substring(lastDotIndex + 1);
142
- }
143
-
144
- async _storeChunkInIDB(chunk, frameIndex, fileHeader) {
145
- const db = this._localDb;
146
- if (!db) {
147
- console.error('Database not initialized');
148
- return;
149
- }
150
- const tx = db.transaction('files', 'readwrite');
151
- const frameKey = `${this.id}_${frameIndex}`; // Unique key combining id and frameIndex
152
- await tx.store.put({ key: frameKey, chunk, fileHeader }, frameKey);
153
- await tx.done;
154
- }
155
-
156
- async _readFrameFromIDB(frameIndex) {
157
- const db = this._localDb;
158
- if (!db) {
159
- console.error('Database not initialized');
160
- return null;
161
- }
162
- const frameKey = `${this.id}_${frameIndex}`; // Same key as used in _storeChunkInIDB
163
- const tx = db.transaction('files', 'readonly');
164
- const frame = await tx.store.get(frameKey);
165
-
166
- return frame ? frame.chunk : null;
167
- }
168
-
169
- async uploadFile() {
170
- if (!this.fileHeader || !this.fileHeader.numFrames) {
171
- throw new Error("File not ready for upload.");
172
- }
173
-
174
- this.fileRecord = this.toRecord();
175
-
176
- let frameRes;
177
-
178
- for (let frameIndex = 0; frameIndex < this.fileHeader.numFrames; frameIndex++) {
179
- try {
180
- const frameData = await this._readFrameFromIDB(frameIndex);
181
- if (!frameData) {
182
- throw new Error(`Failed to read frame ${frameIndex}`);
87
+ uploadFile(file) {
88
+ return new Promise((resolve, reject) => {
89
+ const upload = new Upload(file, {
90
+ endpoint: this._axios.defaults.baseURL + "/file_service/files",
91
+ retryDelays: [0, 1000, 3000, 5000],
92
+ metadata: {
93
+ rbtfileid: this.id,
94
+ filename: file.name,
95
+ filetype: file.type,
96
+ },
97
+ onError: error => {
98
+ console.error("Failed because:", error);
99
+ this.emit('error', error);
100
+ reject(error); // Reject the promise on error
101
+ },
102
+ onProgress: (bytesUploaded, bytesTotal) => {
103
+ const percentage = (bytesUploaded / bytesTotal * 100).toFixed(2);
104
+ this.emit('progress', percentage / 100); // Emit normalized progress
105
+ },
106
+ onSuccess: () => {
107
+ //console.log("File uploaded to:", upload.url);
108
+ const m1 = upload.url.match(/\/([^\/]+)$/);
109
+ const remoteId = m1 ? m1[1] : null;
110
+
111
+ this.setData({
112
+ 'remoteId': remoteId,
113
+ 'remoteSrc': upload.url,
114
+ 'sourceFile': {
115
+ name: upload.file.name,
116
+ size: upload.file.size,
117
+ type: upload.file.type,
118
+ lastModified: upload.file.lastModified,
119
+ }
120
+ });
121
+
122
+ // Ensure save is called and finished before resolving the promise
123
+ this.save().then(() => {
124
+ this.emit('success', upload.url);
125
+ resolve(); // Resolve the promise after save is complete
126
+ }).catch(error => {
127
+ console.error("Save failed:", error);
128
+ reject(error); // Reject the promise if save fails
129
+ });
183
130
  }
131
+ });
184
132
 
185
- frameRes = await this._uploadFrameToServer(frameData, frameIndex, this.fileHeader, this.fileRecord);
186
-
187
- // Update progress after each frame is uploaded
188
- this.setProgress((frameIndex + 1) / this.fileHeader.numFrames);
189
- } catch (error) {
190
- console.error(`Error uploading frame ${frameIndex}:`, error);
191
- throw error; // Stop the upload process if an error occurs
192
- }
193
- }
194
-
195
-
196
- //
197
- // DONE UPLOADING, PROCESSING
198
- //
199
-
200
- try {
201
- const statusRes = await this._checkUploadStatusUntilDone(this.fileRecord);
202
-
203
- this.set('remoteSrc', statusRes.remoteSrc);
204
- this.set('progress', 1);
205
- await this.save();
206
- } catch (error) {
207
- console.error('Error during upload status check:', error);
208
- // Handle error, possibly update UI or retry logic
209
- }
210
-
211
- }
212
-
213
- async _uploadFrameToServer(frameData, frameIndex, fileHeader, fileRecord) {
214
-
215
- const base64Data = this.convertToBase64(frameData);
216
- const payload = [
217
- "@filekit.ffs_server_receiveAndStore_toS3_flow",
218
- {
219
- "fileObject": fileRecord, // using fileRecord from newFile.toRecord()
220
- "fileHeader": fileHeader,
221
- "frameIndex": frameIndex,
222
- "frameData": `rpcBase64:${base64Data}`,
223
- "timeout": 86400000
224
- }
225
- ];
226
-
227
- //console.log(payload);
228
-
229
- const response = await this._axios.post('/file_service/ffs_runFlow', payload);
230
-
231
- if (!response || response.status != 200) {
232
- throw new Error('Error uploading frame to server');
233
- }
234
-
235
- return response;
236
- }
237
-
238
-
239
- async _checkUploadStatusUntilDone(fileRecord) {
240
133
 
241
- return new Promise((resolve, reject) => {
242
- const intervalId = setInterval(async () => {
243
- try {
244
- const statusRes = await this._uploadGetStatusUpdate(fileRecord);
245
- if (statusRes.data && statusRes.data.fileHeader && statusRes.data.fileHeader.status === 'DONE') {
246
- clearInterval(intervalId); // Stop the interval
247
- resolve(statusRes.data.fileHeader); // Resolve the promise with the final status
248
- } else {
249
- // Optionally update progress or handle other status cases here
250
- //this.emit('progress', statusRes.progress); // Emit progress updates if available
251
- }
252
- } catch (error) {
253
- clearInterval(intervalId); // Stop the interval on error
254
- reject(error); // Reject the promise if there's an error
255
- }
256
- }, 2000); // Check every 2 seconds
257
- });
134
+ // Check if there are any previous uploads to continue.
135
+ // upload.findPreviousUploads().then(function (previousUploads) {
136
+ // // Found previous uploads so we select the first one.
137
+ // if (previousUploads.length) {
138
+ // upload.resumeFromPreviousUpload(previousUploads[0])
139
+ // }
258
140
 
141
+ // // Start the upload
142
+ // upload.start();
143
+ // })
144
+ upload.start(); // Start the upload
145
+ });
259
146
  }
260
147
 
261
148
 
262
149
 
263
- async _uploadGetStatusUpdate(fileRecord){
264
-
265
- const payload = [
266
- "@filekit.ffs_server_receiveAndStore_toS3_flow",
267
- {
268
- "fileObject": fileRecord, // using fileRecord from newFile.toRecord()
269
- "getStatusUpdate": true
270
- }
271
- ];
272
-
273
- //console.log(payload);
274
-
275
- const response = await this._axios.post('/file_service/ffs_runFlow', payload);
276
-
277
- if (!response || response.status != 200) {
278
- throw new Error('Error getting upload status');
279
- }
280
-
281
- return response;
282
-
283
-
284
- }
285
-
286
- convertToBase64(buffer) {
287
- return btoa(
288
- new Uint8Array(buffer)
289
- .reduce((data, byte) => data + String.fromCharCode(byte), '')
290
- );
150
+ setProgress(newProgress) {
151
+ this.progress = newProgress;
152
+ this.emit('progress', this.progress); // Emit a progress event
291
153
  }
292
154
  }