@heyputer/puter.js 2.1.6 → 2.1.8
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/puter.cjs +2 -2
- package/index.d.ts +103 -626
- package/package.json +1 -1
- package/src/index.js +91 -91
- package/src/lib/APICallLogger.js +20 -21
- package/src/lib/EventListener.js +10 -10
- package/src/lib/filesystem/APIFS.js +11 -19
- package/src/lib/filesystem/CacheFS.js +25 -25
- package/src/lib/filesystem/PostMessageFS.js +11 -11
- package/src/lib/filesystem/definitions.js +11 -10
- package/src/lib/path.js +505 -446
- package/src/lib/polyfills/fileReaderPoly.js +40 -0
- package/src/lib/polyfills/localStorage.js +30 -33
- package/src/lib/polyfills/xhrshim.js +206 -207
- package/src/lib/utils.js +160 -151
- package/src/lib/xdrpc.js +9 -9
- package/src/modules/AI.js +416 -290
- package/src/modules/Apps.js +56 -56
- package/src/modules/Auth.js +17 -17
- package/src/modules/Debug.js +1 -1
- package/src/modules/Drivers.js +41 -41
- package/src/modules/FSItem.js +64 -62
- package/src/modules/FileSystem/index.js +22 -23
- package/src/modules/FileSystem/operations/copy.js +7 -7
- package/src/modules/FileSystem/operations/deleteFSEntry.js +14 -12
- package/src/modules/FileSystem/operations/getReadUrl.js +16 -14
- package/src/modules/FileSystem/operations/mkdir.js +11 -11
- package/src/modules/FileSystem/operations/move.js +12 -12
- package/src/modules/FileSystem/operations/read.js +10 -10
- package/src/modules/FileSystem/operations/readdir.js +28 -28
- package/src/modules/FileSystem/operations/rename.js +11 -11
- package/src/modules/FileSystem/operations/sign.js +33 -30
- package/src/modules/FileSystem/operations/space.js +7 -7
- package/src/modules/FileSystem/operations/stat.js +25 -25
- package/src/modules/FileSystem/operations/symlink.js +15 -17
- package/src/modules/FileSystem/operations/upload.js +151 -122
- package/src/modules/FileSystem/operations/write.js +16 -12
- package/src/modules/FileSystem/utils/getAbsolutePathForApp.js +10 -6
- package/src/modules/Hosting.js +29 -29
- package/src/modules/KV.js +23 -23
- package/src/modules/OS.js +15 -15
- package/src/modules/Perms.js +19 -21
- package/src/modules/PuterDialog.js +46 -48
- package/src/modules/Threads.js +17 -20
- package/src/modules/UI.js +156 -156
- package/src/modules/Util.js +3 -3
- package/src/modules/Workers.js +52 -49
- package/src/modules/networking/PSocket.js +38 -38
- package/src/modules/networking/PTLS.js +54 -47
- package/src/modules/networking/PWispHandler.js +49 -47
- package/src/modules/networking/parsers.js +110 -108
- package/src/modules/networking/requests.js +67 -78
- package/src/services/APIAccess.js +9 -9
- package/src/services/FSRelay.js +6 -6
- package/src/services/Filesystem.js +8 -8
- package/src/services/NoPuterYet.js +2 -2
- package/src/services/XDIncoming.js +1 -1
|
@@ -1,19 +1,22 @@
|
|
|
1
|
-
import path from
|
|
1
|
+
import path from '../../../lib/path.js';
|
|
2
2
|
import * as utils from '../../../lib/utils.js';
|
|
3
3
|
import getAbsolutePathForApp from '../utils/getAbsolutePathForApp.js';
|
|
4
4
|
|
|
5
|
-
const upload = async function(items, dirPath, options = {}){
|
|
5
|
+
const upload = async function (items, dirPath, options = {}) {
|
|
6
6
|
return new Promise(async (resolve, reject) => {
|
|
7
|
-
const DataTransferItem = globalThis.DataTransfer || (class DataTransferItem {
|
|
8
|
-
|
|
9
|
-
const
|
|
10
|
-
|
|
11
|
-
|
|
7
|
+
const DataTransferItem = globalThis.DataTransfer || (class DataTransferItem {
|
|
8
|
+
});
|
|
9
|
+
const FileList = globalThis.FileList || (class FileList {
|
|
10
|
+
});
|
|
11
|
+
const DataTransferItemList = globalThis.DataTransferItemList || (class DataTransferItemList {
|
|
12
|
+
});
|
|
13
|
+
|
|
14
|
+
// If auth token is not provided and we are in the web environment,
|
|
12
15
|
// try to authenticate with Puter
|
|
13
|
-
if(!puter.authToken && puter.env === 'web'){
|
|
14
|
-
try{
|
|
16
|
+
if ( !puter.authToken && puter.env === 'web' ) {
|
|
17
|
+
try {
|
|
15
18
|
await puter.ui.authenticateWithPuter();
|
|
16
|
-
}catch(e){
|
|
19
|
+
} catch (e) {
|
|
17
20
|
// if authentication fails, throw an error
|
|
18
21
|
reject(e);
|
|
19
22
|
}
|
|
@@ -21,8 +24,10 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
21
24
|
|
|
22
25
|
const error = (e) => {
|
|
23
26
|
// if error callback is provided, call it
|
|
24
|
-
if(options.error && typeof options.error === 'function')
|
|
27
|
+
if ( options.error && typeof options.error === 'function' )
|
|
28
|
+
{
|
|
25
29
|
options.error(e);
|
|
30
|
+
}
|
|
26
31
|
return reject(e);
|
|
27
32
|
};
|
|
28
33
|
|
|
@@ -30,8 +35,10 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
30
35
|
let xhr = new XMLHttpRequest();
|
|
31
36
|
|
|
32
37
|
// Can not write to root
|
|
33
|
-
if(dirPath === '/')
|
|
38
|
+
if ( dirPath === '/' )
|
|
39
|
+
{
|
|
34
40
|
return error('Can not upload to root directory.');
|
|
41
|
+
}
|
|
35
42
|
|
|
36
43
|
// If dirPath is not provided or it's not starting with a slash, it means it's a relative path
|
|
37
44
|
// in that case, we need to prepend the app's root directory to it
|
|
@@ -44,7 +51,7 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
44
51
|
|
|
45
52
|
// Call 'init' callback if provided
|
|
46
53
|
// init is basically a hook that allows the user to get the operation ID and the XMLHttpRequest object
|
|
47
|
-
if(options.init && typeof options.init === 'function'){
|
|
54
|
+
if ( options.init && typeof options.init === 'function' ) {
|
|
48
55
|
options.init(operation_id, xhr);
|
|
49
56
|
}
|
|
50
57
|
|
|
@@ -63,66 +70,74 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
63
70
|
let file_count = 0;
|
|
64
71
|
|
|
65
72
|
let seemsToBeParsedDataTransferItems = false;
|
|
66
|
-
if(Array.isArray(items) && items.length > 0){
|
|
67
|
-
for(let i=0; i<items.length; i++){
|
|
68
|
-
if(items[i] instanceof DataTransferItem || items[i] instanceof DataTransferItemList){
|
|
73
|
+
if ( Array.isArray(items) && items.length > 0 ) {
|
|
74
|
+
for ( let i = 0; i < items.length; i++ ) {
|
|
75
|
+
if ( items[i] instanceof DataTransferItem || items[i] instanceof DataTransferItemList ) {
|
|
69
76
|
seemsToBeParsedDataTransferItems = true;
|
|
70
77
|
}
|
|
71
78
|
}
|
|
72
79
|
}
|
|
73
80
|
|
|
74
81
|
// DataTransferItemList
|
|
75
|
-
if(items instanceof DataTransferItemList || items instanceof DataTransferItem
|
|
82
|
+
if ( items instanceof DataTransferItemList || items instanceof DataTransferItem || items[0] instanceof DataTransferItem || options.parsedDataTransferItems ) {
|
|
76
83
|
// if parsedDataTransferItems is true, it means the user has already parsed the DataTransferItems
|
|
77
|
-
if(options.parsedDataTransferItems)
|
|
84
|
+
if ( options.parsedDataTransferItems )
|
|
85
|
+
{
|
|
78
86
|
entries = items;
|
|
87
|
+
}
|
|
79
88
|
else
|
|
89
|
+
{
|
|
80
90
|
entries = await puter.ui.getEntriesFromDataTransferItems(items);
|
|
91
|
+
}
|
|
81
92
|
|
|
82
93
|
// Sort entries by size ascending
|
|
83
94
|
entries.sort((entry_a, entry_b) => {
|
|
84
|
-
if ( entry_a.isDirectory && !
|
|
85
|
-
if ( !
|
|
95
|
+
if ( entry_a.isDirectory && !entry_b.isDirectory ) return -1;
|
|
96
|
+
if ( !entry_a.isDirectory && entry_b.isDirectory ) return 1;
|
|
86
97
|
if ( entry_a.isDirectory && entry_b.isDirectory ) return 0;
|
|
87
|
-
|
|
98
|
+
|
|
88
99
|
return entry_a.size - entry_b.size;
|
|
89
100
|
});
|
|
90
101
|
}
|
|
91
102
|
// FileList/File
|
|
92
|
-
else if(items instanceof File || items[0] instanceof File || items instanceof FileList || items[0] instanceof FileList){
|
|
93
|
-
if(!Array.isArray(items))
|
|
103
|
+
else if ( items instanceof File || items[0] instanceof File || items instanceof FileList || items[0] instanceof FileList ) {
|
|
104
|
+
if ( ! Array.isArray(items) )
|
|
105
|
+
{
|
|
94
106
|
entries = items instanceof FileList ? Array.from(items) : [items];
|
|
107
|
+
}
|
|
95
108
|
else
|
|
109
|
+
{
|
|
96
110
|
entries = items;
|
|
111
|
+
}
|
|
97
112
|
|
|
98
113
|
// Sort entries by size ascending
|
|
99
114
|
entries.sort((entry_a, entry_b) => {
|
|
100
115
|
return entry_a.size - entry_b.size;
|
|
101
|
-
})
|
|
116
|
+
});
|
|
102
117
|
// add FullPath property to each entry
|
|
103
|
-
for(let i=0; i<entries.length; i++){
|
|
118
|
+
for ( let i = 0; i < entries.length; i++ ) {
|
|
104
119
|
entries[i].filepath = entries[i].name;
|
|
105
120
|
entries[i].fullPath = entries[i].name;
|
|
106
121
|
}
|
|
107
122
|
}
|
|
108
123
|
// blob
|
|
109
|
-
else if(items instanceof Blob){
|
|
124
|
+
else if ( items instanceof Blob ) {
|
|
110
125
|
// create a File object from the blob
|
|
111
|
-
let file = new File([items], options.name, { type:
|
|
126
|
+
let file = new File([items], options.name, { type: 'application/octet-stream' });
|
|
112
127
|
entries = [file];
|
|
113
128
|
// add FullPath property to each entry
|
|
114
|
-
for(let i=0; i<entries.length; i++){
|
|
129
|
+
for ( let i = 0; i < entries.length; i++ ) {
|
|
115
130
|
entries[i].filepath = entries[i].name;
|
|
116
131
|
entries[i].fullPath = entries[i].name;
|
|
117
132
|
}
|
|
118
133
|
}
|
|
119
134
|
// String
|
|
120
|
-
else if(typeof items === 'string'){
|
|
135
|
+
else if ( typeof items === 'string' ) {
|
|
121
136
|
// create a File object from the string
|
|
122
|
-
let file = new File([items], 'default.txt', { type:
|
|
137
|
+
let file = new File([items], 'default.txt', { type: 'text/plain' });
|
|
123
138
|
entries = [file];
|
|
124
139
|
// add FullPath property to each entry
|
|
125
|
-
for(let i=0; i<entries.length; i++){
|
|
140
|
+
for ( let i = 0; i < entries.length; i++ ) {
|
|
126
141
|
entries[i].filepath = entries[i].name;
|
|
127
142
|
entries[i].fullPath = entries[i].name;
|
|
128
143
|
}
|
|
@@ -134,50 +149,54 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
134
149
|
|
|
135
150
|
// Will hold directories and files to be uploaded
|
|
136
151
|
let dirs = [];
|
|
137
|
-
let uniqueDirs = {}
|
|
152
|
+
let uniqueDirs = {};
|
|
138
153
|
let files = [];
|
|
139
154
|
|
|
140
155
|
// Separate files from directories
|
|
141
|
-
for(let i=0; i<entries.length; i++){
|
|
156
|
+
for ( let i = 0; i < entries.length; i++ ) {
|
|
142
157
|
// skip empty entries
|
|
143
|
-
if(!entries[i])
|
|
158
|
+
if ( ! entries[i] )
|
|
159
|
+
{
|
|
144
160
|
continue;
|
|
161
|
+
}
|
|
145
162
|
//collect dirs
|
|
146
|
-
if(entries[i].isDirectory)
|
|
147
|
-
|
|
163
|
+
if ( entries[i].isDirectory )
|
|
164
|
+
{
|
|
165
|
+
dirs.push({ path: path.join(dirPath, entries[i].finalPath ? entries[i].finalPath : entries[i].fullPath) });
|
|
166
|
+
}
|
|
148
167
|
// also files
|
|
149
|
-
else{
|
|
168
|
+
else {
|
|
150
169
|
// Dragged and dropped files do not have a finalPath property and hence the fileItem will go undefined.
|
|
151
170
|
// In such cases, we need default to creating the files as uploaded by the user.
|
|
152
171
|
let fileItem = entries[i].finalPath ? entries[i].finalPath : entries[i].fullPath;
|
|
153
|
-
let [dirLevel, fileName] = [fileItem?.slice(0, fileItem?.lastIndexOf(
|
|
154
|
-
|
|
172
|
+
let [dirLevel, fileName] = [fileItem?.slice(0, fileItem?.lastIndexOf('/')), fileItem?.slice(fileItem?.lastIndexOf('/') + 1)];
|
|
173
|
+
|
|
155
174
|
// If file name is blank then we need to create only an empty directory.
|
|
156
175
|
// On the other hand if the file name is not blank(could be undefined), we need to create the file.
|
|
157
|
-
fileName !=
|
|
158
|
-
if (options.createFileParent && fileItem.includes('/')) {
|
|
176
|
+
fileName != '' && files.push(entries[i]);
|
|
177
|
+
if ( options.createFileParent && fileItem.includes('/') ) {
|
|
159
178
|
let incrementalDir;
|
|
160
179
|
dirLevel.split('/').forEach((directory) => {
|
|
161
|
-
incrementalDir = incrementalDir ? incrementalDir
|
|
162
|
-
let filePath = path.join(dirPath, incrementalDir)
|
|
180
|
+
incrementalDir = incrementalDir ? `${incrementalDir }/${ directory}` : directory;
|
|
181
|
+
let filePath = path.join(dirPath, incrementalDir);
|
|
163
182
|
// Prevent duplicate parent directory creation
|
|
164
|
-
if(!uniqueDirs[filePath]){
|
|
183
|
+
if ( ! uniqueDirs[filePath] ) {
|
|
165
184
|
uniqueDirs[filePath] = true;
|
|
166
|
-
dirs.push({path: filePath});
|
|
185
|
+
dirs.push({ path: filePath });
|
|
167
186
|
}
|
|
168
|
-
})
|
|
187
|
+
});
|
|
169
188
|
}
|
|
170
189
|
}
|
|
171
190
|
// stats about the upload to come
|
|
172
|
-
if(entries[i].size !== undefined){
|
|
191
|
+
if ( entries[i].size !== undefined ) {
|
|
173
192
|
total_size += (entries[i].size);
|
|
174
193
|
file_count++;
|
|
175
194
|
}
|
|
176
195
|
}
|
|
177
196
|
|
|
178
197
|
// Continue only if there are actually any files/directories to upload
|
|
179
|
-
if(dirs.length === 0 && files.length === 0){
|
|
180
|
-
return error({code: 'EMPTY_UPLOAD', message: 'No files or directories to upload.'});
|
|
198
|
+
if ( dirs.length === 0 && files.length === 0 ) {
|
|
199
|
+
return error({ code: 'EMPTY_UPLOAD', message: 'No files or directories to upload.' });
|
|
181
200
|
}
|
|
182
201
|
|
|
183
202
|
// Check storage capacity.
|
|
@@ -188,56 +207,58 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
188
207
|
//
|
|
189
208
|
// Space check in 'web' environment is currently not supported since it requires permissions.
|
|
190
209
|
let storage;
|
|
191
|
-
if(puter.env !== 'web'){
|
|
192
|
-
try{
|
|
210
|
+
if ( puter.env !== 'web' ) {
|
|
211
|
+
try {
|
|
193
212
|
storage = await this.space();
|
|
194
|
-
if(storage.capacity - storage.used < total_size){
|
|
195
|
-
return error({code: 'NOT_ENOUGH_SPACE', message: 'Not enough storage space available.'});
|
|
213
|
+
if ( storage.capacity - storage.used < total_size ) {
|
|
214
|
+
return error({ code: 'NOT_ENOUGH_SPACE', message: 'Not enough storage space available.' });
|
|
196
215
|
}
|
|
197
|
-
}catch(e){
|
|
216
|
+
} catch (e) {
|
|
198
217
|
// Ignored
|
|
199
218
|
}
|
|
200
219
|
}
|
|
201
|
-
|
|
220
|
+
|
|
202
221
|
// total size of the upload is doubled because we will be uploading the files to the server
|
|
203
222
|
// and then the server will upload them to the cloud
|
|
204
223
|
total_size = total_size * 2;
|
|
205
224
|
|
|
206
225
|
// holds the data to be sent to the server
|
|
207
226
|
const fd = new FormData();
|
|
208
|
-
|
|
227
|
+
|
|
209
228
|
//-------------------------------------------------
|
|
210
|
-
// Generate the requests to create all the
|
|
229
|
+
// Generate the requests to create all the
|
|
211
230
|
// folders in this upload
|
|
212
231
|
//-------------------------------------------------
|
|
213
232
|
dirs.sort((a, b) => b.path.length - a.path.length);
|
|
214
233
|
let mkdir_requests = [];
|
|
215
|
-
|
|
216
|
-
for(let i=0; i < dirs.length; i++){
|
|
234
|
+
|
|
235
|
+
for ( let i = 0; i < dirs.length; i++ ) {
|
|
217
236
|
// update all file paths under this folder if dirname was changed
|
|
218
|
-
for(let j=0; j<files.length; j++){
|
|
237
|
+
for ( let j = 0; j < files.length; j++ ) {
|
|
219
238
|
// if file is in this folder and has not been processed yet
|
|
220
|
-
if(!files[j].puter_path_param && path.join(dirPath, files[j].filepath).startsWith(
|
|
221
|
-
files[j].puter_path_param = `$dir_${i}
|
|
239
|
+
if ( !files[j].puter_path_param && path.join(dirPath, files[j].filepath).startsWith(`${dirs[i].path }/`) ) {
|
|
240
|
+
files[j].puter_path_param = `$dir_${i}/${ path.basename(files[j].filepath)}`;
|
|
222
241
|
}
|
|
223
242
|
}
|
|
224
|
-
|
|
243
|
+
|
|
225
244
|
// update all subdirs under this dir
|
|
226
|
-
for(let k=0; k < dirs.length; k++){
|
|
227
|
-
if(!dirs[k].puter_path_param && dirs[k].path.startsWith(dirs[i].path
|
|
228
|
-
dirs[k].puter_path_param = `$dir_${i}
|
|
245
|
+
for ( let k = 0; k < dirs.length; k++ ) {
|
|
246
|
+
if ( !dirs[k].puter_path_param && dirs[k].path.startsWith(`${dirs[i].path }/`) ) {
|
|
247
|
+
dirs[k].puter_path_param = `$dir_${i}/${ path.basename(dirs[k].path)}`;
|
|
229
248
|
}
|
|
230
249
|
}
|
|
231
250
|
}
|
|
232
251
|
|
|
233
|
-
for(let i=0; i < dirs.length; i++){
|
|
252
|
+
for ( let i = 0; i < dirs.length; i++ ) {
|
|
234
253
|
let parent_path = path.dirname(dirs[i].puter_path_param || dirs[i].path);
|
|
235
254
|
let dir_path = dirs[i].puter_path_param || dirs[i].path;
|
|
236
|
-
|
|
255
|
+
|
|
237
256
|
// remove parent path from the beginning of path since path is relative to parent
|
|
238
|
-
if(parent_path !== '/')
|
|
257
|
+
if ( parent_path !== '/' )
|
|
258
|
+
{
|
|
239
259
|
dir_path = dir_path.replace(parent_path, '');
|
|
240
|
-
|
|
260
|
+
}
|
|
261
|
+
|
|
241
262
|
mkdir_requests.push({
|
|
242
263
|
op: 'mkdir',
|
|
243
264
|
parent: parent_path,
|
|
@@ -248,23 +269,23 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
248
269
|
as: `dir_${i}`,
|
|
249
270
|
});
|
|
250
271
|
}
|
|
251
|
-
|
|
272
|
+
|
|
252
273
|
// inverse mkdir_requests so that the root folder is created first
|
|
253
274
|
// and then go down the tree
|
|
254
275
|
mkdir_requests.reverse();
|
|
255
|
-
|
|
276
|
+
|
|
256
277
|
fd.append('operation_id', operation_id);
|
|
257
278
|
fd.append('socket_id', this.socket.id);
|
|
258
279
|
fd.append('original_client_socket_id', this.socket.id);
|
|
259
280
|
|
|
260
281
|
// Append mkdir operations to upload request
|
|
261
|
-
for(let i=0; i<mkdir_requests.length; i++){
|
|
262
|
-
fd.append('operation', JSON.stringify(mkdir_requests[i]));
|
|
282
|
+
for ( let i = 0; i < mkdir_requests.length; i++ ) {
|
|
283
|
+
fd.append('operation', JSON.stringify(mkdir_requests[i]));
|
|
263
284
|
}
|
|
264
|
-
|
|
285
|
+
|
|
265
286
|
// Append file metadata to upload request
|
|
266
|
-
if(!options.shortcutTo){
|
|
267
|
-
for(let i=0; i<files.length; i++){
|
|
287
|
+
if ( ! options.shortcutTo ) {
|
|
288
|
+
for ( let i = 0; i < files.length; i++ ) {
|
|
268
289
|
fd.append('fileinfo', JSON.stringify({
|
|
269
290
|
name: files[i].name,
|
|
270
291
|
type: files[i].type,
|
|
@@ -273,12 +294,12 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
273
294
|
}
|
|
274
295
|
}
|
|
275
296
|
// Append write operations for each file
|
|
276
|
-
for(let i=0; i<files.length; i++){
|
|
297
|
+
for ( let i = 0; i < files.length; i++ ) {
|
|
277
298
|
fd.append('operation', JSON.stringify({
|
|
278
299
|
op: options.shortcutTo ? 'shortcut' : 'write',
|
|
279
300
|
dedupe_name: options.dedupeName ?? true,
|
|
280
301
|
overwrite: options.overwrite ?? false,
|
|
281
|
-
create_missing_ancestors: (options.createMissingAncestors ||
|
|
302
|
+
create_missing_ancestors: (options.createMissingAncestors || options.createMissingParents),
|
|
282
303
|
operation_id: operation_id,
|
|
283
304
|
path: (
|
|
284
305
|
files[i].puter_path_param &&
|
|
@@ -294,102 +315,108 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
294
315
|
app_uid: options.appUID,
|
|
295
316
|
}));
|
|
296
317
|
}
|
|
297
|
-
|
|
318
|
+
|
|
298
319
|
// Append files to upload
|
|
299
|
-
if(!options.shortcutTo){
|
|
300
|
-
for(let i=0; i<files.length; i++){
|
|
320
|
+
if ( ! options.shortcutTo ) {
|
|
321
|
+
for ( let i = 0; i < files.length; i++ ) {
|
|
301
322
|
fd.append('file', files[i] ?? '');
|
|
302
323
|
}
|
|
303
324
|
}
|
|
304
|
-
|
|
325
|
+
|
|
305
326
|
const progress_handler = (msg) => {
|
|
306
|
-
if(msg.operation_id === operation_id){
|
|
307
|
-
bytes_uploaded_to_cloud += msg.loaded_diff
|
|
327
|
+
if ( msg.operation_id === operation_id ) {
|
|
328
|
+
bytes_uploaded_to_cloud += msg.loaded_diff;
|
|
308
329
|
}
|
|
309
|
-
}
|
|
330
|
+
};
|
|
310
331
|
|
|
311
332
|
// Handle upload progress events from server
|
|
312
333
|
this.socket.on('upload.progress', progress_handler);
|
|
313
334
|
|
|
314
335
|
// keeps track of the amount of data uploaded to the server
|
|
315
336
|
let previous_chunk_uploaded = null;
|
|
316
|
-
|
|
337
|
+
|
|
317
338
|
// open request to server
|
|
318
|
-
xhr.open(
|
|
339
|
+
xhr.open('post', (`${this.APIOrigin }/batch`), true);
|
|
319
340
|
// set auth header
|
|
320
|
-
xhr.setRequestHeader(
|
|
341
|
+
xhr.setRequestHeader('Authorization', `Bearer ${ this.authToken}`);
|
|
321
342
|
|
|
322
343
|
// -----------------------------------------------
|
|
323
344
|
// Upload progress: client -> server
|
|
324
345
|
// -----------------------------------------------
|
|
325
|
-
xhr.upload.addEventListener('progress', function(e){
|
|
346
|
+
xhr.upload.addEventListener('progress', function (e) {
|
|
326
347
|
// update operation tracker
|
|
327
348
|
let chunk_uploaded;
|
|
328
|
-
if(previous_chunk_uploaded === null){
|
|
349
|
+
if ( previous_chunk_uploaded === null ) {
|
|
329
350
|
chunk_uploaded = e.loaded;
|
|
330
351
|
previous_chunk_uploaded = 0;
|
|
331
|
-
}else{
|
|
352
|
+
} else {
|
|
332
353
|
chunk_uploaded = e.loaded - previous_chunk_uploaded;
|
|
333
354
|
}
|
|
334
355
|
previous_chunk_uploaded += chunk_uploaded;
|
|
335
356
|
bytes_uploaded_to_server += chunk_uploaded;
|
|
336
357
|
|
|
337
358
|
// overall operation progress
|
|
338
|
-
let op_progress = ((bytes_uploaded_to_cloud + bytes_uploaded_to_server)/total_size * 100).toFixed(2);
|
|
359
|
+
let op_progress = ((bytes_uploaded_to_cloud + bytes_uploaded_to_server) / total_size * 100).toFixed(2);
|
|
339
360
|
op_progress = op_progress > 100 ? 100 : op_progress;
|
|
340
361
|
|
|
341
362
|
// progress callback function
|
|
342
|
-
if(options.progress && typeof options.progress === 'function')
|
|
363
|
+
if ( options.progress && typeof options.progress === 'function' )
|
|
364
|
+
{
|
|
343
365
|
options.progress(operation_id, op_progress);
|
|
344
|
-
|
|
345
|
-
|
|
366
|
+
}
|
|
367
|
+
});
|
|
368
|
+
|
|
346
369
|
// -----------------------------------------------
|
|
347
370
|
// Upload progress: server -> cloud
|
|
348
371
|
// the following code will check the progress of the upload every 100ms
|
|
349
372
|
// -----------------------------------------------
|
|
350
|
-
let cloud_progress_check_interval = setInterval(function() {
|
|
373
|
+
let cloud_progress_check_interval = setInterval(function () {
|
|
351
374
|
// operation progress
|
|
352
|
-
let op_progress = ((bytes_uploaded_to_cloud + bytes_uploaded_to_server)/total_size * 100).toFixed(2);
|
|
353
|
-
|
|
375
|
+
let op_progress = ((bytes_uploaded_to_cloud + bytes_uploaded_to_server) / total_size * 100).toFixed(2);
|
|
376
|
+
|
|
354
377
|
op_progress = op_progress > 100 ? 100 : op_progress;
|
|
355
|
-
if(options.progress && typeof options.progress === 'function')
|
|
378
|
+
if ( options.progress && typeof options.progress === 'function' )
|
|
379
|
+
{
|
|
356
380
|
options.progress(operation_id, op_progress);
|
|
381
|
+
}
|
|
357
382
|
}, 100);
|
|
358
|
-
|
|
383
|
+
|
|
359
384
|
// -----------------------------------------------
|
|
360
385
|
// onabort
|
|
361
386
|
// -----------------------------------------------
|
|
362
|
-
xhr.onabort = ()=>{
|
|
387
|
+
xhr.onabort = () => {
|
|
363
388
|
// stop the cloud upload progress tracker
|
|
364
389
|
clearInterval(cloud_progress_check_interval);
|
|
365
390
|
// remove progress handler
|
|
366
391
|
this.socket.off('upload.progress', progress_handler);
|
|
367
392
|
// if an 'abort' callback is provided, call it
|
|
368
|
-
if(options.abort && typeof options.abort === 'function')
|
|
393
|
+
if ( options.abort && typeof options.abort === 'function' )
|
|
394
|
+
{
|
|
369
395
|
options.abort(operation_id);
|
|
370
|
-
|
|
396
|
+
}
|
|
397
|
+
};
|
|
371
398
|
|
|
372
399
|
// -----------------------------------------------
|
|
373
400
|
// on success/error
|
|
374
401
|
// -----------------------------------------------
|
|
375
|
-
xhr.onreadystatechange = async (e)=>{
|
|
376
|
-
if (xhr.readyState === 4) {
|
|
402
|
+
xhr.onreadystatechange = async (e) => {
|
|
403
|
+
if ( xhr.readyState === 4 ) {
|
|
377
404
|
const resp = await utils.parseResponse(xhr);
|
|
378
|
-
// Error
|
|
379
|
-
if((xhr.status >= 400 && xhr.status < 600) || (options.strict && xhr.status === 218)) {
|
|
405
|
+
// Error
|
|
406
|
+
if ( (xhr.status >= 400 && xhr.status < 600) || (options.strict && xhr.status === 218) ) {
|
|
380
407
|
// stop the cloud upload progress tracker
|
|
381
408
|
clearInterval(cloud_progress_check_interval);
|
|
382
409
|
|
|
383
410
|
// remove progress handler
|
|
384
411
|
this.socket.off('upload.progress', progress_handler);
|
|
385
412
|
|
|
386
|
-
// If this is a 'strict' upload (i.e. status code is 218), we need to find out which operation failed
|
|
413
|
+
// If this is a 'strict' upload (i.e. status code is 218), we need to find out which operation failed
|
|
387
414
|
// and call the error callback with that operation.
|
|
388
|
-
if(options.strict && xhr.status === 218){
|
|
415
|
+
if ( options.strict && xhr.status === 218 ) {
|
|
389
416
|
// find the operation that failed
|
|
390
417
|
let failed_operation;
|
|
391
|
-
for(let i=0; i<resp.results?.length; i++){
|
|
392
|
-
if(resp.results[i].status !== 200){
|
|
418
|
+
for ( let i = 0; i < resp.results?.length; i++ ) {
|
|
419
|
+
if ( resp.results[i].status !== 200 ) {
|
|
393
420
|
failed_operation = resp.results[i];
|
|
394
421
|
break;
|
|
395
422
|
}
|
|
@@ -400,18 +427,20 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
400
427
|
return error(resp);
|
|
401
428
|
}
|
|
402
429
|
// Success
|
|
403
|
-
else{
|
|
404
|
-
if(!resp || !resp.results || resp.results.length === 0){
|
|
430
|
+
else {
|
|
431
|
+
if ( !resp || !resp.results || resp.results.length === 0 ) {
|
|
405
432
|
// no results
|
|
406
|
-
if(puter.debugMode)
|
|
433
|
+
if ( puter.debugMode )
|
|
434
|
+
{
|
|
407
435
|
console.log('no results');
|
|
436
|
+
}
|
|
408
437
|
}
|
|
409
|
-
|
|
438
|
+
|
|
410
439
|
let items = resp.results;
|
|
411
440
|
items = items.length === 1 ? items[0] : items;
|
|
412
441
|
|
|
413
442
|
// if success callback is provided, call it
|
|
414
|
-
if(options.success && typeof options.success === 'function'){
|
|
443
|
+
if ( options.success && typeof options.success === 'function' ) {
|
|
415
444
|
options.success(items);
|
|
416
445
|
}
|
|
417
446
|
// stop the cloud upload progress tracker
|
|
@@ -422,16 +451,16 @@ const upload = async function(items, dirPath, options = {}){
|
|
|
422
451
|
return resolve(items);
|
|
423
452
|
}
|
|
424
453
|
}
|
|
425
|
-
}
|
|
426
|
-
|
|
454
|
+
};
|
|
455
|
+
|
|
427
456
|
// Fire off the 'start' event
|
|
428
|
-
if(options.start && typeof options.start === 'function'){
|
|
457
|
+
if ( options.start && typeof options.start === 'function' ) {
|
|
429
458
|
options.start();
|
|
430
459
|
}
|
|
431
460
|
|
|
432
461
|
// send request
|
|
433
462
|
xhr.send(fd);
|
|
434
|
-
})
|
|
435
|
-
}
|
|
463
|
+
});
|
|
464
|
+
};
|
|
436
465
|
|
|
437
466
|
export default upload;
|
|
@@ -1,13 +1,13 @@
|
|
|
1
|
-
import path from
|
|
1
|
+
import path from '../../../lib/path.js';
|
|
2
2
|
import getAbsolutePathForApp from '../utils/getAbsolutePathForApp.js';
|
|
3
3
|
|
|
4
4
|
const write = async function (targetPath, data, options = {}) {
|
|
5
5
|
// targetPath is required
|
|
6
|
-
if(!targetPath){
|
|
6
|
+
if ( ! targetPath ) {
|
|
7
7
|
throw new Error({ code: 'NO_TARGET_PATH', message: 'No target path provided.' });
|
|
8
8
|
}
|
|
9
9
|
// if targetPath is a File
|
|
10
|
-
if(targetPath instanceof File && data === undefined){
|
|
10
|
+
if ( targetPath instanceof File && data === undefined ) {
|
|
11
11
|
data = targetPath;
|
|
12
12
|
targetPath = data.name;
|
|
13
13
|
}
|
|
@@ -21,8 +21,10 @@ const write = async function (targetPath, data, options = {}) {
|
|
|
21
21
|
options.overwrite = options.overwrite ?? true;
|
|
22
22
|
|
|
23
23
|
// if overwrite is true and dedupeName is not provided, set dedupeName to false
|
|
24
|
-
if(options.overwrite && options.dedupeName === undefined)
|
|
24
|
+
if ( options.overwrite && options.dedupeName === undefined )
|
|
25
|
+
{
|
|
25
26
|
options.dedupeName = false;
|
|
27
|
+
}
|
|
26
28
|
|
|
27
29
|
// if targetPath is not provided or it's not starting with a slash, it means it's a relative path
|
|
28
30
|
// in that case, we need to prepend the app's root directory to it
|
|
@@ -35,28 +37,30 @@ const write = async function (targetPath, data, options = {}) {
|
|
|
35
37
|
const parent = path.dirname(targetPath);
|
|
36
38
|
|
|
37
39
|
// if data is a string, convert it to a File object
|
|
38
|
-
if(typeof data === 'string'){
|
|
39
|
-
data = new File([data ?? ''], filename ?? 'Untitled.txt', { type:
|
|
40
|
+
if ( typeof data === 'string' ) {
|
|
41
|
+
data = new File([data ?? ''], filename ?? 'Untitled.txt', { type: 'text/plain' });
|
|
40
42
|
}
|
|
41
43
|
// blob
|
|
42
|
-
else if(data instanceof Blob){
|
|
44
|
+
else if ( data instanceof Blob ) {
|
|
43
45
|
data = new File([data ?? ''], filename ?? 'Untitled', { type: data.type });
|
|
44
46
|
}
|
|
45
47
|
// typed arrays (Uint8Array, Int8Array, etc.) and ArrayBuffer
|
|
46
|
-
else if(data instanceof ArrayBuffer || ArrayBuffer.isView(data)){
|
|
47
|
-
data = new File([data], filename ?? 'Untitled', { type:
|
|
48
|
+
else if ( data instanceof ArrayBuffer || ArrayBuffer.isView(data) ) {
|
|
49
|
+
data = new File([data], filename ?? 'Untitled', { type: 'application/octet-stream' });
|
|
48
50
|
}
|
|
49
51
|
|
|
50
|
-
if(!data)
|
|
52
|
+
if ( ! data )
|
|
53
|
+
{
|
|
51
54
|
data = new File([data ?? ''], filename);
|
|
55
|
+
}
|
|
52
56
|
|
|
53
57
|
// data should be a File now. If it's not, it's an unsupported type
|
|
54
|
-
if (!(data instanceof File)) {
|
|
58
|
+
if ( ! (data instanceof File) ) {
|
|
55
59
|
throw new Error({ code: 'field_invalid', message: 'write() data parameter is an invalid type' });
|
|
56
60
|
}
|
|
57
61
|
|
|
58
62
|
// perform upload
|
|
59
63
|
return this.upload(data, parent, options);
|
|
60
|
-
}
|
|
64
|
+
};
|
|
61
65
|
|
|
62
66
|
export default write;
|
|
@@ -1,21 +1,25 @@
|
|
|
1
|
-
import path from
|
|
1
|
+
import path from '../../../lib/path.js';
|
|
2
2
|
|
|
3
|
-
const getAbsolutePathForApp = (relativePath)=>{
|
|
3
|
+
const getAbsolutePathForApp = (relativePath) => {
|
|
4
4
|
// if we are in the gui environment, return the relative path as is
|
|
5
|
-
if(puter.env === 'gui')
|
|
5
|
+
if ( puter.env === 'gui' )
|
|
6
|
+
{
|
|
6
7
|
return relativePath;
|
|
8
|
+
}
|
|
7
9
|
|
|
8
10
|
// if no relative path is provided, use the current working directory
|
|
9
|
-
if(!relativePath)
|
|
11
|
+
if ( ! relativePath )
|
|
12
|
+
{
|
|
10
13
|
relativePath = '.';
|
|
14
|
+
}
|
|
11
15
|
|
|
12
16
|
// If relativePath is not provided, or it's not starting with a slash or tilde,
|
|
13
17
|
// it means it's a relative path. In that case, prepend the app's root directory.
|
|
14
|
-
if (!relativePath || (!relativePath.startsWith('/') && !relativePath.startsWith('~') && puter.appID)) {
|
|
18
|
+
if ( !relativePath || (!relativePath.startsWith('/') && !relativePath.startsWith('~') && puter.appID) ) {
|
|
15
19
|
relativePath = path.join('~/AppData', puter.appID, relativePath);
|
|
16
20
|
}
|
|
17
21
|
|
|
18
22
|
return relativePath;
|
|
19
|
-
}
|
|
23
|
+
};
|
|
20
24
|
|
|
21
25
|
export default getAbsolutePathForApp;
|