@engine9-io/input-tools 1.8.4 → 1.8.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/file/FileUtilities.js +76 -5
- package/file/S3.js +114 -63
- package/index.js +10 -1
- package/package.json +2 -1
package/file/FileUtilities.js
CHANGED
|
@@ -12,6 +12,7 @@ const debug = require('debug')('@engine9-io/file');
|
|
|
12
12
|
const { getXlsxStream } = require('xlstream');
|
|
13
13
|
const csv = require('csv');
|
|
14
14
|
const JSON5 = require('json5');
|
|
15
|
+
const { default: pLimit } = require('p-limit');
|
|
15
16
|
const languageEncoding = require('detect-file-encoding-and-language');
|
|
16
17
|
const R2Worker = require('./R2');
|
|
17
18
|
const S3Worker = require('./S3');
|
|
@@ -712,21 +713,77 @@ Worker.prototype.list.metadata = {
|
|
|
712
713
|
}
|
|
713
714
|
};
|
|
714
715
|
|
|
715
|
-
Worker.prototype.listAll = async function ({ directory }) {
|
|
716
|
+
Worker.prototype.listAll = async function ({ directory, start: s, end: e }) {
|
|
716
717
|
if (!directory) throw new Error('directory is required');
|
|
718
|
+
let start = null;
|
|
719
|
+
let end = null;
|
|
720
|
+
if (s) start = relativeDate(s).getTime();
|
|
721
|
+
if (e) end = relativeDate(e).getTime();
|
|
717
722
|
if (directory.startsWith('s3://') || directory.startsWith('r2://')) {
|
|
718
723
|
const worker = new (directory.startsWith('r2://') ? R2Worker : S3Worker)(this);
|
|
719
724
|
return worker.listAll({ directory });
|
|
720
725
|
}
|
|
721
726
|
const a = await fsp.readdir(directory, { recursive: true });
|
|
722
727
|
|
|
723
|
-
|
|
728
|
+
let files = a.map((f) => `${directory}/${f}`);
|
|
729
|
+
if (!start && !end) {
|
|
730
|
+
return files;
|
|
731
|
+
}
|
|
732
|
+
|
|
733
|
+
const limitedMethod = pLimit(10);
|
|
734
|
+
const filesWithinLimit = [];
|
|
735
|
+
|
|
736
|
+
await Promise.all(
|
|
737
|
+
files.map((filename) =>
|
|
738
|
+
limitedMethod(async () => {
|
|
739
|
+
const stats = await fsp.stat(filename);
|
|
740
|
+
if (start && stats.mtime < start) {
|
|
741
|
+
//do not include
|
|
742
|
+
} else if (end && stats.mtime > end) {
|
|
743
|
+
//do nothing
|
|
744
|
+
} else {
|
|
745
|
+
filesWithinLimit.push({
|
|
746
|
+
name: filename,
|
|
747
|
+
type: stats.isDirectory() ? 'directory' : 'file',
|
|
748
|
+
modifiedAt: new Date(stats.mtime).toISOString()
|
|
749
|
+
});
|
|
750
|
+
}
|
|
751
|
+
})
|
|
752
|
+
)
|
|
753
|
+
);
|
|
754
|
+
return filesWithinLimit;
|
|
724
755
|
};
|
|
725
756
|
Worker.prototype.listAll.metadata = {
|
|
726
757
|
options: {
|
|
727
758
|
directory: { required: true }
|
|
728
759
|
}
|
|
729
760
|
};
|
|
761
|
+
Worker.prototype.moveAll = async function (options) {
|
|
762
|
+
const { directory, targetDirectory } = options;
|
|
763
|
+
if (!directory) throw new Error('directory is required');
|
|
764
|
+
if (directory.startsWith('s3://') || directory.startsWith('r2://')) {
|
|
765
|
+
const worker = new (directory.startsWith('r2://') ? R2Worker : S3Worker)(this);
|
|
766
|
+
return worker.moveAll(options);
|
|
767
|
+
}
|
|
768
|
+
const a = await this.listAll(options);
|
|
769
|
+
|
|
770
|
+
let configs = a.map((f) => {
|
|
771
|
+
let filename = typeof f === 'string' ? f : f.filename;
|
|
772
|
+
return {
|
|
773
|
+
filename,
|
|
774
|
+
target: filename.replace(directory, targetDirectory)
|
|
775
|
+
};
|
|
776
|
+
});
|
|
777
|
+
|
|
778
|
+
const limitedMethod = pLimit(10);
|
|
779
|
+
|
|
780
|
+
return Promise.all(configs.map(({ filename, target }) => limitedMethod(async () => this.move({ filename, target }))));
|
|
781
|
+
};
|
|
782
|
+
Worker.prototype.moveAll.metadata = {
|
|
783
|
+
options: {
|
|
784
|
+
directory: { required: true }
|
|
785
|
+
}
|
|
786
|
+
};
|
|
730
787
|
|
|
731
788
|
Worker.prototype.empty = async function ({ directory }) {
|
|
732
789
|
if (!directory) throw new Error('directory is required');
|
|
@@ -772,7 +829,7 @@ Worker.prototype.remove.metadata = {
|
|
|
772
829
|
}
|
|
773
830
|
};
|
|
774
831
|
|
|
775
|
-
Worker.prototype.move = async function ({ filename, target }) {
|
|
832
|
+
Worker.prototype.move = async function ({ filename, target, remove = true }) {
|
|
776
833
|
if (!target) throw new Error('target is required');
|
|
777
834
|
if (typeof target !== 'string') throw new Error(`target isn't a string:${JSON.stringify(target)}`);
|
|
778
835
|
if (target.startsWith('s3://') || target.startsWith('r2://')) {
|
|
@@ -793,14 +850,18 @@ Worker.prototype.move = async function ({ filename, target }) {
|
|
|
793
850
|
if (filename.startsWith('s3://') || filename.startsWith('r2://')) {
|
|
794
851
|
// We need to copy and delete
|
|
795
852
|
const output = await worker.copy({ filename, target });
|
|
796
|
-
await worker.remove({ filename });
|
|
853
|
+
if (remove) await worker.remove({ filename });
|
|
797
854
|
return output;
|
|
798
855
|
}
|
|
799
856
|
const parts = target.split('/');
|
|
800
857
|
return worker.put({ filename, directory: parts.slice(0, -1).join('/'), file: parts.slice(-1)[0] });
|
|
801
858
|
}
|
|
802
859
|
await fsp.mkdir(path.dirname(target), { recursive: true });
|
|
803
|
-
|
|
860
|
+
if (remove) {
|
|
861
|
+
await fsp.rename(filename, target);
|
|
862
|
+
} else {
|
|
863
|
+
await fsp.copyFile(filename, target);
|
|
864
|
+
}
|
|
804
865
|
return { filename: target };
|
|
805
866
|
};
|
|
806
867
|
Worker.prototype.move.metadata = {
|
|
@@ -810,6 +871,16 @@ Worker.prototype.move.metadata = {
|
|
|
810
871
|
}
|
|
811
872
|
};
|
|
812
873
|
|
|
874
|
+
Worker.prototype.copy = async function (opts) {
|
|
875
|
+
return this.move({ ...opts, remove: false });
|
|
876
|
+
};
|
|
877
|
+
Worker.prototype.copy.metadata = {
|
|
878
|
+
options: {
|
|
879
|
+
filename: {},
|
|
880
|
+
target: {}
|
|
881
|
+
}
|
|
882
|
+
};
|
|
883
|
+
|
|
813
884
|
Worker.prototype.stat = async function ({ filename }) {
|
|
814
885
|
if (!filename) throw new Error('filename is required');
|
|
815
886
|
const output = {};
|
package/file/S3.js
CHANGED
|
@@ -7,13 +7,15 @@ const {
|
|
|
7
7
|
DeleteObjectCommand,
|
|
8
8
|
GetObjectCommand,
|
|
9
9
|
HeadObjectCommand,
|
|
10
|
-
GetObjectAttributesCommand,
|
|
11
|
-
|
|
10
|
+
GetObjectAttributesCommand,
|
|
11
|
+
PutObjectCommand,
|
|
12
|
+
ListObjectsV2Command
|
|
12
13
|
} = require('@aws-sdk/client-s3');
|
|
13
14
|
const { getTempFilename } = require('./tools');
|
|
15
|
+
const { default: pLimit } = require('p-limit');
|
|
14
16
|
|
|
15
17
|
function Worker() {
|
|
16
|
-
this.prefix='s3';
|
|
18
|
+
this.prefix = 's3';
|
|
17
19
|
}
|
|
18
20
|
|
|
19
21
|
function getParts(filename) {
|
|
@@ -35,18 +37,20 @@ Worker.prototype.getMetadata = async function ({ filename }) {
|
|
|
35
37
|
const s3Client = this.getClient();
|
|
36
38
|
const { Bucket, Key } = getParts(filename);
|
|
37
39
|
|
|
38
|
-
const resp = await s3Client.send(
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
40
|
+
const resp = await s3Client.send(
|
|
41
|
+
new GetObjectAttributesCommand({
|
|
42
|
+
Bucket,
|
|
43
|
+
Key,
|
|
44
|
+
ObjectAttributes: ['ETag', 'Checksum', 'ObjectParts', 'StorageClass', 'ObjectSize']
|
|
45
|
+
})
|
|
46
|
+
);
|
|
43
47
|
|
|
44
48
|
return resp;
|
|
45
49
|
};
|
|
46
50
|
Worker.prototype.getMetadata.metadata = {
|
|
47
51
|
options: {
|
|
48
|
-
filename: {}
|
|
49
|
-
}
|
|
52
|
+
filename: {}
|
|
53
|
+
}
|
|
50
54
|
};
|
|
51
55
|
|
|
52
56
|
Worker.prototype.stream = async function ({ filename }) {
|
|
@@ -64,12 +68,16 @@ Worker.prototype.stream = async function ({ filename }) {
|
|
|
64
68
|
};
|
|
65
69
|
Worker.prototype.stream.metadata = {
|
|
66
70
|
options: {
|
|
67
|
-
filename: {}
|
|
68
|
-
}
|
|
71
|
+
filename: {}
|
|
72
|
+
}
|
|
69
73
|
};
|
|
70
74
|
|
|
71
75
|
Worker.prototype.copy = async function ({ filename, target }) {
|
|
72
|
-
if (
|
|
76
|
+
if (filename.startsWith('s3://') || filename.startsWith('r2://')) {
|
|
77
|
+
//we're fine
|
|
78
|
+
} else {
|
|
79
|
+
throw new Error('Cowardly not copying a file not from s3 -- use put instead');
|
|
80
|
+
}
|
|
73
81
|
const s3Client = this.getClient();
|
|
74
82
|
const { Bucket, Key } = getParts(target);
|
|
75
83
|
|
|
@@ -78,7 +86,7 @@ Worker.prototype.copy = async function ({ filename, target }) {
|
|
|
78
86
|
const command = new CopyObjectCommand({
|
|
79
87
|
CopySource: filename.slice(4), // remove the s3:/
|
|
80
88
|
Bucket,
|
|
81
|
-
Key
|
|
89
|
+
Key
|
|
82
90
|
});
|
|
83
91
|
|
|
84
92
|
return s3Client.send(command);
|
|
@@ -87,8 +95,19 @@ Worker.prototype.copy = async function ({ filename, target }) {
|
|
|
87
95
|
Worker.prototype.copy.metadata = {
|
|
88
96
|
options: {
|
|
89
97
|
filename: {},
|
|
90
|
-
target: {}
|
|
91
|
-
}
|
|
98
|
+
target: {}
|
|
99
|
+
}
|
|
100
|
+
};
|
|
101
|
+
Worker.prototype.move = async function ({ filename, target }) {
|
|
102
|
+
await this.copy({ filename, target });
|
|
103
|
+
await this.remove({ filename });
|
|
104
|
+
return { filename: target };
|
|
105
|
+
};
|
|
106
|
+
Worker.prototype.move.metadata = {
|
|
107
|
+
options: {
|
|
108
|
+
filename: {},
|
|
109
|
+
target: {}
|
|
110
|
+
}
|
|
92
111
|
};
|
|
93
112
|
|
|
94
113
|
Worker.prototype.remove = async function ({ filename }) {
|
|
@@ -99,8 +118,8 @@ Worker.prototype.remove = async function ({ filename }) {
|
|
|
99
118
|
};
|
|
100
119
|
Worker.prototype.remove.metadata = {
|
|
101
120
|
options: {
|
|
102
|
-
filename: {}
|
|
103
|
-
}
|
|
121
|
+
filename: {}
|
|
122
|
+
}
|
|
104
123
|
};
|
|
105
124
|
|
|
106
125
|
Worker.prototype.download = async function ({ filename }) {
|
|
@@ -125,15 +144,15 @@ Worker.prototype.download = async function ({ filename }) {
|
|
|
125
144
|
};
|
|
126
145
|
Worker.prototype.download.metadata = {
|
|
127
146
|
options: {
|
|
128
|
-
filename: {}
|
|
129
|
-
}
|
|
147
|
+
filename: {}
|
|
148
|
+
}
|
|
130
149
|
};
|
|
131
150
|
|
|
132
151
|
Worker.prototype.put = async function (options) {
|
|
133
152
|
const { filename, directory } = options;
|
|
134
153
|
if (!filename) throw new Error('Local filename required');
|
|
135
|
-
if (directory?.indexOf('s3://') !== 0
|
|
136
|
-
|
|
154
|
+
if (directory?.indexOf('s3://') !== 0 && directory?.indexOf('r2://') !== 0)
|
|
155
|
+
throw new Error(`directory path must start with s3:// or r2://, is ${directory}`);
|
|
137
156
|
|
|
138
157
|
const file = options.file || filename.split('/').pop();
|
|
139
158
|
const parts = directory.split('/');
|
|
@@ -147,7 +166,10 @@ Worker.prototype.put = async function (options) {
|
|
|
147
166
|
const s3Client = this.getClient();
|
|
148
167
|
|
|
149
168
|
const command = new PutObjectCommand({
|
|
150
|
-
Bucket,
|
|
169
|
+
Bucket,
|
|
170
|
+
Key,
|
|
171
|
+
Body,
|
|
172
|
+
ContentType
|
|
151
173
|
});
|
|
152
174
|
|
|
153
175
|
return s3Client.send(command);
|
|
@@ -156,8 +178,8 @@ Worker.prototype.put.metadata = {
|
|
|
156
178
|
options: {
|
|
157
179
|
filename: {},
|
|
158
180
|
directory: { description: 'Directory to put file, e.g. s3://foo-bar/dir/xyz' },
|
|
159
|
-
file: { description: 'Name of file, defaults to the filename' }
|
|
160
|
-
}
|
|
181
|
+
file: { description: 'Name of file, defaults to the filename' }
|
|
182
|
+
}
|
|
161
183
|
};
|
|
162
184
|
|
|
163
185
|
Worker.prototype.write = async function (options) {
|
|
@@ -175,7 +197,10 @@ Worker.prototype.write = async function (options) {
|
|
|
175
197
|
const ContentType = mime.lookup(file);
|
|
176
198
|
|
|
177
199
|
const command = new PutObjectCommand({
|
|
178
|
-
Bucket,
|
|
200
|
+
Bucket,
|
|
201
|
+
Key,
|
|
202
|
+
Body,
|
|
203
|
+
ContentType
|
|
179
204
|
});
|
|
180
205
|
|
|
181
206
|
return s3Client.send(command);
|
|
@@ -184,11 +209,11 @@ Worker.prototype.write.metadata = {
|
|
|
184
209
|
options: {
|
|
185
210
|
directory: { description: 'Directory to put file, e.g. s3://foo-bar/dir/xyz' },
|
|
186
211
|
file: { description: 'Name of file, defaults to the filename' },
|
|
187
|
-
content: { description: 'Contents of file' }
|
|
188
|
-
}
|
|
212
|
+
content: { description: 'Contents of file' }
|
|
213
|
+
}
|
|
189
214
|
};
|
|
190
215
|
|
|
191
|
-
Worker.prototype.list = async function ({ directory, start,end,raw }) {
|
|
216
|
+
Worker.prototype.list = async function ({ directory, start, end, raw }) {
|
|
192
217
|
if (!directory) throw new Error('directory is required');
|
|
193
218
|
let dir = directory;
|
|
194
219
|
while (dir.slice(-1) === '/') dir = dir.slice(0, -1);
|
|
@@ -197,59 +222,67 @@ Worker.prototype.list = async function ({ directory, start,end,raw }) {
|
|
|
197
222
|
const command = new ListObjectsV2Command({
|
|
198
223
|
Bucket,
|
|
199
224
|
Prefix: `${Prefix}/`,
|
|
200
|
-
Delimiter: '/'
|
|
225
|
+
Delimiter: '/'
|
|
201
226
|
});
|
|
202
227
|
|
|
203
228
|
const { Contents: files, CommonPrefixes } = await s3Client.send(command);
|
|
204
229
|
if (raw) return files;
|
|
205
230
|
// debug('Prefixes:', { CommonPrefixes });
|
|
206
|
-
const output = []
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
231
|
+
const output = []
|
|
232
|
+
.concat(
|
|
233
|
+
(CommonPrefixes || []).map((f) => ({
|
|
234
|
+
name: f.Prefix.slice(Prefix.length + 1, -1),
|
|
235
|
+
type: 'directory'
|
|
236
|
+
}))
|
|
237
|
+
)
|
|
238
|
+
.concat(
|
|
239
|
+
(files || [])
|
|
240
|
+
.filter(({ LastModified }) => {
|
|
241
|
+
if (start && new Date(LastModified) < start) {
|
|
242
|
+
return false;
|
|
243
|
+
} else if (end && new Date(LastModified) > end) {
|
|
244
|
+
return false;
|
|
245
|
+
} else {
|
|
246
|
+
return true;
|
|
247
|
+
}
|
|
248
|
+
})
|
|
249
|
+
.map(({ Key, Size, LastModified }) => ({
|
|
250
|
+
name: Key.slice(Prefix.length + 1),
|
|
251
|
+
type: 'file',
|
|
252
|
+
size: Size,
|
|
253
|
+
modifiedAt: new Date(LastModified).toISOString()
|
|
254
|
+
}))
|
|
255
|
+
);
|
|
225
256
|
|
|
226
257
|
return output;
|
|
227
258
|
};
|
|
228
259
|
Worker.prototype.list.metadata = {
|
|
229
260
|
options: {
|
|
230
|
-
directory: { required: true }
|
|
231
|
-
}
|
|
261
|
+
directory: { required: true }
|
|
262
|
+
}
|
|
232
263
|
};
|
|
233
264
|
/* List everything with the prefix */
|
|
234
265
|
Worker.prototype.listAll = async function ({ directory }) {
|
|
235
266
|
if (!directory) throw new Error('directory is required');
|
|
236
267
|
let dir = directory;
|
|
237
268
|
while (dir.slice(-1) === '/') dir = dir.slice(0, -1);
|
|
238
|
-
const { Bucket, Key
|
|
269
|
+
const { Bucket, Key } = getParts(dir);
|
|
239
270
|
const s3Client = this.getClient();
|
|
240
271
|
const files = [];
|
|
241
272
|
let ContinuationToken = null;
|
|
273
|
+
let Prefix = null;
|
|
274
|
+
if (Key) Prefix = `${Key}/`;
|
|
242
275
|
do {
|
|
243
276
|
const command = new ListObjectsV2Command({
|
|
244
277
|
Bucket,
|
|
245
|
-
Prefix
|
|
246
|
-
ContinuationToken
|
|
278
|
+
Prefix,
|
|
279
|
+
ContinuationToken
|
|
247
280
|
// Delimiter: '/',
|
|
248
281
|
});
|
|
249
282
|
debug(`Sending List command with prefix ${Prefix} with ContinuationToken ${ContinuationToken}`);
|
|
250
|
-
|
|
283
|
+
|
|
251
284
|
const result = await s3Client.send(command);
|
|
252
|
-
const newFiles =
|
|
285
|
+
const newFiles = result.Contents?.map((d) => `${this.prefix}://${Bucket}/${d.Key}`) || [];
|
|
253
286
|
debug(`Retrieved ${newFiles.length} new files, total ${files.length},sample ${newFiles.slice(0, 3).join(',')}`);
|
|
254
287
|
files.push(...newFiles);
|
|
255
288
|
ContinuationToken = result.NextContinuationToken;
|
|
@@ -257,9 +290,28 @@ Worker.prototype.listAll = async function ({ directory }) {
|
|
|
257
290
|
return files;
|
|
258
291
|
};
|
|
259
292
|
Worker.prototype.listAll.metadata = {
|
|
293
|
+
options: {
|
|
294
|
+
directory: { required: true }
|
|
295
|
+
}
|
|
296
|
+
};
|
|
297
|
+
|
|
298
|
+
Worker.prototype.moveAll = async function ({ directory, targetDirectory }) {
|
|
299
|
+
if (!directory || !targetDirectory) throw new Error('directory and targetDirectory required');
|
|
300
|
+
const files = await this.listAll({ directory });
|
|
301
|
+
const configs = files.map((d) => ({
|
|
302
|
+
filename: d,
|
|
303
|
+
target: d.replace(directory, targetDirectory)
|
|
304
|
+
}));
|
|
305
|
+
|
|
306
|
+
const limitedMethod = pLimit(10);
|
|
307
|
+
|
|
308
|
+
return Promise.all(configs.map(({ filename, target }) => limitedMethod(async () => this.move({ filename, target }))));
|
|
309
|
+
};
|
|
310
|
+
Worker.prototype.moveAll.metadata = {
|
|
260
311
|
options: {
|
|
261
312
|
directory: { required: true },
|
|
262
|
-
|
|
313
|
+
targetDirectory: { required: true }
|
|
314
|
+
}
|
|
263
315
|
};
|
|
264
316
|
|
|
265
317
|
Worker.prototype.stat = async function ({ filename }) {
|
|
@@ -275,26 +327,25 @@ Worker.prototype.stat = async function ({ filename }) {
|
|
|
275
327
|
ContentLength, // : "3191",
|
|
276
328
|
ContentType, // : "image/jpeg",
|
|
277
329
|
// ETag": "\"6805f2cfc46c0f04559748bb039d69ae\"",
|
|
278
|
-
LastModified
|
|
330
|
+
LastModified // : "2016-12-15T01:19:41.000Z",
|
|
279
331
|
// Metadata": {},
|
|
280
332
|
// VersionId": "null"
|
|
281
|
-
|
|
282
333
|
} = response;
|
|
283
334
|
const modifiedAt = new Date(LastModified);
|
|
284
|
-
const createdAt = modifiedAt
|
|
335
|
+
const createdAt = modifiedAt; // Same for S3
|
|
285
336
|
const size = parseInt(ContentLength, 10);
|
|
286
337
|
|
|
287
338
|
return {
|
|
288
339
|
createdAt,
|
|
289
340
|
modifiedAt,
|
|
290
341
|
contentType: ContentType,
|
|
291
|
-
size
|
|
342
|
+
size
|
|
292
343
|
};
|
|
293
344
|
};
|
|
294
345
|
Worker.prototype.stat.metadata = {
|
|
295
346
|
options: {
|
|
296
|
-
filename: {}
|
|
297
|
-
}
|
|
347
|
+
filename: {}
|
|
348
|
+
}
|
|
298
349
|
};
|
|
299
350
|
|
|
300
351
|
module.exports = Worker;
|
package/index.js
CHANGED
|
@@ -216,12 +216,21 @@ function getInputUUID(a, b) {
|
|
|
216
216
|
return uuidv5(`${pluginId}:${rid}`, '3d0e5d99-6ba9-4fab-9bb2-c32304d3df8e');
|
|
217
217
|
}
|
|
218
218
|
|
|
219
|
+
const timestampMatch = /^\d{13}$/;
|
|
220
|
+
function dateFromString(s) {
|
|
221
|
+
if (typeof s === 'number') return new Date(s);
|
|
222
|
+
if (typeof s === 'string') {
|
|
223
|
+
if (s.match(timestampMatch)) return new Date(parseInt(s));
|
|
224
|
+
}
|
|
225
|
+
return new Date(s);
|
|
226
|
+
}
|
|
227
|
+
|
|
219
228
|
function getUUIDv7(date, inputUuid) {
|
|
220
229
|
/* optional date and input UUID */
|
|
221
230
|
const uuid = inputUuid || uuidv7();
|
|
222
231
|
const bytes = Buffer.from(uuid.replace(/-/g, ''), 'hex');
|
|
223
232
|
if (date !== undefined) {
|
|
224
|
-
const d =
|
|
233
|
+
const d = dateFromString(date);
|
|
225
234
|
// isNaN behaves differently than Number.isNaN -- we're actually going for the
|
|
226
235
|
// attempted conversion here
|
|
227
236
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@engine9-io/input-tools",
|
|
3
|
-
"version": "1.8.
|
|
3
|
+
"version": "1.8.6",
|
|
4
4
|
"description": "Tools for dealing with Engine9 inputs",
|
|
5
5
|
"main": "index.js",
|
|
6
6
|
"scripts": {
|
|
@@ -26,6 +26,7 @@
|
|
|
26
26
|
"json5": "^2.2.3",
|
|
27
27
|
"mime-type": "^5.0.3",
|
|
28
28
|
"mkdirp": "^3.0.1",
|
|
29
|
+
"p-limit": "^7.1.1",
|
|
29
30
|
"parallel-transform": "^1.2.0",
|
|
30
31
|
"throttle-debounce": "^5.0.2",
|
|
31
32
|
"unzipper": "^0.12.1",
|