@nxtedition/lib 19.0.27 → 19.0.29

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/deepstream.js CHANGED
@@ -119,6 +119,25 @@ function get(ds, name, ...args) {
119
119
  )
120
120
  }
121
121
 
122
+ function getRecord(ds, name, ...args) {
123
+ let query = null
124
+
125
+ if (args.length > 0 && (args[0] == null || typeof args[0] === 'object')) {
126
+ query = args.shift()
127
+ }
128
+
129
+ name = `${name}`
130
+
131
+ return ds.record.getRecord(
132
+ `${name}${
133
+ query && Object.keys(query).length > 0
134
+ ? `${name.endsWith('?') ? '' : '?'}${qs.stringify(query, { skipNulls: true })}`
135
+ : ''
136
+ }`,
137
+ ...args,
138
+ )
139
+ }
140
+
122
141
  function query(ds, designId, options) {
123
142
  const next = (startkey, prevRows, limit) =>
124
143
  Number.isFinite(limit) && limit <= 0
@@ -164,6 +183,7 @@ export function makeDeepstream(ds) {
164
183
  query: (...args) => query(ds, ...args),
165
184
  set: (...args) => ds.record.set(...args),
166
185
  get: (...args) => get(ds, ...args),
186
+ getRecord: (...args) => getRecord(ds, ...args),
167
187
  update: (...args) => ds.record.update(...args),
168
188
  },
169
189
  }
@@ -177,11 +197,13 @@ Object.assign(makeDeepstream, {
177
197
  observe2,
178
198
  query,
179
199
  get,
200
+ getRecord,
180
201
  record: {
181
202
  provide,
182
203
  observe,
183
204
  observe2,
184
205
  query,
185
206
  get,
207
+ getRecord,
186
208
  },
187
209
  })
package/errors.js CHANGED
@@ -47,7 +47,7 @@ export function serializeError(error) {
47
47
  }
48
48
 
49
49
  if (typeof error === 'string') {
50
- return serializeError({ message: error })
50
+ return [serializeError({ message: error })]
51
51
  }
52
52
 
53
53
  if (Buffer.isBuffer(error)) {
@@ -56,7 +56,7 @@ export function serializeError(error) {
56
56
 
57
57
  if (Array.isArray(error)) {
58
58
  const errors = error.map(serializeError).filter(Boolean)
59
- return errors.length === 0 ? null : errors.length === 1 ? errors[0] : errors
59
+ return errors
60
60
  }
61
61
 
62
62
  if (Object.prototype.hasOwnProperty.call(error, kSeen)) {
@@ -105,26 +105,28 @@ export function serializeError(error) {
105
105
  signalCode = SIGNALS[signalCode] ?? signalCode
106
106
  }
107
107
 
108
- errors = Array.isArray(errors) ? errors.map(serializeError) : undefined
108
+ errors = Array.isArray(errors) ? errors.map(serializeError).filter(Boolean) : undefined
109
109
  cause = cause ? serializeError(cause) : undefined
110
110
 
111
111
  delete error[kSeen]
112
112
 
113
- return JSON.parse(
114
- JSON.stringify({
115
- ...properties,
116
- message,
117
- type,
118
- code,
119
- exitCode,
120
- signalCode,
121
- statusCode,
122
- headers,
123
- data,
124
- cause,
125
- errors,
126
- }),
127
- )
113
+ return [
114
+ JSON.parse(
115
+ JSON.stringify({
116
+ ...properties,
117
+ message,
118
+ type,
119
+ code,
120
+ exitCode,
121
+ signalCode,
122
+ statusCode,
123
+ headers,
124
+ data,
125
+ cause,
126
+ errors,
127
+ }),
128
+ ),
129
+ ]
128
130
  }
129
131
 
130
132
  // TODO (fix): Recursion guard?
package/http.js CHANGED
@@ -23,6 +23,9 @@ function genReqId() {
23
23
  return `req-${nextReqId.toString(36)}`
24
24
  }
25
25
 
26
+ let reqTimeoutError
27
+ let resTimeoutError
28
+
26
29
  export async function request(ctx, next) {
27
30
  const { req, res, logger } = ctx
28
31
  const startTime = performance.now()
@@ -62,14 +65,14 @@ export async function request(ctx, next) {
62
65
  new Promise((resolve, reject) => {
63
66
  req
64
67
  .on('timeout', function () {
65
- this.destroy(new createError.RequestTimeout())
68
+ this.destroy((reqTimeoutError ??= new createError.RequestTimeout()))
66
69
  })
67
70
  .on('error', function (err) {
68
71
  this.log.error({ err }, 'request error')
69
72
  })
70
73
  res
71
74
  .on('timeout', function () {
72
- this.destroy(new createError.RequestTimeout())
75
+ this.destroy((resTimeoutError ??= new createError.RequestTimeout()))
73
76
  })
74
77
  .on('error', function (err) {
75
78
  reject(err)
package/merge-ranges.js CHANGED
@@ -29,6 +29,10 @@ export default function mergeRanges(ranges) {
29
29
  const range = ranges[n]
30
30
  const top = stack[stack.length - 1]
31
31
 
32
+ if (range.length !== 2 || !Number.isFinite(range[0]) || !Number.isFinite(range[1])) {
33
+ continue
34
+ }
35
+
32
36
  if (top[1] < range[0]) {
33
37
  // No overlap, push range onto stack
34
38
  stack.push([range[0], range[1]])
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nxtedition/lib",
3
- "version": "19.0.27",
3
+ "version": "19.0.29",
4
4
  "license": "MIT",
5
5
  "author": "Robert Nagy <robert.nagy@boffins.se>",
6
6
  "type": "module",
@@ -16,6 +16,7 @@
16
16
  "elasticsearch.js",
17
17
  "merge-ranges.js",
18
18
  "http.js",
19
+ "s3.js",
19
20
  "deepstream.js",
20
21
  "logger.js",
21
22
  "mime.js",
@@ -35,7 +36,7 @@
35
36
  "prepare": "husky install",
36
37
  "prepublishOnly": "pinst --disable",
37
38
  "postpublish": "pinst --enable",
38
- "test": "node --test __tests__ && npx tap test"
39
+ "test": "node --test"
39
40
  },
40
41
  "lint-staged": {
41
42
  "*.{js,jsx,md,ts}": [
@@ -74,6 +75,7 @@
74
75
  "/__tests__"
75
76
  ],
76
77
  "dependencies": {
78
+ "@aws-sdk/client-s3": "^3.540.0",
77
79
  "@elastic/elasticsearch": "^8.12.2",
78
80
  "@elastic/transport": "^8.4.1",
79
81
  "@nxtedition/nxt-undici": "^2.0.41",
@@ -89,6 +91,7 @@
89
91
  "nconf": "^0.12.1",
90
92
  "nested-error-stacks": "^2.1.1",
91
93
  "object-hash": "^3.0.0",
94
+ "p-queue": "^8.0.1",
92
95
  "pino": "^8.19.0",
93
96
  "pino-std-serializers": "^6.2.2",
94
97
  "qs": "^6.12.0",
@@ -116,8 +119,7 @@
116
119
  "pinst": "^3.0.0",
117
120
  "prettier": "^3.2.5",
118
121
  "rxjs": "^7.5.6",
119
- "send": "^0.18.0",
120
- "tap": "^18.7.1"
122
+ "send": "^0.18.0"
121
123
  },
122
124
  "peerDependencies": {
123
125
  "@elastic/elasticsearch": "^8.6.0",
package/s3.js ADDED
@@ -0,0 +1,258 @@
1
+ import crypto from 'node:crypto'
2
+ import stream from 'node:stream'
3
+ import path from 'node:path'
4
+ import os from 'node:os'
5
+ import fs from 'node:fs'
6
+ import assert from 'node:assert'
7
+ import AWS from '@aws-sdk/client-s3'
8
+ import PQueue from 'p-queue'
9
+
10
+ const CONTENT_MD5_EXPR = /^[A-F0-9]{32}$/i
11
+ const CONTENT_LENGTH_EXPR = /^\d+$/i
12
+
13
+ const noop = (arg0) => {}
14
+
15
+ class PartUploader {
16
+ #number
17
+ #path
18
+ #size
19
+ #writable
20
+ #callback
21
+ #hasher
22
+ #signal
23
+
24
+ constructor(dir, number, signal) {
25
+ this.#writable = null
26
+ this.#callback = noop
27
+ this.#hasher = crypto.createHash('md5')
28
+ this.#size = 0
29
+ this.#signal = signal
30
+ this.#number = number
31
+ this.#path = path.join(dir, `${this.#number}.part`)
32
+ }
33
+
34
+ get size() {
35
+ return this.#size
36
+ }
37
+
38
+ async write(chunk) {
39
+ this.#writable ??= fs
40
+ .createWriteStream(this.#path, { signal: this.#signal })
41
+ .on('drain', () => {
42
+ this.#callback(null)
43
+ this.#callback = noop
44
+ })
45
+ .on('error', (err) => {
46
+ this.#callback(err)
47
+ this.callback = noop
48
+ })
49
+
50
+ if (this.#writable.errored) {
51
+ throw this.#writable.errored
52
+ }
53
+
54
+ this.#size += chunk.byteLength
55
+ this.#hasher.update(chunk)
56
+
57
+ if (!this.#writable.write(chunk)) {
58
+ await new Promise((resolve, reject) => {
59
+ this.#callback = (err) => (err ? reject(err) : resolve(null))
60
+ })
61
+ this.#signal.throwIfAborted()
62
+ }
63
+ }
64
+
65
+ async end(s3, params) {
66
+ try {
67
+ if (!this.#writable) {
68
+ throw new Error('No data to send')
69
+ }
70
+
71
+ if (this.#writable.errored) {
72
+ throw this.#writable.errored
73
+ }
74
+
75
+ this.#writable.end()
76
+ await stream.promises.finished(this.#writable)
77
+
78
+ assert(this.#writable.bytesWritten === this.#size, 'Expected size to match bytesWritten')
79
+
80
+ const { ETag } = await s3.send(
81
+ new AWS.UploadPartCommand({
82
+ ...params,
83
+ ContentMD5: this.#hasher.digest('base64'),
84
+ ContentLength: this.#size,
85
+ PartNumber: this.#number,
86
+ Body: fs.createReadStream(this.#path, { signal: this.#signal }),
87
+ }),
88
+ )
89
+
90
+ return { part: { ETag, PartNumber: this.#number } }
91
+ } catch (err) {
92
+ return { error: err }
93
+ } finally {
94
+ await fs.promises.unlink(this.#writable.path)
95
+ }
96
+ }
97
+ }
98
+
99
+ export async function upload(
100
+ {
101
+ client: s3,
102
+ signal: outerSignal,
103
+ tmpdir = os.tmpdir(),
104
+ partSize = 64e6,
105
+ queueSize = 4,
106
+ leavePartsOnError = false,
107
+ },
108
+ { Body, Key, Bucket, ContentMD5, ContentLength },
109
+ ) {
110
+ if (s3 == null) {
111
+ throw new Error('Invalid client')
112
+ }
113
+
114
+ if (!Number.isFinite(partSize) || partSize <= 0) {
115
+ throw new Error('Invalid partSize')
116
+ }
117
+
118
+ if (!Number.isFinite(queueSize) || queueSize <= 0) {
119
+ throw new Error('Invalid queueSize')
120
+ }
121
+
122
+ if (ContentMD5 != null && !CONTENT_MD5_EXPR.test(ContentMD5)) {
123
+ throw new Error(`Invalid ContentMD5: ${ContentMD5}`)
124
+ }
125
+
126
+ if (ContentLength != null && !CONTENT_LENGTH_EXPR.test(ContentLength)) {
127
+ throw new Error(`Invalid ContentLength: ${ContentLength}`)
128
+ }
129
+
130
+ const dir = await fs.promises.mkdtemp(path.join(tmpdir, 's3-upload-'))
131
+ outerSignal?.throwIfAborted()
132
+
133
+ await fs.promises.stat(dir)
134
+ outerSignal?.throwIfAborted()
135
+
136
+ const queue = new PQueue({ concurrency: queueSize })
137
+ const promises = []
138
+ const ac = new AbortController()
139
+ const signal = ac.signal
140
+
141
+ const abort = () => ac.abort()
142
+ outerSignal?.addEventListener('abort', abort)
143
+
144
+ let uploadId
145
+ try {
146
+ const multipartUploadOutput = await s3.send(
147
+ new AWS.CreateMultipartUploadCommand({
148
+ Bucket,
149
+ Key,
150
+ }),
151
+ )
152
+ uploadId = multipartUploadOutput.UploadId
153
+ signal.throwIfAborted()
154
+
155
+ const uploader = {
156
+ size: 0,
157
+ hasher: crypto.createHash('md5'),
158
+ part: new PartUploader(dir, 1, signal),
159
+ number: 1,
160
+ }
161
+
162
+ const maybeFlush = (minSize) => {
163
+ if (uploader.part.size && (minSize == null || uploader.part.size >= minSize)) {
164
+ const part = uploader.part
165
+ uploader.part = new PartUploader(dir, ++uploader.number, signal)
166
+
167
+ const promise = queue.add(() => part.end(s3, { Bucket, Key, UploadId: uploadId }))
168
+ promises.push(promise)
169
+ }
170
+ }
171
+
172
+ for await (const chunk of Body) {
173
+ signal.throwIfAborted()
174
+
175
+ uploader.hasher.update(chunk)
176
+ uploader.size += chunk.byteLength
177
+
178
+ const thenable = uploader.part.write(chunk)
179
+ if (thenable) {
180
+ await thenable
181
+ signal.throwIfAborted()
182
+ }
183
+
184
+ maybeFlush(partSize)
185
+ }
186
+ maybeFlush()
187
+
188
+ const parts = []
189
+ const errors = []
190
+ for (const { part, error } of await Promise.all(promises)) {
191
+ if (error) {
192
+ errors.push(error)
193
+ } else {
194
+ parts.push(part)
195
+ }
196
+ }
197
+ signal.throwIfAborted()
198
+
199
+ if (errors.length > 0) {
200
+ throw new AggregateError(errors, 'upload failed')
201
+ }
202
+
203
+ if (parts.length === 0) {
204
+ throw new Error('upload empty')
205
+ }
206
+
207
+ const uploadOutput = await s3.send(
208
+ new AWS.CompleteMultipartUploadCommand({
209
+ Bucket,
210
+ Key,
211
+ UploadId: uploadId,
212
+ MultipartUpload: { Parts: parts },
213
+ }),
214
+ )
215
+ signal.throwIfAborted()
216
+
217
+ const result = {
218
+ size: uploader.size,
219
+ hash: uploader.hasher.digest('hex'),
220
+ output: uploadOutput,
221
+ parts,
222
+ }
223
+
224
+ const size = ContentLength != null ? Number(ContentLength) : null
225
+ const hash = ContentMD5
226
+
227
+ if (size != null && size !== result.size) {
228
+ throw new Error(`Expected size ${size} but got ${result.size}`)
229
+ }
230
+
231
+ if (hash != null && hash !== result.hash) {
232
+ throw new Error(`Expected hash ${hash} but got ${result.hash}`)
233
+ }
234
+
235
+ return result
236
+ } catch (err) {
237
+ ac.abort(err)
238
+
239
+ if (uploadId && !leavePartsOnError) {
240
+ try {
241
+ await s3.send(
242
+ new AWS.AbortMultipartUploadCommand({
243
+ Bucket,
244
+ Key,
245
+ UploadId: uploadId,
246
+ }),
247
+ )
248
+ } catch (er) {
249
+ throw new AggregateError([err, er])
250
+ }
251
+ }
252
+
253
+ throw err
254
+ } finally {
255
+ outerSignal?.removeEventListener('abort', abort)
256
+ await fs.promises.rmdir(dir, { recursive: true })
257
+ }
258
+ }
@@ -26,7 +26,6 @@ class TimerEntry {
26
26
 
27
27
  dispose() {
28
28
  clearTimeout(this.timer)
29
-
30
29
  this.timer = null
31
30
  }
32
31
  }
@@ -232,6 +231,7 @@ export default function ({ ds, proxify, compiler }) {
232
231
  this._subscription = null
233
232
  this._args = kEmpty
234
233
  this._wrap = null
234
+ this._suspended = false
235
235
 
236
236
  if (rxjs.isObservable(args)) {
237
237
  this._subscription = args.subscribe({
@@ -262,32 +262,40 @@ export default function ({ ds, proxify, compiler }) {
262
262
  throw kSuspend
263
263
  }
264
264
 
265
- fetch(url, init, suspend = true) {
266
- return this._getFetch(url, init, suspend)
265
+ fetch(resource, options, suspend = true) {
266
+ return this._getFetch(resource, options, suspend)
267
267
  }
268
268
 
269
269
  observe(observable, suspend = true) {
270
270
  return this._getObservable(observable, suspend)
271
271
  }
272
272
 
273
- wait(promise, suspend = true) {
274
- return this._getWait(promise, suspend)
273
+ then(promise, suspend = true) {
274
+ return this._getPromise(promise, suspend)
275
275
  }
276
276
 
277
277
  ds(id, state, suspend = true) {
278
278
  return this._getRecord(id, state, suspend)
279
279
  }
280
280
 
281
- _ds(key, postfix, state, suspend = true) {
282
- return !key || typeof key !== 'string'
283
- ? null
284
- : this._getRecord(postfix ? key + postfix : key, state, suspend)
281
+ timer(dueTime, dueValue = dueTime, suspend = true) {
282
+ return this._getTimer(dueTime, dueValue, suspend)
285
283
  }
286
284
 
287
285
  asset(id, type, state, suspend = true) {
288
286
  return this._getHasRawAssetType(id, type, state, suspend)
289
287
  }
290
288
 
289
+ hash(value) {
290
+ return objectHash(value)
291
+ }
292
+
293
+ _ds(key, postfix, state, suspend) {
294
+ return !key || typeof key !== 'string'
295
+ ? null
296
+ : this._getRecord(postfix ? key + postfix : key, state, suspend)
297
+ }
298
+
291
299
  _asset(id, type, state, suspend) {
292
300
  if (!type || typeof type !== 'string') {
293
301
  throw new Error(`invalid argument: type (${type})`)
@@ -298,14 +306,6 @@ export default function ({ ds, proxify, compiler }) {
298
306
  : this._getHasRawAssetType(id, type, state, suspend)
299
307
  }
300
308
 
301
- timer(dueTime, dueValue, suspend = true) {
302
- return this._getTimer(dueTime, dueValue, suspend)
303
- }
304
-
305
- hash(value) {
306
- return objectHash(value)
307
- }
308
-
309
309
  _destroy() {
310
310
  this._destroyed = true
311
311
  this._subscription?.unsubscribe()
@@ -341,7 +341,12 @@ export default function ({ ds, proxify, compiler }) {
341
341
  compiler.current = self
342
342
 
343
343
  try {
344
+ assert(self._suspended === false)
344
345
  const value = self._script.runInContext(self._context)
346
+ if (self._suspended) {
347
+ return
348
+ }
349
+
345
350
  if (value !== self._value) {
346
351
  self._value = value
347
352
  self._observer.next(value)
@@ -363,6 +368,7 @@ export default function ({ ds, proxify, compiler }) {
363
368
  self._context.$ = null
364
369
  self._context.nxt = null
365
370
 
371
+ self._suspended = false
366
372
  self._disposing = true
367
373
 
368
374
  if (self._entries) {
@@ -419,6 +425,7 @@ export default function ({ ds, proxify, compiler }) {
419
425
  }
420
426
 
421
427
  if (!entry.status) {
428
+ this._suspended = true
422
429
  if (suspend ?? true) {
423
430
  throw kSuspend
424
431
  } else {
@@ -445,6 +452,7 @@ export default function ({ ds, proxify, compiler }) {
445
452
  }
446
453
 
447
454
  if (entry.value === kEmpty) {
455
+ this._suspended = true
448
456
  if (suspend ?? true) {
449
457
  throw kSuspend
450
458
  } else {
@@ -455,7 +463,7 @@ export default function ({ ds, proxify, compiler }) {
455
463
  return entry.value
456
464
  }
457
465
 
458
- _getWait(promise, suspend) {
466
+ _getPromise(promise, suspend) {
459
467
  if (typeof promise?.then !== 'function') {
460
468
  throw new Error(`invalid argument: Promise (${promise})`)
461
469
  }
@@ -471,6 +479,7 @@ export default function ({ ds, proxify, compiler }) {
471
479
  }
472
480
 
473
481
  if (entry.value === kEmpty) {
482
+ this._suspended = true
474
483
  if (suspend ?? true) {
475
484
  throw kSuspend
476
485
  } else {
@@ -489,12 +498,12 @@ export default function ({ ds, proxify, compiler }) {
489
498
  const timeout = dueTime - Date.now()
490
499
 
491
500
  if (Number.isFinite(dueTime) && timeout > 0) {
501
+ this._suspended = true
492
502
  this._getEntry(key, TimerEntry, timeout)
493
-
494
503
  if (suspend ?? true) {
495
504
  throw kSuspend
496
505
  } else {
497
- return null
506
+ return dueValue
498
507
  }
499
508
  }
500
509
 
@@ -521,10 +530,11 @@ export default function ({ ds, proxify, compiler }) {
521
530
  const entry = this._getEntry(key, RecordEntry, ds)
522
531
 
523
532
  if (entry.record.state < state) {
533
+ this._suspended = true
524
534
  if (suspend ?? true) {
525
535
  throw kSuspend
526
536
  } else {
527
- return null
537
+ return entry.record.data
528
538
  }
529
539
  }
530
540
 
@@ -540,11 +550,7 @@ export default function ({ ds, proxify, compiler }) {
540
550
  throw new Error(`invalid argument: type (${type})`)
541
551
  }
542
552
 
543
- const data = this._getRecord(
544
- id + ':asset.rawTypes?',
545
- state ?? ds.record.PROVIDER,
546
- suspend ?? true,
547
- )
553
+ const data = this._getRecord(id + ':asset.rawTypes?', state ?? ds.record.PROVIDER, suspend)
548
554
  return data && Array.isArray(data.value) && data.value.includes(type) ? id : null
549
555
  }
550
556
  }