@nxtedition/lib 19.0.37 → 19.0.40

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/package.json +13 -13
  2. package/s3.js +14 -8
  3. package/serializers.js +2 -8
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nxtedition/lib",
3
- "version": "19.0.37",
3
+ "version": "19.0.40",
4
4
  "license": "MIT",
5
5
  "author": "Robert Nagy <robert.nagy@boffins.se>",
6
6
  "type": "module",
@@ -75,11 +75,11 @@
75
75
  "/__tests__"
76
76
  ],
77
77
  "dependencies": {
78
- "@aws-sdk/client-s3": "^3.540.0",
79
- "@elastic/elasticsearch": "^8.12.2",
80
- "@elastic/transport": "^8.4.1",
81
- "@nxtedition/nxt-undici": "^2.0.41",
82
- "date-fns": "^3.4.0",
78
+ "@aws-sdk/client-s3": "^3.552.0",
79
+ "@elastic/elasticsearch": "^8.13.1",
80
+ "@elastic/transport": "^8.5.0",
81
+ "@nxtedition/nxt-undici": "^2.0.45",
82
+ "date-fns": "^3.6.0",
83
83
  "fast-querystring": "^1.1.1",
84
84
  "hasha": "^6.0.0",
85
85
  "http-errors": "^2.0.0",
@@ -92,7 +92,7 @@
92
92
  "nested-error-stacks": "^2.1.1",
93
93
  "object-hash": "^3.0.0",
94
94
  "p-queue": "^8.0.1",
95
- "pino": "^8.19.0",
95
+ "pino": "^8.20.0",
96
96
  "pino-std-serializers": "^6.2.2",
97
97
  "qs": "^6.12.0",
98
98
  "request-target": "^1.0.2",
@@ -100,18 +100,18 @@
100
100
  "split-string": "^6.0.0",
101
101
  "split2": "^4.2.0",
102
102
  "toobusy-js": "^0.5.1",
103
- "undici": "^6.7.1",
103
+ "undici": "^6.12.0",
104
104
  "url-join": "^5.0.0"
105
105
  },
106
106
  "devDependencies": {
107
- "@nxtedition/deepstream.io-client-js": ">=24.1.6",
108
- "@types/lodash": "^4.14.202",
109
- "@types/node": "^20.11.26",
110
- "eslint": "^8.57.0",
107
+ "@nxtedition/deepstream.io-client-js": ">=24.1.20",
108
+ "@types/lodash": "^4.17.0",
109
+ "@types/node": "^20.12.7",
110
+ "eslint": "^8.0.0",
111
111
  "eslint-config-prettier": "^9.1.0",
112
112
  "eslint-config-standard": "^17.0.0",
113
113
  "eslint-plugin-import": "^2.29.1",
114
- "eslint-plugin-n": "^16.6.1",
114
+ "eslint-plugin-n": "^17.1.0",
115
115
  "eslint-plugin-node": "^11.1.0",
116
116
  "eslint-plugin-promise": "^6.0.0",
117
117
  "husky": "^9.0.11",
package/s3.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import crypto from 'node:crypto'
2
2
  import stream from 'node:stream'
3
+ import assert from 'node:assert'
3
4
  import AWS from '@aws-sdk/client-s3'
4
5
  import PQueue from 'p-queue'
5
6
 
@@ -22,7 +23,7 @@ export async function upload({
22
23
  throw new Error('Invalid partSize')
23
24
  }
24
25
 
25
- if (!Number.isFinite(queueSize) || queueSize <= 0) {
26
+ if (!Number.isFinite(queueSize) || queueSize <= 0 || queueSize > 32) {
26
27
  throw new Error('Invalid queueSize')
27
28
  }
28
29
 
@@ -43,6 +44,8 @@ export async function upload({
43
44
  const queue = new PQueue({ concurrency: queueSize })
44
45
  const promises = []
45
46
 
47
+ assert(queue.concurrency > 0 && queue.concurrency <= 32)
48
+
46
49
  let uploadId
47
50
  try {
48
51
  const multipartUploadOutput = await s3.send(
@@ -51,7 +54,7 @@ export async function upload({
51
54
  )
52
55
  uploadId = multipartUploadOutput.UploadId
53
56
  logger = logger?.child({ uploadId })
54
- logger?.debug('created multipart upload')
57
+ logger?.debug('multipart upload created')
55
58
 
56
59
  const uploader = {
57
60
  size: 0,
@@ -113,7 +116,11 @@ export async function upload({
113
116
  logger?.debug({ number, size, etag: ETag }, 'part upload completed')
114
117
  return { part: { ETag, PartNumber: number } }
115
118
  } catch (err) {
116
- logger?.debug({ err }, 'part upload failed')
119
+ if (err.name === 'AbortError') {
120
+ logger?.debug({ err }, 'part upload aborted')
121
+ } else {
122
+ logger?.warn({ err }, 'part upload failed')
123
+ }
117
124
  return { error: err }
118
125
  }
119
126
  },
@@ -122,7 +129,7 @@ export async function upload({
122
129
  .catch((err) => ({ error: err })),
123
130
  )
124
131
 
125
- return queue.onEmpty()
132
+ return queue.size > 0 ? queue.onEmpty() : null
126
133
  }
127
134
 
128
135
  for await (const chunk of Body) {
@@ -154,7 +161,7 @@ export async function upload({
154
161
  signal?.throwIfAborted()
155
162
 
156
163
  if (errors.length > 0) {
157
- throw new AggregateError(errors, 'upload failed')
164
+ throw new AggregateError(errors, 'multipart upload failed')
158
165
  }
159
166
 
160
167
  if (parts.length === 0) {
@@ -190,16 +197,15 @@ export async function upload({
190
197
  throw new Error(`Expected hash ${hash} but got ${result.hash}`)
191
198
  }
192
199
 
193
- logger?.debug(result, 'completed multipart upload')
200
+ logger?.debug(result, 'multipart upload completed')
194
201
 
195
202
  return result
196
203
  } catch (err) {
197
- logger?.error({ err }, 'failed multipart upload')
204
+ logger?.error({ err }, 'multipart upload failed')
198
205
 
199
206
  if (uploadId) {
200
207
  try {
201
208
  await s3.send(new AWS.AbortMultipartUploadCommand({ Bucket, Key, UploadId: uploadId }))
202
- logger?.warn('aborted multipart upload')
203
209
  } catch (er) {
204
210
  throw new AggregateError([err, er])
205
211
  }
package/serializers.js CHANGED
@@ -1,5 +1,6 @@
1
1
  import serializers from 'pino-std-serializers'
2
2
  import { SIGNALS } from './platform.js'
3
+ import { util } from 'undici'
3
4
 
4
5
  function getHeader(obj, key) {
5
6
  return obj?.headers?.get?.(key) || obj?.getHeader?.(key) || obj?.headers?.[key]
@@ -11,14 +12,7 @@ function getHeaders(obj) {
11
12
  }
12
13
 
13
14
  if (Array.isArray(obj.headers)) {
14
- const src = obj.headers
15
- const dst = {}
16
- for (let n = 0; n < src.length; n += 2) {
17
- const key = src[n].toString().toLowerCase()
18
- const val = src[n + 1].toString()
19
- dst[key] = dst[key] ? `${dst[key]},${val}` : val
20
- }
21
- return dst
15
+ return util.parseHeaders(obj.headers)
22
16
  }
23
17
 
24
18
  return (