@nxtedition/lib 26.8.7 → 27.0.0-alpha.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,18 +0,0 @@
1
- import * as rxjs from 'rxjs'
2
- import withAbortSignal from './withAbortSignal.js'
3
-
4
- export default function firstValueFrom(x$, config) {
5
- const hasConfig = config && typeof config === 'object'
6
- const signal = hasConfig ? config.signal : undefined
7
- const timeout = hasConfig ? config.timeout : undefined
8
-
9
- if (signal) {
10
- x$ = x$.pipe(withAbortSignal(signal))
11
- }
12
-
13
- if (timeout) {
14
- x$ = x$.pipe(rxjs.timeout(timeout))
15
- }
16
-
17
- return rxjs.firstValueFrom(x$, config)
18
- }
@@ -1,14 +0,0 @@
1
- import { EMPTY, fromEvent, takeUntil, throwIfEmpty, last } from 'rxjs'
2
- import { AbortError } from '../errors'
3
-
4
- export default function lastValueFrom(x$, config) {
5
- const hasConfig = config && typeof config === 'object'
6
- const signal = hasConfig ? config.signal : undefined
7
-
8
- if (signal) {
9
- x$ = signal.aborted ? EMPTY : x$.pipe(takeUntil(fromEvent(signal, 'abort')))
10
- x$ = x$.pipe(throwIfEmpty(() => new AbortError()))
11
- }
12
-
13
- return x$.pipe(last(hasConfig ? config.defaultValue : undefined)).toPromise()
14
- }
package/rxjs/retry.js DELETED
@@ -1,112 +0,0 @@
1
- import { operate } from 'rxjs/internal/util/lift'
2
- import { createOperatorSubscriber } from 'rxjs/internal/operators/OperatorSubscriber'
3
- import { identity, timer } from 'rxjs'
4
- import { innerFrom } from 'rxjs/internal/observable/innerFrom'
5
-
6
- // This is from https://github.com/ReactiveX/rxjs/blob/7.x/src/internal/operators/retry.ts
7
- // But with some custom code added to emit a value on each retry attempt.
8
- // The default delay function is also changed to use exponential backoff, capped at 1 minute.
9
- //
10
- // It uses internals from rxjs, but otherwise the code would have to be modified more heavily.
11
-
12
- const DEFAULT_DELAY = (err, retryCount) => {
13
- return timer(Math.min(2 ** (retryCount - 1) * 1000, 60_000))
14
- }
15
-
16
- export function retry(configOrCount) {
17
- let config
18
- if (configOrCount && typeof configOrCount === 'object') {
19
- config = configOrCount
20
- } else {
21
- config = {
22
- count: configOrCount,
23
- }
24
- }
25
- const { count = Infinity, delay = DEFAULT_DELAY, resetOnSuccess = false, emitOnRetry } = config
26
-
27
- return count <= 0
28
- ? identity
29
- : operate((source, subscriber) => {
30
- let soFar = 0
31
- let innerSub
32
- const subscribeForRetry = () => {
33
- let syncUnsub = false
34
- innerSub = source.subscribe(
35
- createOperatorSubscriber(
36
- subscriber,
37
- (value) => {
38
- // If we're resetting on success
39
- if (resetOnSuccess) {
40
- soFar = 0
41
- }
42
- subscriber.next(value)
43
- },
44
- // Completions are passed through to consumer.
45
- undefined,
46
- (err) => {
47
- if (soFar++ < count) {
48
- // We are still under our retry count
49
- const resub = () => {
50
- if (innerSub) {
51
- innerSub.unsubscribe()
52
- innerSub = null
53
- subscribeForRetry()
54
- } else {
55
- syncUnsub = true
56
- }
57
- }
58
-
59
- // ------- START CUSTOM CODE -------
60
- if (emitOnRetry) {
61
- try {
62
- subscriber.next(emitOnRetry(err, soFar))
63
- } catch (e) {
64
- subscriber.error(e)
65
- return
66
- }
67
- }
68
- // ------- END CUSTOM CODE -------
69
-
70
- if (delay != null) {
71
- // The user specified a retry delay.
72
- // They gave us a number, use a timer, otherwise, it's a function,
73
- // and we're going to call it to get a notifier.
74
- const notifier =
75
- typeof delay === 'number' ? timer(delay) : innerFrom(delay(err, soFar))
76
- const notifierSubscriber = createOperatorSubscriber(
77
- subscriber,
78
- () => {
79
- // After we get the first notification, we
80
- // unsubscribe from the notifier, because we don't want anymore
81
- // and we resubscribe to the source.
82
- notifierSubscriber.unsubscribe()
83
- resub()
84
- },
85
- () => {
86
- // The notifier completed without emitting.
87
- // The author is telling us they want to complete.
88
- subscriber.complete()
89
- },
90
- )
91
- notifier.subscribe(notifierSubscriber)
92
- } else {
93
- // There was no notifier given. Just resub immediately.
94
- resub()
95
- }
96
- } else {
97
- // We're past our maximum number of retries.
98
- // Just send along the error.
99
- subscriber.error(err)
100
- }
101
- },
102
- ),
103
- )
104
- if (syncUnsub) {
105
- innerSub.unsubscribe()
106
- innerSub = null
107
- subscribeForRetry()
108
- }
109
- }
110
- subscribeForRetry()
111
- })
112
- }
@@ -1,31 +0,0 @@
1
- import { Observable } from 'rxjs'
2
- import { AbortError } from '../errors.js'
3
-
4
- function withAbortSignalImpl(signal) {
5
- return new Observable((o) => {
6
- o.add(this.subscribe(o))
7
-
8
- if (!signal) {
9
- return
10
- }
11
-
12
- const onAbort = () => {
13
- o.error(signal.reason ?? new AbortError())
14
- }
15
-
16
- if (signal.aborted) {
17
- onAbort()
18
- } else {
19
- signal.addEventListener('abort', onAbort)
20
- o.add(() => {
21
- signal.removeEventListener('abort', onAbort)
22
- })
23
- }
24
- })
25
- }
26
-
27
- Observable.prototype.withAbortSignal = withAbortSignalImpl
28
-
29
- export default function withAbortSignal(signal) {
30
- return (o) => withAbortSignalImpl.call(o, signal)
31
- }
package/s3.js DELETED
@@ -1,339 +0,0 @@
1
- import crypto from 'node:crypto'
2
- import tp from 'node:timers/promises'
3
- import AWS from '@aws-sdk/client-s3'
4
- import PQueue from 'p-queue'
5
- import { NodeHttpHandler } from '@smithy/node-http-handler'
6
- import { request as undiciRequest, parseHeaders } from '@nxtedition/nxt-undici'
7
-
8
- const QUEUE = new PQueue({ concurrency: 8 })
9
- const MD5_HEX_EXPR = /^[A-F0-9]{32}$/i
10
-
11
- /**
12
- * @import { Dispatcher } from 'undici'
13
- * @import { S3ClientConfig, CreateMultipartUploadRequest } from '@aws-sdk/client-s3'
14
- */
15
-
16
- export class S3Client extends AWS.S3Client {
17
- /**
18
- * @param {S3ClientConfig & { dispatcher?: Dispatcher }} config
19
- */
20
- constructor(config) {
21
- const { dispatcher, ...options } = config
22
- super({
23
- requestHandler: new UndiciRequestHandler(dispatcher),
24
- ...options,
25
- })
26
- }
27
- }
28
-
29
- class UndiciRequestHandler extends NodeHttpHandler {
30
- #dispatcher
31
-
32
- /**
33
- * @param {Dispatcher=} dispatcher
34
- */
35
- constructor(dispatcher) {
36
- super()
37
- this.#dispatcher = dispatcher
38
- }
39
-
40
- /**
41
- * @param {import('@smithy/protocol-http').HttpRequest} request
42
- * @param {import('@smithy/types').HttpHandlerOptions=} options
43
- */
44
- async handle(request, options) {
45
- const abortSignal = options?.abortSignal
46
-
47
- const { protocol, hostname, port, path, ...requestOptions } = request
48
- // NOTE: Expect & Transfer-Encoding header is not supported by undici
49
- const {
50
- expect,
51
- 'transfer-encoding': transferEncoding,
52
- ...headers
53
- } = parseHeaders(request.headers)
54
- const typedMethod = /** @type {import('undici').Dispatcher.HttpMethod} */ (request.method)
55
-
56
- const url = `${request.protocol}//${request.hostname}${request.port ? `:${request.port}` : ''}${request.path}`
57
-
58
- const response = await undiciRequest(url, {
59
- ...requestOptions,
60
- method: typedMethod,
61
- signal: abortSignal,
62
- dispatcher: this.#dispatcher,
63
- headers,
64
- body: request.body,
65
- // nxt-undici
66
- error: false,
67
- dns: false,
68
- retry: false,
69
- follow: false,
70
- })
71
-
72
- return {
73
- response: {
74
- ...response,
75
- headers: getTransformedHeaders(response.headers),
76
- },
77
- }
78
- }
79
- }
80
-
81
- /**
82
- * Uploads a file to S3 using multipart upload.
83
- *
84
- * @param {Object} options - The options for the upload.
85
- * @param {AWS.S3} options.client - The S3 client.
86
- * @param {AbortSignal} [options.signal] - The signal to abort the upload.
87
- * @param {Object} [options.logger] - The logger to use.
88
- * @param {number} [options.partSize=16e6] - The size of each part in the multipart upload.
89
- * @param {PQueue} [options.queue] - The queue to use for part uploads.
90
- * @param {CreateMultipartUploadRequest & {
91
- * Body: stream.Readable,
92
- * ContentMD5?: string,
93
- * ContentLength?: number,
94
- * }} options.params - The parameters for the upload.
95
- * @returns {Promise<Object>} The result of the upload.
96
- */
97
- export async function upload({
98
- client: s3,
99
- queue = QUEUE,
100
- signal,
101
- logger,
102
- partSize = 16e6,
103
- params,
104
- }) {
105
- if (s3 == null) {
106
- throw new Error('Invalid client')
107
- }
108
-
109
- if (!Number.isFinite(partSize) || partSize <= 0) {
110
- throw new Error('Invalid partSize')
111
- }
112
-
113
- if (params == null || typeof params !== 'object') {
114
- throw new Error('Invalid params')
115
- }
116
-
117
- const { Body, Key, Bucket, ContentMD5, ContentLength, ...createMultipartParams } = params
118
-
119
- const size = ContentLength != null ? Number(ContentLength) : null
120
-
121
- if (size != null && (!Number.isFinite(size) || size < 0)) {
122
- throw new Error('Invalid params.ContentLength')
123
- }
124
-
125
- const hash = ContentMD5 != null ? Buffer.from(ContentMD5, 'base64').toString('hex') : null
126
- if (hash != null && !MD5_HEX_EXPR.test(hash)) {
127
- throw new Error('Invalid params.ContentMD5')
128
- }
129
-
130
- const promises = []
131
-
132
- const ac = new AbortController()
133
- const onAbort = () => {
134
- ac.abort()
135
- }
136
- signal?.addEventListener('abort', onAbort)
137
-
138
- let uploadId
139
- try {
140
- const uploader = {
141
- size: 0,
142
- hasher: crypto.createHash('md5'),
143
- ac,
144
- signal: ac.signal,
145
- part: {
146
- /** @type {Array<Buffer>} **/ chunks: [],
147
- hasher: crypto.createHash('md5'),
148
- size: 0,
149
- number: 1,
150
- },
151
- }
152
-
153
- const multipartUploadOutput = await s3.send(
154
- new AWS.CreateMultipartUploadCommand({ ...createMultipartParams, Bucket, Key }),
155
- { abortSignal: uploader.signal },
156
- )
157
- uploader.signal.throwIfAborted()
158
-
159
- uploadId = multipartUploadOutput.UploadId
160
- logger = logger?.child({ uploadId })
161
- logger?.debug('multipart upload created')
162
-
163
- const maybeFlush = (minSize) => {
164
- const { part } = uploader
165
-
166
- if (!part.size) {
167
- return
168
- }
169
-
170
- if (minSize != null && part.size < minSize) {
171
- return
172
- }
173
-
174
- const chunks = part.chunks
175
- const number = part.number
176
- const size = part.size
177
- const hash = part.hasher.digest('hex')
178
-
179
- part.chunks = []
180
- part.number += 1
181
- part.size = 0
182
- part.hasher = crypto.createHash('md5')
183
-
184
- promises.push(
185
- queue
186
- .add(
187
- async () => {
188
- for (let retryCount = 0; true; retryCount++) {
189
- logger?.debug({ number, size }, 'part upload started')
190
- try {
191
- const { ETag } = await s3.send(
192
- new AWS.UploadPartCommand({
193
- Bucket,
194
- Key,
195
- UploadId: uploadId,
196
- ContentMD5: Buffer.from(hash, 'hex').toString('base64'),
197
- ContentLength: size,
198
- PartNumber: number,
199
- Body: Buffer.concat(chunks),
200
- }),
201
- )
202
- logger?.debug({ number, size, etag: ETag }, 'part upload completed')
203
- return { part: { ETag, PartNumber: number } }
204
- } catch (err) {
205
- logger?.warn({ err }, 'part upload failed')
206
-
207
- if (retryCount < 16 && err.name !== 'NotImplemented') {
208
- logger?.debug({ retryCount }, 'part upload retry')
209
- await tp.setTimeout(retryCount * retryCount * 1e3, undefined, {
210
- signal: uploader.signal,
211
- })
212
- } else {
213
- uploader.ac.abort(err)
214
- return { error: err }
215
- }
216
- }
217
- }
218
- },
219
- { signal: uploader.signal },
220
- )
221
- .catch((err) => ({ error: err })),
222
- )
223
-
224
- return queue.size > 0 ? queue.onEmpty() : null
225
- }
226
-
227
- for await (const chunk of Body) {
228
- uploader.signal.throwIfAborted()
229
-
230
- uploader.hasher.update(chunk)
231
- uploader.size += chunk.byteLength
232
-
233
- uploader.part.hasher.update(chunk)
234
- uploader.part.chunks.push(chunk)
235
- uploader.part.size += chunk.byteLength
236
-
237
- const thenable = maybeFlush(partSize)
238
- if (thenable) {
239
- await thenable
240
- uploader.signal.throwIfAborted()
241
- }
242
- }
243
- await maybeFlush()
244
- uploader.signal.throwIfAborted()
245
-
246
- const parts = []
247
- const errors = []
248
- for (const { part, error } of await Promise.all(promises)) {
249
- if (error) {
250
- errors.push(error)
251
- } else if (part) {
252
- parts.push(part)
253
- }
254
- }
255
- uploader.signal.throwIfAborted()
256
-
257
- if (errors.length > 0) {
258
- throw new AggregateError(errors, 'multipart upload failed')
259
- }
260
-
261
- if (parts.length === 0) {
262
- throw new Error('upload empty')
263
- }
264
-
265
- const uploadOutput = await s3.send(
266
- new AWS.CompleteMultipartUploadCommand({
267
- Bucket,
268
- Key,
269
- UploadId: uploadId,
270
- MultipartUpload: { Parts: parts },
271
- }),
272
- )
273
- uploader.signal.throwIfAborted()
274
-
275
- const result = {
276
- size: uploader.size,
277
- hash: uploader.hasher.digest('hex'),
278
- output: uploadOutput,
279
- parts,
280
- }
281
-
282
- if (size != null && size !== result.size) {
283
- throw new Error(`Expected size ${size} but got ${result.size}`)
284
- }
285
-
286
- if (hash != null && hash !== result.hash) {
287
- throw new Error(`Expected hash ${hash} but got ${result.hash}`)
288
- }
289
-
290
- logger?.debug(result, 'multipart upload completed')
291
-
292
- return result
293
- } catch (err) {
294
- logger?.error({ err }, 'multipart upload failed')
295
-
296
- if (uploadId) {
297
- try {
298
- await s3.send(new AWS.AbortMultipartUploadCommand({ Bucket, Key, UploadId: uploadId }))
299
- } catch (er) {
300
- throw new AggregateError([err, er])
301
- }
302
- }
303
-
304
- throw err
305
- } finally {
306
- signal?.removeEventListener('abort', onAbort)
307
- }
308
- }
309
-
310
- /**
311
- * @see https://github.com/smithy-lang/smithy-typescript/blob/main/packages/node-http-handler/src/get-transformed-headers.ts
312
- *
313
- * @param {import('http2').IncomingHttpHeaders} headers
314
- * @returns {import('@smithy/types').HeaderBag}
315
- */
316
- function getTransformedHeaders(headers) {
317
- /**
318
- * @type {import('@smithy/types').HeaderBag}
319
- */
320
- const transformedHeaders = {}
321
-
322
- const isRangedResponse = headers['content-range'] != null
323
-
324
- for (const name of Object.keys(headers)) {
325
- // Google Cloud S3 returns checksums for the full object
326
- // even with range requests, which causes checksum mismatches.
327
- // I assume the checksum headers shouldn't be sent with ranged responses.
328
- if (isRangedResponse && /^x-amz-checksum-/i.test(name)) {
329
- continue
330
- }
331
-
332
- const headerValues = headers[name]
333
- transformedHeaders[name] = Array.isArray(headerValues)
334
- ? headerValues.join(',')
335
- : (headerValues ?? '')
336
- }
337
-
338
- return transformedHeaders
339
- }
package/timeline.js DELETED
@@ -1,115 +0,0 @@
1
- import fp from 'lodash/fp.js'
2
-
3
- const STOP_TIME = 5
4
- const PRELOAD_TIME = 5
5
-
6
- // Events must have start/end/source/data, and should be sorted by start
7
- export function eventsToTimeline(events) {
8
- const timeline = []
9
- events = [...events]
10
- let active = []
11
-
12
- while (active.length || events.length) {
13
- const activeEnd = Math.min(...active.map((x) => x.end))
14
- if (active.length && (!events.length || activeEnd < events.at(0).start)) {
15
- // current has ended
16
- const prev = active.at(-1)
17
- active = active.filter((e) => e.end > activeEnd)
18
- if (active.length) {
19
- // reverting to previously active
20
- timeline.push({
21
- ...fp.omit(['start', 'end'], active.at(-1)),
22
- data: Object.assign({}, ...active.map((x) => x.data)),
23
- time: activeEnd,
24
- })
25
- } else {
26
- // stop active
27
- if (activeEnd < Infinity) {
28
- timeline.push({ ...fp.omit(['start', 'end', 'data'], prev), time: activeEnd })
29
- }
30
- // clear
31
- const clearTime = activeEnd + STOP_TIME
32
- if (!events.length || clearTime < events.at(0).start) {
33
- timeline.push({ ...fp.omit(['start', 'end', 'source', 'data'], prev), time: clearTime })
34
- }
35
- }
36
- } else {
37
- // start next
38
- const nextStart = events.at(0).start
39
- let next
40
- do {
41
- next = events.shift()
42
- active.push(next)
43
- } while (events.length && nextStart === events.at(0).start)
44
-
45
- timeline.push({
46
- ...fp.omit(['start', 'end'], next),
47
- data: Object.assign({}, ...active.map((x) => x.data)),
48
- time: next.start,
49
- })
50
- }
51
- }
52
- return timeline
53
- }
54
-
55
- export function timelineToCommands(timeline) {
56
- const commands = []
57
- let current = { time: -Infinity }
58
- for (let i = 0; i < timeline.length; i++) {
59
- const next = timeline[i]
60
- const canPreload =
61
- next.source && next.data && current.source == null && current.time < next.time
62
- if (canPreload) {
63
- const loadTime = Math.max(current.time, next.time - PRELOAD_TIME)
64
- commands.push(
65
- {
66
- command: 'load',
67
- ...next,
68
- time: loadTime,
69
- },
70
- {
71
- command: 'play',
72
- ...fp.omit(['source', 'data'], next),
73
- },
74
- )
75
- } else if (next.source) {
76
- if (next.source !== current.source || !current.data) {
77
- commands.push({
78
- command: 'play',
79
- ...next,
80
- })
81
- } else if (!fp.isEqual(current.data, next.data)) {
82
- commands.push({
83
- command: next.data ? 'update' : 'stop',
84
- ...fp.omit(['source'], next),
85
- })
86
- }
87
- } else {
88
- commands.push({
89
- command: 'clear',
90
- ...next,
91
- })
92
- }
93
- current = next
94
- }
95
-
96
- return commands
97
- }
98
-
99
- export function pickLayer(source, min = 11, max = 99999) {
100
- if (!source) {
101
- return min
102
- }
103
-
104
- let hash = 0
105
-
106
- for (let i = 0; i < source.length; i++) {
107
- const chr = source.charCodeAt(i)
108
- hash = (hash << 5) - hash + chr
109
- hash |= 0
110
- }
111
-
112
- hash = hash < 0 ? -hash : hash
113
-
114
- return min + (hash % (max - min + 1))
115
- }
@@ -1,67 +0,0 @@
1
- import type { FirstValueFromConfig } from 'rxjs/internal/firstValueFrom'
2
- import type * as rx from 'rxjs'
3
- import type { DeepstreamClient } from '@nxtedition/deepstream.io-client-js'
4
- import type { Logger } from '../../logger.js'
5
-
6
- export interface MakeTemplateCompilerParams {
7
- // eslint-disable-next-line @typescript-eslint/no-explicit-any
8
- ds?: DeepstreamClient<any, any>
9
- proxify?: unknown
10
- logger?: Logger
11
- platform?: unknown
12
- }
13
-
14
- export declare function makeTemplateCompiler(params: MakeTemplateCompilerParams): TemplateCompiler
15
-
16
- export interface TemplateCompiler {
17
- current: null
18
-
19
- resolveTemplate: <ReturnValue, Context extends Record<string, unknown>>(
20
- template: Nxtpression<ReturnValue, Context> | string,
21
- args$?: Context | rx.Observable<Context>,
22
- options?: FirstValueFromConfig<ReturnValue>,
23
- ) => Promise<unknown>
24
-
25
- onResolveTemplate: <ReturnValue, Context extends Record<string, unknown>>(
26
- template: Nxtpression<ReturnValue, Context> | string,
27
- args$?: Context | rx.Observable<Context>,
28
- ) => rx.Observable<unknown>
29
-
30
- compileTemplate: <ReturnValue, Context extends Record<string, unknown>>(
31
- template: Nxtpression<ReturnValue, Context> | string,
32
- ) => null | ((args$?: Context | rx.Observable<Context>) => rx.Observable<unknown>)
33
-
34
- isTemplate: (value: unknown) => value is Nxtpression
35
- }
36
-
37
- export type Nxtpression<ReturnValue = unknown, Context extends object = object> =
38
- | {
39
- /**
40
- * TS-HACK: this property doesn't really exist on the nxtpression string,
41
- * it is only here to make sure the generic Context won't get stripped.
42
- */
43
- __context: Context
44
-
45
- /**
46
- * TS-HACK: this property doesn't really exist on the nxtpression string,
47
- * it is only here to make sure the generic Context won't get stripped.
48
- */
49
- __returnValue: ReturnValue
50
- }
51
- | string
52
- | ReturnValue
53
-
54
- export type ResolveNxtpressionDeep<T> =
55
- ExtractReturn<T> extends never
56
- ? T extends Array<infer U>
57
- ? ResolveNxtpressionDeep<U>
58
- : T extends object
59
- ? {
60
- [K in keyof T]: ResolveNxtpressionDeep<T[K]>
61
- }
62
- : T
63
- : ExtractReturn<T>
64
- type TypeWithReturn<X> = {
65
- __returnValue: X
66
- }
67
- type ExtractReturn<Type> = Type extends TypeWithReturn<infer X> ? X : never