@nxtedition/lib 26.0.21 → 26.0.23
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/app.js +24 -13
- package/cache.js +54 -16
- package/http.js +10 -0
- package/package.json +3 -2
- package/util/template/index-common.js +1 -1
package/app.js
CHANGED
|
@@ -142,17 +142,32 @@ export function makeApp(appConfig, onTerminate) {
|
|
|
142
142
|
process.title = userAgent
|
|
143
143
|
}
|
|
144
144
|
|
|
145
|
-
const dailyOffpeakTime =
|
|
145
|
+
const dailyOffpeakTime =
|
|
146
|
+
config.dailyOffpeakTime ?? (isMainThread ? getUTCRangeForLocalTime('00:00-04:00') : null)
|
|
146
147
|
|
|
147
148
|
if (dailyOffpeakTime) {
|
|
148
|
-
|
|
149
|
+
const offPeakId = (process.pid << 16) | threadId
|
|
150
|
+
const offPeakBC = new BroadcastChannel('nxt:offPeak').unref()
|
|
151
|
+
|
|
152
|
+
let lastOffPeak = 0
|
|
153
|
+
offPeakBC.onmessage = ({ data }) => {
|
|
154
|
+
const now = Date.now()
|
|
155
|
+
logger.debug({ data, elapsedTime: now - lastOffPeak }, 'offpeak')
|
|
156
|
+
lastOffPeak = now
|
|
157
|
+
}
|
|
149
158
|
|
|
150
159
|
const [start, end] = dailyOffpeakTime.split('-')
|
|
151
160
|
let wasOffpeak = null
|
|
152
161
|
setInterval(() => {
|
|
153
162
|
if (isTimeBetween(new Date(), start, end)) {
|
|
154
|
-
if (!wasOffpeak
|
|
155
|
-
|
|
163
|
+
if (!wasOffpeak) {
|
|
164
|
+
if (Date.now() - lastOffPeak > 60e3) {
|
|
165
|
+
offPeakBC.postMessage({ id: offPeakId, value: dailyOffpeakTime })
|
|
166
|
+
}
|
|
167
|
+
|
|
168
|
+
if (typeof global.gc === 'function') {
|
|
169
|
+
global.gc()
|
|
170
|
+
}
|
|
156
171
|
}
|
|
157
172
|
wasOffpeak = true
|
|
158
173
|
} else {
|
|
@@ -295,8 +310,8 @@ export function makeApp(appConfig, onTerminate) {
|
|
|
295
310
|
const histogram = monitorEventLoopDelay({ resolution })
|
|
296
311
|
histogram.enable()
|
|
297
312
|
|
|
298
|
-
const
|
|
299
|
-
lagBC.unref()
|
|
313
|
+
const lagId = (process.pid << 16) | threadId
|
|
314
|
+
const lagBC = new BroadcastChannel('nxt:lag').unref()
|
|
300
315
|
|
|
301
316
|
if (isMainThread && isPrimary) {
|
|
302
317
|
const lagMap = new Map()
|
|
@@ -322,7 +337,6 @@ export function makeApp(appConfig, onTerminate) {
|
|
|
322
337
|
}, 500).unref()
|
|
323
338
|
}
|
|
324
339
|
|
|
325
|
-
const lagId = (process.pid << 16) | threadId
|
|
326
340
|
setInterval(() => {
|
|
327
341
|
let currentLag = Math.max(0, histogram.mean / 1e6 - resolution)
|
|
328
342
|
if (Number.isNaN(currentLag)) {
|
|
@@ -564,8 +578,7 @@ export function makeApp(appConfig, onTerminate) {
|
|
|
564
578
|
stats$ = rxjs.timer(0, 10e3).pipe(rx.map(() => ({})))
|
|
565
579
|
}
|
|
566
580
|
|
|
567
|
-
const memoryUsageBC = new BroadcastChannel('nxt:memoryUsage')
|
|
568
|
-
memoryUsageBC.unref()
|
|
581
|
+
const memoryUsageBC = new BroadcastChannel('nxt:memoryUsage').unref()
|
|
569
582
|
|
|
570
583
|
let memoryUsageMap
|
|
571
584
|
if (isMainThread) {
|
|
@@ -953,8 +966,7 @@ export function makeApp(appConfig, onTerminate) {
|
|
|
953
966
|
if (appConfig.inspect !== false && !cluster.isWorker) {
|
|
954
967
|
// TODO (fix): What about cluster?
|
|
955
968
|
|
|
956
|
-
const inspectBC = new BroadcastChannel('nxt:inspect')
|
|
957
|
-
inspectBC.unref()
|
|
969
|
+
const inspectBC = new BroadcastChannel('nxt:inspect').unref()
|
|
958
970
|
|
|
959
971
|
let inspectOpen = false
|
|
960
972
|
|
|
@@ -1095,8 +1107,7 @@ export function makeApp(appConfig, onTerminate) {
|
|
|
1095
1107
|
if (appConfig.utils !== false && !cluster.isWorker) {
|
|
1096
1108
|
// TODO (fix): What about cluster?
|
|
1097
1109
|
|
|
1098
|
-
const utilsBC = new BroadcastChannel('nxt:utils')
|
|
1099
|
-
utilsBC.unref()
|
|
1110
|
+
const utilsBC = new BroadcastChannel('nxt:utils').unref()
|
|
1100
1111
|
|
|
1101
1112
|
function writeHeapSnapshot() {
|
|
1102
1113
|
const snapshotPath = `${os.tmpdir()}/heap-${Date.now()}-${serviceName}-${serviceModule}-${serviceInstanceId}-${serviceWorkerId}-${threadId}.heapsnapshot`
|
package/cache.js
CHANGED
|
@@ -4,6 +4,21 @@ import { fastNow } from './time.js'
|
|
|
4
4
|
|
|
5
5
|
function noop() {}
|
|
6
6
|
|
|
7
|
+
const dbs = new Set()
|
|
8
|
+
|
|
9
|
+
{
|
|
10
|
+
const offPeakBC = new BroadcastChannel('nxt:offPeak').unref()
|
|
11
|
+
offPeakBC.onmessage = () => {
|
|
12
|
+
for (const db of dbs) {
|
|
13
|
+
try {
|
|
14
|
+
db.purgeStale()
|
|
15
|
+
} catch (err) {
|
|
16
|
+
process.emitWarning(err)
|
|
17
|
+
}
|
|
18
|
+
}
|
|
19
|
+
}
|
|
20
|
+
}
|
|
21
|
+
|
|
7
22
|
/**
|
|
8
23
|
* @template [V=unknown]
|
|
9
24
|
* @typedef {object} AsyncCacheOptions
|
|
@@ -98,11 +113,14 @@ export class AsyncCache {
|
|
|
98
113
|
);
|
|
99
114
|
`)
|
|
100
115
|
|
|
101
|
-
this.#getQuery = this.#db.prepare(
|
|
116
|
+
this.#getQuery = this.#db.prepare(
|
|
117
|
+
`SELECT val, ttl, stale FROM cache WHERE key = ? AND stale > ?`,
|
|
118
|
+
)
|
|
102
119
|
this.#setQuery = this.#db.prepare(
|
|
103
120
|
`INSERT OR REPLACE INTO cache (key, val, ttl, stale) VALUES (?, ?, ?, ?)`,
|
|
104
121
|
)
|
|
105
122
|
this.#delQuery = this.#db.prepare(`DELETE FROM cache WHERE key = ?`)
|
|
123
|
+
this.#purgeStaleQuery = this.#db.prepare(`DELETE FROM cache WHERE stale <= ?`)
|
|
106
124
|
break
|
|
107
125
|
} catch (err) {
|
|
108
126
|
if (n >= 8) {
|
|
@@ -118,15 +136,18 @@ export class AsyncCache {
|
|
|
118
136
|
}
|
|
119
137
|
}
|
|
120
138
|
}
|
|
139
|
+
|
|
140
|
+
dbs.add(this)
|
|
121
141
|
}
|
|
122
142
|
|
|
123
143
|
close() {
|
|
144
|
+
dbs.delete(this)
|
|
124
145
|
this.#db?.close()
|
|
125
146
|
}
|
|
126
147
|
|
|
127
148
|
/**
|
|
128
149
|
* @param {...any} args
|
|
129
|
-
* @returns {{ value: V|Promise<V>, async:
|
|
150
|
+
* @returns {{ value: V, async: false } | { value: Promise<V>, async: true }}
|
|
130
151
|
*/
|
|
131
152
|
get(...args) {
|
|
132
153
|
return this.#load(args, true)
|
|
@@ -134,7 +155,7 @@ export class AsyncCache {
|
|
|
134
155
|
|
|
135
156
|
/**
|
|
136
157
|
* @param {...any} args
|
|
137
|
-
* @returns {{ value: V|Promise<V>, async:
|
|
158
|
+
* @returns {{ value: V, async: false } | { value: Promise<V>, async: true }}
|
|
138
159
|
*/
|
|
139
160
|
peek(...args) {
|
|
140
161
|
return this.#load(args, false)
|
|
@@ -148,10 +169,17 @@ export class AsyncCache {
|
|
|
148
169
|
return this.#refresh(args)
|
|
149
170
|
}
|
|
150
171
|
|
|
172
|
+
purgeStale() {
|
|
173
|
+
try {
|
|
174
|
+
this.#lru?.purgeStale()
|
|
175
|
+
this.#purgeStaleQuery?.run(fastNow())
|
|
176
|
+
} catch {}
|
|
177
|
+
}
|
|
178
|
+
|
|
151
179
|
/**
|
|
152
180
|
* @param {any[]} args
|
|
153
181
|
* @param {boolean} refresh
|
|
154
|
-
* @returns {{ value: V|Promise<V>, async:
|
|
182
|
+
* @returns {{ value: V, async: false } | { value: Promise<V>, async: true }}
|
|
155
183
|
*/
|
|
156
184
|
#load(args, refresh) {
|
|
157
185
|
const key = this.#keySelector(...args)
|
|
@@ -166,7 +194,7 @@ export class AsyncCache {
|
|
|
166
194
|
|
|
167
195
|
if (cached === undefined) {
|
|
168
196
|
try {
|
|
169
|
-
const ret = this.#getQuery?.get(key)
|
|
197
|
+
const ret = this.#getQuery?.get(key, now)
|
|
170
198
|
if (ret !== undefined) {
|
|
171
199
|
cached = {
|
|
172
200
|
ttl: ret.ttl,
|
|
@@ -214,11 +242,13 @@ export class AsyncCache {
|
|
|
214
242
|
if (promise === undefined && this.#valueSelector) {
|
|
215
243
|
promise = this.#valueSelector(...args).then(
|
|
216
244
|
(value) => {
|
|
217
|
-
this.
|
|
245
|
+
if (this.#dedupe.delete(key)) {
|
|
246
|
+
this.#set(key, value)
|
|
247
|
+
}
|
|
218
248
|
return value
|
|
219
249
|
},
|
|
220
250
|
(err) => {
|
|
221
|
-
this
|
|
251
|
+
this.#delete(key)
|
|
222
252
|
throw err
|
|
223
253
|
},
|
|
224
254
|
)
|
|
@@ -233,23 +263,31 @@ export class AsyncCache {
|
|
|
233
263
|
* @param {string} key
|
|
234
264
|
* @param {V} value
|
|
235
265
|
*/
|
|
236
|
-
set(key, value) {
|
|
266
|
+
#set(key, value) {
|
|
237
267
|
if (typeof key !== 'string' || key.length === 0) {
|
|
238
268
|
throw new TypeError('key must be a non-empty string')
|
|
239
269
|
}
|
|
240
270
|
|
|
241
271
|
const now = fastNow()
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
|
|
245
|
-
|
|
272
|
+
|
|
273
|
+
const ttl = now + Math.max(0, this.#ttl(value, key) ?? 0)
|
|
274
|
+
if (!Number.isFinite(ttl)) {
|
|
275
|
+
throw new TypeError('ttl must be a finite number')
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
const stale = now + Math.max(ttl, this.#stale(value, key) ?? 0)
|
|
279
|
+
if (!Number.isFinite(stale)) {
|
|
280
|
+
throw new TypeError('stale must be a finite number')
|
|
281
|
+
}
|
|
282
|
+
|
|
283
|
+
if (stale <= now) {
|
|
284
|
+
return
|
|
246
285
|
}
|
|
247
286
|
|
|
248
|
-
this.#lru?.set(key,
|
|
249
|
-
this.#dedupe.delete(key)
|
|
287
|
+
this.#lru?.set(key, { ttl, stale, value })
|
|
250
288
|
|
|
251
289
|
try {
|
|
252
|
-
this.#setQuery?.run(key, JSON.stringify(value),
|
|
290
|
+
this.#setQuery?.run(key, JSON.stringify(value), ttl, stale)
|
|
253
291
|
} catch {
|
|
254
292
|
// Do nothing...
|
|
255
293
|
}
|
|
@@ -258,7 +296,7 @@ export class AsyncCache {
|
|
|
258
296
|
/**
|
|
259
297
|
* @param {string} key
|
|
260
298
|
*/
|
|
261
|
-
delete(key) {
|
|
299
|
+
#delete(key) {
|
|
262
300
|
if (typeof key !== 'string' || key.length === 0) {
|
|
263
301
|
throw new TypeError('key must be a non-empty string')
|
|
264
302
|
}
|
package/http.js
CHANGED
|
@@ -13,6 +13,8 @@ export const kAbortController = Symbol('abortController')
|
|
|
13
13
|
const ERR_HEADER_EXPR =
|
|
14
14
|
/^(content-length|content-type|te|host|upgrade|trailers|connection|keep-alive|http2-settings|transfer-encoding|proxy-connection|proxy-authenticate|proxy-authorization)$/i
|
|
15
15
|
|
|
16
|
+
const pending = (globalThis._nxt_lib_http_pending = new Set())
|
|
17
|
+
|
|
16
18
|
// https://github.com/fastify/fastify/blob/main/lib/reqIdGenFactory.js
|
|
17
19
|
// 2,147,483,647 (2^31 − 1) stands for max SMI value (an internal optimization of V8).
|
|
18
20
|
// With this upper bound, if you'll be generating 1k ids/sec, you're going to hit it in ~25 days.
|
|
@@ -145,6 +147,7 @@ export async function upgradeMiddleware(ctx, next) {
|
|
|
145
147
|
})
|
|
146
148
|
|
|
147
149
|
const reqLogger = ctx.logger?.child({ req })
|
|
150
|
+
pending.add(ctx)
|
|
148
151
|
try {
|
|
149
152
|
const isHealthcheck = req.url === '/healthcheck' || req.url === '/_up'
|
|
150
153
|
if (!isHealthcheck) {
|
|
@@ -191,6 +194,7 @@ export async function upgradeMiddleware(ctx, next) {
|
|
|
191
194
|
}
|
|
192
195
|
socket.destroy(err)
|
|
193
196
|
} finally {
|
|
197
|
+
pending.delete(ctx)
|
|
194
198
|
if (!socket.writableEnded && !socket.destroyed) {
|
|
195
199
|
socket.destroy()
|
|
196
200
|
reqLogger?.warn('socket destroyed')
|
|
@@ -203,6 +207,7 @@ export async function requestMiddleware(ctx, next) {
|
|
|
203
207
|
const startTime = performance.now()
|
|
204
208
|
|
|
205
209
|
const reqLogger = ctx.logger?.child({ req })
|
|
210
|
+
pending.add(ctx)
|
|
206
211
|
try {
|
|
207
212
|
const isHealthcheck = req.url === '/healthcheck' || req.url === '/_up'
|
|
208
213
|
if (!isHealthcheck) {
|
|
@@ -334,6 +339,7 @@ export async function requestMiddleware(ctx, next) {
|
|
|
334
339
|
res.destroy(err)
|
|
335
340
|
}
|
|
336
341
|
} finally {
|
|
342
|
+
pending.delete(ctx)
|
|
337
343
|
if (res.writableEnded || res.destroyed || res.stream?.destroyed) {
|
|
338
344
|
// Do nothing..
|
|
339
345
|
} else {
|
|
@@ -728,6 +734,7 @@ export async function request(ctx, next) {
|
|
|
728
734
|
|
|
729
735
|
let reqLogger = logger
|
|
730
736
|
|
|
737
|
+
pending.add(ctx)
|
|
731
738
|
try {
|
|
732
739
|
ctx.url = requestTarget(req)
|
|
733
740
|
if (!ctx.url) {
|
|
@@ -837,6 +844,7 @@ export async function request(ctx, next) {
|
|
|
837
844
|
}
|
|
838
845
|
}
|
|
839
846
|
} finally {
|
|
847
|
+
pending.delete(ctx)
|
|
840
848
|
if (!res.writableEnded) {
|
|
841
849
|
res.destroy()
|
|
842
850
|
logger.debug('request destroyed')
|
|
@@ -914,6 +922,7 @@ export async function upgrade(ctx, next) {
|
|
|
914
922
|
|
|
915
923
|
let aborted = false
|
|
916
924
|
let reqLogger = logger
|
|
925
|
+
pending.add(ctx)
|
|
917
926
|
try {
|
|
918
927
|
ctx.url = requestTarget(req)
|
|
919
928
|
if (!ctx.url) {
|
|
@@ -978,6 +987,7 @@ export async function upgrade(ctx, next) {
|
|
|
978
987
|
|
|
979
988
|
socket.destroy(err)
|
|
980
989
|
} finally {
|
|
990
|
+
pending.delete(ctx)
|
|
981
991
|
queueMicrotask(() => ac.abort())
|
|
982
992
|
}
|
|
983
993
|
}
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@nxtedition/lib",
|
|
3
|
-
"version": "26.0.
|
|
3
|
+
"version": "26.0.23",
|
|
4
4
|
"license": "MIT",
|
|
5
5
|
"author": "Robert Nagy <robert.nagy@boffins.se>",
|
|
6
6
|
"type": "module",
|
|
@@ -77,7 +77,7 @@
|
|
|
77
77
|
"http-errors": "^2.0.0",
|
|
78
78
|
"json5": "^2.2.3",
|
|
79
79
|
"lodash": "^4.17.21",
|
|
80
|
-
"lru-cache": "^11.1
|
|
80
|
+
"lru-cache": "^11.2.1",
|
|
81
81
|
"mime": "^4.0.7",
|
|
82
82
|
"mitata": "^1.0.34",
|
|
83
83
|
"moment-timezone": "^0.5.48",
|
|
@@ -109,6 +109,7 @@
|
|
|
109
109
|
"lint-staged": "^16.1.5",
|
|
110
110
|
"prettier": "^3.6.2",
|
|
111
111
|
"rxjs": "^7.8.2",
|
|
112
|
+
"typescript": "^5.9.2",
|
|
112
113
|
"typescript-eslint": "^8.40.0"
|
|
113
114
|
},
|
|
114
115
|
"peerDependencies": {
|
|
@@ -209,7 +209,7 @@ export function makeTemplateCompiler({ ds, proxify, logger, platform }) {
|
|
|
209
209
|
return resolver ? (args$) => resolver(template, args$) : null
|
|
210
210
|
}
|
|
211
211
|
|
|
212
|
-
function resolveTemplate(template, args$, options) {
|
|
212
|
+
async function resolveTemplate(template, args$, options) {
|
|
213
213
|
const expr = _compileTemplate(template)
|
|
214
214
|
return expr ? firstValueFrom(expr(template, args$), options) : template
|
|
215
215
|
}
|