@nxtedition/lib 26.0.22 → 26.0.24

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (3) hide show
  1. package/app.js +24 -13
  2. package/cache.js +55 -16
  3. package/package.json +3 -2
package/app.js CHANGED
@@ -142,17 +142,32 @@ export function makeApp(appConfig, onTerminate) {
142
142
  process.title = userAgent
143
143
  }
144
144
 
145
- const dailyOffpeakTime = config.dailyOffpeakTime ?? getUTCRangeForLocalTime('00:00-04:00')
145
+ const dailyOffpeakTime =
146
+ config.dailyOffpeakTime ?? (isMainThread ? getUTCRangeForLocalTime('00:00-04:00') : null)
146
147
 
147
148
  if (dailyOffpeakTime) {
148
- // TODO (fix): This is ugly...
149
+ const offPeakId = (process.pid << 16) | threadId
150
+ const offPeakBC = new BroadcastChannel('nxt:offPeak').unref()
151
+
152
+ let lastOffPeak = 0
153
+ offPeakBC.onmessage = ({ data }) => {
154
+ const now = Date.now()
155
+ logger.debug({ data, elapsedTime: now - lastOffPeak }, 'offpeak')
156
+ lastOffPeak = now
157
+ }
149
158
 
150
159
  const [start, end] = dailyOffpeakTime.split('-')
151
160
  let wasOffpeak = null
152
161
  setInterval(() => {
153
162
  if (isTimeBetween(new Date(), start, end)) {
154
- if (!wasOffpeak && global.gc) {
155
- global.gc()
163
+ if (!wasOffpeak) {
164
+ if (Date.now() - lastOffPeak > 60e3) {
165
+ offPeakBC.postMessage({ id: offPeakId, value: dailyOffpeakTime })
166
+ }
167
+
168
+ if (typeof global.gc === 'function') {
169
+ global.gc()
170
+ }
156
171
  }
157
172
  wasOffpeak = true
158
173
  } else {
@@ -295,8 +310,8 @@ export function makeApp(appConfig, onTerminate) {
295
310
  const histogram = monitorEventLoopDelay({ resolution })
296
311
  histogram.enable()
297
312
 
298
- const lagBC = new BroadcastChannel('nxt:lag')
299
- lagBC.unref()
313
+ const lagId = (process.pid << 16) | threadId
314
+ const lagBC = new BroadcastChannel('nxt:lag').unref()
300
315
 
301
316
  if (isMainThread && isPrimary) {
302
317
  const lagMap = new Map()
@@ -322,7 +337,6 @@ export function makeApp(appConfig, onTerminate) {
322
337
  }, 500).unref()
323
338
  }
324
339
 
325
- const lagId = (process.pid << 16) | threadId
326
340
  setInterval(() => {
327
341
  let currentLag = Math.max(0, histogram.mean / 1e6 - resolution)
328
342
  if (Number.isNaN(currentLag)) {
@@ -564,8 +578,7 @@ export function makeApp(appConfig, onTerminate) {
564
578
  stats$ = rxjs.timer(0, 10e3).pipe(rx.map(() => ({})))
565
579
  }
566
580
 
567
- const memoryUsageBC = new BroadcastChannel('nxt:memoryUsage')
568
- memoryUsageBC.unref()
581
+ const memoryUsageBC = new BroadcastChannel('nxt:memoryUsage').unref()
569
582
 
570
583
  let memoryUsageMap
571
584
  if (isMainThread) {
@@ -953,8 +966,7 @@ export function makeApp(appConfig, onTerminate) {
953
966
  if (appConfig.inspect !== false && !cluster.isWorker) {
954
967
  // TODO (fix): What about cluster?
955
968
 
956
- const inspectBC = new BroadcastChannel('nxt:inspect')
957
- inspectBC.unref()
969
+ const inspectBC = new BroadcastChannel('nxt:inspect').unref()
958
970
 
959
971
  let inspectOpen = false
960
972
 
@@ -1095,8 +1107,7 @@ export function makeApp(appConfig, onTerminate) {
1095
1107
  if (appConfig.utils !== false && !cluster.isWorker) {
1096
1108
  // TODO (fix): What about cluster?
1097
1109
 
1098
- const utilsBC = new BroadcastChannel('nxt:utils')
1099
- utilsBC.unref()
1110
+ const utilsBC = new BroadcastChannel('nxt:utils').unref()
1100
1111
 
1101
1112
  function writeHeapSnapshot() {
1102
1113
  const snapshotPath = `${os.tmpdir()}/heap-${Date.now()}-${serviceName}-${serviceModule}-${serviceInstanceId}-${serviceWorkerId}-${threadId}.heapsnapshot`
package/cache.js CHANGED
@@ -4,6 +4,21 @@ import { fastNow } from './time.js'
4
4
 
5
5
  function noop() {}
6
6
 
7
+ const dbs = new Set()
8
+
9
+ {
10
+ const offPeakBC = new BroadcastChannel('nxt:offPeak').unref()
11
+ offPeakBC.onmessage = () => {
12
+ for (const db of dbs) {
13
+ try {
14
+ db.purgeStale()
15
+ } catch (err) {
16
+ process.emitWarning(err)
17
+ }
18
+ }
19
+ }
20
+ }
21
+
7
22
  /**
8
23
  * @template [V=unknown]
9
24
  * @typedef {object} AsyncCacheOptions
@@ -33,6 +48,7 @@ export class AsyncCache {
33
48
  #getQuery
34
49
  #setQuery
35
50
  #delQuery
51
+ #purgeStaleQuery
36
52
 
37
53
  /**
38
54
  * @param {string} location
@@ -98,11 +114,14 @@ export class AsyncCache {
98
114
  );
99
115
  `)
100
116
 
101
- this.#getQuery = this.#db.prepare(`SELECT val, ttl, stale FROM cache WHERE key = ?`)
117
+ this.#getQuery = this.#db.prepare(
118
+ `SELECT val, ttl, stale FROM cache WHERE key = ? AND stale > ?`,
119
+ )
102
120
  this.#setQuery = this.#db.prepare(
103
121
  `INSERT OR REPLACE INTO cache (key, val, ttl, stale) VALUES (?, ?, ?, ?)`,
104
122
  )
105
123
  this.#delQuery = this.#db.prepare(`DELETE FROM cache WHERE key = ?`)
124
+ this.#purgeStaleQuery = this.#db.prepare(`DELETE FROM cache WHERE stale <= ?`)
106
125
  break
107
126
  } catch (err) {
108
127
  if (n >= 8) {
@@ -118,15 +137,18 @@ export class AsyncCache {
118
137
  }
119
138
  }
120
139
  }
140
+
141
+ dbs.add(this)
121
142
  }
122
143
 
123
144
  close() {
145
+ dbs.delete(this)
124
146
  this.#db?.close()
125
147
  }
126
148
 
127
149
  /**
128
150
  * @param {...any} args
129
- * @returns {{ value: V|Promise<V>, async: boolean }}
151
+ * @returns {{ value: V, async: false } | { value: Promise<V>, async: true }}
130
152
  */
131
153
  get(...args) {
132
154
  return this.#load(args, true)
@@ -134,7 +156,7 @@ export class AsyncCache {
134
156
 
135
157
  /**
136
158
  * @param {...any} args
137
- * @returns {{ value: V|Promise<V>, async: boolean }}
159
+ * @returns {{ value: V, async: false } | { value: Promise<V>, async: true }}
138
160
  */
139
161
  peek(...args) {
140
162
  return this.#load(args, false)
@@ -148,10 +170,17 @@ export class AsyncCache {
148
170
  return this.#refresh(args)
149
171
  }
150
172
 
173
+ purgeStale() {
174
+ try {
175
+ this.#lru?.purgeStale()
176
+ this.#purgeStaleQuery?.run(fastNow())
177
+ } catch {}
178
+ }
179
+
151
180
  /**
152
181
  * @param {any[]} args
153
182
  * @param {boolean} refresh
154
- * @returns {{ value: V|Promise<V>, async: boolean }}
183
+ * @returns {{ value: V, async: false } | { value: Promise<V>, async: true }}
155
184
  */
156
185
  #load(args, refresh) {
157
186
  const key = this.#keySelector(...args)
@@ -166,7 +195,7 @@ export class AsyncCache {
166
195
 
167
196
  if (cached === undefined) {
168
197
  try {
169
- const ret = this.#getQuery?.get(key)
198
+ const ret = this.#getQuery?.get(key, now)
170
199
  if (ret !== undefined) {
171
200
  cached = {
172
201
  ttl: ret.ttl,
@@ -214,11 +243,13 @@ export class AsyncCache {
214
243
  if (promise === undefined && this.#valueSelector) {
215
244
  promise = this.#valueSelector(...args).then(
216
245
  (value) => {
217
- this.set(key, value)
246
+ if (this.#dedupe.delete(key)) {
247
+ this.#set(key, value)
248
+ }
218
249
  return value
219
250
  },
220
251
  (err) => {
221
- this.delete(key)
252
+ this.#delete(key)
222
253
  throw err
223
254
  },
224
255
  )
@@ -233,23 +264,31 @@ export class AsyncCache {
233
264
  * @param {string} key
234
265
  * @param {V} value
235
266
  */
236
- set(key, value) {
267
+ #set(key, value) {
237
268
  if (typeof key !== 'string' || key.length === 0) {
238
269
  throw new TypeError('key must be a non-empty string')
239
270
  }
240
271
 
241
272
  const now = fastNow()
242
- const cached = {
243
- ttl: now + this.#ttl(value, key),
244
- stale: now + this.#stale(value, key),
245
- value,
273
+
274
+ const ttl = now + Math.max(0, this.#ttl(value, key) ?? 0)
275
+ if (!Number.isFinite(ttl)) {
276
+ throw new TypeError('ttl must be a finite number')
277
+ }
278
+
279
+ const stale = now + Math.max(ttl, this.#stale(value, key) ?? 0)
280
+ if (!Number.isFinite(stale)) {
281
+ throw new TypeError('stale must be a finite number')
282
+ }
283
+
284
+ if (stale <= now) {
285
+ return
246
286
  }
247
287
 
248
- this.#lru?.set(key, cached)
249
- this.#dedupe.delete(key)
288
+ this.#lru?.set(key, { ttl, stale, value })
250
289
 
251
290
  try {
252
- this.#setQuery?.run(key, JSON.stringify(value), cached.ttl, cached.stale)
291
+ this.#setQuery?.run(key, JSON.stringify(value), ttl, stale)
253
292
  } catch {
254
293
  // Do nothing...
255
294
  }
@@ -258,7 +297,7 @@ export class AsyncCache {
258
297
  /**
259
298
  * @param {string} key
260
299
  */
261
- delete(key) {
300
+ #delete(key) {
262
301
  if (typeof key !== 'string' || key.length === 0) {
263
302
  throw new TypeError('key must be a non-empty string')
264
303
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nxtedition/lib",
3
- "version": "26.0.22",
3
+ "version": "26.0.24",
4
4
  "license": "MIT",
5
5
  "author": "Robert Nagy <robert.nagy@boffins.se>",
6
6
  "type": "module",
@@ -77,7 +77,7 @@
77
77
  "http-errors": "^2.0.0",
78
78
  "json5": "^2.2.3",
79
79
  "lodash": "^4.17.21",
80
- "lru-cache": "^11.1.0",
80
+ "lru-cache": "^11.2.1",
81
81
  "mime": "^4.0.7",
82
82
  "mitata": "^1.0.34",
83
83
  "moment-timezone": "^0.5.48",
@@ -109,6 +109,7 @@
109
109
  "lint-staged": "^16.1.5",
110
110
  "prettier": "^3.6.2",
111
111
  "rxjs": "^7.8.2",
112
+ "typescript": "^5.9.2",
112
113
  "typescript-eslint": "^8.40.0"
113
114
  },
114
115
  "peerDependencies": {