@nxtedition/lib 28.0.20 → 28.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (4) hide show
  1. package/app.js +267 -219
  2. package/cache.js +20 -4
  3. package/memory.js +10 -2
  4. package/package.json +3 -2
package/app.js CHANGED
@@ -623,24 +623,42 @@ export function makeApp(appConfig, onTerminateOrMeta, metaOrNull) {
623
623
  let stats$
624
624
 
625
625
  if (typeof appConfig.stats.subscribe === 'function') {
626
- stats$ = appConfig.stats
626
+ stats$ = appConfig.stats.pipe(rxjs.auditTime(1e3))
627
627
  } else if (typeof appConfig.stats === 'function') {
628
- stats$ = rxjs.timer(0, 10e3).pipe(
628
+ stats$ = rxjs.timer(0, 1e3).pipe(
629
629
  rxjs.exhaustMap(() => {
630
630
  const ret = appConfig.stats({ ds, couch, logger })
631
631
  return ret?.then || ret?.subscribe ? ret : rxjs.of(ret)
632
632
  }),
633
633
  )
634
634
  } else if (typeof appConfig.stats === 'object') {
635
- stats$ = rxjs.timer(0, 10e3).pipe(rxjs.map(() => appConfig.stats))
635
+ stats$ = rxjs.timer(0, 1e3).pipe(rxjs.map(() => appConfig.stats))
636
636
  } else {
637
- stats$ = rxjs.timer(0, 10e3).pipe(rxjs.map(() => ({})))
637
+ stats$ = rxjs.timer(0, 1e3).pipe(rxjs.map(() => ({})))
638
638
  }
639
639
 
640
640
  let statsMap
641
641
 
642
642
  const startTime = Date.now()
643
643
 
644
+ const makeStatsPipe = (key) =>
645
+ rxjs.pipe(
646
+ rxjs.catchError((err) => {
647
+ logger.error({ err, key }, 'monitor.stats')
648
+ return rxjs.of([
649
+ {
650
+ id: `app:user_monitor_stats_${key}`,
651
+ level: 50,
652
+ code: err.code,
653
+ msg: err.message,
654
+ },
655
+ ])
656
+ }),
657
+ rxjs.startWith({}),
658
+ rxjs.distinctUntilChanged(fp.isEqual),
659
+ rxjs.repeatWhen((complete$) => complete$.pipe(rxjs.delay(10e3))),
660
+ )
661
+
644
662
  appDestroyers.unshift(
645
663
  rxjs
646
664
  .timer(0, 1e3)
@@ -711,18 +729,14 @@ export function makeApp(appConfig, onTerminateOrMeta, metaOrNull) {
711
729
  undici,
712
730
  }
713
731
  }),
714
- rxjs.withLatestFrom(stats$.pipe(rxjs.startWith({}))),
732
+ makeStatsPipe('app'),
733
+ rxjs.withLatestFrom(stats$.pipe(makeStatsPipe('user'))),
715
734
  rxjs.map(([appStats, serviceStats]) => ({
716
735
  ...appStats,
717
736
  // TODO (fix): [serviceName] is not great and can collide.
718
737
  [serviceName]: serviceStats,
719
738
  })),
720
- rxjs.retry({
721
- delay(err, retryCount) {
722
- logger.error({ err, retryCount }, 'monitor.stats$ failed')
723
- return rxjs.timer(10e3)
724
- },
725
- }),
739
+ makeStatsPipe('stats'),
726
740
  )
727
741
  .subscribe(monitorProviders.stats$),
728
742
  )
@@ -761,231 +775,250 @@ export function makeApp(appConfig, onTerminateOrMeta, metaOrNull) {
761
775
 
762
776
  if (appConfig.status) {
763
777
  let status$
764
- if (appConfig.status.subscribe) {
765
- status$ = appConfig.status
778
+ if (typeof appConfig.status.subscribe === 'function') {
779
+ status$ = appConfig.status.pipe(rxjs.auditTime(1e3))
766
780
  } else if (typeof appConfig.status === 'function') {
767
- status$ = rxjs
768
- .defer(() => {
769
- const ret = appConfig.status({ ds, couch, logger })
770
- return ret?.then || ret?.subscribe ? ret : rxjs.of(ret)
771
- })
772
- .pipe(
773
- rxjs.catchError((err) => rxjs.of({ warnings: [err.message] })),
774
- rxjs.repeatWhen(() => rxjs.timer(10e3)),
775
- )
781
+ status$ = rxjs.defer(() => {
782
+ const ret = appConfig.status({ ds, couch, logger })
783
+ return ret?.then || ret?.subscribe ? ret : rxjs.of(ret)
784
+ })
776
785
  } else if (appConfig.status && typeof appConfig.status === 'object') {
777
- status$ = rxjs.timer(0, 10e3).pipe(rxjs.exhaustMap(() => appConfig.status))
786
+ status$ = rxjs.timer(0, 1e3).pipe(rxjs.exhaustMap(() => appConfig.status))
778
787
  } else {
779
788
  status$ = rxjs.of({})
780
789
  }
781
790
 
791
+ let statusMap
792
+
793
+ const makeStatusPipe = (key) =>
794
+ rxjs.pipe(
795
+ rxjs.catchError((err) => {
796
+ logger.error({ err, key }, 'monitor.status')
797
+ return rxjs.of([
798
+ {
799
+ id: `app:user_monitor_status_${key}`,
800
+ level: 50,
801
+ code: err.code,
802
+ msg: err.message,
803
+ },
804
+ ])
805
+ }),
806
+ rxjs.startWith([]),
807
+ rxjs.distinctUntilChanged(fp.isEqual),
808
+ rxjs.repeatWhen((complete$) => complete$.pipe(rxjs.delay(10e3))),
809
+ )
810
+
782
811
  appDestroyers.unshift(
783
812
  rxjs
784
- .combineLatest(
785
- [
786
- status$.pipe(
787
- rxjs.filter(Boolean),
788
- rxjs.map((xs) => (Array.isArray(xs) ? { messages: xs } : xs)),
789
- rxjs.catchError((err) => {
790
- logger.error({ err }, 'monitor.status')
791
- return rxjs.of([
792
- {
793
- id: 'app:user_monitor_status',
794
- level: 50,
795
- code: err.code,
796
- msg: err.message,
797
- },
798
- ])
799
- }),
800
- rxjs.startWith([]),
801
- rxjs.distinctUntilChanged(fp.isEqual),
802
- rxjs.repeatWhen((complete$) => complete$.pipe(rxjs.delay(10e3))),
803
- ),
804
- monitorProviders.stats$?.pipe(
805
- rxjs.map(({ memory, heap, utilization, undici, http }) => {
806
- const messages = []
807
-
808
- if (memory?.containerLimit) {
809
- const usagePercent = (memory.containerUsage / memory.containerLimit) * 100
810
- messages.push({
811
- id: 'app:container_memory_usage',
812
- level: usagePercent > 90 ? 50 : usagePercent > 70 ? 40 : 30,
813
- msg: `Memory Usage: ${usagePercent.toFixed(2)}%`,
814
- })
813
+ .combineLatest([
814
+ rxjs.timer(0, 1e3).pipe(
815
+ rxjs.map(() => {
816
+ const messages = []
817
+ if (statusMap) {
818
+ for (const status of statusMap.values()) {
819
+ if (Array.isArray(status?.messages)) {
820
+ messages.push(...status.messages)
821
+ } else if (Array.isArray(status)) {
822
+ messages.push(...status)
823
+ }
815
824
  }
825
+ }
826
+ return { messages }
827
+ }),
828
+ ),
829
+ status$.pipe(
830
+ rxjs.filter(Boolean),
831
+ rxjs.map((xs) => (Array.isArray(xs) ? { messages: xs } : xs)),
832
+ makeStatusPipe('stats'),
833
+ ),
834
+ monitorProviders.stats$?.pipe(
835
+ rxjs.map(({ memory, heap, utilization, undici, http }) => {
836
+ const messages = []
837
+
838
+ if (memory?.containerLimit) {
839
+ const usagePercent = (memory.containerUsage / memory.containerLimit) * 100
840
+ messages.push({
841
+ id: 'app:container_memory_usage',
842
+ level: usagePercent > 90 ? 50 : usagePercent > 70 ? 40 : 30,
843
+ msg: `Memory Usage: ${usagePercent.toFixed(2)}% (${(memory.containerUsage / 1e9).toFixed(2)} GiB / ${(memory.containerLimit / 1e9).toFixed(2)} GiB)`,
844
+ })
845
+ }
816
846
 
817
- if (heap) {
818
- const usagePercent = (heap.used_heap_size / heap.heap_size_limit) * 100
819
- messages.push({
820
- id: 'app:heap_memory_usage',
821
- level: usagePercent > 90 ? 50 : usagePercent > 70 ? 40 : 30,
822
- msg: `Heap Usage: ${usagePercent.toFixed(2)}%`,
823
- })
824
- }
847
+ if (heap) {
848
+ const usagePercent = (heap.used_heap_size / heap.heap_size_limit) * 100
849
+ messages.push({
850
+ id: 'app:heap_memory_usage',
851
+ level: usagePercent > 90 ? 50 : usagePercent > 70 ? 40 : 30,
852
+ msg: `Heap Usage: ${usagePercent.toFixed(2)}%`,
853
+ })
854
+ }
825
855
 
826
- if (utilization) {
827
- const elp = utilization.utilization * 100
828
- messages.push({
829
- id: 'app:event_loop_utilization',
830
- level: elp > 95 ? 50 : elp > 80 ? 40 : 30,
831
- msg: `Event Loop Utilization: ${elp.toFixed(2)}%`,
832
- })
833
- }
856
+ if (utilization) {
857
+ const elp = utilization.utilization * 100
858
+ messages.push({
859
+ id: 'app:event_loop_utilization',
860
+ level: elp > 95 ? 50 : elp > 80 ? 40 : 30,
861
+ msg: `Event Loop Utilization: ${elp.toFixed(2)}%`,
862
+ })
863
+ }
834
864
 
835
- if (undici) {
836
- messages.push({
837
- id: 'app:undici_upstream_sockets',
838
- level: undici.totalSockets > 8192 ? 50 : undici.totalSockets > 4096 ? 40 : 30,
839
- msg: `Undici: ${undici.totalSockets} upstream connected`,
840
- })
841
- }
865
+ if (undici?.totalSockets) {
866
+ messages.push({
867
+ id: 'app:undici_upstream_sockets',
868
+ level: undici.totalSockets > 8192 ? 50 : undici.totalSockets > 4096 ? 40 : 30,
869
+ msg: `Undici: ${undici.totalSockets} upstream connected`,
870
+ })
871
+ }
842
872
 
843
- if (http) {
844
- messages.push({
845
- id: 'app:http_pending_requests',
846
- level: http.totalPending > 8192 ? 50 : http.totalPending > 4096 ? 40 : 30,
847
- msg: `HTTP: ${http.totalPending} pending requests`,
848
- })
849
- }
873
+ if (http?.totalPending) {
874
+ messages.push({
875
+ id: 'app:http_pending_requests',
876
+ level: http.totalPending > 8192 ? 50 : http.totalPending > 4096 ? 40 : 30,
877
+ msg: `http: ${http.totalPending} pending requests`,
878
+ })
879
+ }
850
880
 
851
- return messages
852
- }),
881
+ return messages
882
+ }),
883
+ makeStatusPipe('stats'),
884
+ ),
885
+ toobusy?.appLag$.pipe(
886
+ rxjs.map((lag) =>
887
+ lag == null
888
+ ? []
889
+ : [
890
+ {
891
+ id: 'app:toobusy_lag',
892
+ level: lag > 1e3 ? 50 : lag > toobusy.maxLag ? 40 : 30,
893
+ code: 'NXT_LAG',
894
+ msg: `Lag: ${lag.toFixed(2)} ms`,
895
+ },
896
+ ],
897
+ ),
898
+ makeStatusPipe('toobusy'),
899
+ ) ?? rxjs.of([]),
900
+ underPressure?.pressure$?.pipe(
901
+ rxjs.map((pressure) =>
902
+ pressure == null
903
+ ? []
904
+ : [
905
+ {
906
+ id: 'app:under_pressure',
907
+ level: 40,
908
+ code: 'NXT_PRESSURE',
909
+ msg: `Under Pressure`,
910
+ },
911
+ ],
853
912
  ),
854
- toobusy?.appLag$.pipe(
855
- rxjs.map((lag) =>
856
- lag == null
857
- ? []
858
- : [
913
+ makeStatusPipe('underpressure'),
914
+ ) ?? rxjs.of([]),
915
+ couch
916
+ ? rxjs.timer(0, 10e3).pipe(
917
+ rxjs.exhaustMap(async () => {
918
+ try {
919
+ await couch.up()
920
+ return [
859
921
  {
860
- id: 'app:toobusy_lag',
861
- level: lag > 1e3 ? 50 : lag > toobusy.maxLag ? 40 : 30,
862
- code: 'NXT_LAG',
863
- msg: `Lag: ${lag.toFixed(2)} ms`,
922
+ id: 'app:couch',
923
+ level: 30,
924
+ msg: 'Couch: connected',
864
925
  },
865
- ],
866
- ),
867
- ) ?? rxjs.of([]),
868
- underPressure?.pressure$?.pipe(
869
- rxjs.map((pressure) =>
870
- pressure == null
871
- ? []
872
- : [
926
+ ]
927
+ } catch (err) {
928
+ return [
873
929
  {
874
- id: 'app:under_pressure',
930
+ id: 'app:couch',
875
931
  level: 40,
876
- code: 'NXT_PRESSURE',
877
- msg: `Under Pressure`,
932
+ code: err.code,
933
+ msg: 'Couch: ' + err.message,
878
934
  },
879
- ],
880
- ),
881
- ) ?? rxjs.of([]),
882
- couch
883
- ? rxjs.timer(0, 10e3).pipe(
884
- rxjs.exhaustMap(async () => {
885
- try {
886
- await couch.up()
887
- return [
935
+ ]
936
+ }
937
+ }),
938
+ makeStatusPipe('couch'),
939
+ )
940
+ : rxjs.of([]),
941
+ ds
942
+ ? rxjs.fromEvent(ds, 'connectionStateChanged').pipe(
943
+ rxjs.map((connectionState) =>
944
+ connectionState === 'OPEN'
945
+ ? [
888
946
  {
889
- id: 'app:couch',
947
+ id: 'app:ds_connection_state',
890
948
  level: 30,
891
- msg: 'Couch: connected',
949
+ msg: 'Deepstream: connected',
950
+ data: { connectionState },
892
951
  },
893
952
  ]
894
- } catch (err) {
895
- return [
953
+ : [
896
954
  {
897
- id: 'app:couch',
955
+ id: 'app:ds_connection_state',
898
956
  level: 40,
899
- code: err.code,
900
- msg: 'Couch: ' + err.message,
957
+ msg: 'Deepstream: connecting',
958
+ data: { connectionState },
901
959
  },
902
- ]
903
- }
904
- }),
905
- rxjs.startWith([]),
906
- rxjs.distinctUntilChanged(fp.isEqual),
907
- )
908
- : rxjs.of({}),
909
- ds
910
- ? rxjs.fromEvent(ds, 'connectionStateChanged').pipe(
911
- rxjs.map((connectionState) =>
912
- connectionState === 'OPEN'
913
- ? [
914
- {
915
- id: 'app:ds_connection_state',
916
- level: 30,
917
- msg: 'Deepstream: connected',
918
- data: { connectionState },
919
- },
920
- ]
921
- : [
922
- {
923
- id: 'app:ds_connection_state',
924
- level: 40,
925
- msg: 'Deepstream: connecting',
926
- data: { connectionState },
927
- },
928
- ],
929
- ),
930
- )
931
- : rxjs.of([]),
932
- ds
933
- ? rxjs.timer(0, 10e3).pipe(
934
- rxjs.exhaustMap(async () => {
935
- const messages = []
936
-
937
- if (ds.stats.record.records > 100e3) {
938
- messages.push({
939
- id: 'app:ds_record_records',
940
- level: 40,
941
- code: 'NXT_DEEPSTREAM_RECORDS_RECORDS',
942
- msg: 'Deepstream: ' + ds.stats.record.records + ' records',
943
- })
944
- }
945
-
946
- if (ds.stats.record.pruning > 100e3) {
947
- messages.push({
948
- id: 'app:ds_record_pruning',
949
- level: 40,
950
- code: 'NXT_DEEPSTREAM_RECORDS_PRUNING',
951
- msg: 'Deepstream: ' + ds.stats.record.pruning + ' pruning',
952
- })
953
- }
954
-
955
- if (ds.stats.record.pending > 10e3) {
956
- messages.push({
957
- id: 'app:ds_record_pending',
958
- level: 40,
959
- code: 'NXT_DEEPSTREAM_RECORDS_PENDING',
960
- msg: 'Deepstream: ' + ds.stats.record.pending + ' pending',
961
- })
962
- }
963
-
964
- if (ds.stats.record.updating > 10e3) {
965
- messages.push({
966
- id: 'app:ds_record_updating',
967
- level: 40,
968
- code: 'NXT_DEEPSTREAM_RECORDS_UPDATING',
969
- msg: 'Deepstream: ' + ds.stats.record.updating + ' updating',
970
- })
971
- }
972
-
973
- if (ds.stats.record.patching > 10e3) {
974
- messages.push({
975
- id: 'app:ds_record_patching',
976
- level: 40,
977
- code: 'NXT_DEEPSTREAM_RECORDS_PATCHING',
978
- msg: 'Deepstream: ' + ds.stats.record.patching + ' patching',
979
- })
980
- }
981
-
982
- return messages
983
- }),
984
- )
985
- : rxjs.of([]),
986
- rxjs.timer(0, 10e3),
987
- ].filter(Boolean),
988
- )
960
+ ],
961
+ ),
962
+ makeStatusPipe('ds_connection'),
963
+ )
964
+ : rxjs.of([]),
965
+ ds
966
+ ? rxjs.timer(0, 10e3).pipe(
967
+ rxjs.map(() => {
968
+ const messages = []
969
+
970
+ if (ds.stats.record.records) {
971
+ messages.push({
972
+ id: 'app:ds_record_records',
973
+ level: ds.stats.record.records > 100e3 ? 40 : 30,
974
+ code: 'NXT_DEEPSTREAM_RECORDS_RECORDS',
975
+ msg: 'Deepstream: ' + ds.stats.record.records + ' records',
976
+ })
977
+ }
978
+
979
+ if (ds.stats.record.pruning) {
980
+ messages.push({
981
+ id: 'app:ds_record_pruning',
982
+ level: ds.stats.record.pruning > 100e3 ? 40 : 30,
983
+ code: 'NXT_DEEPSTREAM_RECORDS_PRUNING',
984
+ msg: 'Deepstream: ' + ds.stats.record.pruning + ' pruning',
985
+ })
986
+ }
987
+
988
+ if (ds.stats.record.pending) {
989
+ messages.push({
990
+ id: 'app:ds_record_pending',
991
+ level: ds.stats.record.pending > 10e3 ? 40 : 30,
992
+ code: 'NXT_DEEPSTREAM_RECORDS_PENDING',
993
+ msg: 'Deepstream: ' + ds.stats.record.pending + ' pending',
994
+ })
995
+ }
996
+
997
+ if (ds.stats.record.updating) {
998
+ messages.push({
999
+ id: 'app:ds_record_updating',
1000
+ level: ds.stats.record.updating > 10e3 ? 40 : 30,
1001
+ code: 'NXT_DEEPSTREAM_RECORDS_UPDATING',
1002
+ msg: 'Deepstream: ' + ds.stats.record.updating + ' updating',
1003
+ })
1004
+ }
1005
+
1006
+ if (ds.stats.record.patching) {
1007
+ messages.push({
1008
+ id: 'app:ds_record_patching',
1009
+ level: ds.stats.record.patching ? 40 : 30,
1010
+ code: 'NXT_DEEPSTREAM_RECORDS_PATCHING',
1011
+ msg: 'Deepstream: ' + ds.stats.record.patching + ' patching',
1012
+ })
1013
+ }
1014
+
1015
+ return messages
1016
+ }),
1017
+ makeStatusPipe('ds'),
1018
+ )
1019
+ : rxjs.of([]),
1020
+ rxjs.timer(0, 10e3),
1021
+ ])
989
1022
  .pipe(
990
1023
  rxjs.auditTime(1e3),
991
1024
  rxjs.map(([status, lag, couch, ds]) => {
@@ -1023,22 +1056,37 @@ export function makeApp(appConfig, onTerminateOrMeta, metaOrNull) {
1023
1056
 
1024
1057
  return { ...status, messages, timestamp: Date.now() }
1025
1058
  }),
1026
- rxjs.catchError((err) => {
1027
- logger.error({ err }, 'monitor.status')
1028
- return rxjs.of({
1029
- messages: [{ id: 'app:monitor_status', level: 50, code: err.code, msg: err.message }],
1030
- })
1031
- }),
1032
- rxjs.repeatWhen((complete$) => complete$.pipe(rxjs.delay(10e3))),
1033
- rxjs.startWith({}),
1034
- rxjs.distinctUntilChanged(fp.isEqual),
1059
+ makeStatusPipe('status'),
1035
1060
  )
1036
1061
  .subscribe(monitorProviders.status$),
1037
1062
  )
1038
1063
 
1064
+ const statussBC = new BroadcastChannel('nxt:app:stats').unref()
1065
+
1066
+ if (isMainThread) {
1067
+ statusMap = new Map()
1068
+ statussBC.onmessage = ({ data: { data, id } }) => {
1069
+ if (data != null) {
1070
+ statusMap.set(id, data)
1071
+ } else {
1072
+ statusMap.delete(id)
1073
+ }
1074
+ }
1075
+ }
1076
+
1077
+ appDestroyers.unshift(
1078
+ monitorProviders.status$
1079
+ .subscribe((stats) => {
1080
+ statussBC.postMessage({ id: threadId, data: stats })
1081
+ })
1082
+ .add(() => {
1083
+ statussBC.postMessage({ id: threadId, data: undefined })
1084
+ }),
1085
+ )
1086
+
1039
1087
  appDestroyers.unshift(
1040
1088
  monitorProviders.status$
1041
- .pipe(rxjs.auditTime(1e3), rxjs.pluck('messages'), rxjs.startWith([]), rxjs.pairwise())
1089
+ .pipe(rxjs.pluck('messages'), rxjs.startWith([]), rxjs.pairwise())
1042
1090
  .subscribe(([prev, next]) => {
1043
1091
  for (const { level, msg: status, ...message } of fp.differenceBy('id', next, prev)) {
1044
1092
  if (level >= 50) {
package/cache.js CHANGED
@@ -1,6 +1,7 @@
1
1
  import { DatabaseSync } from 'node:sqlite'
2
2
  import { LRUCache } from 'lru-cache'
3
3
  import { fastNow } from './time.js'
4
+ import { doYield } from '@nxtedition/yield'
4
5
 
5
6
  function noop() {}
6
7
 
@@ -50,6 +51,8 @@ export class AsyncCache {
50
51
  #delQuery
51
52
  #purgeStaleQuery
52
53
 
54
+ #setQueue = []
55
+
53
56
  /**
54
57
  * @param {string} location
55
58
  * @param {((...args: any[]) => Promise<V>)|undefined} [valueSelector]
@@ -304,10 +307,23 @@ export class AsyncCache {
304
307
 
305
308
  this.#lru?.set(key, { ttl, stale, value })
306
309
 
307
- try {
308
- this.#setQuery?.run(key, JSON.stringify(value), ttl, stale)
309
- } catch {
310
- // Do nothing...
310
+ this.#setQueue.push({ key, value, ttl, stale })
311
+ if (this.#setQueue.length === 1) {
312
+ doYield(this.#flushSetQueue)
313
+ }
314
+ }
315
+
316
+ #flushSetQueue = () => {
317
+ for (const { key, value, ttl, stale } of this.#setQueue.splice(0, 64)) {
318
+ try {
319
+ this.#setQuery?.run(key, JSON.stringify(value), ttl, stale)
320
+ } catch {
321
+ // Do nothing...
322
+ }
323
+ }
324
+
325
+ if (this.#setQueue.length > 0) {
326
+ doYield(this.#flushSetQueue)
311
327
  }
312
328
  }
313
329
 
package/memory.js CHANGED
@@ -1,5 +1,7 @@
1
1
  import { readFileSync } from 'node:fs'
2
2
 
3
+ const INACTIVE_FILE_REGEX = /^inactive_file\s+(\d+)/m
4
+
3
5
  function readFile(path) {
4
6
  try {
5
7
  return readFileSync(path, 'utf8').trim()
@@ -30,13 +32,19 @@ export function getContainerMemoryUsage() {
30
32
  // cgroups v2
31
33
  const v2Usage = readFile('/sys/fs/cgroup/memory.current')
32
34
  if (v2Usage) {
33
- return Number(v2Usage)
35
+ const usage = Number(v2Usage)
36
+ const stat = readFile('/sys/fs/cgroup/memory.stat')
37
+ const inactiveFile = Number(stat?.match(INACTIVE_FILE_REGEX)?.[1] ?? 0)
38
+ return Math.max(0, usage - inactiveFile)
34
39
  }
35
40
 
36
41
  // cgroups v1
37
42
  const v1Usage = readFile('/sys/fs/cgroup/memory/memory.usage_in_bytes')
38
43
  if (v1Usage) {
39
- return Number(v1Usage)
44
+ const usage = Number(v1Usage)
45
+ const stat = readFile('/sys/fs/cgroup/memory/memory.stat')
46
+ const inactiveFile = Number(stat?.match(INACTIVE_FILE_REGEX)?.[1] ?? 0)
47
+ return Math.max(0, usage - inactiveFile)
40
48
  }
41
49
 
42
50
  return undefined
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@nxtedition/lib",
3
- "version": "28.0.20",
3
+ "version": "28.0.22",
4
4
  "license": "UNLICENSED",
5
5
  "author": "Robert Nagy <robert.nagy@boffins.se>",
6
6
  "type": "module",
@@ -54,6 +54,7 @@
54
54
  "@nxtedition/sched": "^1.0.2",
55
55
  "@nxtedition/template": "^1.0.10",
56
56
  "@nxtedition/weak-cache": "^1.0.2",
57
+ "@nxtedition/yield": "^1.0.2",
57
58
  "diff": "5.2.0",
58
59
  "fast-querystring": "^1.1.2",
59
60
  "flamegraph-middleware": "^1.0.0",
@@ -92,5 +93,5 @@
92
93
  "pino": ">=7.0.0",
93
94
  "rxjs": "^7.0.0"
94
95
  },
95
- "gitHead": "b33dfcfc5f68626f846f364805f51a0e6de74cce"
96
+ "gitHead": "f323a39ce1fe315c68d4d9a277fd63b3416b4d7b"
96
97
  }