@abtnode/core 1.16.44 → 1.16.45-beta-20250609-082716-b6b0592b

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,5 +1,6 @@
1
1
  const shelljs = require('shelljs');
2
2
  const semver = require('semver');
3
+ const { ABT_NODE_KERNEL_OR_BLOCKLET_MODE } = require('@blocklet/constant');
3
4
  const logger = require('@abtnode/logger')('@abtnode/core:blocklet:engine');
4
5
 
5
6
  // eslint-disable-next-line no-underscore-dangle
@@ -48,7 +49,10 @@ const engineMap = new Map([
48
49
  displayName: 'Node.js',
49
50
  interpreter: 'node',
50
51
  description: 'Powered by Node.js',
51
- args: '--max-http-header-size=16384',
52
+ args:
53
+ process.env.ABT_NODE_KERNEL_MODE === ABT_NODE_KERNEL_OR_BLOCKLET_MODE.PERFORMANT
54
+ ? '--max-http-header-size=16384'
55
+ : '--max-http-header-size=16384 --optimize_for_size',
52
56
  visible: true,
53
57
  version() {
54
58
  return _getVersion(() => process.version, this.name);
@@ -38888,7 +38888,7 @@ module.exports = require("zlib");
38888
38888
  /***/ ((module) => {
38889
38889
 
38890
38890
  "use strict";
38891
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.16.43","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib","test":"node tools/jest.js","coverage":"npm run test -- --coverage"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.16.43","@abtnode/auth":"1.16.43","@abtnode/certificate-manager":"1.16.43","@abtnode/client":"1.16.43","@abtnode/constant":"1.16.43","@abtnode/cron":"1.16.43","@abtnode/docker-utils":"1.16.43","@abtnode/logger":"1.16.43","@abtnode/models":"1.16.43","@abtnode/queue":"1.16.43","@abtnode/rbac":"1.16.43","@abtnode/router-provider":"1.16.43","@abtnode/static-server":"1.16.43","@abtnode/timemachine":"1.16.43","@abtnode/util":"1.16.43","@arcblock/did":"1.20.13","@arcblock/did-auth":"1.20.13","@arcblock/did-ext":"1.20.13","@arcblock/did-motif":"^1.1.13","@arcblock/did-util":"1.20.13","@arcblock/event-hub":"1.20.13","@arcblock/jwt":"1.20.13","@arcblock/pm2-events":"^0.0.5","@arcblock/validator":"1.20.13","@arcblock/vc":"1.20.13","@blocklet/constant":"1.16.43","@blocklet/did-space-js":"^1.0.57","@blocklet/env":"1.16.43","@blocklet/error":"^0.2.5","@blocklet/meta":"1.16.43","@blocklet/resolver":"1.16.43","@blocklet/sdk":"1.16.43","@blocklet/store":"1.16.43","@blocklet/theme":"^2.13.61","@fidm/x509":"^1.2.1","@ocap/mcrypto":"1.20.13","@ocap/util":"1.20.13","@ocap/wallet":"1.20.13","@slack/webhook":"^5.0.4","archiver":"^7.0.1","axios":"^1.7.9","axon":"^2.0.3","chalk":"^4.1.2","cross-spawn":"^7.0.3","dayjs":"^1.11.13","deep-diff":"^1.0.2","detect-port":"^1.5.1","envfile":"^7.1.0","escape-string-regexp":"^4.0.0","fast-glob":"^3.3.2","filesize":"^10.1.1","flat":"^5.0.2","fs-extra":"^11.2.0","get-port":"^5.1.1","hasha":"^5.2.2","is-base64":"^1.1.0","is-cidr":"4","is-ip":"3","is-url":"^1.2.4","joi":"17.12.2","joi-extension-semver":"^5.0.0","js-yaml":"^4.1.0","kill-port":"^2.0.1","lodash":"^4.17.21","lru-cache":"^11.0.2","node-stream-zip":"^1.15.0","p-all":"^3.0.0","p-limit":"^3.1.0","p-map":"^4.0.0","p-retry":"^4.6.2","p-wait-for":"^3.2.0","rate-limiter-flexible":"^5.0.5","read-last-lines":"^1.8.0","semver":"^7.6.3","sequelize":"^6.35.0","shelljs":"^0.8.5","slugify":"^1.6.6","ssri":"^8.0.1","stream-throttle":"^0.1.3","stream-to-promise":"^3.0.0","systeminformation":"^5.23.3","tail":"^2.2.4","tar":"^6.1.11","transliteration":"^2.3.5","ua-parser-js":"^1.0.2","ufo":"^1.5.3","uuid":"^9.0.1","valid-url":"^1.0.9","which":"^2.0.2","xbytes":"^1.8.0"},"devDependencies":{"expand-tilde":"^2.0.2","express":"^4.18.2","jest":"^29.7.0","unzipper":"^0.10.11"},"gitHead":"e5764f753181ed6a7c615cd4fc6682aacf0cb7cd"}');
38891
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.16.44","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib","test":"node tools/jest.js","coverage":"npm run test -- --coverage"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.16.44","@abtnode/auth":"1.16.44","@abtnode/db-cache":"1.16.44","@abtnode/certificate-manager":"1.16.44","@abtnode/client":"1.16.44","@abtnode/constant":"1.16.44","@abtnode/cron":"1.16.44","@abtnode/docker-utils":"1.16.44","@abtnode/logger":"1.16.44","@abtnode/models":"1.16.44","@abtnode/queue":"1.16.44","@abtnode/rbac":"1.16.44","@abtnode/router-provider":"1.16.44","@abtnode/static-server":"1.16.44","@abtnode/timemachine":"1.16.44","@abtnode/util":"1.16.44","@arcblock/did":"1.20.14","@arcblock/did-auth":"1.20.14","@arcblock/did-ext":"1.20.14","@arcblock/did-motif":"^1.1.13","@arcblock/did-util":"1.20.14","@arcblock/event-hub":"1.20.14","@arcblock/jwt":"1.20.14","@arcblock/pm2-events":"^0.0.5","@arcblock/validator":"1.20.14","@arcblock/vc":"1.20.14","@blocklet/constant":"1.16.44","@blocklet/did-space-js":"^1.0.58","@blocklet/env":"1.16.44","@blocklet/error":"^0.2.5","@blocklet/meta":"1.16.44","@blocklet/resolver":"1.16.44","@blocklet/sdk":"1.16.44","@blocklet/store":"1.16.44","@blocklet/theme":"^2.13.62","@fidm/x509":"^1.2.1","@ocap/mcrypto":"1.20.14","@ocap/util":"1.20.14","@ocap/wallet":"1.20.14","@slack/webhook":"^5.0.4","archiver":"^7.0.1","axios":"^1.7.9","axon":"^2.0.3","chalk":"^4.1.2","cross-spawn":"^7.0.3","dayjs":"^1.11.13","deep-diff":"^1.0.2","detect-port":"^1.5.1","envfile":"^7.1.0","escape-string-regexp":"^4.0.0","fast-glob":"^3.3.2","filesize":"^10.1.1","flat":"^5.0.2","fs-extra":"^11.2.0","get-port":"^5.1.1","hasha":"^5.2.2","is-base64":"^1.1.0","is-cidr":"4","is-ip":"3","is-url":"^1.2.4","joi":"17.12.2","joi-extension-semver":"^5.0.0","js-yaml":"^4.1.0","kill-port":"^2.0.1","lodash":"^4.17.21","node-stream-zip":"^1.15.0","p-all":"^3.0.0","p-limit":"^3.1.0","p-map":"^4.0.0","p-retry":"^4.6.2","p-wait-for":"^3.2.0","rate-limiter-flexible":"^5.0.5","read-last-lines":"^1.8.0","semver":"^7.6.3","sequelize":"^6.35.0","shelljs":"^0.8.5","slugify":"^1.6.6","ssri":"^8.0.1","stream-throttle":"^0.1.3","stream-to-promise":"^3.0.0","systeminformation":"^5.23.3","tail":"^2.2.4","tar":"^6.1.11","transliteration":"^2.3.5","ua-parser-js":"^1.0.2","ufo":"^1.5.3","uuid":"^9.0.1","valid-url":"^1.0.9","which":"^2.0.2","xbytes":"^1.8.0"},"devDependencies":{"expand-tilde":"^2.0.2","express":"^4.18.2","jest":"^29.7.0","unzipper":"^0.10.11"},"gitHead":"e5764f753181ed6a7c615cd4fc6682aacf0cb7cd"}');
38892
38892
 
38893
38893
  /***/ }),
38894
38894
 
@@ -342,49 +342,14 @@ class RouterManager extends EventEmitter {
342
342
  }
343
343
 
344
344
  if (issueCert) {
345
- const didDomain = doc.domainAliases.find((x) => isDidDomain(x.value));
346
- const [dnsValue, cert] = await Promise.all([
347
- this.checkDomainDNS(domain, didDomain?.value),
348
- this.getHttpsCert({ domain }),
349
- ]);
350
-
351
- // 自定义域名如果 DNS 未解析成功 CNAME 不匹配 已配置证书,则不自动颁发证书
352
- const shouldSkipCertIssue =
353
- isCustomDomain(domain) && (!dnsValue.isDnsResolved || !dnsValue.isCnameMatch || !!cert);
354
-
355
- if (shouldSkipCertIssue) {
356
- const reasonFn = () => {
357
- if (cert) {
358
- return 'cert already exists';
359
- }
360
-
361
- if (!dnsValue.isDnsResolved) {
362
- return 'DNS not resolved';
363
- }
364
-
365
- if (!dnsValue.isCnameMatch) {
366
- return 'CNAME not match';
367
- }
368
-
369
- return 'unknown reason';
370
- };
371
-
372
- logger.info('skip cert issue for domain alias', {
373
- cert,
374
- domain,
375
- dnsValue,
376
- reason: reasonFn(),
345
+ logger.info('try to issue cert', { domain, id });
346
+ this.issueCert({ did, siteId: id, site: doc, domain, inBlockletSetup })
347
+ .then(() => {
348
+ logger.info('try to issue cert done', { domain, id });
349
+ })
350
+ .catch((error) => {
351
+ logger.error('try to issue cert failed', { error, domain, id });
377
352
  });
378
- } else {
379
- this.certManager
380
- .issue({ domain, did, siteId: id, inBlockletSetup }, { delay: 3000 })
381
- .then(() => {
382
- logger.info('issue cert for domain alias', { domain, did });
383
- })
384
- .catch((error) => {
385
- logger.error('issue cert for domain alias failed', { error, domain, did });
386
- }); // 延迟 5s, 需要等待的原因: Nginx Reload, DNS 生效
387
- }
388
353
  }
389
354
 
390
355
  const newSite = await states.site.findOne({ id });
@@ -688,6 +653,48 @@ class RouterManager extends EventEmitter {
688
653
  }
689
654
  }
690
655
 
656
+ async issueCert({ did, siteId, site, domain, inBlockletSetup = false }) {
657
+ const didDomain = site.domainAliases.find((x) => isDidDomain(x.value));
658
+ const [dnsValue, cert] = await Promise.all([
659
+ this.checkDomainDNS(domain, didDomain?.value),
660
+ this.getHttpsCert({ domain }),
661
+ ]);
662
+
663
+ // 自定义域名如果 DNS 未解析成功 或 CNAME 不匹配 或 已配置证书,则不自动颁发证书
664
+ const shouldSkipCertIssue = isCustomDomain(domain) && (!dnsValue.isDnsResolved || !dnsValue.isCnameMatch || !!cert);
665
+
666
+ if (shouldSkipCertIssue) {
667
+ const reasonFn = () => {
668
+ if (cert) {
669
+ return 'cert already exists';
670
+ }
671
+
672
+ if (!dnsValue.isDnsResolved) {
673
+ return 'DNS not resolved';
674
+ }
675
+
676
+ if (!dnsValue.isCnameMatch) {
677
+ return 'CNAME not match';
678
+ }
679
+
680
+ return 'unknown reason';
681
+ };
682
+
683
+ logger.warn('skip cert issue for domain alias', {
684
+ cert,
685
+ domain,
686
+ dnsValue,
687
+ reason: reasonFn(),
688
+ });
689
+
690
+ return;
691
+ }
692
+
693
+ // 延迟 3s, 需要等待的原因: Nginx Reload, DNS 生效
694
+ await this.certManager.issue({ domain, did, siteId, inBlockletSetup }, { delay: 3000 });
695
+ logger.info('issue cert for domain alias', { domain, did });
696
+ }
697
+
691
698
  async getHttpsCert({ domain }) {
692
699
  const matchedCert = await this.getMatchedCert(domain);
693
700
 
@@ -35,9 +35,10 @@ function parseLogEntry(line, check = true) {
35
35
  logEntry.status >= 500 &&
36
36
  logEntry.status !== 503 &&
37
37
  logEntry.status <= 599 &&
38
- logEntry.request.includes('/.well-known/did.json') === false &&
39
- logEntry.request.includes('/websocket') === false &&
40
- logEntry.request.includes(`${WELLKNOWN_SERVICE_PATH_PREFIX}/health`) === false
38
+ logEntry.userAgent?.includes('Prometheus') === false &&
39
+ logEntry.request?.includes('/.well-known/did.json') === false &&
40
+ logEntry.request?.includes('/websocket') === false &&
41
+ logEntry.request?.includes(`${WELLKNOWN_SERVICE_PATH_PREFIX}/health`) === false
41
42
  ) {
42
43
  console.warn(`5xx request detected: ${logEntry.host}`, line);
43
44
  return logEntry;
@@ -719,6 +719,11 @@ const fixActor = (actor) => {
719
719
  * @extends BaseState<import('@abtnode/models').AuditLogState>
720
720
  */
721
721
  class AuditLogState extends BaseState {
722
+ constructor(...args) {
723
+ super(...args);
724
+ this.enableCountCache = `audit-log-count-${Math.random().toString(36).substring(2, 15)}`;
725
+ }
726
+
722
727
  /**
723
728
  * Create new audit log
724
729
  *
@@ -831,7 +836,6 @@ class AuditLogState extends BaseState {
831
836
  }
832
837
 
833
838
  findPaginated({ scope, category, actionOrContent, paging } = {}) {
834
- this.enableCountCache = true;
835
839
  const conditions = {
836
840
  where: {},
837
841
  };
@@ -6,7 +6,7 @@ const security = require('@abtnode/util/lib/security');
6
6
  const { CustomError } = require('@blocklet/error');
7
7
  const { generateRandomString } = require('@abtnode/models/lib/util');
8
8
  const { isFromPublicKey } = require('@arcblock/did');
9
- const SingleFlightLRUCache = require('@abtnode/util/lib/single-flight-lru-cache');
9
+ const { DBCache, getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
10
10
  const {
11
11
  NODE_MODES,
12
12
  DISK_ALERT_THRESHOLD_PERCENT,
@@ -23,8 +23,6 @@ const { validateOwner } = require('../util');
23
23
  const { get: getDefaultConfigs } = require('../util/default-node-config');
24
24
  const { checkDockerInstalled } = require('../util/docker/check-docker-installed');
25
25
 
26
- const CACHE_KEY = 'info';
27
-
28
26
  /**
29
27
  * @extends BaseState<import('@abtnode/models').ServerState>
30
28
  */
@@ -54,12 +52,14 @@ class NodeState extends BaseState {
54
52
 
55
53
  this.dataDirs = dataDirs;
56
54
  this.notification = notification;
55
+ this.cacheGroup = `node-${config.nodeDid}`;
57
56
  }
58
57
 
59
- cache = new SingleFlightLRUCache({
60
- max: 1,
58
+ cache = new DBCache(() => ({
59
+ ...getAbtNodeRedisAndSQLiteUrl(),
60
+ prefix: 'node-state',
61
61
  ttl: SERVER_CACHE_TTL,
62
- });
62
+ }));
63
63
 
64
64
  isInitialized(doc) {
65
65
  const isOwnerConnected = !!doc.nodeOwner;
@@ -68,8 +68,8 @@ class NodeState extends BaseState {
68
68
  return isOwnerConnected || isControlledBy3rdParty;
69
69
  }
70
70
 
71
- deleteCache = () => {
72
- this.cache.clear();
71
+ deleteCache = async () => {
72
+ await this.cache.del(this.cacheGroup);
73
73
  };
74
74
 
75
75
  /**
@@ -167,7 +167,7 @@ class NodeState extends BaseState {
167
167
  }
168
168
 
169
169
  read() {
170
- return this.cache.autoCache(CACHE_KEY, () => this._read());
170
+ return this.cache.autoCache(this.cacheGroup, () => this._read());
171
171
  }
172
172
 
173
173
  // FIXME: 这个接口比较危险,可能会修改一些本不应该修改的字段,后续需要考虑改进
@@ -4,9 +4,10 @@ const BaseState = require('./base');
4
4
  * @extends BaseState<import('@abtnode/models').NotificationReceiverState>
5
5
  */
6
6
  class NotificationReceiverState extends BaseState {
7
+ // eslint-disable-next-line no-useless-constructor
7
8
  constructor(...args) {
8
9
  super(...args);
9
- this.enableCountCache = true;
10
+ this.enableCountCache = `notification-receiver-count-${Math.random().toString(36).substring(2, 15)}`;
10
11
  }
11
12
  }
12
13
 
@@ -14,7 +14,7 @@ const { createRBAC, MemoryStorage, SequelizeStorage } = require('@abtnode/rbac')
14
14
  const logger = require('@abtnode/logger')('@abtnode/core:team:manager');
15
15
  const { ROLES, RBAC_CONFIG, EVENTS, NOTIFICATION_SEND_CHANNEL } = require('@abtnode/constant');
16
16
  const { BlockletEvents } = require('@blocklet/constant');
17
- const FileLock = require('@abtnode/util/lib/lock-with-file');
17
+ const { DBCache, getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
18
18
  const {
19
19
  BaseState,
20
20
  doSchemaMigration,
@@ -104,7 +104,11 @@ class TeamManager extends EventEmitter {
104
104
 
105
105
  this.init();
106
106
 
107
- this.lock = new FileLock(path.join(dataDirs.tmp, 'team-manager-locks'), 1000 * 30);
107
+ this.lock = new DBCache(() => ({
108
+ ...getAbtNodeRedisAndSQLiteUrl(),
109
+ prefix: 'team-manager-locks',
110
+ ttl: 1000 * 30,
111
+ }));
108
112
  }
109
113
 
110
114
  async init() {
@@ -645,7 +649,7 @@ class TeamManager extends EventEmitter {
645
649
  logger.info('create rbac', { did });
646
650
  const models = await this.getModels(did);
647
651
  const db = new BaseState(models.Rbac);
648
- const rbac = await createRBAC({ storage: new SequelizeStorage(db) });
652
+ const rbac = await createRBAC({ storage: new SequelizeStorage(db, did) });
649
653
  this.cache[pid].rbac = rbac;
650
654
 
651
655
  const roles = await rbac.getRoles();
@@ -659,7 +663,7 @@ class TeamManager extends EventEmitter {
659
663
 
660
664
  return rbac;
661
665
  } finally {
662
- this.lock.release(`init-rbac-${did}`);
666
+ this.lock.releaseLock(`init-rbac-${did}`);
663
667
  }
664
668
  }
665
669
 
@@ -910,7 +914,7 @@ class TeamManager extends EventEmitter {
910
914
  // This error is not fatal, just log it, will happen when there are multiple service processes
911
915
  logger.error('initDatabase failed', { did, dbPath, error });
912
916
  } finally {
913
- this.lock.release(`init-database-${did}`);
917
+ this.lock.releaseLock(`init-database-${did}`);
914
918
  }
915
919
  }
916
920
 
@@ -36,7 +36,7 @@ const getFolderSize = require('@abtnode/util/lib/get-folder-size');
36
36
  const normalizePathPrefix = require('@abtnode/util/lib/normalize-path-prefix');
37
37
  const hashFiles = require('@abtnode/util/lib/hash-files');
38
38
  const didDocument = require('@abtnode/util/lib/did-document');
39
- const SingleFlightLRUCache = require('@abtnode/util/lib/single-flight-lru-cache');
39
+ const { DBCache } = require('@abtnode/db-cache');
40
40
  const {
41
41
  BLOCKLET_MAX_MEM_LIMIT_IN_MB,
42
42
  BLOCKLET_INSTALL_TYPE,
@@ -113,7 +113,7 @@ const { getDidDomainForBlocklet } = require('@abtnode/util/lib/get-domain-for-bl
113
113
  const md5 = require('@abtnode/util/lib/md5');
114
114
 
115
115
  const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
116
- const FileLock = require('@abtnode/util/lib/lock-with-file');
116
+ const { getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
117
117
  const { validate: validateEngine, get: getEngine } = require('../blocklet/manager/engine');
118
118
 
119
119
  const isRequirementsSatisfied = require('./requirement');
@@ -140,15 +140,17 @@ const { createDockerNetwork } = require('./docker/docker-network');
140
140
  */
141
141
  const getBlockletEngineNameByPlatform = (meta) => getBlockletEngine(meta).interpreter;
142
142
 
143
- const startLock = new FileLock(
144
- process.env.ABT_NODE_DATA_DIR ? path.join(process.env.ABT_NODE_DATA_DIR, 'tmp', 'blocklet-start-locks') : '',
145
- 1000 * 60 * 2
146
- );
143
+ const startLock = new DBCache(() => ({
144
+ ...getAbtNodeRedisAndSQLiteUrl(),
145
+ prefix: 'blocklet-start-locks2',
146
+ ttl: 1000 * 60 * 1,
147
+ }));
147
148
 
148
- const blockletCache = new SingleFlightLRUCache({
149
- max: 300,
149
+ const blockletCache = new DBCache(() => ({
150
+ ...getAbtNodeRedisAndSQLiteUrl(),
151
+ prefix: 'blocklet-state',
150
152
  ttl: BLOCKLET_CACHE_TTL,
151
- });
153
+ }));
152
154
 
153
155
  const getVersionScope = (meta) => {
154
156
  if (meta.dist?.integrity) {
@@ -162,14 +164,8 @@ const getVersionScope = (meta) => {
162
164
  return meta.version;
163
165
  };
164
166
 
165
- const deleteBlockletCache = (did) => {
166
- const flags = [false, true];
167
- flags.forEach((ensureIntegrity) => {
168
- flags.forEach((getOptionalComponents) => {
169
- const key = JSON.stringify({ did, ensureIntegrity, getOptionalComponents });
170
- blockletCache.delete(key);
171
- });
172
- });
167
+ const deleteBlockletCache = async (did) => {
168
+ await blockletCache.del(did);
173
169
  };
174
170
 
175
171
  const noop = () => {
@@ -765,7 +761,7 @@ const startBlockletProcess = async (
765
761
  rootBlocklet: blocklet,
766
762
  });
767
763
 
768
- await pm2.startAsync(nextOptions);
764
+ await pm2.startAsync({ ...nextOptions, pmx: false });
769
765
 
770
766
  // eslint-disable-next-line no-use-before-define
771
767
  const status = await getProcessState(processId);
@@ -808,10 +804,7 @@ const startBlockletProcess = async (
808
804
  b.meta.group,
809
805
  ].join('__');
810
806
 
811
- if (startLock.lockDir && !startLock.tryLock(lockName)) {
812
- await startLock.waitUnLock(lockName);
813
- return;
814
- }
807
+ await startLock.acquire(lockName);
815
808
  try {
816
809
  await startBlockletTask(b, { ancestors });
817
810
  } finally {
@@ -1665,7 +1658,7 @@ const getBlocklet = ({
1665
1658
  });
1666
1659
  }
1667
1660
 
1668
- return blockletCache.autoCache(cacheKey, () => {
1661
+ return blockletCache.autoCacheGroup(did, cacheKey, () => {
1669
1662
  return _getBlocklet({ did, ensureIntegrity, getOptionalComponents, ...rest });
1670
1663
  });
1671
1664
  };
@@ -1,15 +1,16 @@
1
1
  const dns = require('dns');
2
2
  const { promisify } = require('util');
3
- const { LRUCache } = require('lru-cache');
3
+ const { DBCache, getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
4
4
  const logger = require('@abtnode/logger')('checkDNS');
5
5
 
6
6
  const resolveCname = promisify(dns.resolveCname);
7
7
  const resolve = promisify(dns.resolve);
8
8
 
9
- const cache = new LRUCache({
10
- max: 100,
9
+ const cache = new DBCache(() => ({
10
+ ...getAbtNodeRedisAndSQLiteUrl(),
11
+ prefix: 'check-dns',
11
12
  ttl: 60 * 60 * 1000,
12
- });
13
+ }));
13
14
 
14
15
  async function checkDnsAndCname(domain, expectedCname = '') {
15
16
  try {
@@ -49,16 +50,8 @@ async function checkDnsAndCname(domain, expectedCname = '') {
49
50
  }
50
51
  }
51
52
 
52
- async function getDNSInfo(domain, expectedCname) {
53
- const cachedAddress = cache.get(domain);
54
-
55
- if (cachedAddress) {
56
- return cachedAddress;
57
- }
58
-
59
- const result = await checkDnsAndCname(domain, expectedCname);
60
- cache.set(domain, result);
61
- return result;
53
+ function getDNSInfo(domain, expectedCname) {
54
+ return cache.autoCache(domain, () => checkDnsAndCname(domain, expectedCname));
62
55
  }
63
56
 
64
57
  module.exports = getDNSInfo;
@@ -12,6 +12,7 @@ function checkDockerInstalled() {
12
12
  lastTime = now;
13
13
  lastResultPromise = (async () => {
14
14
  if (process.env.ABT_NODE_NOT_ALLOW_DOCKER) return false;
15
+ if (process.env.NODE_ENV === 'test') return false;
15
16
  try {
16
17
  await promiseSpawn('docker ps', { mute: true });
17
18
  logger.info('Docker is installed');
@@ -1,6 +1,6 @@
1
1
  const path = require('path');
2
2
  const fs = require('fs');
3
- const LockFile = require('@abtnode/util/lib/lock-with-file');
3
+ const { DBCache, getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
4
4
  const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
5
5
  const logger = require('@abtnode/logger')('@abtnode/docker-exec-chown');
6
6
  const debianChmodDockerfile = require('./debian-chmod-dockerfile');
@@ -10,10 +10,11 @@ const { checkDockerInstalled } = require('./check-docker-installed');
10
10
  const filterParentDirs = require('./filter-parent-dirs');
11
11
  const { createDockerImage } = require('./create-docker-image');
12
12
 
13
- const lockFile = new LockFile(
14
- process.env.ABT_NODE_DATA_DIR ? path.join(process.env.ABT_NODE_DATA_DIR, 'tmp', 'docker-exec-locks') : '',
15
- 1000 * 60 * 2
16
- );
13
+ const lockFile = new DBCache(() => ({
14
+ ...getAbtNodeRedisAndSQLiteUrl(),
15
+ prefix: 'docker-exec-chown-locks',
16
+ ttl: 1000 * 60 * 2,
17
+ }));
17
18
 
18
19
  async function dockerExecChown({ name, dirs, code = 777, force = false }) {
19
20
  if (process.env.ABT_NODE_SKIP_DOCKER_CHOWN === '1' && !force) {
@@ -55,10 +56,7 @@ async function dockerExecChown({ name, dirs, code = 777, force = false }) {
55
56
  const realName = parseDockerName(name, 'docker-exec-chown');
56
57
  const startTime = Date.now();
57
58
 
58
- if (!lockFile.tryLock(realName)) {
59
- await lockFile.waitUnLock(realName);
60
- return;
61
- }
59
+ await lockFile.acquire(realName);
62
60
  try {
63
61
  await promiseSpawn(
64
62
  `docker rm -fv ${realName} > /dev/null 2>&1 || true && docker run --rm --name ${realName} ${volumes} ${image} sh -c '${command}'`,
@@ -66,8 +64,8 @@ async function dockerExecChown({ name, dirs, code = 777, force = false }) {
66
64
  { timeout: 1000 * 120, retry: 3 }
67
65
  );
68
66
  } finally {
69
- lockFile.releaseLock(realName);
70
67
  await promiseSpawn(`docker rm -fv ${realName} > /dev/null 2>&1 || true`, {}, { timeout: 1000 * 10, retry: 3 });
68
+ await lockFile.releaseLock(realName);
71
69
  }
72
70
 
73
71
  logger.info(`dockerExecChown ${name} cost time: ${Date.now() - startTime}ms`);
@@ -1,16 +1,14 @@
1
- const path = require('path');
2
1
  const logger = require('@abtnode/logger')('@abtnode/docker-network');
3
2
  const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
4
- const LockFile = require('@abtnode/util/lib/lock-with-file');
3
+ const { DBCache, getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
5
4
 
6
5
  const { checkDockerInstalled } = require('./check-docker-installed');
7
6
 
8
- const lockFile = new LockFile(
9
- process.env.ABT_NODE_DATA_DIR ? path.join(process.env.ABT_NODE_DATA_DIR, 'tmp', 'docker-network-locks') : '',
10
- 1000 * 20
11
- );
12
-
13
- const networkPruneLockName = 'docker-network-prune';
7
+ const lockNetwork = new DBCache(() => ({
8
+ ...getAbtNodeRedisAndSQLiteUrl(),
9
+ prefix: 'docker-network-locks',
10
+ ttl: 1000 * 10,
11
+ }));
14
12
 
15
13
  async function getDockerNetworks() {
16
14
  try {
@@ -22,7 +20,7 @@ async function getDockerNetworks() {
22
20
  }
23
21
  }
24
22
 
25
- async function _createDockerNetwork(dockerNetworkName) {
23
+ async function _createDockerNetwork(dockerNetworkName, retry = 0) {
26
24
  if (!process.env.ABT_NODE_DATA_DIR) {
27
25
  return;
28
26
  }
@@ -30,30 +28,33 @@ async function _createDockerNetwork(dockerNetworkName) {
30
28
  return;
31
29
  }
32
30
  const networks = await getDockerNetworks();
33
- if (!networks.includes(`${dockerNetworkName}-internal`)) {
34
- await promiseSpawn(`docker network create --internal ${dockerNetworkName}-internal`);
35
- } else {
36
- logger.info(`Network ${dockerNetworkName}-internal already exists. Skipping creation.`);
37
- }
31
+ try {
32
+ if (!networks.includes(`${dockerNetworkName}-internal`)) {
33
+ await promiseSpawn(`docker network create --internal ${dockerNetworkName}-internal`);
34
+ } else {
35
+ logger.info(`Network ${dockerNetworkName}-internal already exists. Skipping creation.`);
36
+ }
38
37
 
39
- if (!networks.includes(dockerNetworkName)) {
40
- await promiseSpawn(`docker network create ${dockerNetworkName}`);
41
- } else {
42
- logger.info(`Network ${dockerNetworkName} already exists. Skipping creation.`);
38
+ if (!networks.includes(dockerNetworkName)) {
39
+ await promiseSpawn(`docker network create ${dockerNetworkName}`);
40
+ } else {
41
+ logger.info(`Network ${dockerNetworkName} already exists. Skipping creation.`);
42
+ }
43
+ } catch (error) {
44
+ if (retry < 3 && error.message.includes('all predefined address pools')) {
45
+ await promiseSpawn('docker network prune -f');
46
+ await _createDockerNetwork(dockerNetworkName, retry + 1);
47
+ return;
48
+ }
49
+ throw error;
43
50
  }
44
51
  }
45
52
 
46
53
  async function createDockerNetwork(dockerNetworkName) {
47
54
  // 创建网络添加锁,防止并发创建相同子网
48
- if (!lockFile.tryLock(dockerNetworkName)) {
49
- // 等待锁释放
50
- await lockFile.waitUnLock(dockerNetworkName);
51
- return;
52
- }
53
- // 创建网络的过程不允许清理网络
54
- lockFile.tryLock(networkPruneLockName);
55
+ await lockNetwork.acquire(dockerNetworkName);
55
56
  try {
56
- await _createDockerNetwork(dockerNetworkName);
57
+ await _createDockerNetwork(dockerNetworkName, 0);
57
58
  } catch (error) {
58
59
  if (/already exists in network/i.test(error?.message || '')) {
59
60
  logger.error('Error docker network already exists, but it can be ignored', error);
@@ -61,16 +62,12 @@ async function createDockerNetwork(dockerNetworkName) {
61
62
  throw error;
62
63
  }
63
64
  } finally {
64
- lockFile.releaseLock(networkPruneLockName);
65
- lockFile.releaseLock(dockerNetworkName);
65
+ lockNetwork.releaseLock(dockerNetworkName);
66
66
  }
67
67
  }
68
68
 
69
69
  async function removeDockerNetwork(dockerNetworkName) {
70
70
  try {
71
- if (!lockFile.tryLock(networkPruneLockName)) {
72
- return;
73
- }
74
71
  const isDockerInstalled = await checkDockerInstalled();
75
72
  if (isDockerInstalled) {
76
73
  await promiseSpawn(`docker network rm ${dockerNetworkName}`);
@@ -79,8 +76,6 @@ async function removeDockerNetwork(dockerNetworkName) {
79
76
  logger.info(`docker remove network ${dockerNetworkName} done`);
80
77
  } catch (error) {
81
78
  logger.error(`Error remove network ${dockerNetworkName}:`, error);
82
- } finally {
83
- lockFile.releaseLock(networkPruneLockName);
84
79
  }
85
80
  }
86
81
 
@@ -0,0 +1,77 @@
1
+ const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
2
+ const logger = require('@abtnode/logger')('@abtnode/ensure-docker-redis');
3
+ const { checkDockerInstalled } = require('./check-docker-installed');
4
+
5
+ const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
6
+
7
+ async function ensureDockerRedis(name = 'db-cache-redis', port = 40409) {
8
+ if (!(await checkDockerInstalled())) {
9
+ return '';
10
+ }
11
+ // 1. 检查是否有正在运行的同名容器
12
+ const checkRunningCmd = `docker ps --filter "name=${name}" --format "{{.Names}}"`;
13
+ // 2. 如果没有运行,再检查是否存在已停止的同名容器
14
+ const checkAllCmd = `docker ps -a --filter "name=${name}" --format "{{.Names}}"`;
15
+ // 3. 如果既没运行也不存在,则创建新的容器,禁用持久化、限制内存、设置淘汰策略
16
+ const runCmd = [
17
+ 'docker run -d',
18
+ `--name ${name}`,
19
+ `-p 127.0.0.1:${port}:6379`,
20
+ 'redis:8.0.2',
21
+ 'redis-server',
22
+ '--save ""',
23
+ '--appendonly no',
24
+ '--maxmemory 512mb',
25
+ '--maxmemory-policy allkeys-lru',
26
+ ].join(' ');
27
+
28
+ const url = `redis://localhost:${port}`;
29
+
30
+ const running = (await promiseSpawn(checkRunningCmd)).trim();
31
+ if (running === name) {
32
+ // 已经在运行,直接返回
33
+ logger.info('redis is already running', name);
34
+ return url;
35
+ }
36
+
37
+ // 查看是否有已停止的同名容器
38
+ const all = (await promiseSpawn(checkAllCmd)).trim();
39
+ if (all === name) {
40
+ // 容器存在,但可能是 stopped,直接启动它
41
+ await promiseSpawn(`docker start ${name}`);
42
+ await sleep(3000);
43
+ logger.info('redis is started', name);
44
+ return url;
45
+ }
46
+
47
+ await promiseSpawn(runCmd);
48
+ await sleep(3000);
49
+ logger.info('redis is started', name);
50
+ return url;
51
+ }
52
+
53
+ // 停止, 并且返回是否 docker 中存在 redis 容器
54
+ async function stopDockerRedis(name = 'db-cache-redis') {
55
+ if (!(await checkDockerInstalled())) {
56
+ return false;
57
+ }
58
+ const checkRunningCmd = `docker ps --filter "name=${name}" --format "{{.Names}}"`;
59
+ const running = (await promiseSpawn(checkRunningCmd)).trim();
60
+ if (running === name) {
61
+ // 已经在运行,直接返回
62
+ logger.info('redis is already running', name);
63
+ try {
64
+ await promiseSpawn(`docker rm -f ${name}`, { mute: true });
65
+ } catch (_) {
66
+ // 不需要打印日志, 因为 redis 可能本来就没有运行
67
+ }
68
+ return true;
69
+ }
70
+
71
+ return false;
72
+ }
73
+
74
+ module.exports = {
75
+ ensureDockerRedis,
76
+ stopDockerRedis,
77
+ };
@@ -1,6 +1,6 @@
1
1
  require('./docker-container-prune');
2
2
 
3
- const { COMPONENT_DOCKER_ENV_FILE_NAME } = require('@blocklet/constant');
3
+ const { COMPONENT_DOCKER_ENV_FILE_NAME, ABT_NODE_KERNEL_OR_BLOCKLET_MODE } = require('@blocklet/constant');
4
4
  const { stringify: stringifyEnvFile } = require('envfile');
5
5
  const path = require('path');
6
6
  const fsp = require('fs/promises');
@@ -95,8 +95,9 @@ async function parseDockerOptionsFromPm2({
95
95
  throw new Error(`Docker CMD is invalid: ${dockerInfo.command}`);
96
96
  }
97
97
 
98
+ options.interpreter = 'none';
98
99
  const defaultCpus = '2';
99
- const defaultMemory = '2g';
100
+ const defaultMemory = '1.5g';
100
101
  const defaultDiskSize = '0g';
101
102
 
102
103
  const cpus = isServerless
@@ -218,10 +219,10 @@ async function parseDockerOptionsFromPm2({
218
219
 
219
220
  let runScript = dockerInfo.script || '';
220
221
  if (!runScript && dockerInfo.installNodeModules) {
221
- const maxOldSpaceSize = Math.floor(Number(dockerEnv.BLOCKLET_DOCKER_MEMORY.replace('g', '')) * 0.8 * 1024);
222
+ const maxOldSpaceSize = Math.floor(Number(dockerEnv.BLOCKLET_DOCKER_MEMORY.replace('g', '')) * 0.85 * 1024);
222
223
  runScript = /(npm|yarn|pnpm|bun)/g.test(nextOptions.script)
223
224
  ? nextOptions.script
224
- : `node --max-old-space-size=${maxOldSpaceSize} ${nextOptions.script}`;
225
+ : `node ${process.env.ABT_NODE_BLOCKLET_MODE === ABT_NODE_KERNEL_OR_BLOCKLET_MODE.PERFORMANT ? '' : '--optimize_for_size'} --max-old-space-size=${maxOldSpaceSize} --max-http-header-size=16384 ${nextOptions.script} -- BLOCKLET_NAME=${options.name}`;
225
226
  }
226
227
 
227
228
  const runScripts = ['preInstall', '--installScript--', 'postInstall', 'preFlight', 'preStart', '--runScript--']
@@ -157,7 +157,7 @@ module.exports = ({ events, dataDirs, instance, teamManager }) => {
157
157
  });
158
158
 
159
159
  [EVENTS.NOTIFICATION_CREATE, EVENTS.NOTIFICATION_BLOCKLET_CREATE].forEach((event) => {
160
- events.on(event, (data) => {
160
+ events.on(event, async (data) => {
161
161
  const {
162
162
  id,
163
163
  title,
@@ -173,7 +173,12 @@ module.exports = ({ events, dataDirs, instance, teamManager }) => {
173
173
  ignorePush,
174
174
  message,
175
175
  } = data;
176
- if (!reduceQueue({ id, title, description, entityType, entityId, severity, action, extra }) && !ignorePush) {
176
+ if (
177
+ !(await reduceQueue.acquire(
178
+ JSON.stringify({ id, title, description, entityType, entityId, severity, action, extra })
179
+ )) &&
180
+ !ignorePush
181
+ ) {
177
182
  queue.push({
178
183
  title,
179
184
  description,
@@ -1,12 +1,10 @@
1
- const { LRUCache } = require('lru-cache');
1
+ const { DBCache, getAbtNodeRedisAndSQLiteUrl } = require('@abtnode/db-cache');
2
2
 
3
- const reduceCache = new LRUCache({ max: 50, ttl: 5 * 1000 });
4
-
5
- const reduceQueue = (obj) => {
6
- const key = JSON.stringify(obj);
7
- const has = reduceCache.has(key);
8
- reduceCache.set(key, true);
9
- return has;
10
- };
3
+ // 确保多进程只会有一个任务在执行
4
+ const reduceQueue = new DBCache(() => ({
5
+ ...getAbtNodeRedisAndSQLiteUrl(),
6
+ prefix: 'reduce-queue',
7
+ ttl: 5 * 1000,
8
+ }));
11
9
 
12
10
  module.exports = reduceQueue;
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "publishConfig": {
4
4
  "access": "public"
5
5
  },
6
- "version": "1.16.44",
6
+ "version": "1.16.45-beta-20250609-082716-b6b0592b",
7
7
  "description": "",
8
8
  "main": "lib/index.js",
9
9
  "files": [
@@ -19,44 +19,45 @@
19
19
  "author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
20
20
  "license": "Apache-2.0",
21
21
  "dependencies": {
22
- "@abtnode/analytics": "1.16.44",
23
- "@abtnode/auth": "1.16.44",
24
- "@abtnode/certificate-manager": "1.16.44",
25
- "@abtnode/client": "1.16.44",
26
- "@abtnode/constant": "1.16.44",
27
- "@abtnode/cron": "1.16.44",
28
- "@abtnode/docker-utils": "1.16.44",
29
- "@abtnode/logger": "1.16.44",
30
- "@abtnode/models": "1.16.44",
31
- "@abtnode/queue": "1.16.44",
32
- "@abtnode/rbac": "1.16.44",
33
- "@abtnode/router-provider": "1.16.44",
34
- "@abtnode/static-server": "1.16.44",
35
- "@abtnode/timemachine": "1.16.44",
36
- "@abtnode/util": "1.16.44",
37
- "@arcblock/did": "1.20.13",
38
- "@arcblock/did-auth": "1.20.13",
39
- "@arcblock/did-ext": "1.20.13",
22
+ "@abtnode/analytics": "1.16.45-beta-20250609-082716-b6b0592b",
23
+ "@abtnode/auth": "1.16.45-beta-20250609-082716-b6b0592b",
24
+ "@abtnode/certificate-manager": "1.16.45-beta-20250609-082716-b6b0592b",
25
+ "@abtnode/client": "1.16.45-beta-20250609-082716-b6b0592b",
26
+ "@abtnode/constant": "1.16.45-beta-20250609-082716-b6b0592b",
27
+ "@abtnode/cron": "1.16.45-beta-20250609-082716-b6b0592b",
28
+ "@abtnode/db-cache": "1.16.45-beta-20250609-082716-b6b0592b",
29
+ "@abtnode/docker-utils": "1.16.45-beta-20250609-082716-b6b0592b",
30
+ "@abtnode/logger": "1.16.45-beta-20250609-082716-b6b0592b",
31
+ "@abtnode/models": "1.16.45-beta-20250609-082716-b6b0592b",
32
+ "@abtnode/queue": "1.16.45-beta-20250609-082716-b6b0592b",
33
+ "@abtnode/rbac": "1.16.45-beta-20250609-082716-b6b0592b",
34
+ "@abtnode/router-provider": "1.16.45-beta-20250609-082716-b6b0592b",
35
+ "@abtnode/static-server": "1.16.45-beta-20250609-082716-b6b0592b",
36
+ "@abtnode/timemachine": "1.16.45-beta-20250609-082716-b6b0592b",
37
+ "@abtnode/util": "1.16.45-beta-20250609-082716-b6b0592b",
38
+ "@arcblock/did": "1.20.14",
39
+ "@arcblock/did-auth": "1.20.14",
40
+ "@arcblock/did-ext": "1.20.14",
40
41
  "@arcblock/did-motif": "^1.1.13",
41
- "@arcblock/did-util": "1.20.13",
42
- "@arcblock/event-hub": "1.20.13",
43
- "@arcblock/jwt": "1.20.13",
42
+ "@arcblock/did-util": "1.20.14",
43
+ "@arcblock/event-hub": "1.20.14",
44
+ "@arcblock/jwt": "1.20.14",
44
45
  "@arcblock/pm2-events": "^0.0.5",
45
- "@arcblock/validator": "1.20.13",
46
- "@arcblock/vc": "1.20.13",
47
- "@blocklet/constant": "1.16.44",
48
- "@blocklet/did-space-js": "^1.0.57",
49
- "@blocklet/env": "1.16.44",
46
+ "@arcblock/validator": "1.20.14",
47
+ "@arcblock/vc": "1.20.14",
48
+ "@blocklet/constant": "1.16.45-beta-20250609-082716-b6b0592b",
49
+ "@blocklet/did-space-js": "^1.0.58",
50
+ "@blocklet/env": "1.16.45-beta-20250609-082716-b6b0592b",
50
51
  "@blocklet/error": "^0.2.5",
51
- "@blocklet/meta": "1.16.44",
52
- "@blocklet/resolver": "1.16.44",
53
- "@blocklet/sdk": "1.16.44",
54
- "@blocklet/store": "1.16.44",
55
- "@blocklet/theme": "^2.13.61",
52
+ "@blocklet/meta": "1.16.45-beta-20250609-082716-b6b0592b",
53
+ "@blocklet/resolver": "1.16.45-beta-20250609-082716-b6b0592b",
54
+ "@blocklet/sdk": "1.16.45-beta-20250609-082716-b6b0592b",
55
+ "@blocklet/store": "1.16.45-beta-20250609-082716-b6b0592b",
56
+ "@blocklet/theme": "^2.13.62",
56
57
  "@fidm/x509": "^1.2.1",
57
- "@ocap/mcrypto": "1.20.13",
58
- "@ocap/util": "1.20.13",
59
- "@ocap/wallet": "1.20.13",
58
+ "@ocap/mcrypto": "1.20.14",
59
+ "@ocap/util": "1.20.14",
60
+ "@ocap/wallet": "1.20.14",
60
61
  "@slack/webhook": "^5.0.4",
61
62
  "archiver": "^7.0.1",
62
63
  "axios": "^1.7.9",
@@ -83,7 +84,6 @@
83
84
  "js-yaml": "^4.1.0",
84
85
  "kill-port": "^2.0.1",
85
86
  "lodash": "^4.17.21",
86
- "lru-cache": "^11.0.2",
87
87
  "node-stream-zip": "^1.15.0",
88
88
  "p-all": "^3.0.0",
89
89
  "p-limit": "^3.1.0",
@@ -116,5 +116,5 @@
116
116
  "jest": "^29.7.0",
117
117
  "unzipper": "^0.10.11"
118
118
  },
119
- "gitHead": "a177f040e3e8da94311cec1395c8e8defae2da9e"
119
+ "gitHead": "adeceb28a9b5c91d2eb35d33880782048ae629cc"
120
120
  }