@abtnode/core 1.17.6 → 1.17.7-beta-20251224-045844-3c7f459a
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/team.js +36 -1
- package/lib/blocklet/manager/disk.js +64 -36
- package/lib/blocklet/manager/ensure-blocklet-running.js +1 -0
- package/lib/blocklet/manager/helper/install-application-from-backup.js +1 -0
- package/lib/blocklet/manager/helper/install-application-from-general.js +1 -0
- package/lib/blocklet/manager/helper/install-component-from-url.js +1 -0
- package/lib/blocklet/manager/helper/upgrade-components.js +1 -0
- package/lib/blocklet/migration-dist/migration.cjs +1 -1
- package/lib/blocklet/storage/backup/spaces.js +11 -0
- package/lib/blocklet/storage/utils/disk.js +3 -0
- package/lib/event/auto-backup-handler.js +15 -11
- package/lib/event/index.js +4 -2
- package/lib/states/blocklet-extras.js +46 -14
- package/lib/states/blocklet.js +25 -2
- package/lib/states/job.js +55 -1
- package/lib/states/notification.js +25 -25
- package/lib/util/blocklet.js +34 -8
- package/lib/util/docker/create-docker-image.js +10 -2
- package/lib/util/docker/parse-docker-options-from-pm2.js +14 -0
- package/lib/util/docker/parse-tmpfs.js +92 -0
- package/lib/webhook/index.js +1 -0
- package/package.json +24 -24
package/lib/api/team.js
CHANGED
|
@@ -58,6 +58,7 @@ const { getEmailServiceProvider } = require('@abtnode/auth/lib/email');
|
|
|
58
58
|
const md5 = require('@abtnode/util/lib/md5');
|
|
59
59
|
const { sanitizeTag } = require('@abtnode/util/lib/sanitize');
|
|
60
60
|
const { Joi } = require('@arcblock/validator');
|
|
61
|
+
const { getBlockletAppIdList } = require('@blocklet/meta/lib/util');
|
|
61
62
|
|
|
62
63
|
const { validateTrustedPassportIssuers } = require('../validators/trusted-passport');
|
|
63
64
|
const { validateTrustedFactories } = require('../validators/trusted-factory');
|
|
@@ -3119,6 +3120,7 @@ class TeamAPI extends EventEmitter {
|
|
|
3119
3120
|
{
|
|
3120
3121
|
action: 'issueOrgOwnerPassport',
|
|
3121
3122
|
entity: 'blocklet',
|
|
3123
|
+
entityId: teamDid,
|
|
3122
3124
|
params: {
|
|
3123
3125
|
teamDid,
|
|
3124
3126
|
org: result,
|
|
@@ -3623,10 +3625,43 @@ class TeamAPI extends EventEmitter {
|
|
|
3623
3625
|
|
|
3624
3626
|
const pushState = getNotificationPushState(results, channelsAvailable, isServer);
|
|
3625
3627
|
|
|
3628
|
+
let teamDids = [teamDid];
|
|
3629
|
+
if (isServer) {
|
|
3630
|
+
const nodeInfo = await this.node.read();
|
|
3631
|
+
teamDids = [nodeInfo.did];
|
|
3632
|
+
} else {
|
|
3633
|
+
teamDids = getBlockletAppIdList(blocklet);
|
|
3634
|
+
}
|
|
3635
|
+
|
|
3636
|
+
const pendingResult = await this.states.job.getPendingNotifications({
|
|
3637
|
+
teamDids,
|
|
3638
|
+
isServer,
|
|
3639
|
+
channels: Object.keys(channelsAvailable),
|
|
3640
|
+
createdAt: startTime,
|
|
3641
|
+
});
|
|
3642
|
+
|
|
3643
|
+
// pushState 的 key 与 pendingResult 的 key 映射关系
|
|
3644
|
+
const channelKeyMap = {
|
|
3645
|
+
pushKit: NOTIFICATION_SEND_CHANNEL.PUSH,
|
|
3646
|
+
wallet: NOTIFICATION_SEND_CHANNEL.WALLET,
|
|
3647
|
+
email: NOTIFICATION_SEND_CHANNEL.EMAIL,
|
|
3648
|
+
webhook: NOTIFICATION_SEND_CHANNEL.WEBHOOK,
|
|
3649
|
+
};
|
|
3650
|
+
|
|
3651
|
+
// 合并 pending 数量到对应的 channel
|
|
3652
|
+
const channels = Object.entries(pushState).reduce((acc, [key, value]) => {
|
|
3653
|
+
const pendingKey = channelKeyMap[key] || key;
|
|
3654
|
+
acc[key] = {
|
|
3655
|
+
...value,
|
|
3656
|
+
pending: pendingResult[pendingKey] || 0,
|
|
3657
|
+
};
|
|
3658
|
+
return acc;
|
|
3659
|
+
}, {});
|
|
3660
|
+
|
|
3626
3661
|
return {
|
|
3627
3662
|
healthy: true,
|
|
3628
3663
|
since: startTime,
|
|
3629
|
-
channels
|
|
3664
|
+
channels,
|
|
3630
3665
|
};
|
|
3631
3666
|
} catch (err) {
|
|
3632
3667
|
logger.error('Get notification service health failed', err, { teamDid });
|
|
@@ -367,6 +367,7 @@ class DiskBlockletManager extends BaseBlockletManager {
|
|
|
367
367
|
*/
|
|
368
368
|
const handleBackupComplete = async ({ id: jobId, job }) => {
|
|
369
369
|
await this.backupQueue.delete(jobId);
|
|
370
|
+
SpacesBackup.abort(job.did);
|
|
370
371
|
|
|
371
372
|
const autoBackup = await this.getAutoBackup({ did: job.did });
|
|
372
373
|
if (autoBackup?.enabled) {
|
|
@@ -3009,17 +3010,19 @@ class DiskBlockletManager extends BaseBlockletManager {
|
|
|
3009
3010
|
await spacesBackup.initialize();
|
|
3010
3011
|
await spacesBackup.verifySpace();
|
|
3011
3012
|
|
|
3012
|
-
|
|
3013
|
-
|
|
3014
|
-
|
|
3015
|
-
|
|
3016
|
-
|
|
3017
|
-
|
|
3018
|
-
|
|
3019
|
-
|
|
3020
|
-
|
|
3021
|
-
|
|
3022
|
-
|
|
3013
|
+
if (!SpacesBackup.isRunning(did)) {
|
|
3014
|
+
this.backupQueue.push(
|
|
3015
|
+
{
|
|
3016
|
+
entity: 'blocklet',
|
|
3017
|
+
action: 'backupToSpaces',
|
|
3018
|
+
did,
|
|
3019
|
+
context,
|
|
3020
|
+
},
|
|
3021
|
+
jobId,
|
|
3022
|
+
true,
|
|
3023
|
+
BACKUPS.JOB.INTERVAL
|
|
3024
|
+
);
|
|
3025
|
+
}
|
|
3023
3026
|
}
|
|
3024
3027
|
|
|
3025
3028
|
await states.blockletExtras.setSettings(did, { autoBackup: value });
|
|
@@ -3748,17 +3751,37 @@ class DiskBlockletManager extends BaseBlockletManager {
|
|
|
3748
3751
|
strategy: BACKUPS.STRATEGY.AUTO,
|
|
3749
3752
|
},
|
|
3750
3753
|
}) {
|
|
3751
|
-
if (shouldJobBackoff()) {
|
|
3752
|
-
logger.warn('Backup to spaces is not available when blocklet server is starting.');
|
|
3753
|
-
return;
|
|
3754
|
-
}
|
|
3755
|
-
|
|
3756
3754
|
const blocklet = await states.blocklet.getBlocklet(did);
|
|
3757
3755
|
const {
|
|
3758
3756
|
appDid,
|
|
3759
3757
|
meta: { did: appPid },
|
|
3760
3758
|
} = blocklet;
|
|
3761
3759
|
|
|
3760
|
+
if (shouldJobBackoff()) {
|
|
3761
|
+
const backup = await states.backup.findOne({ appPid }, {}, { createdAt: -1 });
|
|
3762
|
+
const message = 'Backup to spaces is not available when blocklet server is starting.';
|
|
3763
|
+
|
|
3764
|
+
if (backup.status === BACKUPS.STATUS.PROGRESS) {
|
|
3765
|
+
await states.backup.fail(backup.id, {
|
|
3766
|
+
message,
|
|
3767
|
+
});
|
|
3768
|
+
}
|
|
3769
|
+
|
|
3770
|
+
this.emit(BlockletEvents.backupProgress, {
|
|
3771
|
+
appDid,
|
|
3772
|
+
meta: { did: appPid },
|
|
3773
|
+
completed: true,
|
|
3774
|
+
progress: -1,
|
|
3775
|
+
message,
|
|
3776
|
+
backup,
|
|
3777
|
+
context,
|
|
3778
|
+
blocklet,
|
|
3779
|
+
});
|
|
3780
|
+
|
|
3781
|
+
logger.warn(message);
|
|
3782
|
+
return;
|
|
3783
|
+
}
|
|
3784
|
+
|
|
3762
3785
|
const spaceGateways = await this.getBlockletSpaceGateways({ did });
|
|
3763
3786
|
const backupEndpoint = getBackupEndpoint(blocklet.environments);
|
|
3764
3787
|
if (isEmpty(spaceGateways) || isEmpty(backupEndpoint)) {
|
|
@@ -5488,28 +5511,33 @@ class DiskBlockletManager extends BaseBlockletManager {
|
|
|
5488
5511
|
const { did } = blocklet.meta;
|
|
5489
5512
|
const jobId = getBackupJobId(did);
|
|
5490
5513
|
const { job, willRunAt } = (await this.backupQueue.get(jobId, { full: true })) ?? {};
|
|
5491
|
-
if (job) {
|
|
5492
|
-
if (job.backupState?.strategy === BACKUPS.STRATEGY.MANUAL || willRunAt <= Date.now()) {
|
|
5493
|
-
logger.warn(`This app(${did})'s manual backup is already running, skip it`, { job });
|
|
5494
|
-
await this.backupQueue.restoreCancelled(jobId);
|
|
5495
|
-
return blocklet;
|
|
5496
|
-
}
|
|
5497
|
-
await this.backupQueue.delete(jobId);
|
|
5498
|
-
logger.warn(`This app(${did})'s auto backup is already removed from backup queue`, { job });
|
|
5499
|
-
}
|
|
5500
5514
|
|
|
5501
|
-
|
|
5502
|
-
|
|
5503
|
-
|
|
5504
|
-
|
|
5505
|
-
|
|
5506
|
-
|
|
5507
|
-
|
|
5508
|
-
|
|
5515
|
+
// 任务正在运行或者将要在 1s 内运行,或者任务可能已过期,都是表示任务可用
|
|
5516
|
+
const waitBackupDone = (job && willRunAt - Date.now() <= 1_000) || SpacesBackup.isRunning(did);
|
|
5517
|
+
|
|
5518
|
+
if (waitBackupDone) {
|
|
5519
|
+
logger.warn(`This app(${did})'s manual or auto backup is already running, skip manual backup`, {
|
|
5520
|
+
job,
|
|
5521
|
+
willRunAt,
|
|
5522
|
+
now: Date.now(),
|
|
5523
|
+
isRunning: SpacesBackup.isRunning(did),
|
|
5524
|
+
});
|
|
5525
|
+
await this.backupQueue.restoreCancelled(jobId);
|
|
5526
|
+
} else {
|
|
5527
|
+
await this.backupQueue.delete(jobId);
|
|
5528
|
+
this.backupQueue.push(
|
|
5529
|
+
{
|
|
5530
|
+
entity: 'blocklet',
|
|
5531
|
+
action: 'backupToSpaces',
|
|
5532
|
+
did,
|
|
5533
|
+
context,
|
|
5534
|
+
backupState: {
|
|
5535
|
+
strategy: BACKUPS.STRATEGY.MANUAL,
|
|
5536
|
+
},
|
|
5509
5537
|
},
|
|
5510
|
-
|
|
5511
|
-
|
|
5512
|
-
|
|
5538
|
+
jobId
|
|
5539
|
+
);
|
|
5540
|
+
}
|
|
5513
5541
|
|
|
5514
5542
|
return blocklet;
|
|
5515
5543
|
} catch (error) {
|
|
@@ -39044,7 +39044,7 @@ module.exports = require("zlib");
|
|
|
39044
39044
|
/***/ ((module) => {
|
|
39045
39045
|
|
|
39046
39046
|
"use strict";
|
|
39047
|
-
module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.17.
|
|
39047
|
+
module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.17.6","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.17.6","@abtnode/auth":"1.17.6","@abtnode/certificate-manager":"1.17.6","@abtnode/constant":"1.17.6","@abtnode/cron":"1.17.6","@abtnode/db-cache":"1.17.6","@abtnode/docker-utils":"1.17.6","@abtnode/logger":"1.17.6","@abtnode/models":"1.17.6","@abtnode/queue":"1.17.6","@abtnode/rbac":"1.17.6","@abtnode/router-provider":"1.17.6","@abtnode/static-server":"1.17.6","@abtnode/timemachine":"1.17.6","@abtnode/util":"1.17.6","@aigne/aigne-hub":"^0.10.15","@arcblock/did":"^1.27.15","@arcblock/did-connect-js":"^1.27.15","@arcblock/did-ext":"^1.27.15","@arcblock/did-motif":"^1.1.14","@arcblock/did-util":"^1.27.15","@arcblock/event-hub":"^1.27.15","@arcblock/jwt":"^1.27.15","@arcblock/pm2-events":"^0.0.5","@arcblock/validator":"^1.27.15","@arcblock/vc":"^1.27.15","@blocklet/constant":"1.17.6","@blocklet/did-space-js":"^1.2.11","@blocklet/env":"1.17.6","@blocklet/error":"^0.3.5","@blocklet/meta":"1.17.6","@blocklet/resolver":"1.17.6","@blocklet/sdk":"1.17.6","@blocklet/server-js":"1.17.6","@blocklet/store":"1.17.6","@blocklet/theme":"^3.2.19","@fidm/x509":"^1.2.1","@ocap/mcrypto":"^1.27.15","@ocap/util":"^1.27.15","@ocap/wallet":"^1.27.15","@slack/webhook":"^7.0.6","archiver":"^7.0.1","axios":"^1.7.9","axon":"^2.0.3","chalk":"^4.1.2","cross-spawn":"^7.0.3","dayjs":"^1.11.13","deep-diff":"^1.0.2","detect-port":"^1.5.1","envfile":"^7.1.0","escape-string-regexp":"^4.0.0","fast-glob":"^3.3.2","filesize":"^10.1.1","flat":"^5.0.2","fs-extra":"^11.2.0","get-port":"^5.1.1","hasha":"^5.2.2","is-base64":"^1.1.0","is-cidr":"4","is-ip":"3","is-url":"^1.2.4","joi":"17.12.2","joi-extension-semver":"^5.0.0","js-yaml":"^4.1.0","kill-port":"^2.0.1","lodash":"^4.17.21","node-stream-zip":"^1.15.0","p-all":"^3.0.0","p-limit":"^3.1.0","p-map":"^4.0.0","p-retry":"^4.6.2","p-wait-for":"^3.2.0","private-ip":"^2.3.4","rate-limiter-flexible":"^5.0.5","read-last-lines":"^1.8.0","semver":"^7.6.3","sequelize":"^6.35.0","shelljs":"^0.8.5","slugify":"^1.6.6","ssri":"^8.0.1","stream-throttle":"^0.1.3","stream-to-promise":"^3.0.0","systeminformation":"^5.23.3","tail":"^2.2.4","tar":"^6.1.11","transliteration":"2.3.5","ua-parser-js":"^1.0.2","ufo":"^1.5.3","uuid":"^11.1.0","valid-url":"^1.0.9","which":"^2.0.2","xbytes":"^1.8.0"},"devDependencies":{"axios-mock-adapter":"^2.1.0","expand-tilde":"^2.0.2","express":"^4.18.2","unzipper":"^0.10.11"},"gitHead":"e5764f753181ed6a7c615cd4fc6682aacf0cb7cd"}');
|
|
39048
39048
|
|
|
39049
39049
|
/***/ }),
|
|
39050
39050
|
|
|
@@ -406,6 +406,17 @@ class SpacesBackup extends BaseBackup {
|
|
|
406
406
|
}
|
|
407
407
|
}
|
|
408
408
|
|
|
409
|
+
/**
|
|
410
|
+
*
|
|
411
|
+
*
|
|
412
|
+
* @static
|
|
413
|
+
* @param {string} appPid
|
|
414
|
+
* @memberof SpacesBackup
|
|
415
|
+
*/
|
|
416
|
+
static isRunning(appPid) {
|
|
417
|
+
return SpacesBackup.instanceMap.has(appPid);
|
|
418
|
+
}
|
|
419
|
+
|
|
409
420
|
/**
|
|
410
421
|
*
|
|
411
422
|
*
|
|
@@ -104,6 +104,8 @@ function getFolderSize(folderPath) {
|
|
|
104
104
|
*/
|
|
105
105
|
// eslint-disable-next-line require-await
|
|
106
106
|
async function getFolderObjects(path, prefix = '') {
|
|
107
|
+
// @note: 我尝试动手解析 .gitignore 文件,但是发现 fast-glob 不支持解析它的规则,并找到了对应的 issue: https://github.com/mrmlnc/fast-glob/issues/265#issue-579211456
|
|
108
|
+
const ignore = ['**/node_modules/**', '**/.next/**', '**/.DS_Store'];
|
|
107
109
|
const stream = FastGlob.stream('**', {
|
|
108
110
|
cwd: path,
|
|
109
111
|
objectMode: true,
|
|
@@ -112,6 +114,7 @@ async function getFolderObjects(path, prefix = '') {
|
|
|
112
114
|
absolute: true,
|
|
113
115
|
dot: true,
|
|
114
116
|
concurrency: 2,
|
|
117
|
+
ignore,
|
|
115
118
|
});
|
|
116
119
|
|
|
117
120
|
const objects = [];
|
|
@@ -2,6 +2,7 @@ const debounce = require('lodash/debounce');
|
|
|
2
2
|
const logger = require('@abtnode/logger')('@abtnode/core:event:auto-backup-handler');
|
|
3
3
|
const { BACKUPS } = require('@abtnode/constant');
|
|
4
4
|
const { getBackupJobId } = require('../util/spaces');
|
|
5
|
+
const { SpacesBackup } = require('../blocklet/storage/backup/spaces');
|
|
5
6
|
|
|
6
7
|
/**
|
|
7
8
|
* @description
|
|
@@ -16,19 +17,22 @@ async function autoBackupHandler(_eventName, payload, blockletManager) {
|
|
|
16
17
|
if (autoBackup.enabled && payload.context) {
|
|
17
18
|
const jobId = getBackupJobId(did);
|
|
18
19
|
const { job, willRunAt } = (await blockletManager.backupQueue.get(jobId, { full: true })) ?? {};
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
20
|
+
|
|
21
|
+
// 任务正在运行或者将要在 3s 内运行,或者任务可能已过期,都是表示任务可用
|
|
22
|
+
const waitBackupDone = (job && willRunAt - Date.now() <= 3_000) || SpacesBackup.isRunning(did);
|
|
23
|
+
|
|
24
|
+
if (waitBackupDone) {
|
|
25
|
+
logger.warn(`This app(${did})'s manual or auto backup is already running, skip auto backup`, {
|
|
26
|
+
job,
|
|
27
|
+
willRunAt,
|
|
28
|
+
now: Date.now(),
|
|
29
|
+
isRunning: SpacesBackup.isRunning(did),
|
|
30
|
+
});
|
|
31
|
+
await blockletManager.backupQueue.restoreCancelled(jobId);
|
|
32
|
+
return;
|
|
30
33
|
}
|
|
31
34
|
|
|
35
|
+
await blockletManager.backupQueue.delete(jobId);
|
|
32
36
|
blockletManager.backupQueue.push(
|
|
33
37
|
{
|
|
34
38
|
entity: 'blocklet',
|
package/lib/event/index.js
CHANGED
|
@@ -695,8 +695,10 @@ module.exports = ({
|
|
|
695
695
|
* @returns
|
|
696
696
|
*/
|
|
697
697
|
(_eventName, payload, ...args) => {
|
|
698
|
-
|
|
699
|
-
|
|
698
|
+
if (payload.context) {
|
|
699
|
+
const id = payload.meta.did;
|
|
700
|
+
autoBackupHandlerFactory(id, autoBackupHandler)(eventName, payload, blockletManager, ...args);
|
|
701
|
+
}
|
|
700
702
|
}
|
|
701
703
|
);
|
|
702
704
|
});
|
|
@@ -150,21 +150,37 @@ class BlockletExtrasState extends BaseState {
|
|
|
150
150
|
}
|
|
151
151
|
}
|
|
152
152
|
|
|
153
|
-
|
|
154
|
-
return
|
|
155
|
-
|
|
156
|
-
dids = [].concat(dids);
|
|
157
|
-
const [rootDid, ...childDids] = dids;
|
|
158
|
-
const { dek } = this.config;
|
|
159
|
-
const { name, afterGet = noop('data') } = extra;
|
|
153
|
+
getExtraByDid(did, { selection = {} } = {}) {
|
|
154
|
+
return this.findOne({ did }, selection);
|
|
155
|
+
}
|
|
160
156
|
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
|
|
165
|
-
|
|
157
|
+
/**
|
|
158
|
+
* 从文档中提取指定字段的数据
|
|
159
|
+
* @private
|
|
160
|
+
*/
|
|
161
|
+
_extractFromDoc(doc, dids, extra) {
|
|
162
|
+
// eslint-disable-next-line no-param-reassign
|
|
163
|
+
dids = [].concat(dids);
|
|
164
|
+
const [rootDid, ...childDids] = dids;
|
|
165
|
+
const { dek } = this.config;
|
|
166
|
+
const { name, afterGet = noop('data') } = extra;
|
|
167
|
+
|
|
168
|
+
// 遍历 children 查找目标组件
|
|
169
|
+
let item = doc;
|
|
170
|
+
const didsToTraverse = [...childDids];
|
|
171
|
+
while (item && didsToTraverse.length) {
|
|
172
|
+
const did = didsToTraverse.shift();
|
|
173
|
+
item = (item.children || []).find((x) => x.did === did);
|
|
174
|
+
}
|
|
175
|
+
|
|
176
|
+
return afterGet({ data: item ? item[name] : null, did: rootDid, dek });
|
|
177
|
+
}
|
|
166
178
|
|
|
167
|
-
|
|
179
|
+
generateGetFn(extra) {
|
|
180
|
+
return async (dids, path, defaultValue) => {
|
|
181
|
+
const [rootDid] = [].concat(dids);
|
|
182
|
+
const doc = await this.findOne({ did: rootDid });
|
|
183
|
+
const data = this._extractFromDoc(doc, dids, extra);
|
|
168
184
|
if (!path) {
|
|
169
185
|
return data;
|
|
170
186
|
}
|
|
@@ -172,6 +188,22 @@ class BlockletExtrasState extends BaseState {
|
|
|
172
188
|
};
|
|
173
189
|
}
|
|
174
190
|
|
|
191
|
+
/**
|
|
192
|
+
* 从已查询的文档中获取指定类型的数据,避免重复查询数据库
|
|
193
|
+
* @param {object} doc - 已查询的 blockletExtras 文档
|
|
194
|
+
* @param {string|string[]} dids - did 数组,第一个是 rootDid,后续是 childDids
|
|
195
|
+
* @param {string} type - 数据类型,如 'configs' 或 'settings'
|
|
196
|
+
* @returns {any} 对应类型的数据
|
|
197
|
+
*/
|
|
198
|
+
getFromDoc({ doc = null, dids = [], name = '' }) {
|
|
199
|
+
if (!doc || !name || !dids.length) {
|
|
200
|
+
return null;
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const extra = this.extras.find((x) => x.name === name);
|
|
204
|
+
return this._extractFromDoc(doc, dids, extra);
|
|
205
|
+
}
|
|
206
|
+
|
|
175
207
|
// CAUTION: setConfig() 方法中非必要 **不要** 传入 [{ name: xxx }], 要传入 [{ key: xxx }]. 前者会导致某些配置被自动删掉
|
|
176
208
|
generateSetFn(extra) {
|
|
177
209
|
return async (dids, data) => {
|
|
@@ -271,7 +303,7 @@ class BlockletExtrasState extends BaseState {
|
|
|
271
303
|
}
|
|
272
304
|
|
|
273
305
|
getMeta(did) {
|
|
274
|
-
return
|
|
306
|
+
return this.getExtraByDid(did, { selection: { did: 1, controller: 1, meta: 1 } });
|
|
275
307
|
}
|
|
276
308
|
|
|
277
309
|
async isLauncherSessionConsumed(sessionId) {
|
package/lib/states/blocklet.js
CHANGED
|
@@ -254,6 +254,8 @@ class BlockletState extends BaseState {
|
|
|
254
254
|
this.defaultPort = config.blockletPort || 5555;
|
|
255
255
|
// @didMap: { [did: string]: metaDid: string }
|
|
256
256
|
this.didMap = new Map();
|
|
257
|
+
// @didToIdMap: { [did: string]: id: string } - 缓存 did 到 doc.id 的映射,避免重复 $or 查询
|
|
258
|
+
this.didToIdMap = new Map();
|
|
257
259
|
this.statusLocks = new Map();
|
|
258
260
|
|
|
259
261
|
// BlockletChildState instance passed from outside
|
|
@@ -481,11 +483,23 @@ class BlockletState extends BaseState {
|
|
|
481
483
|
return null;
|
|
482
484
|
}
|
|
483
485
|
|
|
484
|
-
|
|
486
|
+
// 优先使用缓存的 id 直接查询,避免 $or 查询
|
|
487
|
+
const cachedId = process.env.NODE_ENV === 'test' ? null : this.didToIdMap.get(did);
|
|
488
|
+
const doc = await this.findOne(cachedId ? { id: cachedId } : { $or: getConditions(did) });
|
|
485
489
|
if (!doc) {
|
|
490
|
+
// 如果缓存的 id 查不到,可能是缓存失效,清除并重试
|
|
491
|
+
if (cachedId) {
|
|
492
|
+
this.didToIdMap.delete(did);
|
|
493
|
+
return this.getBlocklet(did, { decryptSk });
|
|
494
|
+
}
|
|
486
495
|
return null;
|
|
487
496
|
}
|
|
488
497
|
|
|
498
|
+
// 缓存 did -> id 映射
|
|
499
|
+
if (!cachedId && process.env.NODE_ENV !== 'test') {
|
|
500
|
+
this.didToIdMap.set(did, doc.id);
|
|
501
|
+
}
|
|
502
|
+
|
|
489
503
|
// Load children from BlockletChild table
|
|
490
504
|
const children = await this.loadChildren(doc.id);
|
|
491
505
|
doc.children = children;
|
|
@@ -560,6 +574,10 @@ class BlockletState extends BaseState {
|
|
|
560
574
|
|
|
561
575
|
this.didMap.delete(doc.meta?.did);
|
|
562
576
|
this.didMap.delete(doc.appDid);
|
|
577
|
+
this.didToIdMap.delete(did);
|
|
578
|
+
this.didToIdMap.delete(doc.meta?.did);
|
|
579
|
+
this.didToIdMap.delete(doc.appDid);
|
|
580
|
+
this.didToIdMap.delete(doc.appPid);
|
|
563
581
|
this.statusLocks.delete(doc.meta?.did);
|
|
564
582
|
this.statusLocks.delete(doc.appDid);
|
|
565
583
|
|
|
@@ -1016,7 +1034,9 @@ class BlockletState extends BaseState {
|
|
|
1016
1034
|
await lock.acquire(lockName);
|
|
1017
1035
|
try {
|
|
1018
1036
|
const doc = await this.getBlocklet(did);
|
|
1019
|
-
|
|
1037
|
+
if (!doc) {
|
|
1038
|
+
throw new Error(`Blocklet not found: ${did}`);
|
|
1039
|
+
}
|
|
1020
1040
|
if (doc.meta?.group === BlockletGroup.gateway && !doc.children?.length) {
|
|
1021
1041
|
const updateData = { status, operator };
|
|
1022
1042
|
const res = await this.updateBlocklet(did, updateData);
|
|
@@ -1090,6 +1110,9 @@ class BlockletState extends BaseState {
|
|
|
1090
1110
|
// Recalculate status after children are loaded with updated status
|
|
1091
1111
|
res.status = getBlockletStatus(res);
|
|
1092
1112
|
return res;
|
|
1113
|
+
} catch (error) {
|
|
1114
|
+
logger.error('setBlockletStatus failed', { did, status, componentDids, operator, error });
|
|
1115
|
+
throw error;
|
|
1093
1116
|
} finally {
|
|
1094
1117
|
await lock.releaseLock(lockName);
|
|
1095
1118
|
}
|
package/lib/states/job.js
CHANGED
|
@@ -1,8 +1,62 @@
|
|
|
1
|
+
const { NOTIFICATION_SEND_CHANNEL } = require('@abtnode/constant');
|
|
2
|
+
const dayjs = require('@abtnode/util/lib/dayjs');
|
|
3
|
+
const { Op, Sequelize } = require('sequelize');
|
|
4
|
+
const { CustomError } = require('@blocklet/error');
|
|
5
|
+
|
|
1
6
|
const BaseState = require('./base');
|
|
2
7
|
|
|
8
|
+
// 根据 channel 映射对于的查询类别
|
|
9
|
+
const CHANNEL_MAP = {
|
|
10
|
+
[NOTIFICATION_SEND_CHANNEL.WALLET]: 'send-notification-wallet',
|
|
11
|
+
[NOTIFICATION_SEND_CHANNEL.PUSH]: 'send-notification-push',
|
|
12
|
+
[NOTIFICATION_SEND_CHANNEL.EMAIL]: 'send-notification-email',
|
|
13
|
+
[NOTIFICATION_SEND_CHANNEL.WEBHOOK]: 'send-notification-webhook',
|
|
14
|
+
};
|
|
15
|
+
|
|
3
16
|
/**
|
|
4
17
|
* @extends BaseState<import('@abtnode/models').JobState>
|
|
5
18
|
*/
|
|
6
|
-
class Job extends BaseState {
|
|
19
|
+
class Job extends BaseState {
|
|
20
|
+
async getPendingNotifications({ teamDids = [], channels = [], createdAt = '', isServer = false }) {
|
|
21
|
+
if (!teamDids.length || !channels.length) {
|
|
22
|
+
throw new CustomError(400, 'teamDids and channels are required');
|
|
23
|
+
}
|
|
24
|
+
let startTime = createdAt;
|
|
25
|
+
if (!startTime) {
|
|
26
|
+
startTime = dayjs().subtract(1, 'hours').toDate();
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
// 过滤有效 channel,获取对应的 queue 名称
|
|
30
|
+
const validChannels = channels.filter((channel) => CHANNEL_MAP[channel]);
|
|
31
|
+
const queueNames = validChannels.map((channel) => CHANNEL_MAP[channel]);
|
|
32
|
+
|
|
33
|
+
// 构建 entityId 查询条件
|
|
34
|
+
// isServer 为 true 时,使用 COALESCE 查询 entityId 在 teamDids 中或为空(null/'')的记录
|
|
35
|
+
// isServer 为 false 时,直接查询 entityId 在 teamDids 中的记录,索引利用率最高
|
|
36
|
+
const entityIdCondition = isServer
|
|
37
|
+
? Sequelize.where(Sequelize.fn('COALESCE', Sequelize.col('entityId'), ''), { [Op.in]: [...teamDids, ''] })
|
|
38
|
+
: { entityId: { [Op.in]: teamDids } };
|
|
39
|
+
|
|
40
|
+
// 单次查询,使用 GROUP BY 获取所有 channel 的统计
|
|
41
|
+
const results = await this.model.findAll({
|
|
42
|
+
attributes: ['queue', [Sequelize.fn('COUNT', Sequelize.col('id')), 'count']],
|
|
43
|
+
where: {
|
|
44
|
+
queue: { [Op.in]: queueNames },
|
|
45
|
+
...(isServer ? { [Op.and]: entityIdCondition } : entityIdCondition),
|
|
46
|
+
createdAt: { [Op.gte]: startTime },
|
|
47
|
+
},
|
|
48
|
+
group: ['queue'],
|
|
49
|
+
raw: true,
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
// 将结果映射回 channel 名称,确保所有请求的 channel 都有返回值
|
|
53
|
+
return validChannels.reduce((acc, channel) => {
|
|
54
|
+
const queueName = CHANNEL_MAP[channel];
|
|
55
|
+
const row = results.find((r) => r.queue === queueName);
|
|
56
|
+
acc[channel] = row ? Number(row.count) : 0;
|
|
57
|
+
return acc;
|
|
58
|
+
}, {});
|
|
59
|
+
}
|
|
60
|
+
}
|
|
7
61
|
|
|
8
62
|
module.exports = Job;
|
|
@@ -1035,32 +1035,32 @@ class NotificationState extends BaseState {
|
|
|
1035
1035
|
// 计算时间范围
|
|
1036
1036
|
const startTime = dayjs().subtract(hours, 'hours').toDate();
|
|
1037
1037
|
|
|
1038
|
-
//
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
|
|
1053
|
-
// 提取通知 ID 列表
|
|
1054
|
-
const notificationIds = notifications.map((n) => n.id);
|
|
1038
|
+
// 只查询统计所需字段,排除大的 JSON 字段 (walletSendRecord, pushKitSendRecord, emailSendRecord)
|
|
1039
|
+
// 这样可以大幅减少数据传输量
|
|
1040
|
+
const results = await this.model.sequelize.query(
|
|
1041
|
+
`SELECT
|
|
1042
|
+
"walletSendAt", "walletSendStatus", "walletSendFailedReason",
|
|
1043
|
+
"pushKitSendAt", "pushKitSendStatus", "pushKitSendFailedReason",
|
|
1044
|
+
"emailSendAt", "emailSendStatus", "emailSendFailedReason",
|
|
1045
|
+
"webhook", "createdAt"
|
|
1046
|
+
FROM notification_receivers
|
|
1047
|
+
WHERE "createdAt" >= :startTime`,
|
|
1048
|
+
{
|
|
1049
|
+
replacements: { startTime },
|
|
1050
|
+
type: Sequelize.QueryTypes.SELECT,
|
|
1051
|
+
}
|
|
1052
|
+
);
|
|
1055
1053
|
|
|
1056
|
-
//
|
|
1057
|
-
return
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
}
|
|
1062
|
-
|
|
1063
|
-
|
|
1054
|
+
// 只需要解析 webhook JSON 字段
|
|
1055
|
+
return results.map((item) => {
|
|
1056
|
+
if (typeof item.webhook === 'string') {
|
|
1057
|
+
try {
|
|
1058
|
+
item.webhook = JSON.parse(item.webhook);
|
|
1059
|
+
} catch {
|
|
1060
|
+
item.webhook = {};
|
|
1061
|
+
}
|
|
1062
|
+
}
|
|
1063
|
+
return item;
|
|
1064
1064
|
});
|
|
1065
1065
|
}
|
|
1066
1066
|
}
|
package/lib/util/blocklet.js
CHANGED
|
@@ -14,6 +14,7 @@ const mergeWith = require('lodash/mergeWith');
|
|
|
14
14
|
const toLower = require('lodash/toLower');
|
|
15
15
|
const isEmpty = require('lodash/isEmpty');
|
|
16
16
|
const omit = require('lodash/omit');
|
|
17
|
+
const pick = require('lodash/pick');
|
|
17
18
|
const streamToPromise = require('stream-to-promise');
|
|
18
19
|
const { Throttle } = require('stream-throttle');
|
|
19
20
|
const { slugify } = require('transliteration');
|
|
@@ -1972,9 +1973,19 @@ const _getBlocklet = async ({
|
|
|
1972
1973
|
return null;
|
|
1973
1974
|
}
|
|
1974
1975
|
|
|
1976
|
+
// 优化:并行查询独立数据(只查询一次 extraDoc,然后从内存中同步提取)
|
|
1977
|
+
const [extraDoc, nodeInfo, site] = await Promise.all([
|
|
1978
|
+
states.blockletExtras.getExtraByDid(blocklet.meta.did),
|
|
1979
|
+
states.node.read(),
|
|
1980
|
+
states.site.findOneByBlocklet(blocklet.meta.did),
|
|
1981
|
+
]);
|
|
1982
|
+
|
|
1983
|
+
// 从 extraDoc 中同步提取 settings(不需要再次查询数据库)
|
|
1984
|
+
const extrasMeta = extraDoc ? pick(extraDoc, ['did', 'meta', 'controller']) : null;
|
|
1985
|
+
const settings = states.blockletExtras.getFromDoc({ doc: extraDoc, dids: [blocklet.meta.did], name: 'settings' });
|
|
1986
|
+
|
|
1975
1987
|
// app settings
|
|
1976
1988
|
// FIXME: @zhanghan 在 server 开发模式下,使用 `node /workspace/arcblock/blocklet-server/core/cli/tools/dev.js` 运行的 blocklet,blocklet.meta.did 和 blocklet.appPid 是不一致的
|
|
1977
|
-
const settings = await states.blockletExtras.getSettings(blocklet.meta.did);
|
|
1978
1989
|
blocklet.trustedPassports = get(settings, 'trustedPassports') || [];
|
|
1979
1990
|
blocklet.trustedFactories = (get(settings, 'trustedFactories') || []).map((x) => {
|
|
1980
1991
|
if (!x.passport.ttlPolicy) {
|
|
@@ -1989,7 +2000,6 @@ const _getBlocklet = async ({
|
|
|
1989
2000
|
blocklet.enablePassportIssuance = get(settings, 'enablePassportIssuance', true);
|
|
1990
2001
|
blocklet.settings = settings || {};
|
|
1991
2002
|
|
|
1992
|
-
const extrasMeta = await states.blockletExtras.getMeta(blocklet.meta.did);
|
|
1993
2003
|
if (extrasMeta) {
|
|
1994
2004
|
blocklet.controller = extrasMeta.controller;
|
|
1995
2005
|
}
|
|
@@ -2005,8 +2015,6 @@ const _getBlocklet = async ({
|
|
|
2005
2015
|
);
|
|
2006
2016
|
}
|
|
2007
2017
|
|
|
2008
|
-
const nodeInfo = await states.node.read();
|
|
2009
|
-
|
|
2010
2018
|
(nodeInfo?.blockletRegistryList || []).forEach((store) => {
|
|
2011
2019
|
if (!blocklet.settings.storeList.find((x) => x.url === store.url)) {
|
|
2012
2020
|
blocklet.settings.storeList.push({
|
|
@@ -2016,11 +2024,29 @@ const _getBlocklet = async ({
|
|
|
2016
2024
|
}
|
|
2017
2025
|
});
|
|
2018
2026
|
|
|
2019
|
-
blocklet.site =
|
|
2027
|
+
blocklet.site = site;
|
|
2020
2028
|
blocklet.enableDocker = nodeInfo.enableDocker;
|
|
2021
2029
|
blocklet.enableDockerNetwork = nodeInfo.enableDockerNetwork;
|
|
2022
2030
|
|
|
2023
|
-
|
|
2031
|
+
// 第一次 forEachBlockletSync:收集所有组件的 dids
|
|
2032
|
+
const componentConfigRequests = [];
|
|
2033
|
+
forEachBlockletSync(blocklet, (component, { ancestors }) => {
|
|
2034
|
+
const dids = [...ancestors.map((x) => x.meta.did), component.meta.did];
|
|
2035
|
+
componentConfigRequests.push({
|
|
2036
|
+
componentDid: component.meta.did,
|
|
2037
|
+
dids,
|
|
2038
|
+
});
|
|
2039
|
+
});
|
|
2040
|
+
|
|
2041
|
+
// 基于缓存文档,为每个组件提取 configs(同步操作,不需要再次查询数据库)
|
|
2042
|
+
const configsMap = new Map();
|
|
2043
|
+
componentConfigRequests.forEach(({ componentDid, dids }) => {
|
|
2044
|
+
const configs = states.blockletExtras.getFromDoc({ doc: extraDoc, dids, name: 'configs' });
|
|
2045
|
+
configsMap.set(componentDid, configs);
|
|
2046
|
+
});
|
|
2047
|
+
|
|
2048
|
+
// 第二次 forEachBlockletSync:填充组件
|
|
2049
|
+
forEachBlockletSync(blocklet, (component, { id, level, ancestors }) => {
|
|
2024
2050
|
// component env
|
|
2025
2051
|
try {
|
|
2026
2052
|
// Validate component has required meta fields for getComponentDirs
|
|
@@ -2056,8 +2082,8 @@ const _getBlocklet = async ({
|
|
|
2056
2082
|
throw error;
|
|
2057
2083
|
}
|
|
2058
2084
|
|
|
2059
|
-
// component config
|
|
2060
|
-
const configs =
|
|
2085
|
+
// component config - 从预取的 configsMap 中获取
|
|
2086
|
+
const configs = configsMap.get(component.meta.did) || [];
|
|
2061
2087
|
const rootBlocklet = ancestors.length > 0 ? ancestors[0] : blocklet;
|
|
2062
2088
|
fillBlockletConfigs(component, configs, { rootBlocklet, nodeInfo, dataDirs });
|
|
2063
2089
|
});
|
|
@@ -74,10 +74,17 @@ async function buildImage({ image, dockerfile }) {
|
|
|
74
74
|
const building = {};
|
|
75
75
|
|
|
76
76
|
async function createDockerImage(data) {
|
|
77
|
+
const metaDockerInfo = data?.meta?.docker || {};
|
|
78
|
+
const keys = Object.keys(metaDockerInfo);
|
|
79
|
+
for (const key of keys) {
|
|
80
|
+
if (metaDockerInfo[key] === '') {
|
|
81
|
+
delete metaDockerInfo[key];
|
|
82
|
+
}
|
|
83
|
+
}
|
|
77
84
|
const customInfo = getBlockletCustomDockerfile(data);
|
|
78
85
|
if (building[customInfo.image]) {
|
|
79
86
|
await building[customInfo.image];
|
|
80
|
-
return customInfo;
|
|
87
|
+
return Object.assign(customInfo, metaDockerInfo);
|
|
81
88
|
}
|
|
82
89
|
|
|
83
90
|
if (
|
|
@@ -94,7 +101,8 @@ async function createDockerImage(data) {
|
|
|
94
101
|
building[customInfo.image] = buildImage(customInfo);
|
|
95
102
|
await building[customInfo.image];
|
|
96
103
|
}
|
|
97
|
-
|
|
104
|
+
|
|
105
|
+
return Object.assign(customInfo, metaDockerInfo);
|
|
98
106
|
}
|
|
99
107
|
|
|
100
108
|
module.exports = {
|
|
@@ -22,6 +22,7 @@ const checkNeedRunDocker = require('./check-need-run-docker');
|
|
|
22
22
|
const replaceEnvValue = require('./replace-env-value');
|
|
23
23
|
const parseDockerCpVolume = require('./parse-docker-cp-volume');
|
|
24
24
|
const generateClusterNodeScript = require('./generate-cluster-node-script');
|
|
25
|
+
const { parseTmpfs } = require('./parse-tmpfs');
|
|
25
26
|
|
|
26
27
|
const getSystemResources = (() => {
|
|
27
28
|
let cachedResources = null;
|
|
@@ -86,6 +87,9 @@ async function parseDockerOptionsFromPm2({
|
|
|
86
87
|
dockerInfo.command = nextOptions.env.DOCKER_CMD;
|
|
87
88
|
}
|
|
88
89
|
|
|
90
|
+
const { tmpfs } = dockerInfo;
|
|
91
|
+
delete dockerInfo.tmpfs;
|
|
92
|
+
|
|
89
93
|
try {
|
|
90
94
|
dockerCmdValidator(dockerInfo.image);
|
|
91
95
|
} catch (error) {
|
|
@@ -102,6 +106,7 @@ async function parseDockerOptionsFromPm2({
|
|
|
102
106
|
const defaultCpus = '2';
|
|
103
107
|
const defaultMemory = '1.5g';
|
|
104
108
|
const defaultDiskSize = '0g';
|
|
109
|
+
const defaultTmpfsSize = '4g';
|
|
105
110
|
|
|
106
111
|
const cpus = isServerless
|
|
107
112
|
? process.env.ABT_NODE_DOCKER_CPUS
|
|
@@ -115,6 +120,12 @@ async function parseDockerOptionsFromPm2({
|
|
|
115
120
|
? process.env.ABT_NODE_DOCKER_DISK_SIZE
|
|
116
121
|
: options.env.DOCKER_DISK_SIZE || process.env.ABT_NODE_DOCKER_DISK_SIZE;
|
|
117
122
|
|
|
123
|
+
const tmpfsSize = isServerless
|
|
124
|
+
? process.env.ABT_NODE_DOCKER_TMPFS_SIZE
|
|
125
|
+
: options.env.DOCKER_TMPFS_SIZE || process.env.ABT_NODE_DOCKER_TMPFS_SIZE;
|
|
126
|
+
|
|
127
|
+
const tmpfsOption = parseTmpfs(tmpfs, tmpfsSize || defaultTmpfsSize);
|
|
128
|
+
|
|
118
129
|
// Ensure environment variables are properly set within the Docker container
|
|
119
130
|
const envDefaults = {
|
|
120
131
|
BLOCKLET_DOCKER_CPUS: cpus || defaultCpus,
|
|
@@ -138,6 +149,7 @@ async function parseDockerOptionsFromPm2({
|
|
|
138
149
|
dockerEnv.BLOCKLET_DOCKER_CPUS = `${Math.min(Number(dockerEnv.BLOCKLET_DOCKER_CPUS), maxCPUs)}`;
|
|
139
150
|
dockerEnv.BLOCKLET_DOCKER_MEMORY = `${Math.min(Number(dockerEnv.BLOCKLET_DOCKER_MEMORY.replace('g', '')), maxMemory)}g`;
|
|
140
151
|
dockerEnv.BLOCKLET_DOCKER_DISK_SIZE = `${Math.min(Number(dockerEnv.BLOCKLET_DOCKER_DISK_SIZE.replace('g', '')), 20)}g`;
|
|
152
|
+
dockerEnv.BLOCKLET_DOCKER_TMPFS_SIZE = tmpfsOption.size;
|
|
141
153
|
|
|
142
154
|
const { baseDir } = dockerInfo;
|
|
143
155
|
const serverDir = process.env.ABT_NODE_DATA_DIR;
|
|
@@ -152,6 +164,7 @@ async function parseDockerOptionsFromPm2({
|
|
|
152
164
|
dockerEnv.BLOCKLET_DATA_DIR = replaceDir(nextOptions.env.BLOCKLET_DATA_DIR);
|
|
153
165
|
dockerEnv.BLOCKLET_LOG_DIR = path.join(baseDir, 'logs');
|
|
154
166
|
dockerEnv.BLOCKLET_CACHE_DIR = path.join(baseDir, 'cache');
|
|
167
|
+
dockerEnv.BLOCKLET_TMPFS_DIR = tmpfsOption.fullDir;
|
|
155
168
|
dockerEnv.BLOCKLET_APP_SHARE_DIR = replaceDir(nextOptions.env.BLOCKLET_APP_SHARE_DIR);
|
|
156
169
|
dockerEnv.BLOCKLET_SHARE_DIR = replaceDir(nextOptions.env.BLOCKLET_SHARE_DIR);
|
|
157
170
|
dockerEnv.BLOCKLET_HOST = getLocalIPAddress();
|
|
@@ -361,6 +374,7 @@ async function parseDockerOptionsFromPm2({
|
|
|
361
374
|
--memory-swap="${dockerEnv.BLOCKLET_DOCKER_MEMORY}" \
|
|
362
375
|
--oom-kill-disable=false \
|
|
363
376
|
--env-file ${dockerEnvFile} \
|
|
377
|
+
${tmpfsOption.tmpfs} \
|
|
364
378
|
${dockerInfo.network} \
|
|
365
379
|
${dockerInfo.runParamString || ''} \
|
|
366
380
|
${dockerInfo.image} ${dockerInfo.command || ''} \
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
const { join } = require('path');
|
|
2
|
+
|
|
3
|
+
const DEFAULT_MAX_TMPFS_SIZE = '4g';
|
|
4
|
+
|
|
5
|
+
function parseSize(sizeStr) {
|
|
6
|
+
if (typeof sizeStr === 'string') {
|
|
7
|
+
if (sizeStr.endsWith('m')) {
|
|
8
|
+
return Number(sizeStr.replace('m', ''));
|
|
9
|
+
}
|
|
10
|
+
if (sizeStr.endsWith('g')) {
|
|
11
|
+
return Number(sizeStr.replace('g', '')) * 1024;
|
|
12
|
+
}
|
|
13
|
+
// Try to parse as number (assume GB if no unit)
|
|
14
|
+
return Number(sizeStr) * 1024;
|
|
15
|
+
}
|
|
16
|
+
return Number(sizeStr) * 1024;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
function parseTmpfs(tmpfs, maxTmpfsSize = DEFAULT_MAX_TMPFS_SIZE, prefixDir = '/') {
|
|
20
|
+
if (!tmpfs) {
|
|
21
|
+
return {
|
|
22
|
+
tmpfs: '',
|
|
23
|
+
size: '0m',
|
|
24
|
+
fullDir: '',
|
|
25
|
+
};
|
|
26
|
+
}
|
|
27
|
+
const [fsPath, size] = tmpfs.split(':');
|
|
28
|
+
if (!fsPath || !size) {
|
|
29
|
+
return {
|
|
30
|
+
tmpfs: '',
|
|
31
|
+
size: '0m',
|
|
32
|
+
fullDir: '',
|
|
33
|
+
};
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
const fullDir = join(prefixDir, fsPath);
|
|
37
|
+
const defaultMaxSizeMb = parseSize(DEFAULT_MAX_TMPFS_SIZE);
|
|
38
|
+
|
|
39
|
+
// Parse maxTmpfsSize to a number in MB
|
|
40
|
+
let maxSizeMb = parseSize(maxTmpfsSize);
|
|
41
|
+
// Fallback to default if parsing failed (NaN), invalid value, or zero/empty
|
|
42
|
+
if (Number.isNaN(maxSizeMb) || maxSizeMb <= 0) {
|
|
43
|
+
maxSizeMb = defaultMaxSizeMb;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
// Extract size value from various formats:
|
|
47
|
+
// - "1g" or "512m" (simple size)
|
|
48
|
+
// - "1g,rw" or "rw,1g" (size with options)
|
|
49
|
+
// - "size=512m" or "size=1g" (Docker style with size= prefix)
|
|
50
|
+
// - "size=512m,rw" (Docker style with options)
|
|
51
|
+
const sizeParts = size.split(',');
|
|
52
|
+
let sizeValue = '';
|
|
53
|
+
for (const part of sizeParts) {
|
|
54
|
+
let trimmed = part.trim();
|
|
55
|
+
// Handle "size=512m" format - extract the value after "size="
|
|
56
|
+
if (trimmed.startsWith('size=')) {
|
|
57
|
+
trimmed = trimmed.slice(5); // Remove "size=" prefix
|
|
58
|
+
}
|
|
59
|
+
if (trimmed.endsWith('g') || trimmed.endsWith('m')) {
|
|
60
|
+
sizeValue = trimmed;
|
|
61
|
+
break;
|
|
62
|
+
}
|
|
63
|
+
}
|
|
64
|
+
|
|
65
|
+
let sizeMb = 0;
|
|
66
|
+
if (sizeValue.endsWith('g')) {
|
|
67
|
+
const parsed = Number(sizeValue.replace('g', ''));
|
|
68
|
+
sizeMb = Number.isNaN(parsed) ? 0 : parsed * 1024;
|
|
69
|
+
sizeMb = Math.min(sizeMb, maxSizeMb);
|
|
70
|
+
}
|
|
71
|
+
if (sizeValue.endsWith('m')) {
|
|
72
|
+
const parsed = Number(sizeValue.replace('m', ''));
|
|
73
|
+
sizeMb = Number.isNaN(parsed) ? 0 : parsed;
|
|
74
|
+
sizeMb = Math.min(sizeMb, maxSizeMb);
|
|
75
|
+
}
|
|
76
|
+
// Ensure sizeMb is valid
|
|
77
|
+
if (Number.isNaN(sizeMb) || sizeMb < 0) {
|
|
78
|
+
sizeMb = 0;
|
|
79
|
+
}
|
|
80
|
+
sizeMb = Math.min(sizeMb, maxSizeMb);
|
|
81
|
+
|
|
82
|
+
return {
|
|
83
|
+
tmpfs: `--tmpfs ${fullDir}:size=${sizeMb}m`,
|
|
84
|
+
size: `${sizeMb}m`,
|
|
85
|
+
fullDir,
|
|
86
|
+
};
|
|
87
|
+
}
|
|
88
|
+
|
|
89
|
+
module.exports = {
|
|
90
|
+
parseTmpfs,
|
|
91
|
+
DEFAULT_MAX_TMPFS_SIZE,
|
|
92
|
+
};
|
package/lib/webhook/index.js
CHANGED
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"publishConfig": {
|
|
4
4
|
"access": "public"
|
|
5
5
|
},
|
|
6
|
-
"version": "1.17.
|
|
6
|
+
"version": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
7
7
|
"description": "",
|
|
8
8
|
"main": "lib/index.js",
|
|
9
9
|
"files": [
|
|
@@ -17,21 +17,21 @@
|
|
|
17
17
|
"author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
|
|
18
18
|
"license": "Apache-2.0",
|
|
19
19
|
"dependencies": {
|
|
20
|
-
"@abtnode/analytics": "1.17.
|
|
21
|
-
"@abtnode/auth": "1.17.
|
|
22
|
-
"@abtnode/certificate-manager": "1.17.
|
|
23
|
-
"@abtnode/constant": "1.17.
|
|
24
|
-
"@abtnode/cron": "1.17.
|
|
25
|
-
"@abtnode/db-cache": "1.17.
|
|
26
|
-
"@abtnode/docker-utils": "1.17.
|
|
27
|
-
"@abtnode/logger": "1.17.
|
|
28
|
-
"@abtnode/models": "1.17.
|
|
29
|
-
"@abtnode/queue": "1.17.
|
|
30
|
-
"@abtnode/rbac": "1.17.
|
|
31
|
-
"@abtnode/router-provider": "1.17.
|
|
32
|
-
"@abtnode/static-server": "1.17.
|
|
33
|
-
"@abtnode/timemachine": "1.17.
|
|
34
|
-
"@abtnode/util": "1.17.
|
|
20
|
+
"@abtnode/analytics": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
21
|
+
"@abtnode/auth": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
22
|
+
"@abtnode/certificate-manager": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
23
|
+
"@abtnode/constant": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
24
|
+
"@abtnode/cron": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
25
|
+
"@abtnode/db-cache": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
26
|
+
"@abtnode/docker-utils": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
27
|
+
"@abtnode/logger": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
28
|
+
"@abtnode/models": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
29
|
+
"@abtnode/queue": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
30
|
+
"@abtnode/rbac": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
31
|
+
"@abtnode/router-provider": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
32
|
+
"@abtnode/static-server": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
33
|
+
"@abtnode/timemachine": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
34
|
+
"@abtnode/util": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
35
35
|
"@aigne/aigne-hub": "^0.10.15",
|
|
36
36
|
"@arcblock/did": "^1.27.15",
|
|
37
37
|
"@arcblock/did-connect-js": "^1.27.15",
|
|
@@ -43,15 +43,15 @@
|
|
|
43
43
|
"@arcblock/pm2-events": "^0.0.5",
|
|
44
44
|
"@arcblock/validator": "^1.27.15",
|
|
45
45
|
"@arcblock/vc": "^1.27.15",
|
|
46
|
-
"@blocklet/constant": "1.17.
|
|
46
|
+
"@blocklet/constant": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
47
47
|
"@blocklet/did-space-js": "^1.2.11",
|
|
48
|
-
"@blocklet/env": "1.17.
|
|
48
|
+
"@blocklet/env": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
49
49
|
"@blocklet/error": "^0.3.5",
|
|
50
|
-
"@blocklet/meta": "1.17.
|
|
51
|
-
"@blocklet/resolver": "1.17.
|
|
52
|
-
"@blocklet/sdk": "1.17.
|
|
53
|
-
"@blocklet/server-js": "1.17.
|
|
54
|
-
"@blocklet/store": "1.17.
|
|
50
|
+
"@blocklet/meta": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
51
|
+
"@blocklet/resolver": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
52
|
+
"@blocklet/sdk": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
53
|
+
"@blocklet/server-js": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
54
|
+
"@blocklet/store": "1.17.7-beta-20251224-045844-3c7f459a",
|
|
55
55
|
"@blocklet/theme": "^3.2.19",
|
|
56
56
|
"@fidm/x509": "^1.2.1",
|
|
57
57
|
"@ocap/mcrypto": "^1.27.15",
|
|
@@ -116,5 +116,5 @@
|
|
|
116
116
|
"express": "^4.18.2",
|
|
117
117
|
"unzipper": "^0.10.11"
|
|
118
118
|
},
|
|
119
|
-
"gitHead": "
|
|
119
|
+
"gitHead": "e30dc1736187fc9d8136f146badfd5b521af16f2"
|
|
120
120
|
}
|