@abtnode/core 1.16.47-beta-20250715-034905-11207b15 → 1.16.47-beta-20250718-075003-940d2e2a

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -12,6 +12,7 @@ const { INSTALL_ACTIONS } = require('@abtnode/constant');
12
12
  const logger = require('@abtnode/logger')('@abtnode/core:install-app-backup');
13
13
 
14
14
  const { validateBlocklet, checkDuplicateAppSk, getAppDirs } = require('../../../util/blocklet');
15
+ const { dockerRestorePgBlockletDb } = require('../../../util/docker/docker-backup-pg-blocklet-db');
15
16
 
16
17
  /**
17
18
  * backup 目录结构
@@ -213,6 +214,9 @@ const installApplicationFromBackup = async ({
213
214
  }
214
215
  logger.info(`data is ${moveDir ? 'moved' : 'copied'} successfully`);
215
216
  }
217
+
218
+ const dbPath = path.join(dataDir, 'blocklet.db');
219
+ await dockerRestorePgBlockletDb(dbPath);
216
220
  } catch (error) {
217
221
  logger.error('installFromBackup failed', { did, error });
218
222
 
@@ -41,6 +41,7 @@ const { translate } = require('../../../locales');
41
41
  const { getFolderSize, formatMemoryUsage } = require('../utils/disk');
42
42
  const { dockerExecChown } = require('../../../util/docker/docker-exec-chown');
43
43
  const checkDockerRunHistory = require('../../../util/docker/check-docker-run-history');
44
+ const { dockerBackupPgBlockletDb } = require('../../../util/docker/docker-backup-pg-blocklet-db');
44
45
 
45
46
  /**
46
47
  * @param {{ appDid: string, appPid: string }} params
@@ -169,6 +170,18 @@ class SpacesBackup extends BaseBackup {
169
170
  }
170
171
  }
171
172
 
173
+ async backupPostgres() {
174
+ if (!this.blocklet) {
175
+ return;
176
+ }
177
+ const dataDir =
178
+ this.blocklet.environments?.find((v) => v.key === 'BLOCKLET_APP_DATA_DIR')?.value ||
179
+ path.join(process.env.ABT_NODE_DATA_DIR, 'data', this.blocklet.appPid || this.blocklet.appDid);
180
+ const dbPath = path.join(dataDir, 'blocklet.db');
181
+
182
+ await dockerBackupPgBlockletDb(dbPath);
183
+ }
184
+
172
185
  /**
173
186
  *
174
187
  * @returns {Promise<void>}
@@ -188,6 +201,7 @@ class SpacesBackup extends BaseBackup {
188
201
  logger.info(this.input.appPid, 'initialize.after', formatMemoryUsage());
189
202
 
190
203
  await this.backupChown();
204
+ await this.backupPostgres();
191
205
 
192
206
  logger.info(this.input.appPid, 'export.before', formatMemoryUsage());
193
207
  await this.export();
@@ -32,7 +32,7 @@ async function checkNeedRunDocker(meta = {}, env = {}, nodeInfo = {}, isExternal
32
32
  // Ensure Docker is installed
33
33
  if (!nodeInfo.isDockerInstalled) {
34
34
  if (!(await checkDockerInstalled())) {
35
- throw new Error('Docker is not installed');
35
+ throw new Error('Docker mode is enabled, but the Docker CLI was not found.');
36
36
  }
37
37
  }
38
38
  return true;
@@ -0,0 +1,157 @@
1
+ const path = require('path');
2
+ const fs = require('fs/promises');
3
+ const { dbPathToDbName } = require('@abtnode/models');
4
+ const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
5
+ const logger = require('@abtnode/logger')('backup-pg-blocklet-db');
6
+
7
+ const { POSTGRES_CONTAINER_NAME } = require('./ensure-docker-postgres');
8
+ const { hasPostgres } = require('../migration-sqlite-to-postgres');
9
+
10
+ // 最长 1 小时, 因为特别大的表, pg_dump 会很久
11
+ const baseSpawnOptions = { timeout: 3600_000, retry: 0 };
12
+
13
+ function getBlockletDataRoot(dbPath) {
14
+ return path.dirname(path.dirname(path.dirname(dbPath)));
15
+ }
16
+ const getBackupPath = (dbPath) => {
17
+ return `${dbPath.replace('.db', '')}_pg_backup.gz`;
18
+ };
19
+
20
+ const checkDbExists = async (pgUrl, dbName) => {
21
+ const output = await promiseSpawn(
22
+ `docker exec ${POSTGRES_CONTAINER_NAME} psql "${pgUrl}" -tAc "SELECT 1 FROM pg_database WHERE datname = '${dbName}';"`,
23
+ undefined,
24
+ { timeout: 3_000, retry: 2 }
25
+ );
26
+ return output.trim() === '1';
27
+ };
28
+
29
+ const getPgTempDir = (dbName) => {
30
+ return `/tmp/${dbName}.sql.gz`;
31
+ };
32
+
33
+ function buildPgUrl(originalUrl, dbName) {
34
+ const url = new URL(originalUrl);
35
+ url.pathname = `/${dbName}`; // 替换路径
36
+ return url.toString();
37
+ }
38
+
39
+ function getPgUrl() {
40
+ const pgUrl = process.env.ABT_NODE_POSTGRES_URL;
41
+
42
+ // 如果是当前容器, 使用容器内部 port
43
+ if (pgUrl.includes('127.0.0.1') || pgUrl.includes('localhost')) {
44
+ return pgUrl.replace('40408', '5432');
45
+ }
46
+
47
+ return '';
48
+ }
49
+
50
+ const dockerBackupPgBlockletDb = async (dbPath) => {
51
+ const dataDir = getBlockletDataRoot(dbPath);
52
+ if (!hasPostgres(dataDir)) {
53
+ logger.info('no using postgres, skip backup the pg blocklet db:', dbPath);
54
+ return {};
55
+ }
56
+
57
+ const pgUrl = getPgUrl();
58
+ if (!pgUrl) {
59
+ logger.info('no postgres url, skip backup the pg blocklet db:', dbPath);
60
+ return {};
61
+ }
62
+
63
+ const dbName = dbPathToDbName(dbPath);
64
+ const backupPath = getBackupPath(dbPath);
65
+
66
+ const isDbExists = await checkDbExists(pgUrl, dbName);
67
+ if (!isDbExists) {
68
+ logger.info('no need to backup the pg blocklet db:', dbName);
69
+ return {};
70
+ }
71
+
72
+ // 替换 pgUrl 的 db 为 dbName
73
+ const pgUrlForDump = buildPgUrl(pgUrl, dbName);
74
+
75
+ logger.info('backup the pg blocklet db start:', dbName);
76
+ await promiseSpawn(
77
+ `docker exec ${POSTGRES_CONTAINER_NAME} sh -c 'pg_dump ${pgUrlForDump} | gzip' > ${backupPath}`,
78
+ {},
79
+ baseSpawnOptions
80
+ );
81
+ logger.info('backup the pg blocklet db cp backup file done:', backupPath);
82
+ return {
83
+ backupPath,
84
+ dbName,
85
+ };
86
+ };
87
+
88
+ const dockerRestorePgBlockletDb = async (dbPath) => {
89
+ const dataDir = getBlockletDataRoot(dbPath);
90
+ if (!hasPostgres(dataDir)) {
91
+ logger.info('no using postgres, skip restore the pg blocklet db:', dbPath);
92
+ return {};
93
+ }
94
+
95
+ const pgUrl = getPgUrl();
96
+ if (!pgUrl) {
97
+ logger.info('no postgres url, skip restore the pg blocklet db:', dbPath);
98
+ return {};
99
+ }
100
+
101
+ const dbName = dbPathToDbName(dbPath);
102
+ const backupPath = getBackupPath(dbPath);
103
+
104
+ try {
105
+ await fs.access(backupPath);
106
+ } catch (_) {
107
+ logger.info('no need to restore the pg blocklet db path at:', backupPath);
108
+ return {};
109
+ }
110
+
111
+ const containerTmpPath = getPgTempDir(dbName);
112
+
113
+ // 替换 pgUrl 的 db 为 dbName
114
+ const pgUrlForRestore = buildPgUrl(pgUrl, dbName);
115
+
116
+ logger.info('restore the pg blocklet db start:', dbName);
117
+
118
+ const isDbExists = await checkDbExists(pgUrl, dbName);
119
+ if (!isDbExists) {
120
+ // 如果数据库不存在, 创建数据库, 否则一会数据库直连会失败
121
+ logger.info('creating missing database before restore:', dbName);
122
+ await promiseSpawn(
123
+ `docker exec ${POSTGRES_CONTAINER_NAME} psql "${pgUrl}" -c "CREATE DATABASE \\"${dbName}\\";"`,
124
+ {},
125
+ baseSpawnOptions
126
+ );
127
+ } else {
128
+ // 如果数据库存在, 清空数据库
129
+ logger.info('clearing existing schema in db:', dbName);
130
+ await promiseSpawn(
131
+ `docker exec ${POSTGRES_CONTAINER_NAME} psql "${pgUrlForRestore}" -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;"`,
132
+ {},
133
+ baseSpawnOptions
134
+ );
135
+ }
136
+
137
+ logger.info('restore the pg blocklet db drop schema done:', dbName);
138
+
139
+ await promiseSpawn(`docker cp ${backupPath} ${POSTGRES_CONTAINER_NAME}:${containerTmpPath}`, {}, baseSpawnOptions);
140
+ logger.info('restore the pg blocklet db cp backup file done:', backupPath);
141
+ await promiseSpawn(
142
+ `docker exec ${POSTGRES_CONTAINER_NAME} sh -c 'gunzip -c ${containerTmpPath} | psql "${pgUrlForRestore}"'`,
143
+ {},
144
+ baseSpawnOptions
145
+ );
146
+ logger.info('restore the pg blocklet db done:', dbName);
147
+
148
+ return {
149
+ backupPath,
150
+ dbName,
151
+ };
152
+ };
153
+
154
+ module.exports = {
155
+ dockerBackupPgBlockletDb,
156
+ dockerRestorePgBlockletDb,
157
+ };
@@ -100,7 +100,9 @@ async function _ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port =
100
100
 
101
101
  let lastUrl = '';
102
102
 
103
- async function ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 40408, force = false) {
103
+ const POSTGRES_CONTAINER_NAME = 'abtnode-postgres';
104
+
105
+ async function ensureDockerPostgres(dataDir, name = POSTGRES_CONTAINER_NAME, port = 40408, force = false) {
104
106
  if (lastUrl) {
105
107
  return lastUrl;
106
108
  }
@@ -109,7 +111,7 @@ async function ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 4
109
111
  }
110
112
 
111
113
  // 停止, 并且返回是否 docker 中存在 postgres 容器
112
- async function stopDockerPostgres(name = 'abtnode-postgres') {
114
+ async function stopDockerPostgres(name = POSTGRES_CONTAINER_NAME) {
113
115
  if (!(await checkDockerInstalled())) {
114
116
  return false;
115
117
  }
@@ -132,4 +134,5 @@ async function stopDockerPostgres(name = 'abtnode-postgres') {
132
134
  module.exports = {
133
135
  ensureDockerPostgres,
134
136
  stopDockerPostgres,
137
+ POSTGRES_CONTAINER_NAME,
135
138
  };
@@ -1,30 +1,46 @@
1
+ /* eslint-disable no-await-in-loop */
2
+
1
3
  const path = require('path');
2
4
  const { spawn } = require('child_process');
3
5
  const fsp = require('fs/promises');
4
6
  const os = require('os');
7
+ const logger = require('@abtnode/logger')('@abtnode/core:util:ensure-bun');
8
+ const shelljs = require('shelljs');
9
+ const semver = require('semver');
5
10
 
6
- const BUN_VERSION = '1.2.18';
11
+ const IS_WINDOWS = process.platform === 'win32';
12
+ const BUN_VERSION = IS_WINDOWS ? '1.2.4' : '1.2.18';
7
13
 
8
14
  const getRootDir = () => {
9
15
  if (process.env.ABT_NODE_DATA_DIR) {
10
16
  return process.env.ABT_NODE_DATA_DIR;
11
17
  }
12
- return path.join(os.tmpdir(), 'bun_install');
18
+ const homeDir = os.homedir();
19
+ if (homeDir) {
20
+ return path.join(homeDir, '.blocklet-bun');
21
+ }
22
+ return path.join(os.tmpdir(), '.blocklet-bun');
13
23
  };
14
24
 
15
25
  async function _ensureBun() {
16
- const bunDir = path.join(getRootDir(), 'core', 'bun_install');
17
- try {
18
- await fsp.access(bunDir);
19
- } catch {
20
- await fsp.mkdir(bunDir, { recursive: true });
21
- }
22
-
26
+ const bunDir = path.join(getRootDir(), 'core', 'bun-install');
27
+ await fsp.mkdir(bunDir, { recursive: true }).catch(() => {});
23
28
  const installDir = path.join(bunDir, BUN_VERSION);
24
29
  const binDir = path.join(installDir, 'bin');
25
- const bunExec = path.join(binDir, process.platform === 'win32' ? 'bun.exe' : 'bun');
30
+ const bunExec = path.join(binDir, IS_WINDOWS ? 'bun.exe' : 'bun');
31
+
32
+ const whichBun = shelljs.which('bun');
33
+ // 如果有 bun 且版本大于等于 BUN_VERSION, 则直接使用现有的 bun
34
+ if (whichBun) {
35
+ // 检查 bun 版本
36
+ const bunVersion = shelljs.exec(`${whichBun} --version`).stdout.trim();
37
+ // 判断 bun 版本是否大于等于 BUN_VERSION, 应该用版本对比库
38
+ if (semver.gte(bunVersion, BUN_VERSION)) {
39
+ return whichBun.toString();
40
+ }
41
+ }
26
42
 
27
- // If bun is already installed in this project, return its path
43
+ // If already installed, return immediately
28
44
  try {
29
45
  await fsp.access(bunExec);
30
46
  return bunExec;
@@ -32,29 +48,20 @@ async function _ensureBun() {
32
48
  //
33
49
  }
34
50
 
35
- // Create installation directory
51
+ logger.info(`Bun not found; installing to: ${installDir}`);
36
52
  await fsp.mkdir(installDir, { recursive: true });
37
53
 
38
- // Run the official Bun installer script with BUN_INSTALL overridden
39
- await new Promise((resolvePromise, reject) => {
40
- const linuxInstall = ['-c', 'curl -fsSL https://bun.sh/install | bash'];
41
- const windowsInstall = ['-c', 'powershell -c "irm bun.sh/install.ps1 | iex"'];
42
- const installer = spawn('bash', process.platform === 'win32' ? windowsInstall : linuxInstall, {
54
+ // Run official Bun installer script
55
+ await new Promise((resolve, reject) => {
56
+ const cmd = IS_WINDOWS
57
+ ? ['-c', 'powershell -c "irm bun.sh/install.ps1 | iex"']
58
+ : ['-c', 'curl -fsSL https://bun.sh/install | bash'];
59
+ const installer = spawn('bash', cmd, {
43
60
  env: { ...process.env, BUN_INSTALL: installDir, BUN_VERSION, SHELL: '/dev/null', HOME: installDir },
44
61
  stdio: 'inherit',
45
62
  });
46
-
47
- installer.on('close', (code) => {
48
- if (code !== 0) {
49
- reject(new Error(`Bun installation failed with exit code ${code}`));
50
- } else {
51
- resolvePromise();
52
- }
53
- });
54
-
55
- installer.on('error', (err) => {
56
- reject(err);
57
- });
63
+ installer.on('close', (code) => (code === 0 ? resolve() : reject(new Error(`Installer exited with code ${code}`))));
64
+ installer.on('error', reject);
58
65
  });
59
66
 
60
67
  return bunExec;
@@ -67,7 +74,26 @@ const ensureBun = async () => {
67
74
  if (bunPathPromise) {
68
75
  return bunPathPromise;
69
76
  }
70
- bunPathPromise = _ensureBun();
77
+ bunPathPromise = (async () => {
78
+ const maxAttempts = 5;
79
+ let lastError;
80
+ for (let attempt = 1; attempt <= maxAttempts; attempt++) {
81
+ try {
82
+ const bunExecPath = await _ensureBun();
83
+ // Verify executable exists
84
+ await fsp.access(bunExecPath);
85
+ logger.info(`Bun installation succeeded: ${bunExecPath}`);
86
+ return bunExecPath;
87
+ } catch (err) {
88
+ lastError = err;
89
+ logger.error(`Installation attempt ${attempt} failed: ${err.message}`);
90
+ if (attempt < maxAttempts) {
91
+ logger.info(`Retrying installation (${attempt + 1}/${maxAttempts})...`);
92
+ }
93
+ }
94
+ }
95
+ throw new Error(`All ${maxAttempts} installation attempts failed: ${lastError.message}`);
96
+ })();
71
97
  return bunPathPromise;
72
98
  };
73
99
 
@@ -76,18 +102,13 @@ const bunOptions = {
76
102
  };
77
103
 
78
104
  const getBunCacheDir = async (isDocker = false) => {
79
- const abtNodeDir = getRootDir();
80
- // 如果不是linux, 缓存的目录区分 docker 和非 docker
105
+ const bunDir = getRootDir();
81
106
  let cacheDir = isDocker ? 'bun-cache-docker' : 'bun-cache';
82
107
  if (os.type() === bunOptions.baseDockerOs) {
83
108
  cacheDir = 'bun-cache';
84
109
  }
85
- const bunCacheDir = path.join(abtNodeDir, 'tmp', cacheDir);
86
- try {
87
- await fsp.access(bunCacheDir);
88
- } catch (_) {
89
- await fsp.mkdir(bunCacheDir, { recursive: true });
90
- }
110
+ const bunCacheDir = path.join(bunDir, 'tmp', cacheDir);
111
+ await fsp.mkdir(bunCacheDir, { recursive: true }).catch(() => {});
91
112
  return bunCacheDir;
92
113
  };
93
114
 
package/lib/util/env.js CHANGED
@@ -3,6 +3,9 @@ const serverJobBackoffSeconds = process.env.ABT_NODE_JOB_BACKOFF_SECONDS
3
3
  : 600;
4
4
 
5
5
  const shouldJobBackoff = () => {
6
+ if (process.env.ABT_NODE_JOB_BACKOFF_SECONDS === '0') {
7
+ return false;
8
+ }
6
9
  const uptime = process.uptime();
7
10
  return uptime <= serverJobBackoffSeconds;
8
11
  };
@@ -55,7 +55,7 @@ async function installExternalDependencies({ appDir, forceInstall = false, nodeI
55
55
  await new Promise((resolve, reject) => {
56
56
  const child = spawn(bunPath, ['install'], {
57
57
  cwd: appDir,
58
- stdio: 'pipe',
58
+ stdio: 'inherit',
59
59
  shell: true,
60
60
  env: {
61
61
  ...process.env,
@@ -64,14 +64,9 @@ async function installExternalDependencies({ appDir, forceInstall = false, nodeI
64
64
  },
65
65
  });
66
66
 
67
- let errorOutput = '';
68
- child.stderr.on('data', (data) => {
69
- errorOutput += data.toString();
70
- });
71
-
72
67
  child.on('close', (code) => {
73
- if (code !== 0 && errorOutput.trim()) {
74
- reject(new Error(errorOutput));
68
+ if (code !== 0) {
69
+ reject(new Error(`exit code ${code}`));
75
70
  } else {
76
71
  resolve();
77
72
  }
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "publishConfig": {
4
4
  "access": "public"
5
5
  },
6
- "version": "1.16.47-beta-20250715-034905-11207b15",
6
+ "version": "1.16.47-beta-20250718-075003-940d2e2a",
7
7
  "description": "",
8
8
  "main": "lib/index.js",
9
9
  "files": [
@@ -19,22 +19,22 @@
19
19
  "author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
20
20
  "license": "Apache-2.0",
21
21
  "dependencies": {
22
- "@abtnode/analytics": "1.16.47-beta-20250715-034905-11207b15",
23
- "@abtnode/auth": "1.16.47-beta-20250715-034905-11207b15",
24
- "@abtnode/certificate-manager": "1.16.47-beta-20250715-034905-11207b15",
25
- "@abtnode/client": "1.16.47-beta-20250715-034905-11207b15",
26
- "@abtnode/constant": "1.16.47-beta-20250715-034905-11207b15",
27
- "@abtnode/cron": "1.16.47-beta-20250715-034905-11207b15",
28
- "@abtnode/db-cache": "1.16.47-beta-20250715-034905-11207b15",
29
- "@abtnode/docker-utils": "1.16.47-beta-20250715-034905-11207b15",
30
- "@abtnode/logger": "1.16.47-beta-20250715-034905-11207b15",
31
- "@abtnode/models": "1.16.47-beta-20250715-034905-11207b15",
32
- "@abtnode/queue": "1.16.47-beta-20250715-034905-11207b15",
33
- "@abtnode/rbac": "1.16.47-beta-20250715-034905-11207b15",
34
- "@abtnode/router-provider": "1.16.47-beta-20250715-034905-11207b15",
35
- "@abtnode/static-server": "1.16.47-beta-20250715-034905-11207b15",
36
- "@abtnode/timemachine": "1.16.47-beta-20250715-034905-11207b15",
37
- "@abtnode/util": "1.16.47-beta-20250715-034905-11207b15",
22
+ "@abtnode/analytics": "1.16.47-beta-20250718-075003-940d2e2a",
23
+ "@abtnode/auth": "1.16.47-beta-20250718-075003-940d2e2a",
24
+ "@abtnode/certificate-manager": "1.16.47-beta-20250718-075003-940d2e2a",
25
+ "@abtnode/client": "1.16.47-beta-20250718-075003-940d2e2a",
26
+ "@abtnode/constant": "1.16.47-beta-20250718-075003-940d2e2a",
27
+ "@abtnode/cron": "1.16.47-beta-20250718-075003-940d2e2a",
28
+ "@abtnode/db-cache": "1.16.47-beta-20250718-075003-940d2e2a",
29
+ "@abtnode/docker-utils": "1.16.47-beta-20250718-075003-940d2e2a",
30
+ "@abtnode/logger": "1.16.47-beta-20250718-075003-940d2e2a",
31
+ "@abtnode/models": "1.16.47-beta-20250718-075003-940d2e2a",
32
+ "@abtnode/queue": "1.16.47-beta-20250718-075003-940d2e2a",
33
+ "@abtnode/rbac": "1.16.47-beta-20250718-075003-940d2e2a",
34
+ "@abtnode/router-provider": "1.16.47-beta-20250718-075003-940d2e2a",
35
+ "@abtnode/static-server": "1.16.47-beta-20250718-075003-940d2e2a",
36
+ "@abtnode/timemachine": "1.16.47-beta-20250718-075003-940d2e2a",
37
+ "@abtnode/util": "1.16.47-beta-20250718-075003-940d2e2a",
38
38
  "@arcblock/did": "1.20.16",
39
39
  "@arcblock/did-auth": "1.20.16",
40
40
  "@arcblock/did-ext": "1.20.16",
@@ -45,14 +45,14 @@
45
45
  "@arcblock/pm2-events": "^0.0.5",
46
46
  "@arcblock/validator": "1.20.16",
47
47
  "@arcblock/vc": "1.20.16",
48
- "@blocklet/constant": "1.16.47-beta-20250715-034905-11207b15",
48
+ "@blocklet/constant": "1.16.47-beta-20250718-075003-940d2e2a",
49
49
  "@blocklet/did-space-js": "^1.1.7",
50
- "@blocklet/env": "1.16.47-beta-20250715-034905-11207b15",
50
+ "@blocklet/env": "1.16.47-beta-20250718-075003-940d2e2a",
51
51
  "@blocklet/error": "^0.2.5",
52
- "@blocklet/meta": "1.16.47-beta-20250715-034905-11207b15",
53
- "@blocklet/resolver": "1.16.47-beta-20250715-034905-11207b15",
54
- "@blocklet/sdk": "1.16.47-beta-20250715-034905-11207b15",
55
- "@blocklet/store": "1.16.47-beta-20250715-034905-11207b15",
52
+ "@blocklet/meta": "1.16.47-beta-20250718-075003-940d2e2a",
53
+ "@blocklet/resolver": "1.16.47-beta-20250718-075003-940d2e2a",
54
+ "@blocklet/sdk": "1.16.47-beta-20250718-075003-940d2e2a",
55
+ "@blocklet/store": "1.16.47-beta-20250718-075003-940d2e2a",
56
56
  "@blocklet/theme": "^3.0.26",
57
57
  "@fidm/x509": "^1.2.1",
58
58
  "@ocap/mcrypto": "1.20.16",
@@ -116,5 +116,5 @@
116
116
  "jest": "^29.7.0",
117
117
  "unzipper": "^0.10.11"
118
118
  },
119
- "gitHead": "f08ee3ac63c808c93967db43445042b3050a015e"
119
+ "gitHead": "740ffa603c1de73ccf1bd4d0be497c9a9c7ce936"
120
120
  }