@abtnode/core 1.16.47-beta-20250715-030340-f15b794e → 1.16.47-beta-20250717-221700-2d886a18
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/blocklet/manager/helper/install-application-from-backup.js +4 -0
- package/lib/blocklet/storage/backup/spaces.js +14 -0
- package/lib/util/docker/docker-backup-pg-blocklet-db.js +157 -0
- package/lib/util/docker/ensure-docker-postgres.js +5 -2
- package/lib/util/env.js +3 -0
- package/package.json +24 -24
|
@@ -12,6 +12,7 @@ const { INSTALL_ACTIONS } = require('@abtnode/constant');
|
|
|
12
12
|
const logger = require('@abtnode/logger')('@abtnode/core:install-app-backup');
|
|
13
13
|
|
|
14
14
|
const { validateBlocklet, checkDuplicateAppSk, getAppDirs } = require('../../../util/blocklet');
|
|
15
|
+
const { dockerRestorePgBlockletDb } = require('../../../util/docker/docker-backup-pg-blocklet-db');
|
|
15
16
|
|
|
16
17
|
/**
|
|
17
18
|
* backup 目录结构
|
|
@@ -213,6 +214,9 @@ const installApplicationFromBackup = async ({
|
|
|
213
214
|
}
|
|
214
215
|
logger.info(`data is ${moveDir ? 'moved' : 'copied'} successfully`);
|
|
215
216
|
}
|
|
217
|
+
|
|
218
|
+
const dbPath = path.join(dataDir, 'blocklet.db');
|
|
219
|
+
await dockerRestorePgBlockletDb(dbPath);
|
|
216
220
|
} catch (error) {
|
|
217
221
|
logger.error('installFromBackup failed', { did, error });
|
|
218
222
|
|
|
@@ -41,6 +41,7 @@ const { translate } = require('../../../locales');
|
|
|
41
41
|
const { getFolderSize, formatMemoryUsage } = require('../utils/disk');
|
|
42
42
|
const { dockerExecChown } = require('../../../util/docker/docker-exec-chown');
|
|
43
43
|
const checkDockerRunHistory = require('../../../util/docker/check-docker-run-history');
|
|
44
|
+
const { dockerBackupPgBlockletDb } = require('../../../util/docker/docker-backup-pg-blocklet-db');
|
|
44
45
|
|
|
45
46
|
/**
|
|
46
47
|
* @param {{ appDid: string, appPid: string }} params
|
|
@@ -169,6 +170,18 @@ class SpacesBackup extends BaseBackup {
|
|
|
169
170
|
}
|
|
170
171
|
}
|
|
171
172
|
|
|
173
|
+
async backupPostgres() {
|
|
174
|
+
if (!this.blocklet) {
|
|
175
|
+
return;
|
|
176
|
+
}
|
|
177
|
+
const dataDir =
|
|
178
|
+
this.blocklet.environments?.find((v) => v.key === 'BLOCKLET_APP_DATA_DIR')?.value ||
|
|
179
|
+
path.join(process.env.ABT_NODE_DATA_DIR, 'data', this.blocklet.appPid || this.blocklet.appDid);
|
|
180
|
+
const dbPath = path.join(dataDir, 'blocklet.db');
|
|
181
|
+
|
|
182
|
+
await dockerBackupPgBlockletDb(dbPath);
|
|
183
|
+
}
|
|
184
|
+
|
|
172
185
|
/**
|
|
173
186
|
*
|
|
174
187
|
* @returns {Promise<void>}
|
|
@@ -188,6 +201,7 @@ class SpacesBackup extends BaseBackup {
|
|
|
188
201
|
logger.info(this.input.appPid, 'initialize.after', formatMemoryUsage());
|
|
189
202
|
|
|
190
203
|
await this.backupChown();
|
|
204
|
+
await this.backupPostgres();
|
|
191
205
|
|
|
192
206
|
logger.info(this.input.appPid, 'export.before', formatMemoryUsage());
|
|
193
207
|
await this.export();
|
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
const path = require('path');
|
|
2
|
+
const fs = require('fs/promises');
|
|
3
|
+
const { dbPathToDbName } = require('@abtnode/models');
|
|
4
|
+
const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
|
|
5
|
+
const logger = require('@abtnode/logger')('backup-pg-blocklet-db');
|
|
6
|
+
|
|
7
|
+
const { POSTGRES_CONTAINER_NAME } = require('./ensure-docker-postgres');
|
|
8
|
+
const { hasPostgres } = require('../migration-sqlite-to-postgres');
|
|
9
|
+
|
|
10
|
+
// 最长 1 小时, 因为特别大的表, pg_dump 会很久
|
|
11
|
+
const baseSpawnOptions = { timeout: 3600_000, retry: 0 };
|
|
12
|
+
|
|
13
|
+
function getBlockletDataRoot(dbPath) {
|
|
14
|
+
return path.dirname(path.dirname(path.dirname(dbPath)));
|
|
15
|
+
}
|
|
16
|
+
const getBackupPath = (dbPath) => {
|
|
17
|
+
return `${dbPath.replace('.db', '')}_pg_backup.gz`;
|
|
18
|
+
};
|
|
19
|
+
|
|
20
|
+
const checkDbExists = async (pgUrl, dbName) => {
|
|
21
|
+
const output = await promiseSpawn(
|
|
22
|
+
`docker exec ${POSTGRES_CONTAINER_NAME} psql "${pgUrl}" -tAc "SELECT 1 FROM pg_database WHERE datname = '${dbName}';"`,
|
|
23
|
+
undefined,
|
|
24
|
+
{ timeout: 3_000, retry: 2 }
|
|
25
|
+
);
|
|
26
|
+
return output.trim() === '1';
|
|
27
|
+
};
|
|
28
|
+
|
|
29
|
+
const getPgTempDir = (dbName) => {
|
|
30
|
+
return `/tmp/${dbName}.sql.gz`;
|
|
31
|
+
};
|
|
32
|
+
|
|
33
|
+
function buildPgUrl(originalUrl, dbName) {
|
|
34
|
+
const url = new URL(originalUrl);
|
|
35
|
+
url.pathname = `/${dbName}`; // 替换路径
|
|
36
|
+
return url.toString();
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
function getPgUrl() {
|
|
40
|
+
const pgUrl = process.env.ABT_NODE_POSTGRES_URL;
|
|
41
|
+
|
|
42
|
+
// 如果是当前容器, 使用容器内部 port
|
|
43
|
+
if (pgUrl.includes('127.0.0.1') || pgUrl.includes('localhost')) {
|
|
44
|
+
return pgUrl.replace('40408', '5432');
|
|
45
|
+
}
|
|
46
|
+
|
|
47
|
+
return '';
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
const dockerBackupPgBlockletDb = async (dbPath) => {
|
|
51
|
+
const dataDir = getBlockletDataRoot(dbPath);
|
|
52
|
+
if (!hasPostgres(dataDir)) {
|
|
53
|
+
logger.info('no using postgres, skip backup the pg blocklet db:', dbPath);
|
|
54
|
+
return {};
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
const pgUrl = getPgUrl();
|
|
58
|
+
if (!pgUrl) {
|
|
59
|
+
logger.info('no postgres url, skip backup the pg blocklet db:', dbPath);
|
|
60
|
+
return {};
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const dbName = dbPathToDbName(dbPath);
|
|
64
|
+
const backupPath = getBackupPath(dbPath);
|
|
65
|
+
|
|
66
|
+
const isDbExists = await checkDbExists(pgUrl, dbName);
|
|
67
|
+
if (!isDbExists) {
|
|
68
|
+
logger.info('no need to backup the pg blocklet db:', dbName);
|
|
69
|
+
return {};
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
// 替换 pgUrl 的 db 为 dbName
|
|
73
|
+
const pgUrlForDump = buildPgUrl(pgUrl, dbName);
|
|
74
|
+
|
|
75
|
+
logger.info('backup the pg blocklet db start:', dbName);
|
|
76
|
+
await promiseSpawn(
|
|
77
|
+
`docker exec ${POSTGRES_CONTAINER_NAME} sh -c 'pg_dump ${pgUrlForDump} | gzip' > ${backupPath}`,
|
|
78
|
+
{},
|
|
79
|
+
baseSpawnOptions
|
|
80
|
+
);
|
|
81
|
+
logger.info('backup the pg blocklet db cp backup file done:', backupPath);
|
|
82
|
+
return {
|
|
83
|
+
backupPath,
|
|
84
|
+
dbName,
|
|
85
|
+
};
|
|
86
|
+
};
|
|
87
|
+
|
|
88
|
+
const dockerRestorePgBlockletDb = async (dbPath) => {
|
|
89
|
+
const dataDir = getBlockletDataRoot(dbPath);
|
|
90
|
+
if (!hasPostgres(dataDir)) {
|
|
91
|
+
logger.info('no using postgres, skip restore the pg blocklet db:', dbPath);
|
|
92
|
+
return {};
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
const pgUrl = getPgUrl();
|
|
96
|
+
if (!pgUrl) {
|
|
97
|
+
logger.info('no postgres url, skip restore the pg blocklet db:', dbPath);
|
|
98
|
+
return {};
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
const dbName = dbPathToDbName(dbPath);
|
|
102
|
+
const backupPath = getBackupPath(dbPath);
|
|
103
|
+
|
|
104
|
+
try {
|
|
105
|
+
await fs.access(backupPath);
|
|
106
|
+
} catch (_) {
|
|
107
|
+
logger.info('no need to restore the pg blocklet db path at:', backupPath);
|
|
108
|
+
return {};
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
const containerTmpPath = getPgTempDir(dbName);
|
|
112
|
+
|
|
113
|
+
// 替换 pgUrl 的 db 为 dbName
|
|
114
|
+
const pgUrlForRestore = buildPgUrl(pgUrl, dbName);
|
|
115
|
+
|
|
116
|
+
logger.info('restore the pg blocklet db start:', dbName);
|
|
117
|
+
|
|
118
|
+
const isDbExists = await checkDbExists(pgUrl, dbName);
|
|
119
|
+
if (!isDbExists) {
|
|
120
|
+
// 如果数据库不存在, 创建数据库, 否则一会数据库直连会失败
|
|
121
|
+
logger.info('creating missing database before restore:', dbName);
|
|
122
|
+
await promiseSpawn(
|
|
123
|
+
`docker exec ${POSTGRES_CONTAINER_NAME} psql "${pgUrl}" -c "CREATE DATABASE \\"${dbName}\\";"`,
|
|
124
|
+
{},
|
|
125
|
+
baseSpawnOptions
|
|
126
|
+
);
|
|
127
|
+
} else {
|
|
128
|
+
// 如果数据库存在, 清空数据库
|
|
129
|
+
logger.info('clearing existing schema in db:', dbName);
|
|
130
|
+
await promiseSpawn(
|
|
131
|
+
`docker exec ${POSTGRES_CONTAINER_NAME} psql "${pgUrlForRestore}" -c "DROP SCHEMA public CASCADE; CREATE SCHEMA public;"`,
|
|
132
|
+
{},
|
|
133
|
+
baseSpawnOptions
|
|
134
|
+
);
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
logger.info('restore the pg blocklet db drop schema done:', dbName);
|
|
138
|
+
|
|
139
|
+
await promiseSpawn(`docker cp ${backupPath} ${POSTGRES_CONTAINER_NAME}:${containerTmpPath}`, {}, baseSpawnOptions);
|
|
140
|
+
logger.info('restore the pg blocklet db cp backup file done:', backupPath);
|
|
141
|
+
await promiseSpawn(
|
|
142
|
+
`docker exec ${POSTGRES_CONTAINER_NAME} sh -c 'gunzip -c ${containerTmpPath} | psql "${pgUrlForRestore}"'`,
|
|
143
|
+
{},
|
|
144
|
+
baseSpawnOptions
|
|
145
|
+
);
|
|
146
|
+
logger.info('restore the pg blocklet db done:', dbName);
|
|
147
|
+
|
|
148
|
+
return {
|
|
149
|
+
backupPath,
|
|
150
|
+
dbName,
|
|
151
|
+
};
|
|
152
|
+
};
|
|
153
|
+
|
|
154
|
+
module.exports = {
|
|
155
|
+
dockerBackupPgBlockletDb,
|
|
156
|
+
dockerRestorePgBlockletDb,
|
|
157
|
+
};
|
|
@@ -100,7 +100,9 @@ async function _ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port =
|
|
|
100
100
|
|
|
101
101
|
let lastUrl = '';
|
|
102
102
|
|
|
103
|
-
|
|
103
|
+
const POSTGRES_CONTAINER_NAME = 'abtnode-postgres';
|
|
104
|
+
|
|
105
|
+
async function ensureDockerPostgres(dataDir, name = POSTGRES_CONTAINER_NAME, port = 40408, force = false) {
|
|
104
106
|
if (lastUrl) {
|
|
105
107
|
return lastUrl;
|
|
106
108
|
}
|
|
@@ -109,7 +111,7 @@ async function ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 4
|
|
|
109
111
|
}
|
|
110
112
|
|
|
111
113
|
// 停止, 并且返回是否 docker 中存在 postgres 容器
|
|
112
|
-
async function stopDockerPostgres(name =
|
|
114
|
+
async function stopDockerPostgres(name = POSTGRES_CONTAINER_NAME) {
|
|
113
115
|
if (!(await checkDockerInstalled())) {
|
|
114
116
|
return false;
|
|
115
117
|
}
|
|
@@ -132,4 +134,5 @@ async function stopDockerPostgres(name = 'abtnode-postgres') {
|
|
|
132
134
|
module.exports = {
|
|
133
135
|
ensureDockerPostgres,
|
|
134
136
|
stopDockerPostgres,
|
|
137
|
+
POSTGRES_CONTAINER_NAME,
|
|
135
138
|
};
|
package/lib/util/env.js
CHANGED
|
@@ -3,6 +3,9 @@ const serverJobBackoffSeconds = process.env.ABT_NODE_JOB_BACKOFF_SECONDS
|
|
|
3
3
|
: 600;
|
|
4
4
|
|
|
5
5
|
const shouldJobBackoff = () => {
|
|
6
|
+
if (process.env.ABT_NODE_JOB_BACKOFF_SECONDS === '0') {
|
|
7
|
+
return false;
|
|
8
|
+
}
|
|
6
9
|
const uptime = process.uptime();
|
|
7
10
|
return uptime <= serverJobBackoffSeconds;
|
|
8
11
|
};
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"publishConfig": {
|
|
4
4
|
"access": "public"
|
|
5
5
|
},
|
|
6
|
-
"version": "1.16.47-beta-
|
|
6
|
+
"version": "1.16.47-beta-20250717-221700-2d886a18",
|
|
7
7
|
"description": "",
|
|
8
8
|
"main": "lib/index.js",
|
|
9
9
|
"files": [
|
|
@@ -19,22 +19,22 @@
|
|
|
19
19
|
"author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
|
|
20
20
|
"license": "Apache-2.0",
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@abtnode/analytics": "1.16.47-beta-
|
|
23
|
-
"@abtnode/auth": "1.16.47-beta-
|
|
24
|
-
"@abtnode/certificate-manager": "1.16.47-beta-
|
|
25
|
-
"@abtnode/client": "1.16.47-beta-
|
|
26
|
-
"@abtnode/constant": "1.16.47-beta-
|
|
27
|
-
"@abtnode/cron": "1.16.47-beta-
|
|
28
|
-
"@abtnode/db-cache": "1.16.47-beta-
|
|
29
|
-
"@abtnode/docker-utils": "1.16.47-beta-
|
|
30
|
-
"@abtnode/logger": "1.16.47-beta-
|
|
31
|
-
"@abtnode/models": "1.16.47-beta-
|
|
32
|
-
"@abtnode/queue": "1.16.47-beta-
|
|
33
|
-
"@abtnode/rbac": "1.16.47-beta-
|
|
34
|
-
"@abtnode/router-provider": "1.16.47-beta-
|
|
35
|
-
"@abtnode/static-server": "1.16.47-beta-
|
|
36
|
-
"@abtnode/timemachine": "1.16.47-beta-
|
|
37
|
-
"@abtnode/util": "1.16.47-beta-
|
|
22
|
+
"@abtnode/analytics": "1.16.47-beta-20250717-221700-2d886a18",
|
|
23
|
+
"@abtnode/auth": "1.16.47-beta-20250717-221700-2d886a18",
|
|
24
|
+
"@abtnode/certificate-manager": "1.16.47-beta-20250717-221700-2d886a18",
|
|
25
|
+
"@abtnode/client": "1.16.47-beta-20250717-221700-2d886a18",
|
|
26
|
+
"@abtnode/constant": "1.16.47-beta-20250717-221700-2d886a18",
|
|
27
|
+
"@abtnode/cron": "1.16.47-beta-20250717-221700-2d886a18",
|
|
28
|
+
"@abtnode/db-cache": "1.16.47-beta-20250717-221700-2d886a18",
|
|
29
|
+
"@abtnode/docker-utils": "1.16.47-beta-20250717-221700-2d886a18",
|
|
30
|
+
"@abtnode/logger": "1.16.47-beta-20250717-221700-2d886a18",
|
|
31
|
+
"@abtnode/models": "1.16.47-beta-20250717-221700-2d886a18",
|
|
32
|
+
"@abtnode/queue": "1.16.47-beta-20250717-221700-2d886a18",
|
|
33
|
+
"@abtnode/rbac": "1.16.47-beta-20250717-221700-2d886a18",
|
|
34
|
+
"@abtnode/router-provider": "1.16.47-beta-20250717-221700-2d886a18",
|
|
35
|
+
"@abtnode/static-server": "1.16.47-beta-20250717-221700-2d886a18",
|
|
36
|
+
"@abtnode/timemachine": "1.16.47-beta-20250717-221700-2d886a18",
|
|
37
|
+
"@abtnode/util": "1.16.47-beta-20250717-221700-2d886a18",
|
|
38
38
|
"@arcblock/did": "1.20.16",
|
|
39
39
|
"@arcblock/did-auth": "1.20.16",
|
|
40
40
|
"@arcblock/did-ext": "1.20.16",
|
|
@@ -45,14 +45,14 @@
|
|
|
45
45
|
"@arcblock/pm2-events": "^0.0.5",
|
|
46
46
|
"@arcblock/validator": "1.20.16",
|
|
47
47
|
"@arcblock/vc": "1.20.16",
|
|
48
|
-
"@blocklet/constant": "1.16.47-beta-
|
|
48
|
+
"@blocklet/constant": "1.16.47-beta-20250717-221700-2d886a18",
|
|
49
49
|
"@blocklet/did-space-js": "^1.1.7",
|
|
50
|
-
"@blocklet/env": "1.16.47-beta-
|
|
50
|
+
"@blocklet/env": "1.16.47-beta-20250717-221700-2d886a18",
|
|
51
51
|
"@blocklet/error": "^0.2.5",
|
|
52
|
-
"@blocklet/meta": "1.16.47-beta-
|
|
53
|
-
"@blocklet/resolver": "1.16.47-beta-
|
|
54
|
-
"@blocklet/sdk": "1.16.47-beta-
|
|
55
|
-
"@blocklet/store": "1.16.47-beta-
|
|
52
|
+
"@blocklet/meta": "1.16.47-beta-20250717-221700-2d886a18",
|
|
53
|
+
"@blocklet/resolver": "1.16.47-beta-20250717-221700-2d886a18",
|
|
54
|
+
"@blocklet/sdk": "1.16.47-beta-20250717-221700-2d886a18",
|
|
55
|
+
"@blocklet/store": "1.16.47-beta-20250717-221700-2d886a18",
|
|
56
56
|
"@blocklet/theme": "^3.0.26",
|
|
57
57
|
"@fidm/x509": "^1.2.1",
|
|
58
58
|
"@ocap/mcrypto": "1.20.16",
|
|
@@ -116,5 +116,5 @@
|
|
|
116
116
|
"jest": "^29.7.0",
|
|
117
117
|
"unzipper": "^0.10.11"
|
|
118
118
|
},
|
|
119
|
-
"gitHead": "
|
|
119
|
+
"gitHead": "e8c5b883906dd53ba9c288cd5609cf7d023d90b3"
|
|
120
120
|
}
|