@abtnode/core 1.16.46-beta-20250703-024219-4029ee97 → 1.16.46-beta-20250704-234926-09d872ad
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/blocklet/migration-dist/migration.cjs +15 -3
- package/lib/blocklet/project/connect-to-store.js +1 -1
- package/lib/blocklet/project/index.js +11 -9
- package/lib/index.js +1 -1
- package/lib/migrations/index.js +43 -12
- package/lib/states/audit-log.js +2 -2
- package/lib/states/backup.js +26 -29
- package/lib/states/index.js +1 -14
- package/lib/states/notification.js +50 -42
- package/lib/states/user.js +10 -9
- package/lib/util/docker/docker-network.js +3 -2
- package/lib/util/docker/ensure-docker-postgres.js +135 -0
- package/lib/util/migration-sqlite-to-postgres.js +408 -0
- package/package.json +37 -37
|
@@ -764,6 +764,7 @@ module.exports = Object.freeze({
|
|
|
764
764
|
launchBlockletByLauncher: true,
|
|
765
765
|
launchBlockletWithoutWallet: true,
|
|
766
766
|
backupBlocklet: true,
|
|
767
|
+
destroySelf: true,
|
|
767
768
|
},
|
|
768
769
|
NOTIFICATION_SEND_STATUS: {
|
|
769
770
|
PENDING: 0, // 待发送
|
|
@@ -955,7 +956,7 @@ const initLogger =
|
|
|
955
956
|
),
|
|
956
957
|
});
|
|
957
958
|
|
|
958
|
-
|
|
959
|
+
const logToFile = () => {
|
|
959
960
|
logger.level = level || 'info';
|
|
960
961
|
if (!fs.existsSync(logDir)) {
|
|
961
962
|
fs.mkdirSync(logDir, { recursive: true });
|
|
@@ -1011,7 +1012,9 @@ const initLogger =
|
|
|
1011
1012
|
|
|
1012
1013
|
addedRejectionExceptionTransport = true;
|
|
1013
1014
|
}
|
|
1014
|
-
}
|
|
1015
|
+
};
|
|
1016
|
+
|
|
1017
|
+
const logToConsole = () => {
|
|
1015
1018
|
logger.level = level || 'debug';
|
|
1016
1019
|
const transport = new transports.Console({
|
|
1017
1020
|
format: format.combine(format.colorize({ all: true })),
|
|
@@ -1024,6 +1027,15 @@ const initLogger =
|
|
|
1024
1027
|
logger.exceptions.handle(transport);
|
|
1025
1028
|
addedRejectionExceptionTransport = true;
|
|
1026
1029
|
}
|
|
1030
|
+
};
|
|
1031
|
+
|
|
1032
|
+
if (process.env.ABT_LOG_TO_FILE === 'true') {
|
|
1033
|
+
logToFile();
|
|
1034
|
+
logToConsole();
|
|
1035
|
+
} else if (process.env.NODE_ENV === 'production') {
|
|
1036
|
+
logToFile();
|
|
1037
|
+
} else {
|
|
1038
|
+
logToConsole();
|
|
1027
1039
|
}
|
|
1028
1040
|
|
|
1029
1041
|
return logger;
|
|
@@ -38890,7 +38902,7 @@ module.exports = require("zlib");
|
|
|
38890
38902
|
/***/ ((module) => {
|
|
38891
38903
|
|
|
38892
38904
|
"use strict";
|
|
38893
|
-
module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.16.45","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib","test":"node tools/jest.js","coverage":"npm run test -- --coverage"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.16.45","@abtnode/auth":"1.16.45","@abtnode/certificate-manager":"1.16.45","@abtnode/client":"1.16.45","@abtnode/constant":"1.16.45","@abtnode/cron":"1.16.45","@abtnode/db-cache":"1.16.45","@abtnode/docker-utils":"1.16.45","@abtnode/logger":"1.16.45","@abtnode/models":"1.16.45","@abtnode/queue":"1.16.45","@abtnode/rbac":"1.16.45","@abtnode/router-provider":"1.16.45","@abtnode/static-server":"1.16.45","@abtnode/timemachine":"1.16.45","@abtnode/util":"1.16.45","@arcblock/did":"1.20.
|
|
38905
|
+
module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.16.45","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib","test":"node tools/jest.js","coverage":"npm run test -- --coverage"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.16.45","@abtnode/auth":"1.16.45","@abtnode/certificate-manager":"1.16.45","@abtnode/client":"1.16.45","@abtnode/constant":"1.16.45","@abtnode/cron":"1.16.45","@abtnode/db-cache":"1.16.45","@abtnode/docker-utils":"1.16.45","@abtnode/logger":"1.16.45","@abtnode/models":"1.16.45","@abtnode/queue":"1.16.45","@abtnode/rbac":"1.16.45","@abtnode/router-provider":"1.16.45","@abtnode/static-server":"1.16.45","@abtnode/timemachine":"1.16.45","@abtnode/util":"1.16.45","@arcblock/did":"1.20.15","@arcblock/did-auth":"1.20.15","@arcblock/did-ext":"1.20.15","@arcblock/did-motif":"^1.1.13","@arcblock/did-util":"1.20.15","@arcblock/event-hub":"1.20.15","@arcblock/jwt":"1.20.15","@arcblock/pm2-events":"^0.0.5","@arcblock/validator":"1.20.15","@arcblock/vc":"1.20.15","@blocklet/constant":"1.16.45","@blocklet/did-space-js":"^1.1.0","@blocklet/env":"1.16.45","@blocklet/error":"^0.2.5","@blocklet/meta":"1.16.45","@blocklet/resolver":"1.16.45","@blocklet/sdk":"1.16.45","@blocklet/store":"1.16.45","@blocklet/theme":"^3.0.14","@fidm/x509":"^1.2.1","@ocap/mcrypto":"1.20.15","@ocap/util":"1.20.15","@ocap/wallet":"1.20.15","@slack/webhook":"^5.0.4","archiver":"^7.0.1","axios":"^1.7.9","axon":"^2.0.3","chalk":"^4.1.2","cross-spawn":"^7.0.3","dayjs":"^1.11.13","deep-diff":"^1.0.2","detect-port":"^1.5.1","envfile":"^7.1.0","escape-string-regexp":"^4.0.0","fast-glob":"^3.3.2","filesize":"^10.1.1","flat":"^5.0.2","fs-extra":"^11.2.0","get-port":"^5.1.1","hasha":"^5.2.2","is-base64":"^1.1.0","is-cidr":"4","is-ip":"3","is-url":"^1.2.4","joi":"17.12.2","joi-extension-semver":"^5.0.0","js-yaml":"^4.1.0","kill-port":"^2.0.1","lodash":"^4.17.21","node-stream-zip":"^1.15.0","p-all":"^3.0.0","p-limit":"^3.1.0","p-map":"^4.0.0","p-retry":"^4.6.2","p-wait-for":"^3.2.0","rate-limiter-flexible":"^5.0.5","read-last-lines":"^1.8.0","semver":"^7.6.3","sequelize":"^6.35.0","shelljs":"^0.8.5","slugify":"^1.6.6","ssri":"^8.0.1","stream-throttle":"^0.1.3","stream-to-promise":"^3.0.0","systeminformation":"^5.23.3","tail":"^2.2.4","tar":"^6.1.11","transliteration":"^2.3.5","ua-parser-js":"^1.0.2","ufo":"^1.5.3","uuid":"^9.0.1","valid-url":"^1.0.9","which":"^2.0.2","xbytes":"^1.8.0"},"devDependencies":{"expand-tilde":"^2.0.2","express":"^4.18.2","jest":"^29.7.0","unzipper":"^0.10.11"},"gitHead":"e5764f753181ed6a7c615cd4fc6682aacf0cb7cd"}');
|
|
38894
38906
|
|
|
38895
38907
|
/***/ }),
|
|
38896
38908
|
|
|
@@ -56,7 +56,7 @@ const connectToStore = ({ did, projectId, storeName, storeId, storeUrl, manager,
|
|
|
56
56
|
|
|
57
57
|
await projectState.updateProject(projectId, project);
|
|
58
58
|
} catch (error) {
|
|
59
|
-
reject(error);
|
|
59
|
+
reject(new Error(`Failed to connect to store: ${error.message}`));
|
|
60
60
|
}
|
|
61
61
|
});
|
|
62
62
|
};
|
|
@@ -110,16 +110,18 @@ const getProjects = async ({ did, manager, componentDid, showAccessToken, tenant
|
|
|
110
110
|
const getProject = async ({ did, projectId, messageId, showAccessToken, manager }) => {
|
|
111
111
|
const { projectState } = await manager._getProjectState(did);
|
|
112
112
|
const project = await projectState.findOne(messageId ? { messageId } : { id: projectId });
|
|
113
|
-
if (
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
113
|
+
if (project) {
|
|
114
|
+
if (!showAccessToken) {
|
|
115
|
+
project.connectedStores?.forEach((store) => {
|
|
116
|
+
store.accessToken = store.accessToken ? '__encrypted__' : '';
|
|
117
|
+
});
|
|
118
|
+
}
|
|
118
119
|
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
120
|
+
if (!showAccessToken) {
|
|
121
|
+
project.connectedEndpoints?.forEach((endpoint) => {
|
|
122
|
+
endpoint.accessKeySecret = endpoint.accessKeySecret ? '__encrypted__' : '';
|
|
123
|
+
});
|
|
124
|
+
}
|
|
123
125
|
}
|
|
124
126
|
|
|
125
127
|
return project;
|
package/lib/index.js
CHANGED
|
@@ -327,7 +327,7 @@ function ABTNode(options) {
|
|
|
327
327
|
blockletManager.resetSiteByDid = resetSiteByDid;
|
|
328
328
|
|
|
329
329
|
// Generate an on node ready callback
|
|
330
|
-
const onStatesReady = createStateReadyQueue({ states, options, dataDirs });
|
|
330
|
+
const onStatesReady = createStateReadyQueue({ states: states.allStates, options, dataDirs });
|
|
331
331
|
onStatesReady(createStateReadyHandler(routingSnapshot));
|
|
332
332
|
const domainStatus = new DomainStatus({ routerManager, states });
|
|
333
333
|
|
package/lib/migrations/index.js
CHANGED
|
@@ -7,6 +7,8 @@ const logger = require('@abtnode/logger')('@abtnode/core:migration');
|
|
|
7
7
|
const { doSchemaMigration, createSequelize } = require('@abtnode/models');
|
|
8
8
|
const { getDbFilePath } = require('../util');
|
|
9
9
|
const getMigrationScripts = require('../util/get-migration-scripts');
|
|
10
|
+
const { ensureDockerPostgres } = require('../util/docker/ensure-docker-postgres');
|
|
11
|
+
const { migrationSqliteToPostgres } = require('../util/migration-sqlite-to-postgres');
|
|
10
12
|
|
|
11
13
|
const BACKUP_FILE_DB = 'server.db';
|
|
12
14
|
const BACKUP_FILE_CONFIG = 'config.yml';
|
|
@@ -146,27 +148,41 @@ const runSchemaMigrations = async ({
|
|
|
146
148
|
blocklets = [],
|
|
147
149
|
printInfo = console.info, // eslint-disable-line
|
|
148
150
|
printSuccess = console.info, // eslint-disable-line
|
|
151
|
+
migrationPostgres = false,
|
|
149
152
|
}) => {
|
|
150
153
|
if (!process.env.ABT_NODE_CACHE_SQLITE_PATH) {
|
|
151
154
|
process.env.ABT_NODE_CACHE_SQLITE_PATH = path.join(dataDir, 'core', 'db-cache.db');
|
|
152
155
|
}
|
|
156
|
+
if (!process.env.ABT_NODE_POSTGRES_URL) {
|
|
157
|
+
const postgresUrl = await ensureDockerPostgres(dataDir, 'abtnode-postgres', 40408, migrationPostgres);
|
|
158
|
+
process.env.ABT_NODE_POSTGRES_URL = postgresUrl;
|
|
159
|
+
}
|
|
160
|
+
if (migrationPostgres && !process.env.ABT_NODE_POSTGRES_URL) {
|
|
161
|
+
throw new Error('Postgres URL is not set, please set env ABT_NODE_POSTGRES_URL or ensure docker is running');
|
|
162
|
+
}
|
|
163
|
+
const dbPaths = {
|
|
164
|
+
server: getDbFilePath(path.join(dataDir, 'core/server.db')),
|
|
165
|
+
service: getDbFilePath(path.join(dataDir, 'services/service.db')),
|
|
166
|
+
certificateManagers: [],
|
|
167
|
+
blocklets: [],
|
|
168
|
+
};
|
|
153
169
|
|
|
154
170
|
// migrate server schema
|
|
155
|
-
|
|
156
|
-
await doSchemaMigration(
|
|
157
|
-
printSuccess(`Server schema successfully migrated: ${
|
|
158
|
-
|
|
171
|
+
dbPaths.server = getDbFilePath(path.join(dataDir, 'core/server.db'));
|
|
172
|
+
await doSchemaMigration(dbPaths.server, 'server');
|
|
173
|
+
printSuccess(`Server schema successfully migrated: ${dbPaths.server}`);
|
|
159
174
|
// migrate service schema
|
|
160
|
-
|
|
161
|
-
await doSchemaMigration(
|
|
162
|
-
printSuccess(`Service schema successfully migrated: ${
|
|
175
|
+
dbPaths.service = getDbFilePath(path.join(dataDir, 'services/service.db'));
|
|
176
|
+
await doSchemaMigration(dbPaths.service, 'service');
|
|
177
|
+
printSuccess(`Service schema successfully migrated: ${dbPaths.service}`);
|
|
163
178
|
|
|
164
179
|
// migrate blocklet schema
|
|
165
180
|
for (let i = 0; i < blocklets.length; i++) {
|
|
166
181
|
const blocklet = blocklets[i];
|
|
167
182
|
const env = blocklet.environments.find((x) => x.key === 'BLOCKLET_DATA_DIR');
|
|
168
183
|
if (env) {
|
|
169
|
-
filePath = getDbFilePath(path.join(env.value, 'blocklet.db'));
|
|
184
|
+
const filePath = getDbFilePath(path.join(env.value, 'blocklet.db'));
|
|
185
|
+
dbPaths.blocklets.push(filePath);
|
|
170
186
|
await doSchemaMigration(filePath, 'blocklet');
|
|
171
187
|
printSuccess(`Blocklet schema successfully migrated: ${blocklet.appPid}: ${filePath}`);
|
|
172
188
|
} else {
|
|
@@ -176,17 +192,26 @@ const runSchemaMigrations = async ({
|
|
|
176
192
|
|
|
177
193
|
// migrate certificate manager schema
|
|
178
194
|
for (let i = 0; i < MODULES.length; i++) {
|
|
179
|
-
filePath = getDbFilePath(path.join(dataDir, `modules/${MODULES[i]}/module.db`));
|
|
195
|
+
const filePath = getDbFilePath(path.join(dataDir, `modules/${MODULES[i]}/module.db`));
|
|
180
196
|
await doSchemaMigration(filePath, MODULES[i]);
|
|
197
|
+
dbPaths.certificateManagers.push(filePath);
|
|
181
198
|
printSuccess(`${MODULES[i]} schema successfully migrated: ${filePath}`);
|
|
182
199
|
}
|
|
200
|
+
|
|
201
|
+
if (migrationPostgres) {
|
|
202
|
+
await migrationSqliteToPostgres(dataDir, dbPaths);
|
|
203
|
+
}
|
|
183
204
|
};
|
|
184
205
|
|
|
185
|
-
const closeDatabaseConnections = ({
|
|
206
|
+
const closeDatabaseConnections = async ({
|
|
186
207
|
dataDir,
|
|
187
208
|
blocklets = [],
|
|
188
209
|
printInfo = console.info, // eslint-disable-line
|
|
189
210
|
}) => {
|
|
211
|
+
if (!process.env.ABT_NODE_POSTGRES_URL) {
|
|
212
|
+
const postgresUrl = await ensureDockerPostgres(dataDir);
|
|
213
|
+
process.env.ABT_NODE_POSTGRES_URL = postgresUrl;
|
|
214
|
+
}
|
|
190
215
|
const dataFiles = [
|
|
191
216
|
getDbFilePath(path.join(dataDir, 'core/server.db')),
|
|
192
217
|
getDbFilePath(path.join(dataDir, 'services/service.db')),
|
|
@@ -204,8 +229,14 @@ const closeDatabaseConnections = ({
|
|
|
204
229
|
|
|
205
230
|
const connections = dataFiles.map((x) => createSequelize(x));
|
|
206
231
|
connections.forEach((x) => {
|
|
207
|
-
|
|
208
|
-
|
|
232
|
+
try {
|
|
233
|
+
x.close();
|
|
234
|
+
} catch (err) {
|
|
235
|
+
if (err.message.includes('was closed!')) {
|
|
236
|
+
return;
|
|
237
|
+
}
|
|
238
|
+
throw new Error(`Failed to close database connection: ${x}, error: ${err.message}`);
|
|
239
|
+
}
|
|
209
240
|
});
|
|
210
241
|
};
|
|
211
242
|
|
package/lib/states/audit-log.js
CHANGED
|
@@ -407,7 +407,7 @@ const getLogContent = async (action, args, context, result, info, node) => {
|
|
|
407
407
|
return `updated tag ${args.tag.id}`;
|
|
408
408
|
case 'deleteTag':
|
|
409
409
|
return `deleted tag ${args.tag.id}`;
|
|
410
|
-
case '
|
|
410
|
+
case 'destroySelf':
|
|
411
411
|
return `user ${result.did} initiated account deletion`;
|
|
412
412
|
|
|
413
413
|
// accessKeys
|
|
@@ -647,7 +647,7 @@ const getLogCategory = (action) => {
|
|
|
647
647
|
case 'loginByMaster':
|
|
648
648
|
case 'loginFederatedMaster':
|
|
649
649
|
case 'migrateFederatedAccount':
|
|
650
|
-
case '
|
|
650
|
+
case 'destroySelf':
|
|
651
651
|
return 'team';
|
|
652
652
|
|
|
653
653
|
// accessKeys
|
package/lib/states/backup.js
CHANGED
|
@@ -184,20 +184,26 @@ class BackupState extends BaseState {
|
|
|
184
184
|
* @param {{ did: string, startTime: string, endTime: string }} options
|
|
185
185
|
* @return {Promise<Array<{date: string, successCount: number, errorCount: number}>>}
|
|
186
186
|
*/
|
|
187
|
-
|
|
188
|
-
const
|
|
189
|
-
//
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
'
|
|
195
|
-
'
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
187
|
+
getBlockletBackupSummary({ did, startTime, endTime, timezone }) {
|
|
188
|
+
const { sequelize } = this.model;
|
|
189
|
+
const dialect = sequelize.getDialect(); // 'postgres' 或者 'sqlite'
|
|
190
|
+
let dateExpr;
|
|
191
|
+
let groupExpr;
|
|
192
|
+
|
|
193
|
+
if (dialect === 'postgres') {
|
|
194
|
+
const tzTs = sequelize.fn('timezone', timezone, sequelize.col('createdAt'));
|
|
195
|
+
const dayTs = sequelize.fn('date_trunc', 'day', tzTs);
|
|
196
|
+
dateExpr = sequelize.fn('to_char', dayTs, 'YYYY-MM-DD');
|
|
197
|
+
groupExpr = dayTs;
|
|
198
|
+
} else {
|
|
199
|
+
// SQLite:datetime + strftime
|
|
200
|
+
// SQLite 的 datetime(col, '+X minutes') already returns text in UTC+offset
|
|
201
|
+
const offset = dayjs.tz(dayjs.utc(), timezone).utcOffset();
|
|
202
|
+
const dt = sequelize.fn('datetime', sequelize.col('createdAt'), `${offset} minutes`);
|
|
203
|
+
dateExpr = sequelize.fn('strftime', '%Y-%m-%d', dt);
|
|
204
|
+
groupExpr = dt;
|
|
205
|
+
}
|
|
206
|
+
|
|
201
207
|
const options = {
|
|
202
208
|
where: {
|
|
203
209
|
appPid: did,
|
|
@@ -205,25 +211,16 @@ class BackupState extends BaseState {
|
|
|
205
211
|
status: { [Op.in]: [BACKUPS.STATUS.SUCCEEDED, BACKUPS.STATUS.FAILED] },
|
|
206
212
|
},
|
|
207
213
|
attributes: [
|
|
208
|
-
[
|
|
209
|
-
[
|
|
210
|
-
|
|
211
|
-
'successCount',
|
|
212
|
-
],
|
|
213
|
-
[
|
|
214
|
-
this.model.sequelize.fn('sum', this.model.sequelize.literal('CASE WHEN status != 0 THEN 1 ELSE 0 END')),
|
|
215
|
-
'errorCount',
|
|
216
|
-
],
|
|
214
|
+
[dateExpr, 'date'],
|
|
215
|
+
[sequelize.fn('sum', sequelize.literal('CASE WHEN status = 0 THEN 1 ELSE 0 END')), 'successCount'],
|
|
216
|
+
[sequelize.fn('sum', sequelize.literal('CASE WHEN status != 0 THEN 1 ELSE 0 END')), 'errorCount'],
|
|
217
217
|
],
|
|
218
|
-
group: [
|
|
219
|
-
order: [[
|
|
218
|
+
group: [groupExpr],
|
|
219
|
+
order: [[groupExpr, 'DESC']],
|
|
220
220
|
raw: true,
|
|
221
221
|
};
|
|
222
222
|
|
|
223
|
-
|
|
224
|
-
|
|
225
|
-
// 将结果转换为更友好的格式
|
|
226
|
-
return results;
|
|
223
|
+
return this.model.findAll(options);
|
|
227
224
|
}
|
|
228
225
|
}
|
|
229
226
|
|
package/lib/states/index.js
CHANGED
|
@@ -66,18 +66,5 @@ const init = (dataDirs, config = {}) => {
|
|
|
66
66
|
blacklist: blacklistState,
|
|
67
67
|
};
|
|
68
68
|
};
|
|
69
|
-
|
|
70
|
-
* @type {{
|
|
71
|
-
* backup: import('./backup'),
|
|
72
|
-
* blocklet: import('./blocklet'),
|
|
73
|
-
* blockletExtras: import('./blocklet-extras'),
|
|
74
|
-
* notification: import('./notification'),
|
|
75
|
-
* notificationReceiver: import('./notification-receiver'),
|
|
76
|
-
* job: import('./job'),
|
|
77
|
-
* node: import('./node'),
|
|
78
|
-
* blacklist: import('./blacklist'),
|
|
79
|
-
* runtimeInsight: import('./runtime-insight'),
|
|
80
|
-
* [key: string]: any
|
|
81
|
-
* }}
|
|
82
|
-
*/
|
|
69
|
+
|
|
83
70
|
module.exports = createStateFactory(init, models);
|
|
@@ -24,6 +24,13 @@ const readUnreadInputValidation = ({ notificationIds, receiver }, context) => {
|
|
|
24
24
|
return receiverDidValidation(receiver, context);
|
|
25
25
|
};
|
|
26
26
|
|
|
27
|
+
const safeJsonParse = (json) => {
|
|
28
|
+
if (typeof json !== 'string') {
|
|
29
|
+
return json;
|
|
30
|
+
}
|
|
31
|
+
return JSON.parse(json);
|
|
32
|
+
};
|
|
33
|
+
|
|
27
34
|
/**
|
|
28
35
|
* @extends BaseState<import('@abtnode/models').NotificationState>
|
|
29
36
|
*/
|
|
@@ -222,12 +229,12 @@ class NotificationState extends BaseState {
|
|
|
222
229
|
|
|
223
230
|
// 构建基础查询条件
|
|
224
231
|
// FIXME: 这里需要对历史数据做出过滤,避免有非 notification 类型的数据
|
|
225
|
-
const conditions = [
|
|
232
|
+
const conditions = ["n.type = 'notification'"];
|
|
226
233
|
const replacements = { receiver };
|
|
227
234
|
|
|
228
235
|
if (typeof read === 'boolean') {
|
|
229
236
|
conditions.push('nr.read = :read');
|
|
230
|
-
replacements.read = read
|
|
237
|
+
replacements.read = read;
|
|
231
238
|
}
|
|
232
239
|
|
|
233
240
|
if (severity && severity.length) {
|
|
@@ -236,7 +243,7 @@ class NotificationState extends BaseState {
|
|
|
236
243
|
}
|
|
237
244
|
|
|
238
245
|
if (entityId && entityId.length) {
|
|
239
|
-
conditions.push('n.entityId IN (:entityId)');
|
|
246
|
+
conditions.push('n."entityId" IN (:entityId)');
|
|
240
247
|
replacements.entityId = entityId;
|
|
241
248
|
}
|
|
242
249
|
|
|
@@ -261,7 +268,7 @@ class NotificationState extends BaseState {
|
|
|
261
268
|
const countQuery = `
|
|
262
269
|
SELECT COUNT(DISTINCT n.id) as total
|
|
263
270
|
FROM notifications n
|
|
264
|
-
INNER JOIN notification_receivers nr ON n.id = nr.notificationId
|
|
271
|
+
INNER JOIN notification_receivers nr ON n.id = nr."notificationId"
|
|
265
272
|
${whereClause}
|
|
266
273
|
AND nr.receiver = :receiver
|
|
267
274
|
`;
|
|
@@ -270,32 +277,32 @@ class NotificationState extends BaseState {
|
|
|
270
277
|
SELECT DISTINCT
|
|
271
278
|
n.*,
|
|
272
279
|
nr.id as receiver_id,
|
|
273
|
-
nr.notificationId as receiver_notificationId,
|
|
274
|
-
nr.receiver as receiver_receiver,
|
|
275
|
-
nr.read as receiver_read,
|
|
276
|
-
nr.readAt as receiver_readAt,
|
|
277
|
-
nr.walletSendStatus as receiver_walletSendStatus,
|
|
278
|
-
nr.walletSendAt as receiver_walletSendAt,
|
|
279
|
-
nr.pushKitSendStatus as receiver_pushKitSendStatus,
|
|
280
|
-
nr.pushKitSendAt as receiver_pushKitSendAt,
|
|
281
|
-
nr.emailSendStatus as receiver_emailSendStatus,
|
|
282
|
-
nr.emailSendAt as receiver_emailSendAt,
|
|
283
|
-
nr.createdAt as receiver_createdAt,
|
|
284
|
-
nr.walletSendFailedReason as receiver_walletSendFailedReason,
|
|
285
|
-
nr.walletSendRecord as receiver_walletSendRecord,
|
|
286
|
-
nr.pushKitSendFailedReason as receiver_pushKitSendFailedReason,
|
|
287
|
-
nr.pushKitSendRecord as receiver_pushKitSendRecord,
|
|
288
|
-
nr.emailSendFailedReason as receiver_emailSendFailedReason,
|
|
289
|
-
nr.emailSendRecord as receiver_emailSendRecord,
|
|
290
|
-
nr.webhook as receiver_webhook,
|
|
291
|
-
nr.email as receiver_email,
|
|
292
|
-
nr.webhookUrls as receiver_webhookUrls,
|
|
293
|
-
nr.deviceId as receiver_deviceId
|
|
280
|
+
nr."notificationId" as receiver_notificationId,
|
|
281
|
+
nr."receiver" as receiver_receiver,
|
|
282
|
+
nr."read" as receiver_read,
|
|
283
|
+
nr."readAt" as receiver_readAt,
|
|
284
|
+
nr."walletSendStatus" as receiver_walletSendStatus,
|
|
285
|
+
nr."walletSendAt" as receiver_walletSendAt,
|
|
286
|
+
nr."pushKitSendStatus" as receiver_pushKitSendStatus,
|
|
287
|
+
nr."pushKitSendAt" as receiver_pushKitSendAt,
|
|
288
|
+
nr."emailSendStatus" as receiver_emailSendStatus,
|
|
289
|
+
nr."emailSendAt" as receiver_emailSendAt,
|
|
290
|
+
nr."createdAt" as receiver_createdAt,
|
|
291
|
+
nr."walletSendFailedReason" as receiver_walletSendFailedReason,
|
|
292
|
+
nr."walletSendRecord" as receiver_walletSendRecord,
|
|
293
|
+
nr."pushKitSendFailedReason" as receiver_pushKitSendFailedReason,
|
|
294
|
+
nr."pushKitSendRecord" as receiver_pushKitSendRecord,
|
|
295
|
+
nr."emailSendFailedReason" as receiver_emailSendFailedReason,
|
|
296
|
+
nr."emailSendRecord" as receiver_emailSendRecord,
|
|
297
|
+
nr."webhook" as receiver_webhook,
|
|
298
|
+
nr."email" as receiver_email,
|
|
299
|
+
nr."webhookUrls" as receiver_webhookUrls,
|
|
300
|
+
nr."deviceId" as receiver_deviceId
|
|
294
301
|
FROM notifications n
|
|
295
|
-
INNER JOIN notification_receivers nr ON n.id = nr.notificationId
|
|
302
|
+
INNER JOIN notification_receivers nr ON n.id = nr."notificationId"
|
|
296
303
|
${whereClause}
|
|
297
304
|
AND nr.receiver = :receiver
|
|
298
|
-
ORDER BY n.createdAt DESC
|
|
305
|
+
ORDER BY n."createdAt" DESC
|
|
299
306
|
LIMIT :limit OFFSET :offset
|
|
300
307
|
`;
|
|
301
308
|
|
|
@@ -327,25 +334,25 @@ class NotificationState extends BaseState {
|
|
|
327
334
|
|
|
328
335
|
// 处理 receiver 的 JSON 字段
|
|
329
336
|
if (receiverFields.walletSendRecord) {
|
|
330
|
-
receiverFields.walletSendRecord =
|
|
337
|
+
receiverFields.walletSendRecord = safeJsonParse(receiverFields.walletSendRecord);
|
|
331
338
|
}
|
|
332
339
|
if (receiverFields.pushKitSendRecord) {
|
|
333
|
-
receiverFields.pushKitSendRecord =
|
|
340
|
+
receiverFields.pushKitSendRecord = safeJsonParse(receiverFields.pushKitSendRecord);
|
|
334
341
|
}
|
|
335
342
|
if (receiverFields.emailSendRecord) {
|
|
336
|
-
receiverFields.emailSendRecord =
|
|
343
|
+
receiverFields.emailSendRecord = safeJsonParse(receiverFields.emailSendRecord);
|
|
337
344
|
}
|
|
338
345
|
if (receiverFields.webhook) {
|
|
339
|
-
receiverFields.webhook =
|
|
346
|
+
receiverFields.webhook = safeJsonParse(receiverFields.webhook);
|
|
340
347
|
}
|
|
341
348
|
|
|
342
349
|
return {
|
|
343
350
|
...row,
|
|
344
|
-
attachments: row.attachments ?
|
|
345
|
-
actions: row.actions ?
|
|
346
|
-
blocks: row.blocks ?
|
|
347
|
-
data: row.data ?
|
|
348
|
-
activity: row.activity ?
|
|
351
|
+
attachments: row.attachments ? safeJsonParse(row.attachments) : [],
|
|
352
|
+
actions: row.actions ? safeJsonParse(row.actions) : [],
|
|
353
|
+
blocks: row.blocks ? safeJsonParse(row.blocks) : [],
|
|
354
|
+
data: row.data ? safeJsonParse(row.data) : {},
|
|
355
|
+
activity: row.activity ? safeJsonParse(row.activity) : {},
|
|
349
356
|
read: Boolean(row.read),
|
|
350
357
|
receivers: [receiverFields], // 将 receiver 数据作为数组
|
|
351
358
|
};
|
|
@@ -655,10 +662,11 @@ class NotificationState extends BaseState {
|
|
|
655
662
|
notificationId,
|
|
656
663
|
};
|
|
657
664
|
if (userName) {
|
|
665
|
+
const likeOp = this.model.sequelize.getDialect() === 'postgres' ? Op.iLike : Op.like;
|
|
658
666
|
const users = await this.user.find({
|
|
659
667
|
where: {
|
|
660
668
|
fullName: {
|
|
661
|
-
[
|
|
669
|
+
[likeOp]: `%${userName}%`,
|
|
662
670
|
},
|
|
663
671
|
},
|
|
664
672
|
attributes: ['did'],
|
|
@@ -845,12 +853,12 @@ class NotificationState extends BaseState {
|
|
|
845
853
|
|
|
846
854
|
async getUnreadNotificationCount({ receiver, severity, componentDid, entityId, source } = {}) {
|
|
847
855
|
// 构建基础查询条件
|
|
848
|
-
const conditions = [
|
|
856
|
+
const conditions = ["n.type = 'notification'"];
|
|
849
857
|
const replacements = { receiver };
|
|
850
858
|
|
|
851
859
|
// 强制将read设为false,只计算未读消息
|
|
852
860
|
conditions.push('nr.read = :read');
|
|
853
|
-
replacements.read =
|
|
861
|
+
replacements.read = false;
|
|
854
862
|
|
|
855
863
|
if (severity && severity.length) {
|
|
856
864
|
conditions.push('n.severity IN (:severity)');
|
|
@@ -858,12 +866,12 @@ class NotificationState extends BaseState {
|
|
|
858
866
|
}
|
|
859
867
|
|
|
860
868
|
if (componentDid && componentDid.length) {
|
|
861
|
-
conditions.push('n.componentDid IN (:componentDid)');
|
|
869
|
+
conditions.push('n."componentDid" IN (:componentDid)');
|
|
862
870
|
replacements.componentDid = componentDid;
|
|
863
871
|
}
|
|
864
872
|
|
|
865
873
|
if (entityId && entityId.length) {
|
|
866
|
-
conditions.push('n.entityId IN (:entityId)');
|
|
874
|
+
conditions.push('n."entityId" IN (:entityId)');
|
|
867
875
|
replacements.entityId = entityId;
|
|
868
876
|
}
|
|
869
877
|
|
|
@@ -887,7 +895,7 @@ class NotificationState extends BaseState {
|
|
|
887
895
|
const countQuery = `
|
|
888
896
|
SELECT COUNT(DISTINCT n.id) as total
|
|
889
897
|
FROM notifications n
|
|
890
|
-
INNER JOIN notification_receivers nr ON n.id = nr.notificationId
|
|
898
|
+
INNER JOIN notification_receivers nr ON n.id = nr."notificationId"
|
|
891
899
|
${whereClause}
|
|
892
900
|
AND nr.receiver = :receiver
|
|
893
901
|
`;
|
package/lib/states/user.js
CHANGED
|
@@ -273,7 +273,8 @@ class User extends ExtendBase {
|
|
|
273
273
|
if (isValid(search)) {
|
|
274
274
|
where.did = toAddress(search);
|
|
275
275
|
} else {
|
|
276
|
-
|
|
276
|
+
const likeOp = this.model.sequelize.getDialect() === 'postgres' ? Op.iLike : Op.like;
|
|
277
|
+
where[Op.or] = [{ fullName: { [likeOp]: `%${search}%` } }, { email: { [Op.like]: `%${search}%` } }];
|
|
277
278
|
}
|
|
278
279
|
}
|
|
279
280
|
|
|
@@ -300,9 +301,9 @@ class User extends ExtendBase {
|
|
|
300
301
|
// LIMIT ${pageSize} OFFSET ${offset}
|
|
301
302
|
const subQuery = `
|
|
302
303
|
WITH RECURSIVE UserTree(did,inviter,generation,createdAt) AS (
|
|
303
|
-
SELECT did,inviter,generation,createdAt FROM users WHERE inviter="${exist.did}"
|
|
304
|
+
SELECT did,inviter,generation,"createdAt" FROM users WHERE inviter="${exist.did}"
|
|
304
305
|
UNION ALL
|
|
305
|
-
SELECT child.did,child.inviter,child.generation,child.createdAt FROM users AS child INNER JOIN UserTree AS parent ON (child.inviter=parent.did) ORDER BY child.createdAt DESC
|
|
306
|
+
SELECT child.did,child.inviter,child.generation,child."createdAt" FROM users AS child INNER JOIN UserTree AS parent ON (child.inviter=parent.did) ORDER BY child."createdAt" DESC
|
|
306
307
|
)
|
|
307
308
|
SELECT did,inviter,generation FROM UserTree ${generation > 0 ? `WHERE generation=${(exist.generation > 0 ? exist.generation : 0) + generation}` : ''}`.trim();
|
|
308
309
|
const children = await this.query(subQuery);
|
|
@@ -350,7 +351,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
|
|
|
350
351
|
replacements.status = PASSPORT_STATUS.VALID;
|
|
351
352
|
if (role === '$none') {
|
|
352
353
|
where.did = {
|
|
353
|
-
[Op.notIn]: Sequelize.literal('(SELECT DISTINCT userDid FROM passports WHERE status = :status)'),
|
|
354
|
+
[Op.notIn]: Sequelize.literal('(SELECT DISTINCT "userDid" FROM passports WHERE status = :status)'),
|
|
354
355
|
};
|
|
355
356
|
} else if (role === '$blocked') {
|
|
356
357
|
where.approved = false;
|
|
@@ -358,7 +359,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
|
|
|
358
359
|
replacements.role = role;
|
|
359
360
|
where.did = {
|
|
360
361
|
[Op.in]: Sequelize.literal(
|
|
361
|
-
'(SELECT DISTINCT userDid FROM passports WHERE name = :role AND status = :status)'
|
|
362
|
+
'(SELECT DISTINCT "userDid" FROM passports WHERE name = :role AND status = :status)'
|
|
362
363
|
),
|
|
363
364
|
};
|
|
364
365
|
}
|
|
@@ -426,7 +427,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
|
|
|
426
427
|
return this.count({
|
|
427
428
|
where: {
|
|
428
429
|
did: {
|
|
429
|
-
[Op.notIn]: Sequelize.literal('(SELECT DISTINCT userDid FROM passports WHERE status = :status)'),
|
|
430
|
+
[Op.notIn]: Sequelize.literal('(SELECT DISTINCT "userDid" FROM passports WHERE status = :status)'),
|
|
430
431
|
},
|
|
431
432
|
},
|
|
432
433
|
replacements: { status: PASSPORT_STATUS.VALID },
|
|
@@ -447,7 +448,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
|
|
|
447
448
|
where: {
|
|
448
449
|
did: {
|
|
449
450
|
[Op.in]: Sequelize.literal(
|
|
450
|
-
'(SELECT DISTINCT userDid FROM passports WHERE name = :name AND status = :status)'
|
|
451
|
+
'(SELECT DISTINCT "userDid" FROM passports WHERE name = :name AND status = :status)'
|
|
451
452
|
),
|
|
452
453
|
},
|
|
453
454
|
},
|
|
@@ -457,7 +458,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
|
|
|
457
458
|
|
|
458
459
|
async getOwnerDids() {
|
|
459
460
|
const result = await this.passport.query(
|
|
460
|
-
`SELECT DISTINCT
|
|
461
|
+
`SELECT DISTINCT"userDid" FROM passports WHERE name='${ROLES.OWNER}' AND status = '${PASSPORT_STATUS.VALID}' ORDER BY "issuanceDate" ASC LIMIT 1`
|
|
461
462
|
);
|
|
462
463
|
return result.map((x) => x.userDid);
|
|
463
464
|
}
|
|
@@ -784,7 +785,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
|
|
|
784
785
|
approved: true,
|
|
785
786
|
did: {
|
|
786
787
|
[Op.in]: Sequelize.literal(
|
|
787
|
-
'(SELECT DISTINCT userDid FROM passports WHERE name IN (:roles) AND status = :status)'
|
|
788
|
+
'(SELECT DISTINCT "userDid" FROM passports WHERE name IN (:roles) AND status = :status)'
|
|
788
789
|
),
|
|
789
790
|
},
|
|
790
791
|
};
|
|
@@ -74,8 +74,9 @@ async function removeDockerNetwork(dockerNetworkName) {
|
|
|
74
74
|
await promiseSpawn(`docker network rm ${dockerNetworkName}-internal`);
|
|
75
75
|
}
|
|
76
76
|
logger.info(`docker remove network ${dockerNetworkName} done`);
|
|
77
|
-
} catch (
|
|
78
|
-
|
|
77
|
+
} catch (_) {
|
|
78
|
+
// 不需要打印, 因为上面的删除也是尝试性的, 在没有启用 docker 的情况, 不会有 docker network 存在
|
|
79
|
+
// logger.error(`Error remove network ${dockerNetworkName}:`, error);
|
|
79
80
|
}
|
|
80
81
|
}
|
|
81
82
|
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
/* eslint-disable no-await-in-loop */
|
|
2
|
+
const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
|
|
3
|
+
const logger = require('@abtnode/logger')('@abtnode/ensure-docker-postgres');
|
|
4
|
+
const path = require('path');
|
|
5
|
+
const fs = require('fs');
|
|
6
|
+
const { Sequelize } = require('sequelize');
|
|
7
|
+
|
|
8
|
+
const { checkDockerInstalled } = require('./check-docker-installed');
|
|
9
|
+
const { hasPostgres } = require('../migration-sqlite-to-postgres');
|
|
10
|
+
|
|
11
|
+
const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
|
|
12
|
+
|
|
13
|
+
async function waitForPostgresReady(url, timeoutMs = 30_000) {
|
|
14
|
+
const start = Date.now();
|
|
15
|
+
let lastError = null;
|
|
16
|
+
|
|
17
|
+
while (Date.now() - start < timeoutMs) {
|
|
18
|
+
const sequelize = new Sequelize(url, { logging: false });
|
|
19
|
+
try {
|
|
20
|
+
await sequelize.authenticate();
|
|
21
|
+
await sequelize.close();
|
|
22
|
+
return true;
|
|
23
|
+
} catch (err) {
|
|
24
|
+
lastError = err;
|
|
25
|
+
await new Promise((r) => setTimeout(r, 500));
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
throw new Error(`Postgres did not become ready in time:\n${lastError?.message ?? 'Unknown error'}`);
|
|
30
|
+
}
|
|
31
|
+
async function _ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 40408, force = false) {
|
|
32
|
+
if (!dataDir) {
|
|
33
|
+
return '';
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
if (process.env.ABT_NODE_IGNORE_USE_POSTGRES === 'true') {
|
|
37
|
+
return '';
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
if (!(await checkDockerInstalled())) {
|
|
41
|
+
return '';
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
if (!hasPostgres(dataDir) && !force) {
|
|
45
|
+
return '';
|
|
46
|
+
}
|
|
47
|
+
|
|
48
|
+
if (process.env.ABT_NODE_IGNORE_RESTART_POSTGRES !== 'true') {
|
|
49
|
+
await stopDockerPostgres(name);
|
|
50
|
+
await sleep(1000);
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
// 1. 检查是否有正在运行的同名容器
|
|
54
|
+
const checkRunningCmd = `docker ps --filter "name=${name}" --format "{{.Names}}"`;
|
|
55
|
+
// 2. 如果没有运行,再检查是否存在已停止的同名容器
|
|
56
|
+
const checkAllCmd = `docker ps -a --filter "name=${name}" --format "{{.Names}}"`;
|
|
57
|
+
// 3. 如果既没运行也不存在,则创建新的容器,设置内存限制和基本配置
|
|
58
|
+
const dbPath = path.join(dataDir, 'core', 'postgres');
|
|
59
|
+
if (!fs.existsSync(path.join(dataDir, 'core'))) {
|
|
60
|
+
fs.mkdirSync(path.join(dataDir, 'core'), { recursive: true });
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
const runCmd = [
|
|
64
|
+
'docker run -d',
|
|
65
|
+
`--name ${name}`,
|
|
66
|
+
`-p 127.0.0.1:${port}:5432`,
|
|
67
|
+
`-v ${dbPath}:/var/lib/postgresql/data`,
|
|
68
|
+
'--memory 2g',
|
|
69
|
+
'--memory-swap 2g',
|
|
70
|
+
'-e POSTGRES_PASSWORD=postgres',
|
|
71
|
+
'-e POSTGRES_USER=postgres',
|
|
72
|
+
'-e POSTGRES_DB=postgres',
|
|
73
|
+
'postgres:17.5',
|
|
74
|
+
].join(' ');
|
|
75
|
+
|
|
76
|
+
const url = `postgresql://postgres:postgres@localhost:${port}/postgres`;
|
|
77
|
+
|
|
78
|
+
const running = (await promiseSpawn(checkRunningCmd)).trim();
|
|
79
|
+
if (running === name) {
|
|
80
|
+
// 已经在运行,直接返回
|
|
81
|
+
logger.info('postgres is already running', name);
|
|
82
|
+
return url;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
// 查看是否有已停止的同名容器
|
|
86
|
+
const all = (await promiseSpawn(checkAllCmd)).trim();
|
|
87
|
+
if (all === name) {
|
|
88
|
+
// 容器存在,但可能是 stopped,直接启动它
|
|
89
|
+
await promiseSpawn(`docker start ${name}`);
|
|
90
|
+
await waitForPostgresReady(url);
|
|
91
|
+
logger.info('postgres is started', name);
|
|
92
|
+
return url;
|
|
93
|
+
}
|
|
94
|
+
|
|
95
|
+
await promiseSpawn(runCmd);
|
|
96
|
+
await waitForPostgresReady(url);
|
|
97
|
+
logger.info('postgres is started', name);
|
|
98
|
+
return url;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
let lastUrl = '';
|
|
102
|
+
|
|
103
|
+
async function ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 40408, force = false) {
|
|
104
|
+
if (lastUrl) {
|
|
105
|
+
return lastUrl;
|
|
106
|
+
}
|
|
107
|
+
lastUrl = await _ensureDockerPostgres(dataDir, name, port, force);
|
|
108
|
+
return lastUrl;
|
|
109
|
+
}
|
|
110
|
+
|
|
111
|
+
// 停止, 并且返回是否 docker 中存在 postgres 容器
|
|
112
|
+
async function stopDockerPostgres(name = 'abtnode-postgres') {
|
|
113
|
+
if (!(await checkDockerInstalled())) {
|
|
114
|
+
return false;
|
|
115
|
+
}
|
|
116
|
+
const checkRunningCmd = `docker ps --filter "name=${name}" --format "{{.Names}}"`;
|
|
117
|
+
const running = (await promiseSpawn(checkRunningCmd)).trim();
|
|
118
|
+
if (running === name) {
|
|
119
|
+
// 已经在运行,直接返回
|
|
120
|
+
logger.info('postgres is already running', name);
|
|
121
|
+
try {
|
|
122
|
+
await promiseSpawn(`docker rm -f ${name}`, { mute: true });
|
|
123
|
+
} catch (_) {
|
|
124
|
+
// 不需要打印日志, 因为 postgres 可能本来就没有运行
|
|
125
|
+
}
|
|
126
|
+
return true;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
return false;
|
|
130
|
+
}
|
|
131
|
+
|
|
132
|
+
module.exports = {
|
|
133
|
+
ensureDockerPostgres,
|
|
134
|
+
stopDockerPostgres,
|
|
135
|
+
};
|
|
@@ -0,0 +1,408 @@
|
|
|
1
|
+
/* eslint-disable no-await-in-loop */
|
|
2
|
+
/* eslint-disable no-console */
|
|
3
|
+
/* eslint-disable prefer-destructuring */
|
|
4
|
+
|
|
5
|
+
const { dbPathToPostgresUrl } = require('@abtnode/models');
|
|
6
|
+
const { QueryTypes, Sequelize } = require('sequelize');
|
|
7
|
+
const fs = require('fs');
|
|
8
|
+
const fsp = require('fs/promises');
|
|
9
|
+
const path = require('path');
|
|
10
|
+
|
|
11
|
+
const ignoreErrorTableNames = new Set(['runtime_insights', 'notification_receivers', 'notifications']);
|
|
12
|
+
const needCleanDataTableNames = new Set(['sessions']);
|
|
13
|
+
|
|
14
|
+
function sortTableNames(tableNames, sort) {
|
|
15
|
+
return [...tableNames].sort((a, b) => {
|
|
16
|
+
const indexA = sort.indexOf(a);
|
|
17
|
+
const indexB = sort.indexOf(b);
|
|
18
|
+
|
|
19
|
+
const scoreA = indexA === -1 ? Infinity : indexA;
|
|
20
|
+
const scoreB = indexB === -1 ? Infinity : indexB;
|
|
21
|
+
|
|
22
|
+
return scoreA - scoreB;
|
|
23
|
+
});
|
|
24
|
+
}
|
|
25
|
+
|
|
26
|
+
async function migrateAllTablesNoModels(dbPath) {
|
|
27
|
+
// Initialize SQLite connection
|
|
28
|
+
const connectUrl = process.env.ABT_NODE_POSTGRES_URL;
|
|
29
|
+
const sqliteDb = new Sequelize({ dialect: 'sqlite', storage: dbPath, logging: false });
|
|
30
|
+
const postgresUrl = dbPathToPostgresUrl(dbPath, connectUrl);
|
|
31
|
+
const pgDb = new Sequelize(postgresUrl, {
|
|
32
|
+
dialect: 'postgres',
|
|
33
|
+
pool: { max: 10, min: 0, idle: 10000 },
|
|
34
|
+
logging: false,
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
if (pgDb.getDialect() !== 'postgres') {
|
|
38
|
+
throw new Error(`PG_CONNECTION_STRING is not a valid Postgres connection string: ${pgDb.getDialect()}`);
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
const sqliteQI = sqliteDb.getQueryInterface();
|
|
42
|
+
const pgQI = pgDb.getQueryInterface();
|
|
43
|
+
|
|
44
|
+
let tableNames = await sqliteQI.showAllTables();
|
|
45
|
+
tableNames = tableNames
|
|
46
|
+
.map((t) => (typeof t === 'string' ? t : t.tableName || t.name))
|
|
47
|
+
.filter((name) => !/^(sqlite|sequelize)/.test(name.toLowerCase()) && name !== 'runtime_insights');
|
|
48
|
+
|
|
49
|
+
// 把 tableNames 排序, 把被依赖的表放前面
|
|
50
|
+
tableNames = sortTableNames(tableNames, ['users', 'notification_receivers']);
|
|
51
|
+
|
|
52
|
+
console.log('Start migration database: ', dbPath);
|
|
53
|
+
|
|
54
|
+
for (const tableName of tableNames) {
|
|
55
|
+
console.log(`\n➡️ Starting migration for table: ${tableName}`);
|
|
56
|
+
|
|
57
|
+
const colInfos = await sqliteDb.query(`PRAGMA TABLE_INFO("${tableName}")`, { type: QueryTypes.SELECT });
|
|
58
|
+
const sqliteSchema = {};
|
|
59
|
+
for (const col of colInfos) {
|
|
60
|
+
sqliteSchema[col.name] = {
|
|
61
|
+
type: col.type,
|
|
62
|
+
allowNull: col.notnull === 0,
|
|
63
|
+
defaultValue: col.dflt_value,
|
|
64
|
+
primaryKey: col.pk === 1,
|
|
65
|
+
};
|
|
66
|
+
}
|
|
67
|
+
|
|
68
|
+
let allCols = Object.keys(sqliteSchema);
|
|
69
|
+
// 删除 server.db 的 blocklets 表中的 controller 列, 属于历史遗留数据
|
|
70
|
+
if (dbPath.includes('server.db') && tableName === 'blocklets') {
|
|
71
|
+
allCols = allCols.filter((c) => c !== 'controller');
|
|
72
|
+
}
|
|
73
|
+
let pkCols = allCols.filter((c) => sqliteSchema[c].primaryKey);
|
|
74
|
+
if (!pkCols.length) {
|
|
75
|
+
pkCols = [allCols[0]];
|
|
76
|
+
console.warn(` ⚠️ Table ${tableName} has no primary key; using "${pkCols[0]}"`);
|
|
77
|
+
}
|
|
78
|
+
const nonPkCols = allCols.filter((c) => !pkCols.includes(c));
|
|
79
|
+
|
|
80
|
+
// Describe PG table to detect JSON/auto-inc
|
|
81
|
+
const pgSchema = await pgQI.describeTable(tableName);
|
|
82
|
+
|
|
83
|
+
// find JSON/JSONB
|
|
84
|
+
const jsonCols = Object.entries(pgSchema)
|
|
85
|
+
.filter(([, def]) => def.type && ['JSON', 'JSONB'].includes(def.type.toUpperCase()))
|
|
86
|
+
.map(([col, def]) => ({ name: col, type: def.type.toUpperCase() }));
|
|
87
|
+
|
|
88
|
+
// find auto-increment columns (nextval default)
|
|
89
|
+
const autoIncCols = Object.entries(pgSchema)
|
|
90
|
+
.filter(([, def]) => typeof def.defaultValue === 'string' && def.defaultValue.startsWith('nextval('))
|
|
91
|
+
.map(([col]) => col);
|
|
92
|
+
|
|
93
|
+
// Build the column list we actually INSERT
|
|
94
|
+
const insertCols = allCols.filter((c) => !autoIncCols.includes(c));
|
|
95
|
+
|
|
96
|
+
const insertColsList = insertCols.map((c) => `"${c}"`).join(', ');
|
|
97
|
+
|
|
98
|
+
const placeholders = insertCols
|
|
99
|
+
.map((c, i) => {
|
|
100
|
+
const jc = jsonCols.find((j) => j.name === c);
|
|
101
|
+
return jc ? `$${i + 1}::${jc.type.toLowerCase()}` : `$${i + 1}`;
|
|
102
|
+
})
|
|
103
|
+
.join(', ');
|
|
104
|
+
|
|
105
|
+
// if all PKs are auto-inc, we do a plain insert
|
|
106
|
+
const userPkCols = pkCols.filter((c) => !autoIncCols.includes(c));
|
|
107
|
+
const useUpsert = userPkCols.length > 0;
|
|
108
|
+
|
|
109
|
+
let upsertSQL = '';
|
|
110
|
+
if (useUpsert) {
|
|
111
|
+
const conflictKeys = userPkCols.map((c) => `"${c}"`).join(',');
|
|
112
|
+
const updateSet = nonPkCols
|
|
113
|
+
.map((c) => {
|
|
114
|
+
const jc = jsonCols.find((j) => j.name === c);
|
|
115
|
+
return jc ? `"${c}" = EXCLUDED."${c}"::${jc.type.toLowerCase()}` : `"${c}" = EXCLUDED."${c}"`;
|
|
116
|
+
})
|
|
117
|
+
.join(',');
|
|
118
|
+
upsertSQL = `
|
|
119
|
+
INSERT INTO "${tableName}" (${insertColsList})
|
|
120
|
+
VALUES (${placeholders})
|
|
121
|
+
ON CONFLICT (${conflictKeys})
|
|
122
|
+
DO UPDATE SET ${updateSet};
|
|
123
|
+
`;
|
|
124
|
+
} else {
|
|
125
|
+
upsertSQL = `
|
|
126
|
+
INSERT INTO "${tableName}" (${insertColsList})
|
|
127
|
+
VALUES (${placeholders});
|
|
128
|
+
`;
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
// Batch‐migrate rows
|
|
132
|
+
const batchSize = 1000;
|
|
133
|
+
let offset = 0;
|
|
134
|
+
// eslint-disable-next-line no-constant-condition
|
|
135
|
+
while (true) {
|
|
136
|
+
const rows = await sqliteDb.query(`SELECT * FROM "${tableName}" LIMIT ${batchSize} OFFSET ${offset}`, {
|
|
137
|
+
type: QueryTypes.SELECT,
|
|
138
|
+
});
|
|
139
|
+
if (!rows.length) break;
|
|
140
|
+
|
|
141
|
+
console.log(` Migrating rows ${offset + 1}-${offset + rows.length}`);
|
|
142
|
+
|
|
143
|
+
for (const row of rows) {
|
|
144
|
+
// 如果有 createdAt 或 updatedAt 列, 并且值是不合法的 Date, 就改成当前时间
|
|
145
|
+
if (row.createdAt && Number.isNaN(new Date(row.createdAt).getTime())) {
|
|
146
|
+
row.createdAt = new Date();
|
|
147
|
+
}
|
|
148
|
+
if (row.updatedAt && Number.isNaN(new Date(row.updatedAt).getTime())) {
|
|
149
|
+
row.updatedAt = new Date();
|
|
150
|
+
}
|
|
151
|
+
|
|
152
|
+
// 修复不合格的旧数据
|
|
153
|
+
if (tableName === 'notifications' && row.feedType === 'gallery') {
|
|
154
|
+
row.feedType = '';
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
for (const jc of jsonCols) {
|
|
158
|
+
const raw = row[jc.name];
|
|
159
|
+
let parsed = null;
|
|
160
|
+
if (raw == null) {
|
|
161
|
+
parsed = null;
|
|
162
|
+
} else if (typeof raw === 'string') {
|
|
163
|
+
try {
|
|
164
|
+
parsed = JSON.parse(raw);
|
|
165
|
+
} catch {
|
|
166
|
+
//
|
|
167
|
+
}
|
|
168
|
+
} else if (Buffer.isBuffer(raw)) {
|
|
169
|
+
try {
|
|
170
|
+
parsed = JSON.parse(raw.toString('utf8'));
|
|
171
|
+
} catch {
|
|
172
|
+
//
|
|
173
|
+
}
|
|
174
|
+
} else if (typeof raw === 'object') {
|
|
175
|
+
parsed = raw;
|
|
176
|
+
}
|
|
177
|
+
row[jc.name] = parsed != null ? JSON.stringify(parsed) : null;
|
|
178
|
+
}
|
|
179
|
+
|
|
180
|
+
// build bind values for ONLY the non-autoInc cols
|
|
181
|
+
const bindVals = insertCols.map((c) => row[c]);
|
|
182
|
+
|
|
183
|
+
if (needCleanDataTableNames.has(tableName)) {
|
|
184
|
+
for (let i = 0; i < bindVals.length; i++) {
|
|
185
|
+
const colName = insertCols[i];
|
|
186
|
+
const isJsonCol = jsonCols.some((j) => j.name === colName);
|
|
187
|
+
const val = bindVals[i];
|
|
188
|
+
|
|
189
|
+
if (isJsonCol && typeof val === 'string' && val.includes('\\u0000')) {
|
|
190
|
+
try {
|
|
191
|
+
const parsed = JSON.parse(val);
|
|
192
|
+
const cleaned = JSON.stringify(parsed, (key, value) => {
|
|
193
|
+
if (typeof value === 'string') {
|
|
194
|
+
// eslint-disable-next-line no-control-regex
|
|
195
|
+
return value.replace(/\u0000/g, '');
|
|
196
|
+
}
|
|
197
|
+
return value;
|
|
198
|
+
});
|
|
199
|
+
bindVals[i] = cleaned;
|
|
200
|
+
} catch (e) {
|
|
201
|
+
console.warn(`⚠️ JSON parse error during clean on column "${colName}" [index ${i}]:`, e);
|
|
202
|
+
// 不抛错,继续迁移
|
|
203
|
+
}
|
|
204
|
+
}
|
|
205
|
+
}
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
try {
|
|
209
|
+
await pgDb.query(upsertSQL, { bind: bindVals });
|
|
210
|
+
} catch (err) {
|
|
211
|
+
if (err.name === 'SequelizeUniqueConstraintError') {
|
|
212
|
+
const uniqField = err.errors[0].path;
|
|
213
|
+
console.warn(` ⚠️ ${tableName}: unique conflict on ${uniqField}, fallback to UPDATE`);
|
|
214
|
+
const updateCols = nonPkCols.map((c, i) => `"${c}" = $${i + 1}`).join(', ');
|
|
215
|
+
const updateBind = nonPkCols.map((c) => row[c]).concat([row[uniqField]]);
|
|
216
|
+
const updateSQL = `
|
|
217
|
+
UPDATE "${tableName}"
|
|
218
|
+
SET ${updateCols}
|
|
219
|
+
WHERE "${uniqField}" = $${updateBind.length};
|
|
220
|
+
`;
|
|
221
|
+
await pgDb.query(updateSQL, { bind: updateBind });
|
|
222
|
+
continue;
|
|
223
|
+
}
|
|
224
|
+
const varcharErr = err.message.match(/value too long for type character varying\((\d+)\)/i);
|
|
225
|
+
if (varcharErr) {
|
|
226
|
+
const badCols = [];
|
|
227
|
+
for (const col of allCols) {
|
|
228
|
+
const def = pgSchema[col];
|
|
229
|
+
const lenMatch = def.type.match(/varying\((\d+)\)/i);
|
|
230
|
+
const val = row[col];
|
|
231
|
+
if (lenMatch && typeof val === 'string') {
|
|
232
|
+
const limit = parseInt(lenMatch[1], 10);
|
|
233
|
+
if (val.length > limit) badCols.push({ column: col, length: val.length, limit });
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
console.error(` ❌ ${tableName}: string too long for VARCHAR columns:`, badCols);
|
|
237
|
+
continue;
|
|
238
|
+
}
|
|
239
|
+
console.error(` ❌ Upsert failed for ${tableName} : ${err.message}, SQL:${upsertSQL} value: ${bindVals}`);
|
|
240
|
+
if (ignoreErrorTableNames.has(tableName)) {
|
|
241
|
+
console.log(` ❌ Ignore error for ${tableName}`);
|
|
242
|
+
continue;
|
|
243
|
+
}
|
|
244
|
+
if (err.message.includes('enum_webhook_attempts_status')) {
|
|
245
|
+
console.log(' ❌ Ignore error for enum_webhook_attempts_status');
|
|
246
|
+
continue;
|
|
247
|
+
}
|
|
248
|
+
throw err;
|
|
249
|
+
}
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
offset += rows.length;
|
|
253
|
+
}
|
|
254
|
+
|
|
255
|
+
console.log(` ✅ Finished migrating table ${tableName}`);
|
|
256
|
+
}
|
|
257
|
+
|
|
258
|
+
await sqliteDb.close();
|
|
259
|
+
await pgDb.close();
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
async function validateTableRowCounts(dbPath) {
|
|
263
|
+
// Initialize SQLite connection
|
|
264
|
+
const sqliteDb = new Sequelize({
|
|
265
|
+
dialect: 'sqlite',
|
|
266
|
+
storage: dbPath,
|
|
267
|
+
logging: false,
|
|
268
|
+
});
|
|
269
|
+
|
|
270
|
+
// Build Postgres URL from env var and sqlite path
|
|
271
|
+
const postgresUrl = dbPathToPostgresUrl(dbPath, process.env.ABT_NODE_POSTGRES_URL);
|
|
272
|
+
const pgDb = new Sequelize(postgresUrl, {
|
|
273
|
+
dialect: 'postgres',
|
|
274
|
+
pool: { max: 10, min: 0, idle: 10000 },
|
|
275
|
+
logging: false,
|
|
276
|
+
});
|
|
277
|
+
|
|
278
|
+
if (pgDb.getDialect() !== 'postgres') {
|
|
279
|
+
throw new Error(`PG_CONNECTION_STRING is not a valid Postgres connection string: ${pgDb.getDialect()}`);
|
|
280
|
+
}
|
|
281
|
+
|
|
282
|
+
const sqliteQI = sqliteDb.getQueryInterface();
|
|
283
|
+
|
|
284
|
+
// 1. List all table names
|
|
285
|
+
let tableNames = await sqliteQI.showAllTables();
|
|
286
|
+
tableNames = tableNames
|
|
287
|
+
.map((t) => (typeof t === 'string' ? t : t.tableName || t.name))
|
|
288
|
+
.filter((name) => !/^(sqlite_|SequelizeMeta$)/i.test(name) && name !== 'runtime_insights');
|
|
289
|
+
|
|
290
|
+
const results = [];
|
|
291
|
+
|
|
292
|
+
// 2. For each table, compare counts
|
|
293
|
+
for (const tableName of tableNames) {
|
|
294
|
+
// count in SQLite
|
|
295
|
+
const [{ cnt: sqliteCount }] = await sqliteDb.query(`SELECT COUNT(*) AS cnt FROM "${tableName}"`, {
|
|
296
|
+
type: QueryTypes.SELECT,
|
|
297
|
+
});
|
|
298
|
+
|
|
299
|
+
// count in Postgres
|
|
300
|
+
const [{ count: pgCountStr }] = await pgDb.query(`SELECT COUNT(*) AS count FROM "${tableName}"`, {
|
|
301
|
+
type: QueryTypes.SELECT,
|
|
302
|
+
});
|
|
303
|
+
const pgCount = parseInt(pgCountStr, 10);
|
|
304
|
+
|
|
305
|
+
const match = sqliteCount === pgCount;
|
|
306
|
+
results.push({ table: tableName, sqliteCount, pgCount, match });
|
|
307
|
+
|
|
308
|
+
console.log(`${match ? '✅' : '❌'} Table "${tableName}": SQLite=${sqliteCount}, Postgres=${pgCount}`);
|
|
309
|
+
}
|
|
310
|
+
|
|
311
|
+
// Close connections
|
|
312
|
+
await sqliteDb.close();
|
|
313
|
+
await pgDb.close();
|
|
314
|
+
|
|
315
|
+
return results;
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
async function findBlockletDbFiles(dataDir) {
|
|
319
|
+
const results = [];
|
|
320
|
+
const coreDir = path.join(dataDir, 'data');
|
|
321
|
+
|
|
322
|
+
async function traverse(dir) {
|
|
323
|
+
let entries;
|
|
324
|
+
try {
|
|
325
|
+
entries = await fsp.readdir(dir, { withFileTypes: true });
|
|
326
|
+
} catch (err) {
|
|
327
|
+
console.error(`Failed to read directory ${dir}:`, err);
|
|
328
|
+
return;
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
for (const entry of entries) {
|
|
332
|
+
const fullPath = path.join(dir, entry.name);
|
|
333
|
+
|
|
334
|
+
// Skip any paths containing "_abtnode"
|
|
335
|
+
if (fullPath.includes('_abtnode')) continue;
|
|
336
|
+
|
|
337
|
+
if (entry.isDirectory()) {
|
|
338
|
+
await traverse(fullPath);
|
|
339
|
+
} else if (entry.isFile() && entry.name === 'blocklet.db') {
|
|
340
|
+
results.push(fullPath);
|
|
341
|
+
}
|
|
342
|
+
}
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
await traverse(coreDir);
|
|
346
|
+
return results;
|
|
347
|
+
}
|
|
348
|
+
|
|
349
|
+
function hasPostgres(dataDir) {
|
|
350
|
+
const lockPath = path.join(dataDir, 'core', 'sqlite-to-postgres.lock');
|
|
351
|
+
const hasLock = fs.existsSync(lockPath);
|
|
352
|
+
return hasLock;
|
|
353
|
+
}
|
|
354
|
+
|
|
355
|
+
function savePostgresLock(dataDir) {
|
|
356
|
+
const lockPath = path.join(dataDir, 'core', 'sqlite-to-postgres.lock');
|
|
357
|
+
fs.writeFileSync(lockPath, new Date().toISOString());
|
|
358
|
+
}
|
|
359
|
+
|
|
360
|
+
function removePostgresLock(dataDir) {
|
|
361
|
+
const lockPath = path.join(dataDir, 'core', 'sqlite-to-postgres.lock');
|
|
362
|
+
if (fs.existsSync(lockPath)) {
|
|
363
|
+
fs.unlinkSync(lockPath);
|
|
364
|
+
}
|
|
365
|
+
}
|
|
366
|
+
|
|
367
|
+
async function migrationSqliteToPostgres(dataDir, dbPaths) {
|
|
368
|
+
const postgresUrl = process.env.ABT_NODE_POSTGRES_URL;
|
|
369
|
+
|
|
370
|
+
if (!postgresUrl) {
|
|
371
|
+
return;
|
|
372
|
+
}
|
|
373
|
+
|
|
374
|
+
console.log('Start Migration Sqlite data to Postgres...');
|
|
375
|
+
|
|
376
|
+
if (dbPaths.blocklets.length === 0) {
|
|
377
|
+
const blockletDbFiles = await findBlockletDbFiles(dataDir);
|
|
378
|
+
dbPaths.blocklets.push(...blockletDbFiles);
|
|
379
|
+
}
|
|
380
|
+
|
|
381
|
+
const allPaths = [];
|
|
382
|
+
|
|
383
|
+
for (const dbPath of Object.values(dbPaths)) {
|
|
384
|
+
if (Array.isArray(dbPath)) {
|
|
385
|
+
allPaths.push(...dbPath);
|
|
386
|
+
continue;
|
|
387
|
+
}
|
|
388
|
+
allPaths.push(dbPath);
|
|
389
|
+
}
|
|
390
|
+
|
|
391
|
+
const filterPaths = Array.from(new Set(allPaths));
|
|
392
|
+
|
|
393
|
+
for (const dbPath of filterPaths) {
|
|
394
|
+
await migrateAllTablesNoModels(dbPath);
|
|
395
|
+
}
|
|
396
|
+
|
|
397
|
+
for (const dbPath of allPaths) {
|
|
398
|
+
await validateTableRowCounts(dbPath);
|
|
399
|
+
}
|
|
400
|
+
|
|
401
|
+
savePostgresLock(dataDir);
|
|
402
|
+
}
|
|
403
|
+
|
|
404
|
+
module.exports = {
|
|
405
|
+
migrationSqliteToPostgres,
|
|
406
|
+
hasPostgres,
|
|
407
|
+
removePostgresLock,
|
|
408
|
+
};
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"publishConfig": {
|
|
4
4
|
"access": "public"
|
|
5
5
|
},
|
|
6
|
-
"version": "1.16.46-beta-
|
|
6
|
+
"version": "1.16.46-beta-20250704-234926-09d872ad",
|
|
7
7
|
"description": "",
|
|
8
8
|
"main": "lib/index.js",
|
|
9
9
|
"files": [
|
|
@@ -19,45 +19,45 @@
|
|
|
19
19
|
"author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
|
|
20
20
|
"license": "Apache-2.0",
|
|
21
21
|
"dependencies": {
|
|
22
|
-
"@abtnode/analytics": "1.16.46-beta-
|
|
23
|
-
"@abtnode/auth": "1.16.46-beta-
|
|
24
|
-
"@abtnode/certificate-manager": "1.16.46-beta-
|
|
25
|
-
"@abtnode/client": "1.16.46-beta-
|
|
26
|
-
"@abtnode/constant": "1.16.46-beta-
|
|
27
|
-
"@abtnode/cron": "1.16.46-beta-
|
|
28
|
-
"@abtnode/db-cache": "1.16.46-beta-
|
|
29
|
-
"@abtnode/docker-utils": "1.16.46-beta-
|
|
30
|
-
"@abtnode/logger": "1.16.46-beta-
|
|
31
|
-
"@abtnode/models": "1.16.46-beta-
|
|
32
|
-
"@abtnode/queue": "1.16.46-beta-
|
|
33
|
-
"@abtnode/rbac": "1.16.46-beta-
|
|
34
|
-
"@abtnode/router-provider": "1.16.46-beta-
|
|
35
|
-
"@abtnode/static-server": "1.16.46-beta-
|
|
36
|
-
"@abtnode/timemachine": "1.16.46-beta-
|
|
37
|
-
"@abtnode/util": "1.16.46-beta-
|
|
38
|
-
"@arcblock/did": "1.20.
|
|
39
|
-
"@arcblock/did-auth": "1.20.
|
|
40
|
-
"@arcblock/did-ext": "1.20.
|
|
22
|
+
"@abtnode/analytics": "1.16.46-beta-20250704-234926-09d872ad",
|
|
23
|
+
"@abtnode/auth": "1.16.46-beta-20250704-234926-09d872ad",
|
|
24
|
+
"@abtnode/certificate-manager": "1.16.46-beta-20250704-234926-09d872ad",
|
|
25
|
+
"@abtnode/client": "1.16.46-beta-20250704-234926-09d872ad",
|
|
26
|
+
"@abtnode/constant": "1.16.46-beta-20250704-234926-09d872ad",
|
|
27
|
+
"@abtnode/cron": "1.16.46-beta-20250704-234926-09d872ad",
|
|
28
|
+
"@abtnode/db-cache": "1.16.46-beta-20250704-234926-09d872ad",
|
|
29
|
+
"@abtnode/docker-utils": "1.16.46-beta-20250704-234926-09d872ad",
|
|
30
|
+
"@abtnode/logger": "1.16.46-beta-20250704-234926-09d872ad",
|
|
31
|
+
"@abtnode/models": "1.16.46-beta-20250704-234926-09d872ad",
|
|
32
|
+
"@abtnode/queue": "1.16.46-beta-20250704-234926-09d872ad",
|
|
33
|
+
"@abtnode/rbac": "1.16.46-beta-20250704-234926-09d872ad",
|
|
34
|
+
"@abtnode/router-provider": "1.16.46-beta-20250704-234926-09d872ad",
|
|
35
|
+
"@abtnode/static-server": "1.16.46-beta-20250704-234926-09d872ad",
|
|
36
|
+
"@abtnode/timemachine": "1.16.46-beta-20250704-234926-09d872ad",
|
|
37
|
+
"@abtnode/util": "1.16.46-beta-20250704-234926-09d872ad",
|
|
38
|
+
"@arcblock/did": "1.20.15",
|
|
39
|
+
"@arcblock/did-auth": "1.20.15",
|
|
40
|
+
"@arcblock/did-ext": "1.20.15",
|
|
41
41
|
"@arcblock/did-motif": "^1.1.13",
|
|
42
|
-
"@arcblock/did-util": "1.20.
|
|
43
|
-
"@arcblock/event-hub": "1.20.
|
|
44
|
-
"@arcblock/jwt": "1.20.
|
|
42
|
+
"@arcblock/did-util": "1.20.15",
|
|
43
|
+
"@arcblock/event-hub": "1.20.15",
|
|
44
|
+
"@arcblock/jwt": "1.20.15",
|
|
45
45
|
"@arcblock/pm2-events": "^0.0.5",
|
|
46
|
-
"@arcblock/validator": "1.20.
|
|
47
|
-
"@arcblock/vc": "1.20.
|
|
48
|
-
"@blocklet/constant": "1.16.46-beta-
|
|
49
|
-
"@blocklet/did-space-js": "^1.0
|
|
50
|
-
"@blocklet/env": "1.16.46-beta-
|
|
46
|
+
"@arcblock/validator": "1.20.15",
|
|
47
|
+
"@arcblock/vc": "1.20.15",
|
|
48
|
+
"@blocklet/constant": "1.16.46-beta-20250704-234926-09d872ad",
|
|
49
|
+
"@blocklet/did-space-js": "^1.1.0",
|
|
50
|
+
"@blocklet/env": "1.16.46-beta-20250704-234926-09d872ad",
|
|
51
51
|
"@blocklet/error": "^0.2.5",
|
|
52
|
-
"@blocklet/meta": "1.16.46-beta-
|
|
53
|
-
"@blocklet/resolver": "1.16.46-beta-
|
|
54
|
-
"@blocklet/sdk": "1.16.46-beta-
|
|
55
|
-
"@blocklet/store": "1.16.46-beta-
|
|
56
|
-
"@blocklet/theme": "^
|
|
52
|
+
"@blocklet/meta": "1.16.46-beta-20250704-234926-09d872ad",
|
|
53
|
+
"@blocklet/resolver": "1.16.46-beta-20250704-234926-09d872ad",
|
|
54
|
+
"@blocklet/sdk": "1.16.46-beta-20250704-234926-09d872ad",
|
|
55
|
+
"@blocklet/store": "1.16.46-beta-20250704-234926-09d872ad",
|
|
56
|
+
"@blocklet/theme": "^3.0.14",
|
|
57
57
|
"@fidm/x509": "^1.2.1",
|
|
58
|
-
"@ocap/mcrypto": "1.20.
|
|
59
|
-
"@ocap/util": "1.20.
|
|
60
|
-
"@ocap/wallet": "1.20.
|
|
58
|
+
"@ocap/mcrypto": "1.20.15",
|
|
59
|
+
"@ocap/util": "1.20.15",
|
|
60
|
+
"@ocap/wallet": "1.20.15",
|
|
61
61
|
"@slack/webhook": "^5.0.4",
|
|
62
62
|
"archiver": "^7.0.1",
|
|
63
63
|
"axios": "^1.7.9",
|
|
@@ -116,5 +116,5 @@
|
|
|
116
116
|
"jest": "^29.7.0",
|
|
117
117
|
"unzipper": "^0.10.11"
|
|
118
118
|
},
|
|
119
|
-
"gitHead": "
|
|
119
|
+
"gitHead": "1767a642c60fcfada6be69de0e68c5d269489e5d"
|
|
120
120
|
}
|