@abtnode/core 1.16.45 → 1.16.46-beta-20250703-050038-4ba2582f

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -955,7 +955,7 @@ const initLogger =
955
955
  ),
956
956
  });
957
957
 
958
- if (process.env.NODE_ENV === 'production' || process.env.ABT_LOG_TO_FILE === 'true') {
958
+ const logToFile = () => {
959
959
  logger.level = level || 'info';
960
960
  if (!fs.existsSync(logDir)) {
961
961
  fs.mkdirSync(logDir, { recursive: true });
@@ -1011,7 +1011,9 @@ const initLogger =
1011
1011
 
1012
1012
  addedRejectionExceptionTransport = true;
1013
1013
  }
1014
- } else {
1014
+ };
1015
+
1016
+ const logToConsole = () => {
1015
1017
  logger.level = level || 'debug';
1016
1018
  const transport = new transports.Console({
1017
1019
  format: format.combine(format.colorize({ all: true })),
@@ -1024,6 +1026,15 @@ const initLogger =
1024
1026
  logger.exceptions.handle(transport);
1025
1027
  addedRejectionExceptionTransport = true;
1026
1028
  }
1029
+ };
1030
+
1031
+ if (process.env.ABT_LOG_TO_FILE === 'true') {
1032
+ logToFile();
1033
+ logToConsole();
1034
+ } else if (process.env.NODE_ENV === 'production') {
1035
+ logToFile();
1036
+ } else {
1037
+ logToConsole();
1027
1038
  }
1028
1039
 
1029
1040
  return logger;
@@ -38890,7 +38901,7 @@ module.exports = require("zlib");
38890
38901
  /***/ ((module) => {
38891
38902
 
38892
38903
  "use strict";
38893
- module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.16.44","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib","test":"node tools/jest.js","coverage":"npm run test -- --coverage"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.16.44","@abtnode/auth":"1.16.44","@abtnode/certificate-manager":"1.16.44","@abtnode/client":"1.16.44","@abtnode/constant":"1.16.44","@abtnode/cron":"1.16.44","@abtnode/db-cache":"1.16.44","@abtnode/docker-utils":"1.16.44","@abtnode/logger":"1.16.44","@abtnode/models":"1.16.44","@abtnode/queue":"1.16.44","@abtnode/rbac":"1.16.44","@abtnode/router-provider":"1.16.44","@abtnode/static-server":"1.16.44","@abtnode/timemachine":"1.16.44","@abtnode/util":"1.16.44","@arcblock/did":"1.20.14","@arcblock/did-auth":"1.20.14","@arcblock/did-ext":"1.20.14","@arcblock/did-motif":"^1.1.13","@arcblock/did-util":"1.20.14","@arcblock/event-hub":"1.20.14","@arcblock/jwt":"1.20.14","@arcblock/pm2-events":"^0.0.5","@arcblock/validator":"1.20.14","@arcblock/vc":"1.20.14","@blocklet/constant":"1.16.44","@blocklet/did-space-js":"^1.0.62","@blocklet/env":"1.16.44","@blocklet/error":"^0.2.5","@blocklet/meta":"1.16.44","@blocklet/resolver":"1.16.44","@blocklet/sdk":"1.16.44","@blocklet/store":"1.16.44","@blocklet/theme":"^2.13.70","@fidm/x509":"^1.2.1","@ocap/mcrypto":"1.20.14","@ocap/util":"1.20.14","@ocap/wallet":"1.20.14","@slack/webhook":"^5.0.4","archiver":"^7.0.1","axios":"^1.7.9","axon":"^2.0.3","chalk":"^4.1.2","cross-spawn":"^7.0.3","dayjs":"^1.11.13","deep-diff":"^1.0.2","detect-port":"^1.5.1","envfile":"^7.1.0","escape-string-regexp":"^4.0.0","fast-glob":"^3.3.2","filesize":"^10.1.1","flat":"^5.0.2","fs-extra":"^11.2.0","get-port":"^5.1.1","hasha":"^5.2.2","is-base64":"^1.1.0","is-cidr":"4","is-ip":"3","is-url":"^1.2.4","joi":"17.12.2","joi-extension-semver":"^5.0.0","js-yaml":"^4.1.0","kill-port":"^2.0.1","lodash":"^4.17.21","node-stream-zip":"^1.15.0","p-all":"^3.0.0","p-limit":"^3.1.0","p-map":"^4.0.0","p-retry":"^4.6.2","p-wait-for":"^3.2.0","rate-limiter-flexible":"^5.0.5","read-last-lines":"^1.8.0","semver":"^7.6.3","sequelize":"^6.35.0","shelljs":"^0.8.5","slugify":"^1.6.6","ssri":"^8.0.1","stream-throttle":"^0.1.3","stream-to-promise":"^3.0.0","systeminformation":"^5.23.3","tail":"^2.2.4","tar":"^6.1.11","transliteration":"^2.3.5","ua-parser-js":"^1.0.2","ufo":"^1.5.3","uuid":"^9.0.1","valid-url":"^1.0.9","which":"^2.0.2","xbytes":"^1.8.0"},"devDependencies":{"expand-tilde":"^2.0.2","express":"^4.18.2","jest":"^29.7.0","unzipper":"^0.10.11"},"gitHead":"e5764f753181ed6a7c615cd4fc6682aacf0cb7cd"}');
38904
+ module.exports = /*#__PURE__*/JSON.parse('{"name":"@abtnode/core","publishConfig":{"access":"public"},"version":"1.16.45","description":"","main":"lib/index.js","files":["lib"],"scripts":{"lint":"eslint tests lib --ignore-pattern \'tests/assets/*\'","lint:fix":"eslint --fix tests lib","test":"node tools/jest.js","coverage":"npm run test -- --coverage"},"keywords":[],"author":"wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)","license":"Apache-2.0","dependencies":{"@abtnode/analytics":"1.16.45","@abtnode/auth":"1.16.45","@abtnode/certificate-manager":"1.16.45","@abtnode/client":"1.16.45","@abtnode/constant":"1.16.45","@abtnode/cron":"1.16.45","@abtnode/db-cache":"1.16.45","@abtnode/docker-utils":"1.16.45","@abtnode/logger":"1.16.45","@abtnode/models":"1.16.45","@abtnode/queue":"1.16.45","@abtnode/rbac":"1.16.45","@abtnode/router-provider":"1.16.45","@abtnode/static-server":"1.16.45","@abtnode/timemachine":"1.16.45","@abtnode/util":"1.16.45","@arcblock/did":"1.20.14","@arcblock/did-auth":"1.20.14","@arcblock/did-ext":"1.20.14","@arcblock/did-motif":"^1.1.13","@arcblock/did-util":"1.20.14","@arcblock/event-hub":"1.20.14","@arcblock/jwt":"1.20.14","@arcblock/pm2-events":"^0.0.5","@arcblock/validator":"1.20.14","@arcblock/vc":"1.20.14","@blocklet/constant":"1.16.45","@blocklet/did-space-js":"^1.0.62","@blocklet/env":"1.16.45","@blocklet/error":"^0.2.5","@blocklet/meta":"1.16.45","@blocklet/resolver":"1.16.45","@blocklet/sdk":"1.16.45","@blocklet/store":"1.16.45","@blocklet/theme":"^2.13.70","@fidm/x509":"^1.2.1","@ocap/mcrypto":"1.20.14","@ocap/util":"1.20.14","@ocap/wallet":"1.20.14","@slack/webhook":"^5.0.4","archiver":"^7.0.1","axios":"^1.7.9","axon":"^2.0.3","chalk":"^4.1.2","cross-spawn":"^7.0.3","dayjs":"^1.11.13","deep-diff":"^1.0.2","detect-port":"^1.5.1","envfile":"^7.1.0","escape-string-regexp":"^4.0.0","fast-glob":"^3.3.2","filesize":"^10.1.1","flat":"^5.0.2","fs-extra":"^11.2.0","get-port":"^5.1.1","hasha":"^5.2.2","is-base64":"^1.1.0","is-cidr":"4","is-ip":"3","is-url":"^1.2.4","joi":"17.12.2","joi-extension-semver":"^5.0.0","js-yaml":"^4.1.0","kill-port":"^2.0.1","lodash":"^4.17.21","node-stream-zip":"^1.15.0","p-all":"^3.0.0","p-limit":"^3.1.0","p-map":"^4.0.0","p-retry":"^4.6.2","p-wait-for":"^3.2.0","rate-limiter-flexible":"^5.0.5","read-last-lines":"^1.8.0","semver":"^7.6.3","sequelize":"^6.35.0","shelljs":"^0.8.5","slugify":"^1.6.6","ssri":"^8.0.1","stream-throttle":"^0.1.3","stream-to-promise":"^3.0.0","systeminformation":"^5.23.3","tail":"^2.2.4","tar":"^6.1.11","transliteration":"^2.3.5","ua-parser-js":"^1.0.2","ufo":"^1.5.3","uuid":"^9.0.1","valid-url":"^1.0.9","which":"^2.0.2","xbytes":"^1.8.0"},"devDependencies":{"expand-tilde":"^2.0.2","express":"^4.18.2","jest":"^29.7.0","unzipper":"^0.10.11"},"gitHead":"e5764f753181ed6a7c615cd4fc6682aacf0cb7cd"}');
38894
38905
 
38895
38906
  /***/ }),
38896
38907
 
@@ -56,7 +56,7 @@ const connectToStore = ({ did, projectId, storeName, storeId, storeUrl, manager,
56
56
 
57
57
  await projectState.updateProject(projectId, project);
58
58
  } catch (error) {
59
- reject(error);
59
+ reject(new Error(`Failed to connect to store: ${error.message}`));
60
60
  }
61
61
  });
62
62
  };
@@ -110,16 +110,18 @@ const getProjects = async ({ did, manager, componentDid, showAccessToken, tenant
110
110
  const getProject = async ({ did, projectId, messageId, showAccessToken, manager }) => {
111
111
  const { projectState } = await manager._getProjectState(did);
112
112
  const project = await projectState.findOne(messageId ? { messageId } : { id: projectId });
113
- if (!showAccessToken) {
114
- project.connectedStores?.forEach((store) => {
115
- store.accessToken = store.accessToken ? '__encrypted__' : '';
116
- });
117
- }
113
+ if (project) {
114
+ if (!showAccessToken) {
115
+ project.connectedStores?.forEach((store) => {
116
+ store.accessToken = store.accessToken ? '__encrypted__' : '';
117
+ });
118
+ }
118
119
 
119
- if (!showAccessToken) {
120
- project.connectedEndpoints?.forEach((endpoint) => {
121
- endpoint.accessKeySecret = endpoint.accessKeySecret ? '__encrypted__' : '';
122
- });
120
+ if (!showAccessToken) {
121
+ project.connectedEndpoints?.forEach((endpoint) => {
122
+ endpoint.accessKeySecret = endpoint.accessKeySecret ? '__encrypted__' : '';
123
+ });
124
+ }
123
125
  }
124
126
 
125
127
  return project;
package/lib/index.js CHANGED
@@ -327,7 +327,7 @@ function ABTNode(options) {
327
327
  blockletManager.resetSiteByDid = resetSiteByDid;
328
328
 
329
329
  // Generate an on node ready callback
330
- const onStatesReady = createStateReadyQueue({ states, options, dataDirs });
330
+ const onStatesReady = createStateReadyQueue({ states: states.allStates, options, dataDirs });
331
331
  onStatesReady(createStateReadyHandler(routingSnapshot));
332
332
  const domainStatus = new DomainStatus({ routerManager, states });
333
333
 
@@ -7,6 +7,8 @@ const logger = require('@abtnode/logger')('@abtnode/core:migration');
7
7
  const { doSchemaMigration, createSequelize } = require('@abtnode/models');
8
8
  const { getDbFilePath } = require('../util');
9
9
  const getMigrationScripts = require('../util/get-migration-scripts');
10
+ const { ensureDockerPostgres } = require('../util/docker/ensure-docker-postgres');
11
+ const { migrationSqliteToPostgres } = require('../util/migration-sqlite-to-postgres');
10
12
 
11
13
  const BACKUP_FILE_DB = 'server.db';
12
14
  const BACKUP_FILE_CONFIG = 'config.yml';
@@ -146,27 +148,41 @@ const runSchemaMigrations = async ({
146
148
  blocklets = [],
147
149
  printInfo = console.info, // eslint-disable-line
148
150
  printSuccess = console.info, // eslint-disable-line
151
+ migrationPostgres = false,
149
152
  }) => {
150
153
  if (!process.env.ABT_NODE_CACHE_SQLITE_PATH) {
151
154
  process.env.ABT_NODE_CACHE_SQLITE_PATH = path.join(dataDir, 'core', 'db-cache.db');
152
155
  }
156
+ if (!process.env.ABT_NODE_POSTGRES_URL) {
157
+ const postgresUrl = await ensureDockerPostgres(dataDir, 'abtnode-postgres', 40408, migrationPostgres);
158
+ process.env.ABT_NODE_POSTGRES_URL = postgresUrl;
159
+ }
160
+ if (migrationPostgres && !process.env.ABT_NODE_POSTGRES_URL) {
161
+ throw new Error('Postgres URL is not set, please set env ABT_NODE_POSTGRES_URL or ensure docker is running');
162
+ }
163
+ const dbPaths = {
164
+ server: getDbFilePath(path.join(dataDir, 'core/server.db')),
165
+ service: getDbFilePath(path.join(dataDir, 'services/service.db')),
166
+ certificateManagers: [],
167
+ blocklets: [],
168
+ };
153
169
 
154
170
  // migrate server schema
155
- let filePath = getDbFilePath(path.join(dataDir, 'core/server.db'));
156
- await doSchemaMigration(filePath, 'server');
157
- printSuccess(`Server schema successfully migrated: ${filePath}`);
158
-
171
+ dbPaths.server = getDbFilePath(path.join(dataDir, 'core/server.db'));
172
+ await doSchemaMigration(dbPaths.server, 'server');
173
+ printSuccess(`Server schema successfully migrated: ${dbPaths.server}`);
159
174
  // migrate service schema
160
- filePath = getDbFilePath(path.join(dataDir, 'services/service.db'));
161
- await doSchemaMigration(filePath, 'service');
162
- printSuccess(`Service schema successfully migrated: ${filePath}`);
175
+ dbPaths.service = getDbFilePath(path.join(dataDir, 'services/service.db'));
176
+ await doSchemaMigration(dbPaths.service, 'service');
177
+ printSuccess(`Service schema successfully migrated: ${dbPaths.service}`);
163
178
 
164
179
  // migrate blocklet schema
165
180
  for (let i = 0; i < blocklets.length; i++) {
166
181
  const blocklet = blocklets[i];
167
182
  const env = blocklet.environments.find((x) => x.key === 'BLOCKLET_DATA_DIR');
168
183
  if (env) {
169
- filePath = getDbFilePath(path.join(env.value, 'blocklet.db'));
184
+ const filePath = getDbFilePath(path.join(env.value, 'blocklet.db'));
185
+ dbPaths.blocklets.push(filePath);
170
186
  await doSchemaMigration(filePath, 'blocklet');
171
187
  printSuccess(`Blocklet schema successfully migrated: ${blocklet.appPid}: ${filePath}`);
172
188
  } else {
@@ -176,17 +192,26 @@ const runSchemaMigrations = async ({
176
192
 
177
193
  // migrate certificate manager schema
178
194
  for (let i = 0; i < MODULES.length; i++) {
179
- filePath = getDbFilePath(path.join(dataDir, `modules/${MODULES[i]}/module.db`));
195
+ const filePath = getDbFilePath(path.join(dataDir, `modules/${MODULES[i]}/module.db`));
180
196
  await doSchemaMigration(filePath, MODULES[i]);
197
+ dbPaths.certificateManagers.push(filePath);
181
198
  printSuccess(`${MODULES[i]} schema successfully migrated: ${filePath}`);
182
199
  }
200
+
201
+ if (migrationPostgres) {
202
+ await migrationSqliteToPostgres(dataDir, dbPaths);
203
+ }
183
204
  };
184
205
 
185
- const closeDatabaseConnections = ({
206
+ const closeDatabaseConnections = async ({
186
207
  dataDir,
187
208
  blocklets = [],
188
209
  printInfo = console.info, // eslint-disable-line
189
210
  }) => {
211
+ if (!process.env.ABT_NODE_POSTGRES_URL) {
212
+ const postgresUrl = await ensureDockerPostgres(dataDir);
213
+ process.env.ABT_NODE_POSTGRES_URL = postgresUrl;
214
+ }
190
215
  const dataFiles = [
191
216
  getDbFilePath(path.join(dataDir, 'core/server.db')),
192
217
  getDbFilePath(path.join(dataDir, 'services/service.db')),
@@ -204,8 +229,14 @@ const closeDatabaseConnections = ({
204
229
 
205
230
  const connections = dataFiles.map((x) => createSequelize(x));
206
231
  connections.forEach((x) => {
207
- x.close();
208
- printInfo(`Closed database connection: ${x}`);
232
+ try {
233
+ x.close();
234
+ } catch (err) {
235
+ if (err.message.includes('was closed!')) {
236
+ return;
237
+ }
238
+ throw new Error(`Failed to close database connection: ${x}, error: ${err.message}`);
239
+ }
209
240
  });
210
241
  };
211
242
 
@@ -184,20 +184,26 @@ class BackupState extends BaseState {
184
184
  * @param {{ did: string, startTime: string, endTime: string }} options
185
185
  * @return {Promise<Array<{date: string, successCount: number, errorCount: number}>>}
186
186
  */
187
- async getBlockletBackupSummary({ did, startTime, endTime, timezone }) {
188
- const localTime = dayjs.tz(dayjs.utc(), timezone); // 获取指定时区的当前时间
189
- // 计算时差
190
- const offset = localTime.utcOffset(); // 返回与 UTC 的偏移,单位:分钟
191
-
192
- // 在 Sequelize 查询中应用时区差
193
- const dateColumn = this.model.sequelize.fn(
194
- 'strftime',
195
- '%Y-%m-%d', // 提取年月日
196
- this.model.sequelize.fn('datetime', this.model.sequelize.col('createdAt'), `${offset} minutes`)
197
- );
198
- /**
199
- * @type {import('sequelize').WhereOptions<import('@abtnode/models').BackupState>}
200
- */
187
+ getBlockletBackupSummary({ did, startTime, endTime, timezone }) {
188
+ const { sequelize } = this.model;
189
+ const dialect = sequelize.getDialect(); // 'postgres' 或者 'sqlite'
190
+ let dateExpr;
191
+ let groupExpr;
192
+
193
+ if (dialect === 'postgres') {
194
+ const tzTs = sequelize.fn('timezone', timezone, sequelize.col('createdAt'));
195
+ const dayTs = sequelize.fn('date_trunc', 'day', tzTs);
196
+ dateExpr = sequelize.fn('to_char', dayTs, 'YYYY-MM-DD');
197
+ groupExpr = dayTs;
198
+ } else {
199
+ // SQLite:datetime + strftime
200
+ // SQLite 的 datetime(col, '+X minutes') already returns text in UTC+offset
201
+ const offset = dayjs.tz(dayjs.utc(), timezone).utcOffset();
202
+ const dt = sequelize.fn('datetime', sequelize.col('createdAt'), `${offset} minutes`);
203
+ dateExpr = sequelize.fn('strftime', '%Y-%m-%d', dt);
204
+ groupExpr = dt;
205
+ }
206
+
201
207
  const options = {
202
208
  where: {
203
209
  appPid: did,
@@ -205,25 +211,16 @@ class BackupState extends BaseState {
205
211
  status: { [Op.in]: [BACKUPS.STATUS.SUCCEEDED, BACKUPS.STATUS.FAILED] },
206
212
  },
207
213
  attributes: [
208
- [this.model.sequelize.fn('strftime', '%Y-%m-%d', dateColumn), 'date'],
209
- [
210
- this.model.sequelize.fn('sum', this.model.sequelize.literal('CASE WHEN status = 0 THEN 1 ELSE 0 END')),
211
- 'successCount',
212
- ],
213
- [
214
- this.model.sequelize.fn('sum', this.model.sequelize.literal('CASE WHEN status != 0 THEN 1 ELSE 0 END')),
215
- 'errorCount',
216
- ],
214
+ [dateExpr, 'date'],
215
+ [sequelize.fn('sum', sequelize.literal('CASE WHEN status = 0 THEN 1 ELSE 0 END')), 'successCount'],
216
+ [sequelize.fn('sum', sequelize.literal('CASE WHEN status != 0 THEN 1 ELSE 0 END')), 'errorCount'],
217
217
  ],
218
- group: [dateColumn],
219
- order: [[dateColumn, 'DESC']],
218
+ group: [groupExpr],
219
+ order: [[groupExpr, 'DESC']],
220
220
  raw: true,
221
221
  };
222
222
 
223
- const results = await this.model.findAll(options);
224
-
225
- // 将结果转换为更友好的格式
226
- return results;
223
+ return this.model.findAll(options);
227
224
  }
228
225
  }
229
226
 
@@ -66,18 +66,5 @@ const init = (dataDirs, config = {}) => {
66
66
  blacklist: blacklistState,
67
67
  };
68
68
  };
69
- /**
70
- * @type {{
71
- * backup: import('./backup'),
72
- * blocklet: import('./blocklet'),
73
- * blockletExtras: import('./blocklet-extras'),
74
- * notification: import('./notification'),
75
- * notificationReceiver: import('./notification-receiver'),
76
- * job: import('./job'),
77
- * node: import('./node'),
78
- * blacklist: import('./blacklist'),
79
- * runtimeInsight: import('./runtime-insight'),
80
- * [key: string]: any
81
- * }}
82
- */
69
+
83
70
  module.exports = createStateFactory(init, models);
@@ -24,6 +24,13 @@ const readUnreadInputValidation = ({ notificationIds, receiver }, context) => {
24
24
  return receiverDidValidation(receiver, context);
25
25
  };
26
26
 
27
+ const safeJsonParse = (json) => {
28
+ if (typeof json !== 'string') {
29
+ return json;
30
+ }
31
+ return JSON.parse(json);
32
+ };
33
+
27
34
  /**
28
35
  * @extends BaseState<import('@abtnode/models').NotificationState>
29
36
  */
@@ -222,12 +229,12 @@ class NotificationState extends BaseState {
222
229
 
223
230
  // 构建基础查询条件
224
231
  // FIXME: 这里需要对历史数据做出过滤,避免有非 notification 类型的数据
225
- const conditions = ['n.type = "notification"'];
232
+ const conditions = ["n.type = 'notification'"];
226
233
  const replacements = { receiver };
227
234
 
228
235
  if (typeof read === 'boolean') {
229
236
  conditions.push('nr.read = :read');
230
- replacements.read = read ? 1 : 0;
237
+ replacements.read = read;
231
238
  }
232
239
 
233
240
  if (severity && severity.length) {
@@ -236,7 +243,7 @@ class NotificationState extends BaseState {
236
243
  }
237
244
 
238
245
  if (entityId && entityId.length) {
239
- conditions.push('n.entityId IN (:entityId)');
246
+ conditions.push('n."entityId" IN (:entityId)');
240
247
  replacements.entityId = entityId;
241
248
  }
242
249
 
@@ -261,7 +268,7 @@ class NotificationState extends BaseState {
261
268
  const countQuery = `
262
269
  SELECT COUNT(DISTINCT n.id) as total
263
270
  FROM notifications n
264
- INNER JOIN notification_receivers nr ON n.id = nr.notificationId
271
+ INNER JOIN notification_receivers nr ON n.id = nr."notificationId"
265
272
  ${whereClause}
266
273
  AND nr.receiver = :receiver
267
274
  `;
@@ -270,32 +277,32 @@ class NotificationState extends BaseState {
270
277
  SELECT DISTINCT
271
278
  n.*,
272
279
  nr.id as receiver_id,
273
- nr.notificationId as receiver_notificationId,
274
- nr.receiver as receiver_receiver,
275
- nr.read as receiver_read,
276
- nr.readAt as receiver_readAt,
277
- nr.walletSendStatus as receiver_walletSendStatus,
278
- nr.walletSendAt as receiver_walletSendAt,
279
- nr.pushKitSendStatus as receiver_pushKitSendStatus,
280
- nr.pushKitSendAt as receiver_pushKitSendAt,
281
- nr.emailSendStatus as receiver_emailSendStatus,
282
- nr.emailSendAt as receiver_emailSendAt,
283
- nr.createdAt as receiver_createdAt,
284
- nr.walletSendFailedReason as receiver_walletSendFailedReason,
285
- nr.walletSendRecord as receiver_walletSendRecord,
286
- nr.pushKitSendFailedReason as receiver_pushKitSendFailedReason,
287
- nr.pushKitSendRecord as receiver_pushKitSendRecord,
288
- nr.emailSendFailedReason as receiver_emailSendFailedReason,
289
- nr.emailSendRecord as receiver_emailSendRecord,
290
- nr.webhook as receiver_webhook,
291
- nr.email as receiver_email,
292
- nr.webhookUrls as receiver_webhookUrls,
293
- nr.deviceId as receiver_deviceId
280
+ nr."notificationId" as receiver_notificationId,
281
+ nr."receiver" as receiver_receiver,
282
+ nr."read" as receiver_read,
283
+ nr."readAt" as receiver_readAt,
284
+ nr."walletSendStatus" as receiver_walletSendStatus,
285
+ nr."walletSendAt" as receiver_walletSendAt,
286
+ nr."pushKitSendStatus" as receiver_pushKitSendStatus,
287
+ nr."pushKitSendAt" as receiver_pushKitSendAt,
288
+ nr."emailSendStatus" as receiver_emailSendStatus,
289
+ nr."emailSendAt" as receiver_emailSendAt,
290
+ nr."createdAt" as receiver_createdAt,
291
+ nr."walletSendFailedReason" as receiver_walletSendFailedReason,
292
+ nr."walletSendRecord" as receiver_walletSendRecord,
293
+ nr."pushKitSendFailedReason" as receiver_pushKitSendFailedReason,
294
+ nr."pushKitSendRecord" as receiver_pushKitSendRecord,
295
+ nr."emailSendFailedReason" as receiver_emailSendFailedReason,
296
+ nr."emailSendRecord" as receiver_emailSendRecord,
297
+ nr."webhook" as receiver_webhook,
298
+ nr."email" as receiver_email,
299
+ nr."webhookUrls" as receiver_webhookUrls,
300
+ nr."deviceId" as receiver_deviceId
294
301
  FROM notifications n
295
- INNER JOIN notification_receivers nr ON n.id = nr.notificationId
302
+ INNER JOIN notification_receivers nr ON n.id = nr."notificationId"
296
303
  ${whereClause}
297
304
  AND nr.receiver = :receiver
298
- ORDER BY n.createdAt DESC
305
+ ORDER BY n."createdAt" DESC
299
306
  LIMIT :limit OFFSET :offset
300
307
  `;
301
308
 
@@ -327,25 +334,25 @@ class NotificationState extends BaseState {
327
334
 
328
335
  // 处理 receiver 的 JSON 字段
329
336
  if (receiverFields.walletSendRecord) {
330
- receiverFields.walletSendRecord = JSON.parse(receiverFields.walletSendRecord);
337
+ receiverFields.walletSendRecord = safeJsonParse(receiverFields.walletSendRecord);
331
338
  }
332
339
  if (receiverFields.pushKitSendRecord) {
333
- receiverFields.pushKitSendRecord = JSON.parse(receiverFields.pushKitSendRecord);
340
+ receiverFields.pushKitSendRecord = safeJsonParse(receiverFields.pushKitSendRecord);
334
341
  }
335
342
  if (receiverFields.emailSendRecord) {
336
- receiverFields.emailSendRecord = JSON.parse(receiverFields.emailSendRecord);
343
+ receiverFields.emailSendRecord = safeJsonParse(receiverFields.emailSendRecord);
337
344
  }
338
345
  if (receiverFields.webhook) {
339
- receiverFields.webhook = JSON.parse(receiverFields.webhook);
346
+ receiverFields.webhook = safeJsonParse(receiverFields.webhook);
340
347
  }
341
348
 
342
349
  return {
343
350
  ...row,
344
- attachments: row.attachments ? JSON.parse(row.attachments) : [],
345
- actions: row.actions ? JSON.parse(row.actions) : [],
346
- blocks: row.blocks ? JSON.parse(row.blocks) : [],
347
- data: row.data ? JSON.parse(row.data) : {},
348
- activity: row.activity ? JSON.parse(row.activity) : {},
351
+ attachments: row.attachments ? safeJsonParse(row.attachments) : [],
352
+ actions: row.actions ? safeJsonParse(row.actions) : [],
353
+ blocks: row.blocks ? safeJsonParse(row.blocks) : [],
354
+ data: row.data ? safeJsonParse(row.data) : {},
355
+ activity: row.activity ? safeJsonParse(row.activity) : {},
349
356
  read: Boolean(row.read),
350
357
  receivers: [receiverFields], // 将 receiver 数据作为数组
351
358
  };
@@ -845,12 +852,12 @@ class NotificationState extends BaseState {
845
852
 
846
853
  async getUnreadNotificationCount({ receiver, severity, componentDid, entityId, source } = {}) {
847
854
  // 构建基础查询条件
848
- const conditions = ['n.type = "notification"'];
855
+ const conditions = ["n.type = 'notification'"];
849
856
  const replacements = { receiver };
850
857
 
851
858
  // 强制将read设为false,只计算未读消息
852
859
  conditions.push('nr.read = :read');
853
- replacements.read = 0;
860
+ replacements.read = false;
854
861
 
855
862
  if (severity && severity.length) {
856
863
  conditions.push('n.severity IN (:severity)');
@@ -858,12 +865,12 @@ class NotificationState extends BaseState {
858
865
  }
859
866
 
860
867
  if (componentDid && componentDid.length) {
861
- conditions.push('n.componentDid IN (:componentDid)');
868
+ conditions.push('n."componentDid" IN (:componentDid)');
862
869
  replacements.componentDid = componentDid;
863
870
  }
864
871
 
865
872
  if (entityId && entityId.length) {
866
- conditions.push('n.entityId IN (:entityId)');
873
+ conditions.push('n."entityId" IN (:entityId)');
867
874
  replacements.entityId = entityId;
868
875
  }
869
876
 
@@ -887,7 +894,7 @@ class NotificationState extends BaseState {
887
894
  const countQuery = `
888
895
  SELECT COUNT(DISTINCT n.id) as total
889
896
  FROM notifications n
890
- INNER JOIN notification_receivers nr ON n.id = nr.notificationId
897
+ INNER JOIN notification_receivers nr ON n.id = nr."notificationId"
891
898
  ${whereClause}
892
899
  AND nr.receiver = :receiver
893
900
  `;
@@ -300,9 +300,9 @@ class User extends ExtendBase {
300
300
  // LIMIT ${pageSize} OFFSET ${offset}
301
301
  const subQuery = `
302
302
  WITH RECURSIVE UserTree(did,inviter,generation,createdAt) AS (
303
- SELECT did,inviter,generation,createdAt FROM users WHERE inviter="${exist.did}"
303
+ SELECT did,inviter,generation,"createdAt" FROM users WHERE inviter="${exist.did}"
304
304
  UNION ALL
305
- SELECT child.did,child.inviter,child.generation,child.createdAt FROM users AS child INNER JOIN UserTree AS parent ON (child.inviter=parent.did) ORDER BY child.createdAt DESC
305
+ SELECT child.did,child.inviter,child.generation,child."createdAt" FROM users AS child INNER JOIN UserTree AS parent ON (child.inviter=parent.did) ORDER BY child."createdAt" DESC
306
306
  )
307
307
  SELECT did,inviter,generation FROM UserTree ${generation > 0 ? `WHERE generation=${(exist.generation > 0 ? exist.generation : 0) + generation}` : ''}`.trim();
308
308
  const children = await this.query(subQuery);
@@ -350,7 +350,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
350
350
  replacements.status = PASSPORT_STATUS.VALID;
351
351
  if (role === '$none') {
352
352
  where.did = {
353
- [Op.notIn]: Sequelize.literal('(SELECT DISTINCT userDid FROM passports WHERE status = :status)'),
353
+ [Op.notIn]: Sequelize.literal('(SELECT DISTINCT "userDid" FROM passports WHERE status = :status)'),
354
354
  };
355
355
  } else if (role === '$blocked') {
356
356
  where.approved = false;
@@ -358,7 +358,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
358
358
  replacements.role = role;
359
359
  where.did = {
360
360
  [Op.in]: Sequelize.literal(
361
- '(SELECT DISTINCT userDid FROM passports WHERE name = :role AND status = :status)'
361
+ '(SELECT DISTINCT "userDid" FROM passports WHERE name = :role AND status = :status)'
362
362
  ),
363
363
  };
364
364
  }
@@ -426,7 +426,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
426
426
  return this.count({
427
427
  where: {
428
428
  did: {
429
- [Op.notIn]: Sequelize.literal('(SELECT DISTINCT userDid FROM passports WHERE status = :status)'),
429
+ [Op.notIn]: Sequelize.literal('(SELECT DISTINCT "userDid" FROM passports WHERE status = :status)'),
430
430
  },
431
431
  },
432
432
  replacements: { status: PASSPORT_STATUS.VALID },
@@ -447,7 +447,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
447
447
  where: {
448
448
  did: {
449
449
  [Op.in]: Sequelize.literal(
450
- '(SELECT DISTINCT userDid FROM passports WHERE name = :name AND status = :status)'
450
+ '(SELECT DISTINCT "userDid" FROM passports WHERE name = :name AND status = :status)'
451
451
  ),
452
452
  },
453
453
  },
@@ -457,7 +457,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
457
457
 
458
458
  async getOwnerDids() {
459
459
  const result = await this.passport.query(
460
- `SELECT DISTINCT(userDid) FROM passports WHERE name='${ROLES.OWNER}' AND status = '${PASSPORT_STATUS.VALID}' ORDER BY issuanceDate ASC LIMIT 1`
460
+ `SELECT DISTINCT"userDid" FROM passports WHERE name='${ROLES.OWNER}' AND status = '${PASSPORT_STATUS.VALID}' ORDER BY "issuanceDate" ASC LIMIT 1`
461
461
  );
462
462
  return result.map((x) => x.userDid);
463
463
  }
@@ -784,7 +784,7 @@ SELECT did,inviter,generation FROM UserTree`.trim();
784
784
  approved: true,
785
785
  did: {
786
786
  [Op.in]: Sequelize.literal(
787
- '(SELECT DISTINCT userDid FROM passports WHERE name IN (:roles) AND status = :status)'
787
+ '(SELECT DISTINCT "userDid" FROM passports WHERE name IN (:roles) AND status = :status)'
788
788
  ),
789
789
  },
790
790
  };
@@ -74,8 +74,9 @@ async function removeDockerNetwork(dockerNetworkName) {
74
74
  await promiseSpawn(`docker network rm ${dockerNetworkName}-internal`);
75
75
  }
76
76
  logger.info(`docker remove network ${dockerNetworkName} done`);
77
- } catch (error) {
78
- logger.error(`Error remove network ${dockerNetworkName}:`, error);
77
+ } catch (_) {
78
+ // 不需要打印, 因为上面的删除也是尝试性的, 在没有启用 docker 的情况, 不会有 docker network 存在
79
+ // logger.error(`Error remove network ${dockerNetworkName}:`, error);
79
80
  }
80
81
  }
81
82
 
@@ -0,0 +1,135 @@
1
+ /* eslint-disable no-await-in-loop */
2
+ const promiseSpawn = require('@abtnode/util/lib/promise-spawn');
3
+ const logger = require('@abtnode/logger')('@abtnode/ensure-docker-postgres');
4
+ const path = require('path');
5
+ const fs = require('fs');
6
+ const { Sequelize } = require('sequelize');
7
+
8
+ const { checkDockerInstalled } = require('./check-docker-installed');
9
+ const { hasPostgres } = require('../migration-sqlite-to-postgres');
10
+
11
+ const sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));
12
+
13
+ async function waitForPostgresReady(url, timeoutMs = 30_000) {
14
+ const start = Date.now();
15
+ let lastError = null;
16
+
17
+ while (Date.now() - start < timeoutMs) {
18
+ const sequelize = new Sequelize(url, { logging: false });
19
+ try {
20
+ await sequelize.authenticate();
21
+ await sequelize.close();
22
+ return true;
23
+ } catch (err) {
24
+ lastError = err;
25
+ await new Promise((r) => setTimeout(r, 500));
26
+ }
27
+ }
28
+
29
+ throw new Error(`Postgres did not become ready in time:\n${lastError?.message ?? 'Unknown error'}`);
30
+ }
31
+ async function _ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 40408, force = false) {
32
+ if (!dataDir) {
33
+ return '';
34
+ }
35
+
36
+ if (process.env.ABT_NODE_IGNORE_USE_POSTGRES === 'true') {
37
+ return '';
38
+ }
39
+
40
+ if (!(await checkDockerInstalled())) {
41
+ return '';
42
+ }
43
+
44
+ if (!hasPostgres(dataDir) && !force) {
45
+ return '';
46
+ }
47
+
48
+ if (process.env.ABT_NODE_IGNORE_RESTART_POSTGRES !== 'true') {
49
+ await stopDockerPostgres(name);
50
+ await sleep(1000);
51
+ }
52
+
53
+ // 1. 检查是否有正在运行的同名容器
54
+ const checkRunningCmd = `docker ps --filter "name=${name}" --format "{{.Names}}"`;
55
+ // 2. 如果没有运行,再检查是否存在已停止的同名容器
56
+ const checkAllCmd = `docker ps -a --filter "name=${name}" --format "{{.Names}}"`;
57
+ // 3. 如果既没运行也不存在,则创建新的容器,设置内存限制和基本配置
58
+ const dbPath = path.join(dataDir, 'core', 'postgres');
59
+ if (!fs.existsSync(path.join(dataDir, 'core'))) {
60
+ fs.mkdirSync(path.join(dataDir, 'core'), { recursive: true });
61
+ }
62
+
63
+ const runCmd = [
64
+ 'docker run -d',
65
+ `--name ${name}`,
66
+ `-p 127.0.0.1:${port}:5432`,
67
+ `-v ${dbPath}:/var/lib/postgresql/data`,
68
+ '--memory 2g',
69
+ '--memory-swap 2g',
70
+ '-e POSTGRES_PASSWORD=postgres',
71
+ '-e POSTGRES_USER=postgres',
72
+ '-e POSTGRES_DB=postgres',
73
+ 'postgres:17.5',
74
+ ].join(' ');
75
+
76
+ const url = `postgresql://postgres:postgres@localhost:${port}/postgres`;
77
+
78
+ const running = (await promiseSpawn(checkRunningCmd)).trim();
79
+ if (running === name) {
80
+ // 已经在运行,直接返回
81
+ logger.info('postgres is already running', name);
82
+ return url;
83
+ }
84
+
85
+ // 查看是否有已停止的同名容器
86
+ const all = (await promiseSpawn(checkAllCmd)).trim();
87
+ if (all === name) {
88
+ // 容器存在,但可能是 stopped,直接启动它
89
+ await promiseSpawn(`docker start ${name}`);
90
+ await waitForPostgresReady(url);
91
+ logger.info('postgres is started', name);
92
+ return url;
93
+ }
94
+
95
+ await promiseSpawn(runCmd);
96
+ await waitForPostgresReady(url);
97
+ logger.info('postgres is started', name);
98
+ return url;
99
+ }
100
+
101
+ let lastUrl = '';
102
+
103
+ async function ensureDockerPostgres(dataDir, name = 'abtnode-postgres', port = 40408, force = false) {
104
+ if (lastUrl) {
105
+ return lastUrl;
106
+ }
107
+ lastUrl = await _ensureDockerPostgres(dataDir, name, port, force);
108
+ return lastUrl;
109
+ }
110
+
111
+ // 停止, 并且返回是否 docker 中存在 postgres 容器
112
+ async function stopDockerPostgres(name = 'abtnode-postgres') {
113
+ if (!(await checkDockerInstalled())) {
114
+ return false;
115
+ }
116
+ const checkRunningCmd = `docker ps --filter "name=${name}" --format "{{.Names}}"`;
117
+ const running = (await promiseSpawn(checkRunningCmd)).trim();
118
+ if (running === name) {
119
+ // 已经在运行,直接返回
120
+ logger.info('postgres is already running', name);
121
+ try {
122
+ await promiseSpawn(`docker rm -f ${name}`, { mute: true });
123
+ } catch (_) {
124
+ // 不需要打印日志, 因为 postgres 可能本来就没有运行
125
+ }
126
+ return true;
127
+ }
128
+
129
+ return false;
130
+ }
131
+
132
+ module.exports = {
133
+ ensureDockerPostgres,
134
+ stopDockerPostgres,
135
+ };
@@ -0,0 +1,372 @@
1
+ /* eslint-disable no-await-in-loop */
2
+ /* eslint-disable no-console */
3
+ /* eslint-disable prefer-destructuring */
4
+
5
+ const { dbPathToPostgresUrl } = require('@abtnode/models');
6
+ const { QueryTypes, Sequelize } = require('sequelize');
7
+ const fs = require('fs');
8
+ const fsp = require('fs/promises');
9
+ const path = require('path');
10
+
11
+ function sortTableNames(tableNames, sort) {
12
+ return [...tableNames].sort((a, b) => {
13
+ const indexA = sort.indexOf(a);
14
+ const indexB = sort.indexOf(b);
15
+
16
+ const scoreA = indexA === -1 ? Infinity : indexA;
17
+ const scoreB = indexB === -1 ? Infinity : indexB;
18
+
19
+ return scoreA - scoreB;
20
+ });
21
+ }
22
+
23
+ async function migrateAllTablesNoModels(dbPath) {
24
+ // Initialize SQLite connection
25
+ const connectUrl = process.env.ABT_NODE_POSTGRES_URL;
26
+ const sqliteDb = new Sequelize({ dialect: 'sqlite', storage: dbPath, logging: false });
27
+ const postgresUrl = dbPathToPostgresUrl(dbPath, connectUrl);
28
+ const pgDb = new Sequelize(postgresUrl, {
29
+ dialect: 'postgres',
30
+ pool: { max: 10, min: 0, idle: 10000 },
31
+ logging: false,
32
+ });
33
+
34
+ if (pgDb.getDialect() !== 'postgres') {
35
+ throw new Error(`PG_CONNECTION_STRING is not a valid Postgres connection string: ${pgDb.getDialect()}`);
36
+ }
37
+
38
+ const sqliteQI = sqliteDb.getQueryInterface();
39
+ const pgQI = pgDb.getQueryInterface();
40
+
41
+ let tableNames = await sqliteQI.showAllTables();
42
+ tableNames = tableNames
43
+ .map((t) => (typeof t === 'string' ? t : t.tableName || t.name))
44
+ .filter((name) => !/^(sqlite|sequelize)/.test(name.toLowerCase()) && name !== 'runtime_insights');
45
+
46
+ // 把 tableNames 排序, 把被依赖的表放前面
47
+ tableNames = sortTableNames(tableNames, ['users', 'notification_receivers']);
48
+
49
+ console.log('Start migration database: ', dbPath);
50
+
51
+ for (const tableName of tableNames) {
52
+ console.log(`\n➡️ Starting migration for table: ${tableName}`);
53
+
54
+ const colInfos = await sqliteDb.query(`PRAGMA TABLE_INFO("${tableName}")`, { type: QueryTypes.SELECT });
55
+ const sqliteSchema = {};
56
+ for (const col of colInfos) {
57
+ sqliteSchema[col.name] = {
58
+ type: col.type,
59
+ allowNull: col.notnull === 0,
60
+ defaultValue: col.dflt_value,
61
+ primaryKey: col.pk === 1,
62
+ };
63
+ }
64
+
65
+ let allCols = Object.keys(sqliteSchema);
66
+ // 删除 server.db 的 blocklets 表中的 controller 列, 属于历史遗留数据
67
+ if (dbPath.includes('server.db') && tableName === 'blocklets') {
68
+ allCols = allCols.filter((c) => c !== 'controller');
69
+ }
70
+ let pkCols = allCols.filter((c) => sqliteSchema[c].primaryKey);
71
+ if (!pkCols.length) {
72
+ pkCols = [allCols[0]];
73
+ console.warn(` ⚠️ Table ${tableName} has no primary key; using "${pkCols[0]}"`);
74
+ }
75
+ const nonPkCols = allCols.filter((c) => !pkCols.includes(c));
76
+
77
+ // Describe PG table to detect JSON/auto-inc
78
+ const pgSchema = await pgQI.describeTable(tableName);
79
+
80
+ // find JSON/JSONB
81
+ const jsonCols = Object.entries(pgSchema)
82
+ .filter(([, def]) => def.type && ['JSON', 'JSONB'].includes(def.type.toUpperCase()))
83
+ .map(([col, def]) => ({ name: col, type: def.type.toUpperCase() }));
84
+
85
+ // find auto-increment columns (nextval default)
86
+ const autoIncCols = Object.entries(pgSchema)
87
+ .filter(([, def]) => typeof def.defaultValue === 'string' && def.defaultValue.startsWith('nextval('))
88
+ .map(([col]) => col);
89
+
90
+ // Build the column list we actually INSERT
91
+ const insertCols = allCols.filter((c) => !autoIncCols.includes(c));
92
+
93
+ const insertColsList = insertCols.map((c) => `"${c}"`).join(', ');
94
+
95
+ const placeholders = insertCols
96
+ .map((c, i) => {
97
+ const jc = jsonCols.find((j) => j.name === c);
98
+ return jc ? `$${i + 1}::${jc.type.toLowerCase()}` : `$${i + 1}`;
99
+ })
100
+ .join(', ');
101
+
102
+ // if all PKs are auto-inc, we do a plain insert
103
+ const userPkCols = pkCols.filter((c) => !autoIncCols.includes(c));
104
+ const useUpsert = userPkCols.length > 0;
105
+
106
+ let upsertSQL = '';
107
+ if (useUpsert) {
108
+ const conflictKeys = userPkCols.map((c) => `"${c}"`).join(',');
109
+ const updateSet = nonPkCols
110
+ .map((c) => {
111
+ const jc = jsonCols.find((j) => j.name === c);
112
+ return jc ? `"${c}" = EXCLUDED."${c}"::${jc.type.toLowerCase()}` : `"${c}" = EXCLUDED."${c}"`;
113
+ })
114
+ .join(',');
115
+ upsertSQL = `
116
+ INSERT INTO "${tableName}" (${insertColsList})
117
+ VALUES (${placeholders})
118
+ ON CONFLICT (${conflictKeys})
119
+ DO UPDATE SET ${updateSet};
120
+ `;
121
+ } else {
122
+ upsertSQL = `
123
+ INSERT INTO "${tableName}" (${insertColsList})
124
+ VALUES (${placeholders});
125
+ `;
126
+ }
127
+
128
+ // Batch‐migrate rows
129
+ const batchSize = 1000;
130
+ let offset = 0;
131
+ // eslint-disable-next-line no-constant-condition
132
+ while (true) {
133
+ const rows = await sqliteDb.query(`SELECT * FROM "${tableName}" LIMIT ${batchSize} OFFSET ${offset}`, {
134
+ type: QueryTypes.SELECT,
135
+ });
136
+ if (!rows.length) break;
137
+
138
+ console.log(` Migrating rows ${offset + 1}-${offset + rows.length}`);
139
+
140
+ for (const row of rows) {
141
+ // 如果有 createdAt 或 updatedAt 列, 并且值是不合法的 Date, 就改成当前时间
142
+ if (row.createdAt && Number.isNaN(new Date(row.createdAt).getTime())) {
143
+ row.createdAt = new Date();
144
+ }
145
+ if (row.updatedAt && Number.isNaN(new Date(row.updatedAt).getTime())) {
146
+ row.updatedAt = new Date();
147
+ }
148
+
149
+ // 修复不合格的旧数据
150
+ if (tableName === 'notifications' && row.feedType === 'gallery') {
151
+ row.feedType = '';
152
+ }
153
+
154
+ for (const jc of jsonCols) {
155
+ const raw = row[jc.name];
156
+ let parsed = null;
157
+ if (raw == null) {
158
+ parsed = null;
159
+ } else if (typeof raw === 'string') {
160
+ try {
161
+ parsed = JSON.parse(raw);
162
+ } catch {
163
+ //
164
+ }
165
+ } else if (Buffer.isBuffer(raw)) {
166
+ try {
167
+ parsed = JSON.parse(raw.toString('utf8'));
168
+ } catch {
169
+ //
170
+ }
171
+ } else if (typeof raw === 'object') {
172
+ parsed = raw;
173
+ }
174
+ row[jc.name] = parsed != null ? JSON.stringify(parsed) : null;
175
+ }
176
+
177
+ // build bind values for ONLY the non-autoInc cols
178
+ const bindVals = insertCols.map((c) => row[c]);
179
+
180
+ try {
181
+ await pgDb.query(upsertSQL, { bind: bindVals });
182
+ } catch (err) {
183
+ if (err.name === 'SequelizeUniqueConstraintError') {
184
+ const uniqField = err.errors[0].path;
185
+ console.warn(` ⚠️ ${tableName}: unique conflict on ${uniqField}, fallback to UPDATE`);
186
+ const updateCols = nonPkCols.map((c, i) => `"${c}" = $${i + 1}`).join(', ');
187
+ const updateBind = nonPkCols.map((c) => row[c]).concat([row[uniqField]]);
188
+ const updateSQL = `
189
+ UPDATE "${tableName}"
190
+ SET ${updateCols}
191
+ WHERE "${uniqField}" = $${updateBind.length};
192
+ `;
193
+ await pgDb.query(updateSQL, { bind: updateBind });
194
+ continue;
195
+ }
196
+ const varcharErr = err.message.match(/value too long for type character varying\((\d+)\)/i);
197
+ if (varcharErr) {
198
+ const badCols = [];
199
+ for (const col of allCols) {
200
+ const def = pgSchema[col];
201
+ const lenMatch = def.type.match(/varying\((\d+)\)/i);
202
+ const val = row[col];
203
+ if (lenMatch && typeof val === 'string') {
204
+ const limit = parseInt(lenMatch[1], 10);
205
+ if (val.length > limit) badCols.push({ column: col, length: val.length, limit });
206
+ }
207
+ }
208
+ console.error(` ❌ ${tableName}: string too long for VARCHAR columns:`, badCols);
209
+ continue;
210
+ }
211
+ console.error(` ❌ Upsert failed for ${tableName} : ${err.message}`);
212
+ throw err;
213
+ }
214
+ }
215
+
216
+ offset += rows.length;
217
+ }
218
+
219
+ console.log(` ✅ Finished migrating table ${tableName}`);
220
+ }
221
+
222
+ await sqliteDb.close();
223
+ await pgDb.close();
224
+ }
225
+
226
+ async function validateTableRowCounts(dbPath) {
227
+ // Initialize SQLite connection
228
+ const sqliteDb = new Sequelize({
229
+ dialect: 'sqlite',
230
+ storage: dbPath,
231
+ logging: false,
232
+ });
233
+
234
+ // Build Postgres URL from env var and sqlite path
235
+ const postgresUrl = dbPathToPostgresUrl(dbPath, process.env.ABT_NODE_POSTGRES_URL);
236
+ const pgDb = new Sequelize(postgresUrl, {
237
+ dialect: 'postgres',
238
+ pool: { max: 10, min: 0, idle: 10000 },
239
+ logging: false,
240
+ });
241
+
242
+ if (pgDb.getDialect() !== 'postgres') {
243
+ throw new Error(`PG_CONNECTION_STRING is not a valid Postgres connection string: ${pgDb.getDialect()}`);
244
+ }
245
+
246
+ const sqliteQI = sqliteDb.getQueryInterface();
247
+
248
+ // 1. List all table names
249
+ let tableNames = await sqliteQI.showAllTables();
250
+ tableNames = tableNames
251
+ .map((t) => (typeof t === 'string' ? t : t.tableName || t.name))
252
+ .filter((name) => !/^(sqlite_|SequelizeMeta$)/i.test(name) && name !== 'runtime_insights');
253
+
254
+ const results = [];
255
+
256
+ // 2. For each table, compare counts
257
+ for (const tableName of tableNames) {
258
+ // count in SQLite
259
+ const [{ cnt: sqliteCount }] = await sqliteDb.query(`SELECT COUNT(*) AS cnt FROM "${tableName}"`, {
260
+ type: QueryTypes.SELECT,
261
+ });
262
+
263
+ // count in Postgres
264
+ const [{ count: pgCountStr }] = await pgDb.query(`SELECT COUNT(*) AS count FROM "${tableName}"`, {
265
+ type: QueryTypes.SELECT,
266
+ });
267
+ const pgCount = parseInt(pgCountStr, 10);
268
+
269
+ const match = sqliteCount === pgCount;
270
+ results.push({ table: tableName, sqliteCount, pgCount, match });
271
+
272
+ console.log(`${match ? '✅' : '❌'} Table "${tableName}": SQLite=${sqliteCount}, Postgres=${pgCount}`);
273
+ }
274
+
275
+ // Close connections
276
+ await sqliteDb.close();
277
+ await pgDb.close();
278
+
279
+ return results;
280
+ }
281
+
282
+ async function findBlockletDbFiles(dataDir) {
283
+ const results = [];
284
+ const coreDir = path.join(dataDir, 'data');
285
+
286
+ async function traverse(dir) {
287
+ let entries;
288
+ try {
289
+ entries = await fsp.readdir(dir, { withFileTypes: true });
290
+ } catch (err) {
291
+ console.error(`Failed to read directory ${dir}:`, err);
292
+ return;
293
+ }
294
+
295
+ for (const entry of entries) {
296
+ const fullPath = path.join(dir, entry.name);
297
+
298
+ // Skip any paths containing "_abtnode"
299
+ if (fullPath.includes('_abtnode')) continue;
300
+
301
+ if (entry.isDirectory()) {
302
+ await traverse(fullPath);
303
+ } else if (entry.isFile() && entry.name === 'blocklet.db') {
304
+ results.push(fullPath);
305
+ }
306
+ }
307
+ }
308
+
309
+ await traverse(coreDir);
310
+ return results;
311
+ }
312
+
313
+ function hasPostgres(dataDir) {
314
+ const lockPath = path.join(dataDir, 'core', 'sqlite-to-postgres.lock');
315
+ const hasLock = fs.existsSync(lockPath);
316
+ return hasLock;
317
+ }
318
+
319
+ function savePostgresLock(dataDir) {
320
+ const lockPath = path.join(dataDir, 'core', 'sqlite-to-postgres.lock');
321
+ fs.writeFileSync(lockPath, new Date().toISOString());
322
+ }
323
+
324
+ function removePostgresLock(dataDir) {
325
+ const lockPath = path.join(dataDir, 'core', 'sqlite-to-postgres.lock');
326
+ if (fs.existsSync(lockPath)) {
327
+ fs.unlinkSync(lockPath);
328
+ }
329
+ }
330
+
331
+ async function migrationSqliteToPostgres(dataDir, dbPaths) {
332
+ const postgresUrl = process.env.ABT_NODE_POSTGRES_URL;
333
+
334
+ if (!postgresUrl) {
335
+ return;
336
+ }
337
+
338
+ console.log('Start Migration Sqlite data to Postgres...');
339
+
340
+ if (dbPaths.blocklets.length === 0) {
341
+ const blockletDbFiles = await findBlockletDbFiles(dataDir);
342
+ dbPaths.blocklets.push(...blockletDbFiles);
343
+ }
344
+
345
+ const allPaths = [];
346
+
347
+ for (const dbPath of Object.values(dbPaths)) {
348
+ if (Array.isArray(dbPath)) {
349
+ allPaths.push(...dbPath);
350
+ continue;
351
+ }
352
+ allPaths.push(dbPath);
353
+ }
354
+
355
+ const filterPaths = Array.from(new Set(allPaths));
356
+
357
+ for (const dbPath of filterPaths) {
358
+ await migrateAllTablesNoModels(dbPath);
359
+ }
360
+
361
+ for (const dbPath of allPaths) {
362
+ await validateTableRowCounts(dbPath);
363
+ }
364
+
365
+ savePostgresLock(dataDir);
366
+ }
367
+
368
+ module.exports = {
369
+ migrationSqliteToPostgres,
370
+ hasPostgres,
371
+ removePostgresLock,
372
+ };
package/package.json CHANGED
@@ -3,7 +3,7 @@
3
3
  "publishConfig": {
4
4
  "access": "public"
5
5
  },
6
- "version": "1.16.45",
6
+ "version": "1.16.46-beta-20250703-050038-4ba2582f",
7
7
  "description": "",
8
8
  "main": "lib/index.js",
9
9
  "files": [
@@ -19,22 +19,22 @@
19
19
  "author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
20
20
  "license": "Apache-2.0",
21
21
  "dependencies": {
22
- "@abtnode/analytics": "1.16.45",
23
- "@abtnode/auth": "1.16.45",
24
- "@abtnode/certificate-manager": "1.16.45",
25
- "@abtnode/client": "1.16.45",
26
- "@abtnode/constant": "1.16.45",
27
- "@abtnode/cron": "1.16.45",
28
- "@abtnode/db-cache": "1.16.45",
29
- "@abtnode/docker-utils": "1.16.45",
30
- "@abtnode/logger": "1.16.45",
31
- "@abtnode/models": "1.16.45",
32
- "@abtnode/queue": "1.16.45",
33
- "@abtnode/rbac": "1.16.45",
34
- "@abtnode/router-provider": "1.16.45",
35
- "@abtnode/static-server": "1.16.45",
36
- "@abtnode/timemachine": "1.16.45",
37
- "@abtnode/util": "1.16.45",
22
+ "@abtnode/analytics": "1.16.46-beta-20250703-050038-4ba2582f",
23
+ "@abtnode/auth": "1.16.46-beta-20250703-050038-4ba2582f",
24
+ "@abtnode/certificate-manager": "1.16.46-beta-20250703-050038-4ba2582f",
25
+ "@abtnode/client": "1.16.46-beta-20250703-050038-4ba2582f",
26
+ "@abtnode/constant": "1.16.46-beta-20250703-050038-4ba2582f",
27
+ "@abtnode/cron": "1.16.46-beta-20250703-050038-4ba2582f",
28
+ "@abtnode/db-cache": "1.16.46-beta-20250703-050038-4ba2582f",
29
+ "@abtnode/docker-utils": "1.16.46-beta-20250703-050038-4ba2582f",
30
+ "@abtnode/logger": "1.16.46-beta-20250703-050038-4ba2582f",
31
+ "@abtnode/models": "1.16.46-beta-20250703-050038-4ba2582f",
32
+ "@abtnode/queue": "1.16.46-beta-20250703-050038-4ba2582f",
33
+ "@abtnode/rbac": "1.16.46-beta-20250703-050038-4ba2582f",
34
+ "@abtnode/router-provider": "1.16.46-beta-20250703-050038-4ba2582f",
35
+ "@abtnode/static-server": "1.16.46-beta-20250703-050038-4ba2582f",
36
+ "@abtnode/timemachine": "1.16.46-beta-20250703-050038-4ba2582f",
37
+ "@abtnode/util": "1.16.46-beta-20250703-050038-4ba2582f",
38
38
  "@arcblock/did": "1.20.14",
39
39
  "@arcblock/did-auth": "1.20.14",
40
40
  "@arcblock/did-ext": "1.20.14",
@@ -45,14 +45,14 @@
45
45
  "@arcblock/pm2-events": "^0.0.5",
46
46
  "@arcblock/validator": "1.20.14",
47
47
  "@arcblock/vc": "1.20.14",
48
- "@blocklet/constant": "1.16.45",
48
+ "@blocklet/constant": "1.16.46-beta-20250703-050038-4ba2582f",
49
49
  "@blocklet/did-space-js": "^1.0.62",
50
- "@blocklet/env": "1.16.45",
50
+ "@blocklet/env": "1.16.46-beta-20250703-050038-4ba2582f",
51
51
  "@blocklet/error": "^0.2.5",
52
- "@blocklet/meta": "1.16.45",
53
- "@blocklet/resolver": "1.16.45",
54
- "@blocklet/sdk": "1.16.45",
55
- "@blocklet/store": "1.16.45",
52
+ "@blocklet/meta": "1.16.46-beta-20250703-050038-4ba2582f",
53
+ "@blocklet/resolver": "1.16.46-beta-20250703-050038-4ba2582f",
54
+ "@blocklet/sdk": "1.16.46-beta-20250703-050038-4ba2582f",
55
+ "@blocklet/store": "1.16.46-beta-20250703-050038-4ba2582f",
56
56
  "@blocklet/theme": "^2.13.70",
57
57
  "@fidm/x509": "^1.2.1",
58
58
  "@ocap/mcrypto": "1.20.14",
@@ -116,5 +116,5 @@
116
116
  "jest": "^29.7.0",
117
117
  "unzipper": "^0.10.11"
118
118
  },
119
- "gitHead": "8e981926c6dd0fc612d5bf716ab6c638791aa5f3"
119
+ "gitHead": "8d7838277e51ecabae489db51937f6deb51e015f"
120
120
  }