@abtnode/core 1.17.5-beta-20251209-090953-3a59e7ac → 1.17.5-beta-20251214-122206-29056e8c
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/api/team.js +71 -3
- package/lib/blocklet/manager/disk.js +51 -40
- package/lib/blocklet/manager/helper/blue-green-get-componentids.js +11 -16
- package/lib/blocklet/manager/helper/blue-green-start-blocklet.js +118 -82
- package/lib/blocklet/migration-dist/migration.cjs +1 -1
- package/lib/migrations/index.js +4 -4
- package/lib/monitor/blocklet-runtime-monitor.js +3 -5
- package/lib/states/audit-log.js +34 -9
- package/lib/states/blocklet-child.js +193 -0
- package/lib/states/blocklet-extras.js +63 -1
- package/lib/states/blocklet.js +292 -11
- package/lib/states/index.js +4 -1
- package/lib/states/notification.js +4 -2
- package/lib/util/blocklet.js +112 -42
- package/lib/util/migration-sqlite-to-postgres.js +240 -6
- package/package.json +39 -39
- package/lib/blocklet/manager/helper/blue-green-update-blocklet-status.js +0 -18
|
@@ -14,6 +14,105 @@ const notCheckPrimaryKeyTableNames = new Set(['tagging']);
|
|
|
14
14
|
|
|
15
15
|
const needBreakErrors = [];
|
|
16
16
|
|
|
17
|
+
/**
|
|
18
|
+
* Generate a unique ID for blocklet_children records
|
|
19
|
+
*/
|
|
20
|
+
function generateChildId() {
|
|
21
|
+
return `${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
|
|
22
|
+
}
|
|
23
|
+
|
|
24
|
+
/**
|
|
25
|
+
* 因为删除了表字段,所以需要单独处理,不然 migrate 有旧的 children 数据会失败
|
|
26
|
+
* @param {object} params - Parameters
|
|
27
|
+
* @param {Sequelize} params.pgDb - PostgreSQL database connection
|
|
28
|
+
* @param {string} params.blockletId - Parent blocklet ID
|
|
29
|
+
* @param {string} params.parentBlockletDid - Parent blocklet DID
|
|
30
|
+
* @param {Array} params.children - Children array to migrate
|
|
31
|
+
*/
|
|
32
|
+
async function migrateBlockletChildrenToTable({ pgDb, blockletId, parentBlockletDid, children }) {
|
|
33
|
+
if (!Array.isArray(children) || children.length === 0) {
|
|
34
|
+
return;
|
|
35
|
+
}
|
|
36
|
+
|
|
37
|
+
for (const child of children) {
|
|
38
|
+
const childMeta = child?.meta || {};
|
|
39
|
+
const childDid = childMeta?.did;
|
|
40
|
+
|
|
41
|
+
if (!childDid) {
|
|
42
|
+
console.warn(` ⚠️ Child in blocklet ${blockletId} has no meta.did, skipping`);
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
|
|
46
|
+
try {
|
|
47
|
+
// Check if child already exists
|
|
48
|
+
const [existing] = await pgDb.query(
|
|
49
|
+
'SELECT id FROM blocklet_children WHERE "parentBlockletId" = $1 AND "childDid" = $2 LIMIT 1',
|
|
50
|
+
{ bind: [blockletId, childDid], type: QueryTypes.SELECT }
|
|
51
|
+
);
|
|
52
|
+
|
|
53
|
+
if (existing) {
|
|
54
|
+
console.log(` ℹ️ Child ${childDid} already exists for blocklet ${blockletId}, skipping`);
|
|
55
|
+
continue;
|
|
56
|
+
}
|
|
57
|
+
|
|
58
|
+
// Insert child record
|
|
59
|
+
const insertSQL = `
|
|
60
|
+
INSERT INTO blocklet_children (
|
|
61
|
+
id, "parentBlockletId", "parentBlockletDid", "childDid", "mountPoint",
|
|
62
|
+
meta, "bundleSource", source, "deployedFrom", mode, status,
|
|
63
|
+
ports, environments, "installedAt", "startedAt",
|
|
64
|
+
"stoppedAt", "pausedAt", operator, "inProgressStart", "greenStatus",
|
|
65
|
+
"greenPorts", "createdAt", "updatedAt"
|
|
66
|
+
) VALUES (
|
|
67
|
+
$1, $2, $3, $4, $5,
|
|
68
|
+
$6::jsonb, $7::jsonb, $8, $9, $10, $11,
|
|
69
|
+
$12::jsonb, $13::jsonb, $14, $15,
|
|
70
|
+
$16, $17, $18, $19, $20,
|
|
71
|
+
$21::jsonb, $22, $23
|
|
72
|
+
)
|
|
73
|
+
ON CONFLICT DO NOTHING
|
|
74
|
+
`;
|
|
75
|
+
|
|
76
|
+
const now = new Date();
|
|
77
|
+
const bindValues = [
|
|
78
|
+
generateChildId(), // id
|
|
79
|
+
blockletId, // parentBlockletId
|
|
80
|
+
parentBlockletDid, // parentBlockletDid
|
|
81
|
+
childDid, // childDid
|
|
82
|
+
child.mountPoint || null, // mountPoint
|
|
83
|
+
JSON.stringify(child.meta || {}), // meta
|
|
84
|
+
JSON.stringify(child.bundleSource || {}), // bundleSource
|
|
85
|
+
child.source || 0, // source
|
|
86
|
+
child.deployedFrom || '', // deployedFrom
|
|
87
|
+
child.mode || 'production', // mode
|
|
88
|
+
child.status || 0, // status
|
|
89
|
+
JSON.stringify(child.ports || {}), // ports
|
|
90
|
+
JSON.stringify(child.environments || []), // environments
|
|
91
|
+
child.installedAt || null, // installedAt
|
|
92
|
+
child.startedAt || null, // startedAt
|
|
93
|
+
child.stoppedAt || null, // stoppedAt
|
|
94
|
+
child.pausedAt || null, // pausedAt
|
|
95
|
+
child.operator || null, // operator
|
|
96
|
+
child.inProgressStart || null, // inProgressStart
|
|
97
|
+
child.greenStatus || null, // greenStatus
|
|
98
|
+
child.greenPorts ? JSON.stringify(child.greenPorts) : null, // greenPorts
|
|
99
|
+
now, // createdAt
|
|
100
|
+
now, // updatedAt
|
|
101
|
+
];
|
|
102
|
+
|
|
103
|
+
await pgDb.query(insertSQL, { bind: bindValues });
|
|
104
|
+
console.log(` ✅ Migrated child ${childDid} to blocklet_children table`);
|
|
105
|
+
} catch (err) {
|
|
106
|
+
// Ignore unique constraint errors
|
|
107
|
+
if (err.name === 'SequelizeUniqueConstraintError' || err.message?.includes('UNIQUE constraint')) {
|
|
108
|
+
console.log(` ℹ️ Child ${childDid} already exists (unique constraint), skipping`);
|
|
109
|
+
continue;
|
|
110
|
+
}
|
|
111
|
+
console.error(` ❌ Failed to migrate child ${childDid}:`, err.message);
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
}
|
|
115
|
+
|
|
17
116
|
function sortTableNames(tableNames, sort) {
|
|
18
117
|
return [...tableNames].sort((a, b) => {
|
|
19
118
|
const indexA = sort.indexOf(a);
|
|
@@ -50,7 +149,8 @@ async function migrateAllTablesNoModels(dbPath) {
|
|
|
50
149
|
.filter((name) => !/^(sqlite|sequelize)/.test(name.toLowerCase()) && name !== 'runtime_insights');
|
|
51
150
|
|
|
52
151
|
// 把 tableNames 排序, 把被依赖的表放前面
|
|
53
|
-
|
|
152
|
+
// blocklet_children 需要在 blocklets 之前处理,因为 blocklets 的 children 字段需要迁移到 blocklet_children 表
|
|
153
|
+
tableNames = sortTableNames(tableNames, ['users', 'notification_receivers', 'blocklet_children', 'blocklets']);
|
|
54
154
|
|
|
55
155
|
for (const tableName of tableNames) {
|
|
56
156
|
console.log(`\n➡️ Starting migration for table: ${dbPath} ${tableName}`);
|
|
@@ -71,6 +171,14 @@ async function migrateAllTablesNoModels(dbPath) {
|
|
|
71
171
|
if (dbPath.includes('server.db') && tableName === 'blocklets') {
|
|
72
172
|
allCols = allCols.filter((c) => c !== 'controller');
|
|
73
173
|
}
|
|
174
|
+
|
|
175
|
+
// 删除 blocklets 表中的 children 列, 因为 children 已经拆分到 blocklet_children 表
|
|
176
|
+
// children 数据会在迁移过程中单独处理
|
|
177
|
+
const hasChildrenColumn = tableName === 'blocklets' && sqliteSchema.children;
|
|
178
|
+
if (hasChildrenColumn) {
|
|
179
|
+
allCols = allCols.filter((c) => c !== 'children');
|
|
180
|
+
console.log(' ℹ️ Detected children column in blocklets table, will migrate to blocklet_children table');
|
|
181
|
+
}
|
|
74
182
|
let pkCols = allCols.filter((c) => sqliteSchema[c].primaryKey);
|
|
75
183
|
if (!pkCols.length) {
|
|
76
184
|
pkCols = [allCols[0]];
|
|
@@ -94,6 +202,14 @@ async function migrateAllTablesNoModels(dbPath) {
|
|
|
94
202
|
.filter(([, def]) => def.type && ['JSON', 'JSONB'].includes(def.type.toUpperCase()))
|
|
95
203
|
.map(([col, def]) => ({ name: col, type: def.type.toUpperCase() }));
|
|
96
204
|
|
|
205
|
+
// find DATE/TIMESTAMP columns (need to validate and fix invalid dates)
|
|
206
|
+
const dateCols = Object.entries(pgSchema)
|
|
207
|
+
.filter(([, def]) => {
|
|
208
|
+
const type = def.type?.toUpperCase() || '';
|
|
209
|
+
return type.includes('DATE') || type.includes('TIMESTAMP') || type === 'DATE' || type.startsWith('TIMESTAMP');
|
|
210
|
+
})
|
|
211
|
+
.map(([col]) => col);
|
|
212
|
+
|
|
97
213
|
// find auto-increment columns (nextval default)
|
|
98
214
|
const autoIncCols = Object.entries(pgSchema)
|
|
99
215
|
.filter(([, def]) => typeof def.defaultValue === 'string' && def.defaultValue.startsWith('nextval('))
|
|
@@ -159,12 +275,90 @@ async function migrateAllTablesNoModels(dbPath) {
|
|
|
159
275
|
console.log(` Migrating rows ${offset + 1}-${offset + rows.length}`);
|
|
160
276
|
|
|
161
277
|
for (const row of rows) {
|
|
162
|
-
//
|
|
163
|
-
if (
|
|
164
|
-
|
|
278
|
+
// Handle children migration for blocklets table
|
|
279
|
+
if (hasChildrenColumn && row.children) {
|
|
280
|
+
try {
|
|
281
|
+
let children = row.children;
|
|
282
|
+
if (typeof children === 'string') {
|
|
283
|
+
try {
|
|
284
|
+
children = JSON.parse(children);
|
|
285
|
+
} catch {
|
|
286
|
+
children = null;
|
|
287
|
+
}
|
|
288
|
+
} else if (Buffer.isBuffer(children)) {
|
|
289
|
+
try {
|
|
290
|
+
children = JSON.parse(children.toString('utf8'));
|
|
291
|
+
} catch {
|
|
292
|
+
children = null;
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
|
|
296
|
+
if (Array.isArray(children) && children.length > 0) {
|
|
297
|
+
// Get parent blocklet DID from meta
|
|
298
|
+
let meta = row.meta;
|
|
299
|
+
if (typeof meta === 'string') {
|
|
300
|
+
try {
|
|
301
|
+
meta = JSON.parse(meta);
|
|
302
|
+
} catch {
|
|
303
|
+
meta = {};
|
|
304
|
+
}
|
|
305
|
+
} else if (Buffer.isBuffer(meta)) {
|
|
306
|
+
try {
|
|
307
|
+
meta = JSON.parse(meta.toString('utf8'));
|
|
308
|
+
} catch {
|
|
309
|
+
meta = {};
|
|
310
|
+
}
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
const parentBlockletDid = meta?.did;
|
|
314
|
+
if (parentBlockletDid) {
|
|
315
|
+
console.log(` 🔄 Migrating ${children.length} children for blocklet ${row.id}`);
|
|
316
|
+
await migrateBlockletChildrenToTable({
|
|
317
|
+
pgDb,
|
|
318
|
+
blockletId: row.id,
|
|
319
|
+
parentBlockletDid,
|
|
320
|
+
children,
|
|
321
|
+
});
|
|
322
|
+
} else {
|
|
323
|
+
console.warn(` ⚠️ Blocklet ${row.id} has no meta.did, skipping children migration`);
|
|
324
|
+
}
|
|
325
|
+
}
|
|
326
|
+
} catch (err) {
|
|
327
|
+
console.error(` ❌ Failed to migrate children for blocklet ${row.id}:`, err.message);
|
|
328
|
+
}
|
|
165
329
|
}
|
|
166
|
-
|
|
167
|
-
|
|
330
|
+
|
|
331
|
+
// Fix invalid date values for all DATE/TIMESTAMP columns
|
|
332
|
+
for (const dateCol of dateCols) {
|
|
333
|
+
if (row[dateCol] != null) {
|
|
334
|
+
const dateVal = row[dateCol];
|
|
335
|
+
// Check if it's an invalid date (NaN, "Invalid date" string, or invalid Date object)
|
|
336
|
+
let isValid = false;
|
|
337
|
+
if (dateVal instanceof Date) {
|
|
338
|
+
isValid = !Number.isNaN(dateVal.getTime());
|
|
339
|
+
} else if (typeof dateVal === 'string') {
|
|
340
|
+
// Check for "Invalid date" string or empty string
|
|
341
|
+
if (dateVal === 'Invalid date' || dateVal === '' || dateVal === 'null') {
|
|
342
|
+
isValid = false;
|
|
343
|
+
} else {
|
|
344
|
+
const parsed = new Date(dateVal);
|
|
345
|
+
isValid = !Number.isNaN(parsed.getTime());
|
|
346
|
+
}
|
|
347
|
+
} else if (typeof dateVal === 'number') {
|
|
348
|
+
// Check if it's a valid timestamp
|
|
349
|
+
const parsed = new Date(dateVal);
|
|
350
|
+
isValid = !Number.isNaN(parsed.getTime());
|
|
351
|
+
} else {
|
|
352
|
+
// null or undefined are valid (will be handled by allowNull)
|
|
353
|
+
isValid = true;
|
|
354
|
+
}
|
|
355
|
+
|
|
356
|
+
if (!isValid) {
|
|
357
|
+
console.warn(` ⚠️ ${tableName}: Invalid date in column "${dateCol}", fixing to current time`);
|
|
358
|
+
console.log(` Old value: ${dateVal} (type: ${typeof dateVal})`);
|
|
359
|
+
row[dateCol] = new Date();
|
|
360
|
+
}
|
|
361
|
+
}
|
|
168
362
|
}
|
|
169
363
|
|
|
170
364
|
// 修复不合格的旧数据
|
|
@@ -266,6 +460,46 @@ async function migrateAllTablesNoModels(dbPath) {
|
|
|
266
460
|
console.error(` ❌ ${tableName}: string too long for VARCHAR columns:`, badCols);
|
|
267
461
|
continue;
|
|
268
462
|
}
|
|
463
|
+
// Handle invalid timestamp/date errors - should have been fixed above, but log if still occurs
|
|
464
|
+
const timestampErr = err.message.match(/invalid input syntax for type timestamp/i);
|
|
465
|
+
if (timestampErr) {
|
|
466
|
+
console.error(` ❌ ${tableName}: Invalid timestamp error (should have been fixed):`, err.message);
|
|
467
|
+
console.log(' Row data:', JSON.stringify(row, null, 2));
|
|
468
|
+
// Try to fix and retry once
|
|
469
|
+
let fixed = false;
|
|
470
|
+
for (const dateCol of dateCols) {
|
|
471
|
+
if (row[dateCol] != null) {
|
|
472
|
+
const dateVal = row[dateCol];
|
|
473
|
+
if (
|
|
474
|
+
dateVal === 'Invalid date' ||
|
|
475
|
+
dateVal === '' ||
|
|
476
|
+
(typeof dateVal === 'string' && dateVal.toLowerCase() === 'null')
|
|
477
|
+
) {
|
|
478
|
+
row[dateCol] = new Date();
|
|
479
|
+
fixed = true;
|
|
480
|
+
} else {
|
|
481
|
+
const parsed = new Date(dateVal);
|
|
482
|
+
if (Number.isNaN(parsed.getTime())) {
|
|
483
|
+
row[dateCol] = new Date();
|
|
484
|
+
fixed = true;
|
|
485
|
+
}
|
|
486
|
+
}
|
|
487
|
+
}
|
|
488
|
+
}
|
|
489
|
+
if (fixed) {
|
|
490
|
+
console.log(' 🔧 Fixed invalid dates, retrying insert...');
|
|
491
|
+
const retryBindVals = insertCols.map((c) => row[c]);
|
|
492
|
+
try {
|
|
493
|
+
await pgDb.query(upsertSQL, { bind: retryBindVals });
|
|
494
|
+
continue;
|
|
495
|
+
} catch (retryErr) {
|
|
496
|
+
console.error(' ❌ Retry failed:', retryErr.message);
|
|
497
|
+
}
|
|
498
|
+
}
|
|
499
|
+
// If still failing, skip this row
|
|
500
|
+
console.warn(' ⚠️ Skipping row due to timestamp error');
|
|
501
|
+
continue;
|
|
502
|
+
}
|
|
269
503
|
console.error(` ❌ Upsert failed for ${tableName} : ${err.message}, SQL:${upsertSQL} value: ${bindVals}`);
|
|
270
504
|
if (ignoreErrorTableNames.has(tableName)) {
|
|
271
505
|
console.log(` ❌ Ignore error for ${tableName}`);
|
package/package.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
|
3
3
|
"publishConfig": {
|
|
4
4
|
"access": "public"
|
|
5
5
|
},
|
|
6
|
-
"version": "1.17.5-beta-
|
|
6
|
+
"version": "1.17.5-beta-20251214-122206-29056e8c",
|
|
7
7
|
"description": "",
|
|
8
8
|
"main": "lib/index.js",
|
|
9
9
|
"files": [
|
|
@@ -17,46 +17,46 @@
|
|
|
17
17
|
"author": "wangshijun <wangshijun2010@gmail.com> (http://github.com/wangshijun)",
|
|
18
18
|
"license": "Apache-2.0",
|
|
19
19
|
"dependencies": {
|
|
20
|
-
"@abtnode/analytics": "1.17.5-beta-
|
|
21
|
-
"@abtnode/auth": "1.17.5-beta-
|
|
22
|
-
"@abtnode/certificate-manager": "1.17.5-beta-
|
|
23
|
-
"@abtnode/constant": "1.17.5-beta-
|
|
24
|
-
"@abtnode/cron": "1.17.5-beta-
|
|
25
|
-
"@abtnode/db-cache": "1.17.5-beta-
|
|
26
|
-
"@abtnode/docker-utils": "1.17.5-beta-
|
|
27
|
-
"@abtnode/logger": "1.17.5-beta-
|
|
28
|
-
"@abtnode/models": "1.17.5-beta-
|
|
29
|
-
"@abtnode/queue": "1.17.5-beta-
|
|
30
|
-
"@abtnode/rbac": "1.17.5-beta-
|
|
31
|
-
"@abtnode/router-provider": "1.17.5-beta-
|
|
32
|
-
"@abtnode/static-server": "1.17.5-beta-
|
|
33
|
-
"@abtnode/timemachine": "1.17.5-beta-
|
|
34
|
-
"@abtnode/util": "1.17.5-beta-
|
|
35
|
-
"@aigne/aigne-hub": "^0.10.
|
|
36
|
-
"@arcblock/did": "^1.27.
|
|
37
|
-
"@arcblock/did-connect-js": "^1.27.
|
|
38
|
-
"@arcblock/did-ext": "^1.27.
|
|
20
|
+
"@abtnode/analytics": "1.17.5-beta-20251214-122206-29056e8c",
|
|
21
|
+
"@abtnode/auth": "1.17.5-beta-20251214-122206-29056e8c",
|
|
22
|
+
"@abtnode/certificate-manager": "1.17.5-beta-20251214-122206-29056e8c",
|
|
23
|
+
"@abtnode/constant": "1.17.5-beta-20251214-122206-29056e8c",
|
|
24
|
+
"@abtnode/cron": "1.17.5-beta-20251214-122206-29056e8c",
|
|
25
|
+
"@abtnode/db-cache": "1.17.5-beta-20251214-122206-29056e8c",
|
|
26
|
+
"@abtnode/docker-utils": "1.17.5-beta-20251214-122206-29056e8c",
|
|
27
|
+
"@abtnode/logger": "1.17.5-beta-20251214-122206-29056e8c",
|
|
28
|
+
"@abtnode/models": "1.17.5-beta-20251214-122206-29056e8c",
|
|
29
|
+
"@abtnode/queue": "1.17.5-beta-20251214-122206-29056e8c",
|
|
30
|
+
"@abtnode/rbac": "1.17.5-beta-20251214-122206-29056e8c",
|
|
31
|
+
"@abtnode/router-provider": "1.17.5-beta-20251214-122206-29056e8c",
|
|
32
|
+
"@abtnode/static-server": "1.17.5-beta-20251214-122206-29056e8c",
|
|
33
|
+
"@abtnode/timemachine": "1.17.5-beta-20251214-122206-29056e8c",
|
|
34
|
+
"@abtnode/util": "1.17.5-beta-20251214-122206-29056e8c",
|
|
35
|
+
"@aigne/aigne-hub": "^0.10.14",
|
|
36
|
+
"@arcblock/did": "^1.27.14",
|
|
37
|
+
"@arcblock/did-connect-js": "^1.27.14",
|
|
38
|
+
"@arcblock/did-ext": "^1.27.14",
|
|
39
39
|
"@arcblock/did-motif": "^1.1.14",
|
|
40
|
-
"@arcblock/did-util": "^1.27.
|
|
41
|
-
"@arcblock/event-hub": "^1.27.
|
|
42
|
-
"@arcblock/jwt": "^1.27.
|
|
40
|
+
"@arcblock/did-util": "^1.27.14",
|
|
41
|
+
"@arcblock/event-hub": "^1.27.14",
|
|
42
|
+
"@arcblock/jwt": "^1.27.14",
|
|
43
43
|
"@arcblock/pm2-events": "^0.0.5",
|
|
44
|
-
"@arcblock/validator": "^1.27.
|
|
45
|
-
"@arcblock/vc": "^1.27.
|
|
46
|
-
"@blocklet/constant": "1.17.5-beta-
|
|
47
|
-
"@blocklet/did-space-js": "^1.2.
|
|
48
|
-
"@blocklet/env": "1.17.5-beta-
|
|
49
|
-
"@blocklet/error": "^0.3.
|
|
50
|
-
"@blocklet/meta": "1.17.5-beta-
|
|
51
|
-
"@blocklet/resolver": "1.17.5-beta-
|
|
52
|
-
"@blocklet/sdk": "1.17.5-beta-
|
|
53
|
-
"@blocklet/server-js": "1.17.5-beta-
|
|
54
|
-
"@blocklet/store": "1.17.5-beta-
|
|
55
|
-
"@blocklet/theme": "^3.2.
|
|
44
|
+
"@arcblock/validator": "^1.27.14",
|
|
45
|
+
"@arcblock/vc": "^1.27.14",
|
|
46
|
+
"@blocklet/constant": "1.17.5-beta-20251214-122206-29056e8c",
|
|
47
|
+
"@blocklet/did-space-js": "^1.2.9",
|
|
48
|
+
"@blocklet/env": "1.17.5-beta-20251214-122206-29056e8c",
|
|
49
|
+
"@blocklet/error": "^0.3.4",
|
|
50
|
+
"@blocklet/meta": "1.17.5-beta-20251214-122206-29056e8c",
|
|
51
|
+
"@blocklet/resolver": "1.17.5-beta-20251214-122206-29056e8c",
|
|
52
|
+
"@blocklet/sdk": "1.17.5-beta-20251214-122206-29056e8c",
|
|
53
|
+
"@blocklet/server-js": "1.17.5-beta-20251214-122206-29056e8c",
|
|
54
|
+
"@blocklet/store": "1.17.5-beta-20251214-122206-29056e8c",
|
|
55
|
+
"@blocklet/theme": "^3.2.13",
|
|
56
56
|
"@fidm/x509": "^1.2.1",
|
|
57
|
-
"@ocap/mcrypto": "^1.27.
|
|
58
|
-
"@ocap/util": "^1.27.
|
|
59
|
-
"@ocap/wallet": "^1.27.
|
|
57
|
+
"@ocap/mcrypto": "^1.27.14",
|
|
58
|
+
"@ocap/util": "^1.27.14",
|
|
59
|
+
"@ocap/wallet": "^1.27.14",
|
|
60
60
|
"@slack/webhook": "^7.0.6",
|
|
61
61
|
"archiver": "^7.0.1",
|
|
62
62
|
"axios": "^1.7.9",
|
|
@@ -116,5 +116,5 @@
|
|
|
116
116
|
"express": "^4.18.2",
|
|
117
117
|
"unzipper": "^0.10.11"
|
|
118
118
|
},
|
|
119
|
-
"gitHead": "
|
|
119
|
+
"gitHead": "6839aebc2fdccbfbe7448f1e38f90be300ee4051"
|
|
120
120
|
}
|
|
@@ -1,18 +0,0 @@
|
|
|
1
|
-
const blueGreenUpdateBlockletStatus = async ({ states, did, status, blueGreenComponentIds }) => {
|
|
2
|
-
const outputBlocklet = {};
|
|
3
|
-
await Promise.all(
|
|
4
|
-
blueGreenComponentIds.map(async (item) => {
|
|
5
|
-
if (!item.componentDids.length) {
|
|
6
|
-
return;
|
|
7
|
-
}
|
|
8
|
-
const res = await states.blocklet.setBlockletStatus(did, status, {
|
|
9
|
-
componentDids: item.componentDids,
|
|
10
|
-
isGreen: item.changeToGreen,
|
|
11
|
-
});
|
|
12
|
-
Object.assign(outputBlocklet, res);
|
|
13
|
-
})
|
|
14
|
-
);
|
|
15
|
-
return outputBlocklet;
|
|
16
|
-
};
|
|
17
|
-
|
|
18
|
-
module.exports = { blueGreenUpdateBlockletStatus };
|