cyberia 3.0.2 → 3.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/{.env.production → .env.example} +20 -2
- package/.github/workflows/engine-cyberia.cd.yml +41 -10
- package/.github/workflows/engine-cyberia.ci.yml +53 -14
- package/.github/workflows/ghpkg.ci.yml +1 -1
- package/.github/workflows/gitlab.ci.yml +1 -1
- package/.github/workflows/hardhat.ci.yml +82 -0
- package/.github/workflows/npmpkg.ci.yml +37 -8
- package/.github/workflows/publish.ci.yml +5 -5
- package/.github/workflows/publish.cyberia.ci.yml +5 -5
- package/.github/workflows/pwa-microservices-template-page.cd.yml +3 -3
- package/.github/workflows/pwa-microservices-template-test.ci.yml +1 -1
- package/.github/workflows/release.cd.yml +3 -2
- package/.vscode/extensions.json +9 -8
- package/.vscode/settings.json +3 -2
- package/CHANGELOG.md +533 -290
- package/CLI-HELP.md +79 -53
- package/WHITE-PAPER.md +1540 -0
- package/bin/build.js +16 -11
- package/bin/cyberia.js +959 -8
- package/bin/deploy.js +103 -270
- package/bin/file.js +2 -1
- package/bin/index.js +959 -8
- package/bin/vs.js +3 -3
- package/conf.js +277 -77
- package/deployment.yaml +218 -4
- package/hardhat/.env.example +31 -0
- package/hardhat/README.md +531 -0
- package/hardhat/WHITE-PAPER.md +1540 -0
- package/hardhat/contracts/ObjectLayerToken.sol +391 -0
- package/hardhat/deployments/.gitkeep +0 -0
- package/hardhat/deployments/hardhat-ObjectLayerToken.json +11 -0
- package/hardhat/hardhat.config.js +136 -0
- package/hardhat/ignition/modules/ObjectLayerToken.js +21 -0
- package/hardhat/networks/besu-object-layer.network.json +138 -0
- package/hardhat/package-lock.json +7628 -0
- package/hardhat/package.json +45 -0
- package/hardhat/scripts/deployObjectLayerToken.js +98 -0
- package/hardhat/test/ObjectLayerToken.js +590 -0
- package/jsdoc.dd-cyberia.json +59 -0
- package/jsdoc.json +20 -13
- package/manifests/cronjobs/dd-cron/dd-cron-backup.yaml +1 -1
- package/manifests/cronjobs/dd-cron/dd-cron-dns.yaml +1 -1
- package/manifests/deployment/dd-cyberia-development/deployment.yaml +490 -0
- package/manifests/deployment/dd-cyberia-development/proxy.yaml +261 -0
- package/manifests/deployment/dd-cyberia-development/pv-pvc.yaml +132 -0
- package/manifests/deployment/dd-default-development/deployment.yaml +2 -2
- package/manifests/deployment/dd-test-development/deployment.yaml +52 -52
- package/manifests/deployment/dd-test-development/proxy.yaml +4 -4
- package/manifests/pv-pvc-dd.yaml +1 -1
- package/package.json +60 -50
- package/proxy.yaml +128 -9
- package/pv-pvc.yaml +132 -0
- package/scripts/k3s-node-setup.sh +1 -1
- package/scripts/ports-ls.sh +2 -0
- package/src/api/atlas-sprite-sheet/atlas-sprite-sheet.controller.js +3 -1
- package/src/api/atlas-sprite-sheet/atlas-sprite-sheet.model.js +1 -2
- package/src/api/atlas-sprite-sheet/atlas-sprite-sheet.service.js +40 -7
- package/src/api/document/document.service.js +1 -1
- package/src/api/file/file.controller.js +3 -1
- package/src/api/file/file.service.js +28 -5
- package/src/api/ipfs/ipfs.service.js +2 -2
- package/src/api/object-layer/object-layer.controller.js +6 -2
- package/src/api/object-layer/object-layer.model.js +67 -21
- package/src/api/object-layer/object-layer.router.js +668 -42
- package/src/api/object-layer/object-layer.service.js +10 -16
- package/src/api/object-layer-render-frames/object-layer-render-frames.model.js +1 -2
- package/src/api/user/user.router.js +10 -5
- package/src/api/user/user.service.js +7 -7
- package/src/cli/baremetal.js +6 -10
- package/src/cli/cloud-init.js +0 -3
- package/src/cli/db.js +54 -71
- package/src/cli/deploy.js +64 -12
- package/src/cli/env.js +5 -5
- package/src/cli/fs.js +0 -2
- package/src/cli/image.js +0 -3
- package/src/cli/index.js +41 -13
- package/src/cli/monitor.js +5 -6
- package/src/cli/repository.js +329 -46
- package/src/cli/run.js +210 -122
- package/src/cli/secrets.js +1 -3
- package/src/cli/ssh.js +1 -1
- package/src/client/Itemledger.index.js +1 -959
- package/src/client/Underpost.index.js +36 -0
- package/src/client/components/core/AgGrid.js +20 -5
- package/src/client/components/core/Alert.js +2 -2
- package/src/client/components/core/Content.js +22 -3
- package/src/client/components/core/Docs.js +30 -6
- package/src/client/components/core/FileExplorer.js +71 -4
- package/src/client/components/core/Input.js +1 -1
- package/src/client/components/core/Modal.js +22 -6
- package/src/client/components/core/PublicProfile.js +3 -3
- package/src/client/components/core/RichText.js +1 -2
- package/src/client/components/core/Router.js +34 -1
- package/src/client/components/core/Worker.js +1 -1
- package/src/client/components/cryptokoyn/CssCryptokoyn.js +63 -1
- package/src/client/components/cyberia/ObjectLayerEngineModal.js +145 -119
- package/src/client/components/cyberia/ObjectLayerEngineViewer.js +64 -6
- package/src/client/components/cyberia-portal/CommonCyberiaPortal.js +1 -0
- package/src/client/components/cyberia-portal/CssCyberiaPortal.js +44 -2
- package/src/client/components/cyberia-portal/LogInCyberiaPortal.js +0 -1
- package/src/client/components/cyberia-portal/MenuCyberiaPortal.js +64 -2
- package/src/client/components/cyberia-portal/RoutesCyberiaPortal.js +1 -0
- package/src/client/components/itemledger/CssItemledger.js +62 -0
- package/src/client/components/underpost/CommonUnderpost.js +29 -0
- package/src/client/components/underpost/CssUnderpost.js +281 -0
- package/src/client/components/underpost/CyberpunkBloggerUnderpost.js +879 -0
- package/src/client/components/underpost/DocumentSearchProvider.js +448 -0
- package/src/client/components/underpost/ElementsUnderpost.js +38 -0
- package/src/client/components/underpost/LabGalleryUnderpost.js +82 -0
- package/src/client/components/underpost/LogInUnderpost.js +23 -0
- package/src/client/components/underpost/LogOutUnderpost.js +15 -0
- package/src/client/components/underpost/MenuUnderpost.js +691 -0
- package/src/client/components/underpost/RoutesUnderpost.js +47 -0
- package/src/client/components/underpost/SettingsUnderpost.js +16 -0
- package/src/client/components/underpost/SignUpUnderpost.js +9 -0
- package/src/client/components/underpost/SocketIoUnderpost.js +54 -0
- package/src/client/components/underpost/TranslateUnderpost.js +10 -0
- package/src/client/public/cryptokoyn/assets/logo/base-icon.png +0 -0
- package/src/client/public/cryptokoyn/browserconfig.xml +12 -0
- package/src/client/public/cryptokoyn/microdata.json +85 -0
- package/src/client/public/cryptokoyn/site.webmanifest +57 -0
- package/src/client/public/cryptokoyn/sitemap +3 -3
- package/src/client/public/default/sitemap +3 -3
- package/src/client/public/itemledger/browserconfig.xml +2 -2
- package/src/client/public/itemledger/manifest.webmanifest +4 -4
- package/src/client/public/itemledger/microdata.json +71 -0
- package/src/client/public/itemledger/sitemap +3 -3
- package/src/client/public/itemledger/yandex-browser-manifest.json +2 -2
- package/src/client/public/test/sitemap +3 -3
- package/src/client/services/object-layer/object-layer.management.js +23 -4
- package/src/client/ssr/body/404.js +15 -11
- package/src/client/ssr/body/500.js +15 -11
- package/src/client/ssr/body/SwaggerDarkMode.js +285 -0
- package/src/client/ssr/body/UnderpostDefaultSplashScreen.js +83 -0
- package/src/client/ssr/head/PwaItemledger.js +60 -0
- package/src/client/ssr/head/UnderpostScripts.js +6 -0
- package/src/client/ssr/offline/NoNetworkConnection.js +11 -10
- package/src/client/ssr/pages/Test.js +11 -10
- package/src/client.build.js +0 -3
- package/src/client.dev.js +0 -3
- package/src/db/DataBaseProvider.js +17 -2
- package/src/db/mariadb/MariaDB.js +14 -9
- package/src/db/mongo/MongooseDB.js +17 -1
- package/src/index.js +1 -1
- package/src/proxy.js +0 -3
- package/src/runtime/express/Express.js +15 -9
- package/src/runtime/lampp/Lampp.js +6 -13
- package/src/server/auth.js +12 -14
- package/src/server/backup.js +2 -3
- package/src/server/besu-genesis-generator.js +1630 -0
- package/src/server/client-build-docs.js +126 -17
- package/src/server/client-build-live.js +9 -18
- package/src/server/client-build.js +203 -75
- package/src/server/client-dev-server.js +14 -13
- package/src/server/conf.js +376 -164
- package/src/server/cron.js +2 -1
- package/src/server/dns.js +28 -12
- package/src/server/downloader.js +0 -2
- package/src/server/logger.js +27 -9
- package/src/server/object-layer.js +92 -16
- package/src/server/peer.js +0 -2
- package/src/server/process.js +1 -50
- package/src/server/proxy.js +4 -8
- package/src/server/runtime.js +5 -8
- package/src/server/semantic-layer-generator.js +1 -0
- package/src/server/ssr.js +0 -3
- package/src/server/start.js +19 -12
- package/src/server/tls.js +0 -2
- package/src/server.js +0 -4
- package/.env.development +0 -43
- package/.env.test +0 -43
- package/hardhat/contracts/CryptoKoyn.sol +0 -59
- package/hardhat/contracts/ItemLedger.sol +0 -73
- package/hardhat/contracts/Lock.sol +0 -34
- package/hardhat/hardhat.config.cjs +0 -45
- package/hardhat/ignition/modules/Lock.js +0 -18
- package/hardhat/networks/cryptokoyn-itemledger.network.json +0 -29
- package/hardhat/scripts/deployCryptokoyn.cjs +0 -25
- package/hardhat/scripts/deployItemledger.cjs +0 -25
- package/hardhat/test/Lock.js +0 -126
- package/hardhat/white-paper.md +0 -581
- package/white-paper.md +0 -581
package/bin/cyberia.js
CHANGED
|
@@ -18,9 +18,12 @@ import { Command } from 'commander';
|
|
|
18
18
|
import fs from 'fs-extra';
|
|
19
19
|
import { shellExec } from '../src/server/process.js';
|
|
20
20
|
import { loggerFactory } from '../src/server/logger.js';
|
|
21
|
+
import { generateBesuManifests, deployBesu, removeBesu } from '../src/server/besu-genesis-generator.js';
|
|
21
22
|
import { DataBaseProvider } from '../src/db/DataBaseProvider.js';
|
|
23
|
+
import { loadConfServerJson } from '../src/server/conf.js';
|
|
22
24
|
import {
|
|
23
25
|
ObjectLayerEngine,
|
|
26
|
+
resolveCanonicalCid,
|
|
24
27
|
pngDirectoryIteratorByObjectLayerType,
|
|
25
28
|
getKeyFramesDirectionsFromNumberFolderDirection,
|
|
26
29
|
buildImgFromTile,
|
|
@@ -40,6 +43,42 @@ import crypto from 'crypto';
|
|
|
40
43
|
import nodePath from 'path';
|
|
41
44
|
import Underpost from '../src/index.js';
|
|
42
45
|
|
|
46
|
+
/**
|
|
47
|
+
* Connect to the project MongoDB instance using the standard env / conf layout.
|
|
48
|
+
*
|
|
49
|
+
* @async
|
|
50
|
+
* @function connectDbForChain
|
|
51
|
+
* @param {Object} params
|
|
52
|
+
* @param {string} params.envPath – path to .env file.
|
|
53
|
+
* @param {string} [params.mongoHost] – optional mongo host override.
|
|
54
|
+
* @returns {Promise<{ ObjectLayer: import('mongoose').Model, host: string, path: string }>}
|
|
55
|
+
* @memberof CyberiaCLI
|
|
56
|
+
*/
|
|
57
|
+
async function connectDbForChain({ envPath, mongoHost }) {
|
|
58
|
+
const deployId = process.env.DEFAULT_DEPLOY_ID;
|
|
59
|
+
const host = process.env.DEFAULT_DEPLOY_HOST;
|
|
60
|
+
const path = process.env.DEFAULT_DEPLOY_PATH;
|
|
61
|
+
|
|
62
|
+
const confServerPath = `./engine-private/conf/${deployId}/conf.server.json`;
|
|
63
|
+
if (!fs.existsSync(confServerPath)) {
|
|
64
|
+
throw new Error(`Server config not found: ${confServerPath}. Ensure DEFAULT_DEPLOY_ID is set.`);
|
|
65
|
+
}
|
|
66
|
+
const confServer = loadConfServerJson(confServerPath, { resolve: true });
|
|
67
|
+
const { db } = confServer[host][path];
|
|
68
|
+
|
|
69
|
+
db.host = mongoHost ? mongoHost : db.host.replace('127.0.0.1', 'mongodb-0.mongodb-service');
|
|
70
|
+
|
|
71
|
+
await DataBaseProvider.load({
|
|
72
|
+
apis: ['object-layer'],
|
|
73
|
+
host,
|
|
74
|
+
path,
|
|
75
|
+
db,
|
|
76
|
+
});
|
|
77
|
+
|
|
78
|
+
const ObjectLayer = DataBaseProvider.instance[`${host}${path}`].mongoose.models.ObjectLayer;
|
|
79
|
+
return { ObjectLayer, host, path };
|
|
80
|
+
}
|
|
81
|
+
|
|
43
82
|
/** @type {Function} */
|
|
44
83
|
const logger = loggerFactory(import.meta);
|
|
45
84
|
|
|
@@ -129,7 +168,7 @@ try {
|
|
|
129
168
|
const path = process.env.DEFAULT_DEPLOY_PATH;
|
|
130
169
|
|
|
131
170
|
const confServerPath = `./engine-private/conf/${deployId}/conf.server.json`;
|
|
132
|
-
const confServer =
|
|
171
|
+
const confServer = loadConfServerJson(confServerPath, { resolve: true });
|
|
133
172
|
const { db } = confServer[host][path];
|
|
134
173
|
|
|
135
174
|
db.host = options.mongoHost ? options.mongoHost : db.host.replace('127.0.0.1', 'mongodb-0.mongodb-service');
|
|
@@ -222,7 +261,7 @@ try {
|
|
|
222
261
|
const shouldGenerateAtlas = !isImportAll;
|
|
223
262
|
|
|
224
263
|
if (shouldGenerateAtlas) {
|
|
225
|
-
// Use the
|
|
264
|
+
// Use the createObjectLayerDocuments which handles atlas generation
|
|
226
265
|
// Since we're in CLI context without a full Express req/res, we build a minimal
|
|
227
266
|
// atlas generation flow using AtlasSpriteSheetGenerator directly after creation.
|
|
228
267
|
const { objectLayer } = await ObjectLayerEngine.createObjectLayerDocuments({
|
|
@@ -256,22 +295,60 @@ try {
|
|
|
256
295
|
md5: crypto.createHash('md5').update(buffer).digest('hex'),
|
|
257
296
|
}).save();
|
|
258
297
|
|
|
298
|
+
// Pin atlas PNG to IPFS
|
|
299
|
+
let importAtlasCid = '';
|
|
300
|
+
let importAtlasMetadataCid = '';
|
|
301
|
+
try {
|
|
302
|
+
const ipfsResult = await IpfsClient.addBufferToIpfs(
|
|
303
|
+
buffer,
|
|
304
|
+
`${itemKey}_atlas_sprite_sheet.png`,
|
|
305
|
+
`/object-layer/${itemKey}/${itemKey}_atlas_sprite_sheet.png`,
|
|
306
|
+
);
|
|
307
|
+
if (ipfsResult) {
|
|
308
|
+
importAtlasCid = ipfsResult.cid;
|
|
309
|
+
logger.info(`Atlas sprite sheet pinned to IPFS – CID: ${importAtlasCid}`);
|
|
310
|
+
}
|
|
311
|
+
} catch (ipfsError) {
|
|
312
|
+
logger.warn('Failed to add atlas sprite sheet to IPFS:', ipfsError.message);
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
// Pin atlas metadata JSON to IPFS (fast-json-stable-stringify)
|
|
316
|
+
try {
|
|
317
|
+
const metadataIpfsResult = await IpfsClient.addJsonToIpfs(
|
|
318
|
+
metadata,
|
|
319
|
+
`${itemKey}_atlas_sprite_sheet_metadata.json`,
|
|
320
|
+
`/object-layer/${itemKey}/${itemKey}_atlas_sprite_sheet_metadata.json`,
|
|
321
|
+
);
|
|
322
|
+
if (metadataIpfsResult) {
|
|
323
|
+
importAtlasMetadataCid = metadataIpfsResult.cid;
|
|
324
|
+
logger.info(`Atlas metadata pinned to IPFS – CID: ${importAtlasMetadataCid}`);
|
|
325
|
+
}
|
|
326
|
+
} catch (ipfsError) {
|
|
327
|
+
logger.warn('Failed to add atlas metadata to IPFS:', ipfsError.message);
|
|
328
|
+
}
|
|
329
|
+
|
|
259
330
|
let atlasDoc = await AtlasSpriteSheet.findOne({ 'metadata.itemKey': itemKey });
|
|
260
331
|
|
|
261
332
|
if (atlasDoc) {
|
|
262
333
|
atlasDoc.fileId = fileDoc._id;
|
|
334
|
+
atlasDoc.cid = importAtlasCid;
|
|
263
335
|
atlasDoc.metadata = metadata;
|
|
264
336
|
await atlasDoc.save();
|
|
265
337
|
logger.info(`Updated existing AtlasSpriteSheet document: ${atlasDoc._id}`);
|
|
266
338
|
} else {
|
|
267
339
|
atlasDoc = await new AtlasSpriteSheet({
|
|
268
340
|
fileId: fileDoc._id,
|
|
341
|
+
cid: importAtlasCid,
|
|
269
342
|
metadata,
|
|
270
343
|
}).save();
|
|
271
344
|
logger.info(`Created new AtlasSpriteSheet document: ${atlasDoc._id}`);
|
|
272
345
|
}
|
|
273
346
|
|
|
274
347
|
populatedObjectLayer.atlasSpriteSheetId = atlasDoc._id;
|
|
348
|
+
if (!populatedObjectLayer.data.render) populatedObjectLayer.data.render = {};
|
|
349
|
+
populatedObjectLayer.data.render.cid = importAtlasCid;
|
|
350
|
+
populatedObjectLayer.data.render.metadataCid = importAtlasMetadataCid;
|
|
351
|
+
populatedObjectLayer.markModified('data.render');
|
|
275
352
|
await populatedObjectLayer.save();
|
|
276
353
|
|
|
277
354
|
logger.info(`Atlas sprite sheet completed for item: ${itemKey}`);
|
|
@@ -346,7 +423,9 @@ try {
|
|
|
346
423
|
|
|
347
424
|
if (frameIndexNum >= frames.length) {
|
|
348
425
|
logger.error(
|
|
349
|
-
`Frame index ${frameIndexNum} out of range. Available frames: 0-${
|
|
426
|
+
`Frame index ${frameIndexNum} out of range. Available frames: 0-${
|
|
427
|
+
frames.length - 1
|
|
428
|
+
} for direction ${objectLayerFrameDirection}`,
|
|
350
429
|
);
|
|
351
430
|
process.exit(1);
|
|
352
431
|
}
|
|
@@ -436,12 +515,45 @@ try {
|
|
|
436
515
|
|
|
437
516
|
logger.info(`File saved with ID: ${fileDoc._id}`);
|
|
438
517
|
|
|
518
|
+
// Pin atlas PNG to IPFS
|
|
519
|
+
let toAtlasCid = '';
|
|
520
|
+
let toAtlasMetadataCid = '';
|
|
521
|
+
try {
|
|
522
|
+
const ipfsResult = await IpfsClient.addBufferToIpfs(
|
|
523
|
+
buffer,
|
|
524
|
+
`${itemKey}_atlas_sprite_sheet.png`,
|
|
525
|
+
`/object-layer/${itemKey}/${itemKey}_atlas_sprite_sheet.png`,
|
|
526
|
+
);
|
|
527
|
+
if (ipfsResult) {
|
|
528
|
+
toAtlasCid = ipfsResult.cid;
|
|
529
|
+
logger.info(`Atlas sprite sheet pinned to IPFS – CID: ${toAtlasCid}`);
|
|
530
|
+
}
|
|
531
|
+
} catch (ipfsError) {
|
|
532
|
+
logger.warn('Failed to add atlas sprite sheet to IPFS:', ipfsError.message);
|
|
533
|
+
}
|
|
534
|
+
|
|
535
|
+
// Pin atlas metadata JSON to IPFS (fast-json-stable-stringify)
|
|
536
|
+
try {
|
|
537
|
+
const metadataIpfsResult = await IpfsClient.addJsonToIpfs(
|
|
538
|
+
metadata,
|
|
539
|
+
`${itemKey}_atlas_sprite_sheet_metadata.json`,
|
|
540
|
+
`/object-layer/${itemKey}/${itemKey}_atlas_sprite_sheet_metadata.json`,
|
|
541
|
+
);
|
|
542
|
+
if (metadataIpfsResult) {
|
|
543
|
+
toAtlasMetadataCid = metadataIpfsResult.cid;
|
|
544
|
+
logger.info(`Atlas metadata pinned to IPFS – CID: ${toAtlasMetadataCid}`);
|
|
545
|
+
}
|
|
546
|
+
} catch (ipfsError) {
|
|
547
|
+
logger.warn('Failed to add atlas metadata to IPFS:', ipfsError.message);
|
|
548
|
+
}
|
|
549
|
+
|
|
439
550
|
// Check if atlas sprite sheet already exists
|
|
440
551
|
let atlasDoc = await AtlasSpriteSheet.findOne({ 'metadata.itemKey': itemKey });
|
|
441
552
|
|
|
442
553
|
if (atlasDoc) {
|
|
443
554
|
// Update existing
|
|
444
555
|
atlasDoc.fileId = fileDoc._id;
|
|
556
|
+
atlasDoc.cid = toAtlasCid;
|
|
445
557
|
atlasDoc.metadata = metadata;
|
|
446
558
|
await atlasDoc.save();
|
|
447
559
|
logger.info(`Updated existing AtlasSpriteSheet document: ${atlasDoc._id}`);
|
|
@@ -449,13 +561,18 @@ try {
|
|
|
449
561
|
// Create new
|
|
450
562
|
atlasDoc = await new AtlasSpriteSheet({
|
|
451
563
|
fileId: fileDoc._id,
|
|
564
|
+
cid: toAtlasCid,
|
|
452
565
|
metadata,
|
|
453
566
|
}).save();
|
|
454
567
|
logger.info(`Created new AtlasSpriteSheet document: ${atlasDoc._id}`);
|
|
455
568
|
}
|
|
456
569
|
|
|
457
|
-
// Update ObjectLayer with reference to atlas sprite sheet
|
|
570
|
+
// Update ObjectLayer with reference to atlas sprite sheet and render CIDs
|
|
458
571
|
objectLayer.atlasSpriteSheetId = atlasDoc._id;
|
|
572
|
+
if (!objectLayer.data.render) objectLayer.data.render = {};
|
|
573
|
+
objectLayer.data.render.cid = toAtlasCid;
|
|
574
|
+
objectLayer.data.render.metadataCid = toAtlasMetadataCid;
|
|
575
|
+
objectLayer.markModified('data.render');
|
|
459
576
|
await objectLayer.save();
|
|
460
577
|
|
|
461
578
|
logger.info(`Atlas sprite sheet completed for item: ${itemKey}`);
|
|
@@ -618,6 +735,7 @@ try {
|
|
|
618
735
|
}).save();
|
|
619
736
|
|
|
620
737
|
// Pin atlas PNG to IPFS + copy into MFS
|
|
738
|
+
let atlasMetadataCid = '';
|
|
621
739
|
try {
|
|
622
740
|
const ipfsResult = await IpfsClient.addBufferToIpfs(
|
|
623
741
|
buffer,
|
|
@@ -632,6 +750,21 @@ try {
|
|
|
632
750
|
logger.warn('Failed to add atlas sprite sheet to IPFS:', ipfsError.message);
|
|
633
751
|
}
|
|
634
752
|
|
|
753
|
+
// Pin atlas metadata JSON to IPFS (fast-json-stable-stringify)
|
|
754
|
+
try {
|
|
755
|
+
const metadataIpfsResult = await IpfsClient.addJsonToIpfs(
|
|
756
|
+
metadata,
|
|
757
|
+
`${atlasItemKey}_atlas_sprite_sheet_metadata.json`,
|
|
758
|
+
`/object-layer/${atlasItemKey}/${atlasItemKey}_atlas_sprite_sheet_metadata.json`,
|
|
759
|
+
);
|
|
760
|
+
if (metadataIpfsResult) {
|
|
761
|
+
atlasMetadataCid = metadataIpfsResult.cid;
|
|
762
|
+
logger.info(`Atlas metadata pinned to IPFS – CID: ${atlasMetadataCid}`);
|
|
763
|
+
}
|
|
764
|
+
} catch (ipfsError) {
|
|
765
|
+
logger.warn('Failed to add atlas metadata to IPFS:', ipfsError.message);
|
|
766
|
+
}
|
|
767
|
+
|
|
635
768
|
// Upsert AtlasSpriteSheet document (with CID)
|
|
636
769
|
let atlasDoc = await AtlasSpriteSheet.findOne({ 'metadata.itemKey': atlasItemKey });
|
|
637
770
|
if (atlasDoc) {
|
|
@@ -649,10 +782,12 @@ try {
|
|
|
649
782
|
logger.info(`Created new AtlasSpriteSheet document: ${atlasDoc._id}`);
|
|
650
783
|
}
|
|
651
784
|
|
|
652
|
-
// Link atlas to ObjectLayer and set data.
|
|
785
|
+
// Link atlas to ObjectLayer and set data.render.cid + data.render.metadataCid
|
|
653
786
|
populatedObjectLayer.atlasSpriteSheetId = atlasDoc._id;
|
|
654
|
-
populatedObjectLayer.data.
|
|
655
|
-
populatedObjectLayer.
|
|
787
|
+
if (!populatedObjectLayer.data.render) populatedObjectLayer.data.render = {};
|
|
788
|
+
populatedObjectLayer.data.render.cid = atlasCid;
|
|
789
|
+
populatedObjectLayer.data.render.metadataCid = atlasMetadataCid;
|
|
790
|
+
populatedObjectLayer.markModified('data.render');
|
|
656
791
|
await populatedObjectLayer.save();
|
|
657
792
|
|
|
658
793
|
// Also write atlas PNG to both static asset directories
|
|
@@ -703,7 +838,823 @@ try {
|
|
|
703
838
|
)
|
|
704
839
|
.description('Object layer management');
|
|
705
840
|
|
|
706
|
-
|
|
841
|
+
// ── chain: Hyperledger Besu / ERC-1155 lifecycle commands ────────────────
|
|
842
|
+
const chain = program.command('chain').description('Hyperledger Besu chain & ERC-1155 ObjectLayerToken lifecycle');
|
|
843
|
+
|
|
844
|
+
chain
|
|
845
|
+
.command('deploy')
|
|
846
|
+
.description(
|
|
847
|
+
'Deploy Besu IBFT2 network to kubeadm Kubernetes cluster.\n' +
|
|
848
|
+
'Dynamically generates fresh validator keys, genesis, extraData, enode URLs,\n' +
|
|
849
|
+
'and all K8s manifests in manifests/besu/ before applying via kustomize.\n' +
|
|
850
|
+
'Each invocation creates a unique chain identity (new keys, new extraData).',
|
|
851
|
+
)
|
|
852
|
+
.option('--pull-image', 'Pull Besu container images into containerd before deployment')
|
|
853
|
+
.option('--validators <count>', 'Number of IBFT2 validators (default: 4)', '4')
|
|
854
|
+
.option('--chain-id <chainId>', 'Chain ID for the network (default: 777771)', '777771')
|
|
855
|
+
.option('--block-period <seconds>', 'IBFT2 block period in seconds (default: 5)', '5')
|
|
856
|
+
.option('--epoch-length <length>', 'IBFT2 epoch length (default: 30000)', '30000')
|
|
857
|
+
.option('--coinbase-address <address>', 'Coinbase deployer address (auto-detected from engine-private if omitted)')
|
|
858
|
+
.option('--besu-image <image>', 'Besu container image', 'hyperledger/besu:24.12.1')
|
|
859
|
+
.option('--curl-image <image>', 'Curl init container image', 'curlimages/curl:8.11.1')
|
|
860
|
+
.option('--node-port-rpc <port>', 'NodePort for external JSON-RPC access', '30545')
|
|
861
|
+
.option('--node-port-ws <port>', 'NodePort for external WebSocket access', '30546')
|
|
862
|
+
.option('--namespace <ns>', 'Kubernetes namespace for Besu resources', 'besu')
|
|
863
|
+
.option('--skip-generate', 'Skip manifest generation and use existing manifests/besu/ as-is')
|
|
864
|
+
.option('--skip-wait', 'Skip waiting for validators to reach Running state')
|
|
865
|
+
.action(async (options) => {
|
|
866
|
+
const result = await deployBesu({
|
|
867
|
+
pullImage: !!options.pullImage,
|
|
868
|
+
validators: parseInt(options.validators, 10),
|
|
869
|
+
chainId: parseInt(options.chainId, 10),
|
|
870
|
+
blockPeriodSeconds: parseInt(options.blockPeriod, 10),
|
|
871
|
+
epochLength: parseInt(options.epochLength, 10),
|
|
872
|
+
coinbaseAddress: options.coinbaseAddress || '',
|
|
873
|
+
besuImage: options.besuImage,
|
|
874
|
+
curlImage: options.curlImage,
|
|
875
|
+
nodePortRpc: parseInt(options.nodePortRpc, 10),
|
|
876
|
+
nodePortWs: parseInt(options.nodePortWs, 10),
|
|
877
|
+
namespace: options.namespace,
|
|
878
|
+
skipGenerate: !!options.skipGenerate,
|
|
879
|
+
skipWait: !!options.skipWait,
|
|
880
|
+
manifestsPath: './manifests/besu',
|
|
881
|
+
networkConfigDir: './hardhat/networks',
|
|
882
|
+
privateKeysDir: './engine-private/eth-networks/besu/validators',
|
|
883
|
+
});
|
|
884
|
+
if (!result && !options.skipGenerate) {
|
|
885
|
+
process.exit(1);
|
|
886
|
+
}
|
|
887
|
+
});
|
|
888
|
+
|
|
889
|
+
chain
|
|
890
|
+
.command('remove')
|
|
891
|
+
.description('Remove Besu IBFT2 network from kubeadm Kubernetes cluster')
|
|
892
|
+
.option('--namespace <ns>', 'Kubernetes namespace for Besu resources', 'besu')
|
|
893
|
+
.option('--clean-keys', 'Also remove generated validator keys from engine-private/')
|
|
894
|
+
.option('--clean-manifests', 'Also remove the generated manifests/besu/ directory')
|
|
895
|
+
.action(async (options) => {
|
|
896
|
+
removeBesu({
|
|
897
|
+
namespace: options.namespace,
|
|
898
|
+
cleanKeys: !!options.cleanKeys,
|
|
899
|
+
cleanManifests: !!options.cleanManifests,
|
|
900
|
+
manifestsPath: './manifests/besu',
|
|
901
|
+
privateKeysDir: './engine-private/eth-networks/besu/validators',
|
|
902
|
+
});
|
|
903
|
+
});
|
|
904
|
+
|
|
905
|
+
chain
|
|
906
|
+
.command('generate-manifests')
|
|
907
|
+
.description(
|
|
908
|
+
'Generate fresh Besu IBFT2 K8s manifests without deploying.\n' +
|
|
909
|
+
'Creates new validator keys, genesis, extraData, and all manifest files\n' +
|
|
910
|
+
'in manifests/besu/. Use "cyberia chain deploy --skip-generate" to apply them later.',
|
|
911
|
+
)
|
|
912
|
+
.option('--validators <count>', 'Number of IBFT2 validators (default: 4)', '4')
|
|
913
|
+
.option('--chain-id <chainId>', 'Chain ID for the network (default: 777771)', '777771')
|
|
914
|
+
.option('--block-period <seconds>', 'IBFT2 block period in seconds (default: 5)', '5')
|
|
915
|
+
.option('--epoch-length <length>', 'IBFT2 epoch length (default: 30000)', '30000')
|
|
916
|
+
.option('--coinbase-address <address>', 'Coinbase deployer address (auto-detected from engine-private if omitted)')
|
|
917
|
+
.option('--besu-image <image>', 'Besu container image', 'hyperledger/besu:24.12.1')
|
|
918
|
+
.option('--curl-image <image>', 'Curl init container image', 'curlimages/curl:8.11.1')
|
|
919
|
+
.option('--node-port-rpc <port>', 'NodePort for external JSON-RPC access', '30545')
|
|
920
|
+
.option('--node-port-ws <port>', 'NodePort for external WebSocket access', '30546')
|
|
921
|
+
.option('--namespace <ns>', 'Kubernetes namespace for Besu resources', 'besu')
|
|
922
|
+
.option('--output-dir <dir>', 'Output directory for manifests', './manifests/besu')
|
|
923
|
+
.action(async (options) => {
|
|
924
|
+
try {
|
|
925
|
+
const result = await generateBesuManifests({
|
|
926
|
+
outputDir: options.outputDir,
|
|
927
|
+
networkConfigDir: './hardhat/networks',
|
|
928
|
+
validatorCount: parseInt(options.validators, 10),
|
|
929
|
+
namespace: options.namespace,
|
|
930
|
+
chainId: parseInt(options.chainId, 10),
|
|
931
|
+
blockPeriodSeconds: parseInt(options.blockPeriod, 10),
|
|
932
|
+
epochLength: parseInt(options.epochLength, 10),
|
|
933
|
+
requestTimeoutSeconds: 10,
|
|
934
|
+
coinbaseAddress: options.coinbaseAddress || '',
|
|
935
|
+
besuImage: options.besuImage,
|
|
936
|
+
curlImage: options.curlImage,
|
|
937
|
+
nodePortRpc: parseInt(options.nodePortRpc, 10),
|
|
938
|
+
nodePortWs: parseInt(options.nodePortWs, 10),
|
|
939
|
+
savePrivateKeys: true,
|
|
940
|
+
privateKeysDir: './engine-private/eth-networks/besu/validators',
|
|
941
|
+
});
|
|
942
|
+
logger.info('');
|
|
943
|
+
logger.info('Manifests generated successfully. To deploy:');
|
|
944
|
+
logger.info(' cyberia chain deploy --skip-generate');
|
|
945
|
+
logger.info('');
|
|
946
|
+
logger.info('Validator summary:');
|
|
947
|
+
for (const v of result.validators) {
|
|
948
|
+
logger.info(` Validator ${v.index}: address=${v.address} pubkey=${v.publicKey.slice(0, 16)}...`);
|
|
949
|
+
}
|
|
950
|
+
} catch (err) {
|
|
951
|
+
logger.error(`Manifest generation failed: ${err.message}`);
|
|
952
|
+
process.exit(1);
|
|
953
|
+
}
|
|
954
|
+
});
|
|
955
|
+
|
|
956
|
+
chain
|
|
957
|
+
.command('deploy-contract')
|
|
958
|
+
.description('Deploy ObjectLayerToken (ERC-1155) contract to a Besu network via Hardhat')
|
|
959
|
+
.option('--network <network>', 'Hardhat network name (besu-k8s for kubeadm cluster)', 'besu-k8s')
|
|
960
|
+
.action(async (options) => {
|
|
961
|
+
const network = options.network || 'besu-k8s';
|
|
962
|
+
logger.info(`Deploying ObjectLayerToken to network: ${network}`);
|
|
963
|
+
shellExec(`cd hardhat && npx hardhat run scripts/deployObjectLayerToken.js --network ${network}`);
|
|
964
|
+
logger.info('Contract deployment complete. Check hardhat/deployments/ for the artifact.');
|
|
965
|
+
});
|
|
966
|
+
|
|
967
|
+
chain
|
|
968
|
+
.command('compile')
|
|
969
|
+
.description('Compile Solidity contracts via Hardhat')
|
|
970
|
+
.action(async () => {
|
|
971
|
+
logger.info('Compiling contracts...');
|
|
972
|
+
shellExec('cd hardhat && npx hardhat compile');
|
|
973
|
+
logger.info('Compilation complete.');
|
|
974
|
+
});
|
|
975
|
+
|
|
976
|
+
chain
|
|
977
|
+
.command('test')
|
|
978
|
+
.description('Run Hardhat tests for ObjectLayerToken')
|
|
979
|
+
.action(async () => {
|
|
980
|
+
logger.info('Running ObjectLayerToken tests...');
|
|
981
|
+
shellExec('cd hardhat && npx hardhat test test/ObjectLayerToken.js');
|
|
982
|
+
});
|
|
983
|
+
|
|
984
|
+
chain
|
|
985
|
+
.command('register')
|
|
986
|
+
.description(
|
|
987
|
+
'Register an Object Layer item on-chain via the deployed ObjectLayerToken contract.\n' +
|
|
988
|
+
'When --from-db is set the canonical CID is resolved from MongoDB (fast-json-stable-stringify of objectLayer.data).\n' +
|
|
989
|
+
'This guarantees the on-chain metadataCid always matches the content-addressed IPFS payload.',
|
|
990
|
+
)
|
|
991
|
+
.requiredOption('--item-id <itemId>', 'Human-readable item identifier (e.g. "hatchet")')
|
|
992
|
+
.option('--metadata-cid <cid>', 'IPFS metadata CID for the item (ignored when --from-db is set)', '')
|
|
993
|
+
.option('--from-db', 'Resolve the canonical CID from the ObjectLayer MongoDB document (recommended)')
|
|
994
|
+
.option('--supply <supply>', 'Initial token supply (1 = non-fungible, >1 = semi-fungible)', '1')
|
|
995
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
996
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
997
|
+
.option('--mongo-host <mongoHost>', 'MongoDB host override (used with --from-db)')
|
|
998
|
+
.action(async (options) => {
|
|
999
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1000
|
+
|
|
1001
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1002
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1003
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1004
|
+
logger.error(`Deployment artifact not found: ${artifactPath}. Run "cyberia chain deploy-contract" first.`);
|
|
1005
|
+
process.exit(1);
|
|
1006
|
+
}
|
|
1007
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1008
|
+
const contractAddress = deployment.address;
|
|
1009
|
+
|
|
1010
|
+
// ── Resolve canonical CID ───────────────────────────────────────
|
|
1011
|
+
let canonicalCid = options.metadataCid || '';
|
|
1012
|
+
|
|
1013
|
+
if (options.fromDb) {
|
|
1014
|
+
try {
|
|
1015
|
+
const { ObjectLayer, host, path } = await connectDbForChain({
|
|
1016
|
+
envPath: options.envPath,
|
|
1017
|
+
mongoHost: options.mongoHost,
|
|
1018
|
+
});
|
|
1019
|
+
const resolved = await resolveCanonicalCid({
|
|
1020
|
+
itemId: options.itemId,
|
|
1021
|
+
ObjectLayer,
|
|
1022
|
+
ipfsClient: IpfsClient,
|
|
1023
|
+
options: { host, path },
|
|
1024
|
+
});
|
|
1025
|
+
|
|
1026
|
+
if (options.metadataCid && options.metadataCid !== resolved.cid) {
|
|
1027
|
+
logger.warn(
|
|
1028
|
+
`Provided --metadata-cid "${options.metadataCid}" differs from canonical CID "${resolved.cid}" (source: ${resolved.source}).`,
|
|
1029
|
+
);
|
|
1030
|
+
logger.warn('Using the canonical CID to ensure on-chain integrity.');
|
|
1031
|
+
}
|
|
1032
|
+
|
|
1033
|
+
canonicalCid = resolved.cid;
|
|
1034
|
+
logger.info(`Canonical CID resolved (${resolved.source}): ${canonicalCid}`);
|
|
1035
|
+
logger.info(` SHA-256: ${resolved.sha256}`);
|
|
1036
|
+
|
|
1037
|
+
// Close the DB connection after resolving
|
|
1038
|
+
await DataBaseProvider.instance[`${host}${path}`].mongoose.close();
|
|
1039
|
+
} catch (dbErr) {
|
|
1040
|
+
logger.error(`Failed to resolve canonical CID from database: ${dbErr.message}`);
|
|
1041
|
+
process.exit(1);
|
|
1042
|
+
}
|
|
1043
|
+
} else if (!canonicalCid) {
|
|
1044
|
+
logger.warn(
|
|
1045
|
+
'No --metadata-cid provided and --from-db not set. The on-chain metadataCid will be empty.\n' +
|
|
1046
|
+
'Consider using --from-db to automatically resolve the canonical CID from the database.',
|
|
1047
|
+
);
|
|
1048
|
+
}
|
|
1049
|
+
|
|
1050
|
+
logger.info(`Registering Object Layer item "${options.itemId}" on contract ${contractAddress}`);
|
|
1051
|
+
logger.info(` Metadata CID: ${canonicalCid || '(none)'}`);
|
|
1052
|
+
logger.info(` Supply: ${options.supply}`);
|
|
1053
|
+
|
|
1054
|
+
// Use a Hardhat script via inline JS to call registerObjectLayer
|
|
1055
|
+
const registerScript = `
|
|
1056
|
+
import hre from 'hardhat';
|
|
1057
|
+
const { ethers } = hre;
|
|
1058
|
+
async function main() {
|
|
1059
|
+
const [deployer] = await ethers.getSigners();
|
|
1060
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${contractAddress}');
|
|
1061
|
+
const tx = await token.registerObjectLayer(
|
|
1062
|
+
deployer.address,
|
|
1063
|
+
'${options.itemId}',
|
|
1064
|
+
'${canonicalCid}',
|
|
1065
|
+
${options.supply},
|
|
1066
|
+
'0x'
|
|
1067
|
+
);
|
|
1068
|
+
const receipt = await tx.wait();
|
|
1069
|
+
const tokenId = await token.computeTokenId('${options.itemId}');
|
|
1070
|
+
console.log('Registered tokenId:', tokenId.toString());
|
|
1071
|
+
console.log('Tx hash:', receipt.hash);
|
|
1072
|
+
}
|
|
1073
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1074
|
+
`;
|
|
1075
|
+
const tmpScript = './hardhat/scripts/_cli_register_tmp.js';
|
|
1076
|
+
fs.writeFileSync(tmpScript, registerScript, 'utf8');
|
|
1077
|
+
try {
|
|
1078
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_register_tmp.js --network ${options.network}`);
|
|
1079
|
+
} finally {
|
|
1080
|
+
fs.removeSync(tmpScript);
|
|
1081
|
+
}
|
|
1082
|
+
});
|
|
1083
|
+
|
|
1084
|
+
chain
|
|
1085
|
+
.command('mint')
|
|
1086
|
+
.description('Mint additional tokens for an existing token ID')
|
|
1087
|
+
.requiredOption('--token-id <tokenId>', 'ERC-1155 token ID (uint256)')
|
|
1088
|
+
.requiredOption('--to <address>', 'Recipient address')
|
|
1089
|
+
.requiredOption('--amount <amount>', 'Amount to mint')
|
|
1090
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1091
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
1092
|
+
.action(async (options) => {
|
|
1093
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1094
|
+
|
|
1095
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1096
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1097
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1098
|
+
logger.error(`Deployment artifact not found: ${artifactPath}. Run "cyberia chain deploy-contract" first.`);
|
|
1099
|
+
process.exit(1);
|
|
1100
|
+
}
|
|
1101
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1102
|
+
const contractAddress = deployment.address;
|
|
1103
|
+
|
|
1104
|
+
logger.info(`Minting ${options.amount} of token ID ${options.tokenId} to ${options.to}`);
|
|
1105
|
+
|
|
1106
|
+
const mintScript = `
|
|
1107
|
+
import hre from 'hardhat';
|
|
1108
|
+
const { ethers } = hre;
|
|
1109
|
+
async function main() {
|
|
1110
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${contractAddress}');
|
|
1111
|
+
const tx = await token.mint('${options.to}', ${options.tokenId}, ${options.amount}, '0x');
|
|
1112
|
+
const receipt = await tx.wait();
|
|
1113
|
+
console.log('Mint tx hash:', receipt.hash);
|
|
1114
|
+
const balance = await token.balanceOf('${options.to}', ${options.tokenId});
|
|
1115
|
+
console.log('New balance:', balance.toString());
|
|
1116
|
+
}
|
|
1117
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1118
|
+
`;
|
|
1119
|
+
const tmpScript = './hardhat/scripts/_cli_mint_tmp.js';
|
|
1120
|
+
fs.writeFileSync(tmpScript, mintScript, 'utf8');
|
|
1121
|
+
try {
|
|
1122
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_mint_tmp.js --network ${options.network}`);
|
|
1123
|
+
} finally {
|
|
1124
|
+
fs.removeSync(tmpScript);
|
|
1125
|
+
}
|
|
1126
|
+
});
|
|
1127
|
+
|
|
1128
|
+
chain
|
|
1129
|
+
.command('status')
|
|
1130
|
+
.description('Query Besu chain and ObjectLayerToken contract status')
|
|
1131
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1132
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
1133
|
+
.action(async (options) => {
|
|
1134
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1135
|
+
|
|
1136
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1137
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1138
|
+
|
|
1139
|
+
logger.info('── Besu Chain Status ──');
|
|
1140
|
+
|
|
1141
|
+
// Check node connectivity
|
|
1142
|
+
const statusScript = `
|
|
1143
|
+
import hre from 'hardhat';
|
|
1144
|
+
import { readFileSync } from 'fs';
|
|
1145
|
+
const { ethers } = hre;
|
|
1146
|
+
async function main() {
|
|
1147
|
+
const provider = ethers.provider;
|
|
1148
|
+
const network = await provider.getNetwork();
|
|
1149
|
+
const blockNumber = await provider.getBlockNumber();
|
|
1150
|
+
const [deployer] = await ethers.getSigners();
|
|
1151
|
+
const balance = await provider.getBalance(deployer.address);
|
|
1152
|
+
console.log('Network:', JSON.stringify({
|
|
1153
|
+
name: network.name,
|
|
1154
|
+
chainId: network.chainId.toString(),
|
|
1155
|
+
blockNumber,
|
|
1156
|
+
deployerAddress: deployer.address,
|
|
1157
|
+
deployerBalance: ethers.formatEther(balance) + ' ETH'
|
|
1158
|
+
}, null, 2));
|
|
1159
|
+
|
|
1160
|
+
${
|
|
1161
|
+
fs.existsSync(artifactPath)
|
|
1162
|
+
? `
|
|
1163
|
+
const deployment = JSON.parse(readFileSync('${nodePath.resolve(artifactPath)}', 'utf8'));
|
|
1164
|
+
try {
|
|
1165
|
+
const token = await ethers.getContractAt('ObjectLayerToken', deployment.address);
|
|
1166
|
+
const cryptokoynSupply = await token['totalSupply(uint256)'](0);
|
|
1167
|
+
const deployerCKY = await token.balanceOf(deployer.address, 0);
|
|
1168
|
+
const isPaused = false; // pausable check would need try-catch
|
|
1169
|
+
console.log('Contract:', JSON.stringify({
|
|
1170
|
+
address: deployment.address,
|
|
1171
|
+
cryptokoynTotalSupply: ethers.formatEther(cryptokoynSupply) + ' CKY',
|
|
1172
|
+
deployerCryptokoynBalance: ethers.formatEther(deployerCKY) + ' CKY',
|
|
1173
|
+
}, null, 2));
|
|
1174
|
+
} catch (e) {
|
|
1175
|
+
console.log('Contract not accessible:', e.message);
|
|
1176
|
+
}
|
|
1177
|
+
`
|
|
1178
|
+
: `console.log('No deployment artifact found for network ${options.network}.');`
|
|
1179
|
+
}
|
|
1180
|
+
}
|
|
1181
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1182
|
+
`;
|
|
1183
|
+
const tmpScript = './hardhat/scripts/_cli_status_tmp.js';
|
|
1184
|
+
fs.writeFileSync(tmpScript, statusScript, 'utf8');
|
|
1185
|
+
try {
|
|
1186
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_status_tmp.js --network ${options.network}`);
|
|
1187
|
+
} finally {
|
|
1188
|
+
fs.removeSync(tmpScript);
|
|
1189
|
+
}
|
|
1190
|
+
});
|
|
1191
|
+
|
|
1192
|
+
chain
|
|
1193
|
+
.command('pause')
|
|
1194
|
+
.description('Pause all token transfers on the ObjectLayerToken contract (emergency governance)')
|
|
1195
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1196
|
+
.action(async (options) => {
|
|
1197
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1198
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1199
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1200
|
+
logger.error(`Deployment artifact not found: ${artifactPath}`);
|
|
1201
|
+
process.exit(1);
|
|
1202
|
+
}
|
|
1203
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1204
|
+
|
|
1205
|
+
const pauseScript = `
|
|
1206
|
+
import hre from 'hardhat';
|
|
1207
|
+
const { ethers } = hre;
|
|
1208
|
+
async function main() {
|
|
1209
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${deployment.address}');
|
|
1210
|
+
const tx = await token.pause();
|
|
1211
|
+
await tx.wait();
|
|
1212
|
+
console.log('Contract PAUSED. All transfers are frozen.');
|
|
1213
|
+
}
|
|
1214
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1215
|
+
`;
|
|
1216
|
+
const tmpScript = './hardhat/scripts/_cli_pause_tmp.js';
|
|
1217
|
+
fs.writeFileSync(tmpScript, pauseScript, 'utf8');
|
|
1218
|
+
try {
|
|
1219
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_pause_tmp.js --network ${options.network}`);
|
|
1220
|
+
} finally {
|
|
1221
|
+
fs.removeSync(tmpScript);
|
|
1222
|
+
}
|
|
1223
|
+
});
|
|
1224
|
+
|
|
1225
|
+
chain
|
|
1226
|
+
.command('unpause')
|
|
1227
|
+
.description('Unpause token transfers on the ObjectLayerToken contract')
|
|
1228
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1229
|
+
.action(async (options) => {
|
|
1230
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1231
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1232
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1233
|
+
logger.error(`Deployment artifact not found: ${artifactPath}`);
|
|
1234
|
+
process.exit(1);
|
|
1235
|
+
}
|
|
1236
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1237
|
+
|
|
1238
|
+
const unpauseScript = `
|
|
1239
|
+
import hre from 'hardhat';
|
|
1240
|
+
const { ethers } = hre;
|
|
1241
|
+
async function main() {
|
|
1242
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${deployment.address}');
|
|
1243
|
+
const tx = await token.unpause();
|
|
1244
|
+
await tx.wait();
|
|
1245
|
+
console.log('Contract UNPAUSED. Transfers resumed.');
|
|
1246
|
+
}
|
|
1247
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1248
|
+
`;
|
|
1249
|
+
const tmpScript = './hardhat/scripts/_cli_unpause_tmp.js';
|
|
1250
|
+
fs.writeFileSync(tmpScript, unpauseScript, 'utf8');
|
|
1251
|
+
try {
|
|
1252
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_unpause_tmp.js --network ${options.network}`);
|
|
1253
|
+
} finally {
|
|
1254
|
+
fs.removeSync(tmpScript);
|
|
1255
|
+
}
|
|
1256
|
+
});
|
|
1257
|
+
|
|
1258
|
+
// ── key-gen: Generate Ethereum secp256k1 key pair ───────────────────────
|
|
1259
|
+
chain
|
|
1260
|
+
.command('key-gen')
|
|
1261
|
+
.description('Generate a new Ethereum secp256k1 key pair for player identity or deployer accounts')
|
|
1262
|
+
.option(
|
|
1263
|
+
'--save',
|
|
1264
|
+
'Persist key files to default paths (private → ./engine-private/, public → ./hardhat/deployments/)',
|
|
1265
|
+
)
|
|
1266
|
+
.option('--private-path <path>', 'Custom path for the private key JSON file (overrides default)')
|
|
1267
|
+
.option('--public-path <path>', 'Custom path for the public key JSON file (overrides default)')
|
|
1268
|
+
.action(async (options) => {
|
|
1269
|
+
const { ethers } = await import('ethers');
|
|
1270
|
+
const wallet = ethers.Wallet.createRandom();
|
|
1271
|
+
|
|
1272
|
+
const addressLower = wallet.address.toLowerCase();
|
|
1273
|
+
|
|
1274
|
+
const privateData = {
|
|
1275
|
+
address: wallet.address,
|
|
1276
|
+
privateKey: wallet.privateKey,
|
|
1277
|
+
mnemonic: wallet.mnemonic ? wallet.mnemonic.phrase : null,
|
|
1278
|
+
};
|
|
1279
|
+
|
|
1280
|
+
const publicData = {
|
|
1281
|
+
address: wallet.address,
|
|
1282
|
+
publicKey: wallet.publicKey,
|
|
1283
|
+
};
|
|
1284
|
+
|
|
1285
|
+
logger.info('── New Ethereum Key Pair ──');
|
|
1286
|
+
logger.info(` Address : ${wallet.address}`);
|
|
1287
|
+
logger.info(` Private Key: ${wallet.privateKey}`);
|
|
1288
|
+
logger.info(` Public Key : ${wallet.publicKey}`);
|
|
1289
|
+
if (privateData.mnemonic) {
|
|
1290
|
+
logger.info(` Mnemonic : ${privateData.mnemonic}`);
|
|
1291
|
+
}
|
|
1292
|
+
|
|
1293
|
+
const shouldSave = options.save || options.privatePath || options.publicPath;
|
|
1294
|
+
|
|
1295
|
+
if (shouldSave) {
|
|
1296
|
+
const privatePath = options.privatePath || `./engine-private/eth-networks/besu/${addressLower}.key.json`;
|
|
1297
|
+
const publicPath = options.publicPath || `./hardhat/deployments/${addressLower}.pub.json`;
|
|
1298
|
+
|
|
1299
|
+
fs.ensureDirSync(nodePath.dirname(privatePath));
|
|
1300
|
+
fs.writeJsonSync(privatePath, privateData, { spaces: 2 });
|
|
1301
|
+
logger.info(` Private key saved to: ${privatePath}`);
|
|
1302
|
+
logger.warn(' ⚠ Keep this file secure! Anyone with the private key controls this address.');
|
|
1303
|
+
|
|
1304
|
+
fs.ensureDirSync(nodePath.dirname(publicPath));
|
|
1305
|
+
fs.writeJsonSync(publicPath, publicData, { spaces: 2 });
|
|
1306
|
+
logger.info(` Public key saved to : ${publicPath}`);
|
|
1307
|
+
}
|
|
1308
|
+
});
|
|
1309
|
+
|
|
1310
|
+
// ── set-coinbase: Set the Besu deployer (coinbase) private key ──────────
|
|
1311
|
+
chain
|
|
1312
|
+
.command('set-coinbase')
|
|
1313
|
+
.description(
|
|
1314
|
+
'Set the coinbase deployer private key used by hardhat.config.js for Besu network deployments.\n' +
|
|
1315
|
+
'Accepts either a raw hex private key via --private-key, or a .key.json file generated by "cyberia chain key-gen --save" via --from-file.',
|
|
1316
|
+
)
|
|
1317
|
+
.option('--private-key <hex>', 'Raw hex private key (with or without 0x prefix)')
|
|
1318
|
+
.option(
|
|
1319
|
+
'--from-file <path>',
|
|
1320
|
+
'Path to a .key.json file (e.g. ./engine-private/eth-networks/besu/<address>.key.json)',
|
|
1321
|
+
)
|
|
1322
|
+
.option(
|
|
1323
|
+
'--coinbase-path <path>',
|
|
1324
|
+
'Custom output path for the coinbase file',
|
|
1325
|
+
'./engine-private/eth-networks/besu/coinbase',
|
|
1326
|
+
)
|
|
1327
|
+
.action(async (options) => {
|
|
1328
|
+
let privateKey;
|
|
1329
|
+
|
|
1330
|
+
if (options.fromFile) {
|
|
1331
|
+
if (!fs.existsSync(options.fromFile)) {
|
|
1332
|
+
logger.error(`Key file not found: ${options.fromFile}`);
|
|
1333
|
+
process.exit(1);
|
|
1334
|
+
}
|
|
1335
|
+
try {
|
|
1336
|
+
const keyData = fs.readJsonSync(options.fromFile);
|
|
1337
|
+
if (!keyData.privateKey) {
|
|
1338
|
+
logger.error(`Key file does not contain a "privateKey" field: ${options.fromFile}`);
|
|
1339
|
+
process.exit(1);
|
|
1340
|
+
}
|
|
1341
|
+
privateKey = keyData.privateKey;
|
|
1342
|
+
logger.info(`Read private key for address ${keyData.address || '(unknown)'} from ${options.fromFile}`);
|
|
1343
|
+
} catch (e) {
|
|
1344
|
+
logger.error(`Failed to parse key file: ${e.message}`);
|
|
1345
|
+
process.exit(1);
|
|
1346
|
+
}
|
|
1347
|
+
} else if (options.privateKey) {
|
|
1348
|
+
privateKey = options.privateKey;
|
|
1349
|
+
} else {
|
|
1350
|
+
logger.error('Provide either --private-key <hex> or --from-file <path>.');
|
|
1351
|
+
process.exit(1);
|
|
1352
|
+
}
|
|
1353
|
+
|
|
1354
|
+
// Normalise: ensure 0x prefix
|
|
1355
|
+
privateKey = privateKey.trim();
|
|
1356
|
+
if (!privateKey.startsWith('0x')) privateKey = `0x${privateKey}`;
|
|
1357
|
+
|
|
1358
|
+
// Validate the key by deriving the address
|
|
1359
|
+
try {
|
|
1360
|
+
const { ethers } = await import('ethers');
|
|
1361
|
+
const wallet = new ethers.Wallet(privateKey);
|
|
1362
|
+
logger.info(` Derived address: ${wallet.address}`);
|
|
1363
|
+
} catch (e) {
|
|
1364
|
+
logger.error(`Invalid private key: ${e.message}`);
|
|
1365
|
+
process.exit(1);
|
|
1366
|
+
}
|
|
1367
|
+
|
|
1368
|
+
// Write the coinbase file
|
|
1369
|
+
const coinbasePath = options.coinbasePath;
|
|
1370
|
+
fs.ensureDirSync(nodePath.dirname(coinbasePath));
|
|
1371
|
+
fs.writeFileSync(coinbasePath, privateKey, 'utf8');
|
|
1372
|
+
logger.info(`Coinbase private key written to: ${coinbasePath}`);
|
|
1373
|
+
logger.warn('⚠ Keep this file secure! Anyone with the private key controls the deployer address.');
|
|
1374
|
+
logger.info('hardhat.config.js will read this file automatically for Besu network deployments.');
|
|
1375
|
+
});
|
|
1376
|
+
|
|
1377
|
+
// ── balance: Query token balance for an address ─────────────────────────
|
|
1378
|
+
chain
|
|
1379
|
+
.command('balance')
|
|
1380
|
+
.description('Query ERC-1155 token balance for an address (CKY fungible, semi-fungible, or non-fungible)')
|
|
1381
|
+
.requiredOption('--address <address>', 'Ethereum address to query')
|
|
1382
|
+
.option('--token-id <tokenId>', 'ERC-1155 token ID (default: 0 = CKY)', '0')
|
|
1383
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1384
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
1385
|
+
.action(async (options) => {
|
|
1386
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1387
|
+
|
|
1388
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1389
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1390
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1391
|
+
logger.error(`Deployment artifact not found: ${artifactPath}. Run "cyberia chain deploy-contract" first.`);
|
|
1392
|
+
process.exit(1);
|
|
1393
|
+
}
|
|
1394
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1395
|
+
const contractAddress = deployment.address;
|
|
1396
|
+
|
|
1397
|
+
const balanceScript = `
|
|
1398
|
+
import hre from 'hardhat';
|
|
1399
|
+
const { ethers } = hre;
|
|
1400
|
+
async function main() {
|
|
1401
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${contractAddress}');
|
|
1402
|
+
const balance = await token.balanceOf('${options.address}', ${options.tokenId});
|
|
1403
|
+
const itemId = await token.getItemId(${options.tokenId});
|
|
1404
|
+
const metadataCid = await token.getMetadataCID(${options.tokenId});
|
|
1405
|
+
let totalSupply;
|
|
1406
|
+
try { totalSupply = await token['totalSupply(uint256)'](${options.tokenId}); } catch (_) { totalSupply = 'N/A'; }
|
|
1407
|
+
console.log(JSON.stringify({
|
|
1408
|
+
address: '${options.address}',
|
|
1409
|
+
tokenId: '${options.tokenId}',
|
|
1410
|
+
itemId: itemId || '(unregistered)',
|
|
1411
|
+
balance: balance.toString(),
|
|
1412
|
+
formattedBalance: ${options.tokenId} === '0' || ${options.tokenId} === 0 ? ethers.formatEther(balance) + ' CKY' : balance.toString() + ' units',
|
|
1413
|
+
totalSupply: totalSupply.toString(),
|
|
1414
|
+
metadataCid: metadataCid || '(none)',
|
|
1415
|
+
}, null, 2));
|
|
1416
|
+
}
|
|
1417
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1418
|
+
`;
|
|
1419
|
+
const tmpScript = './hardhat/scripts/_cli_balance_tmp.js';
|
|
1420
|
+
fs.writeFileSync(tmpScript, balanceScript, 'utf8');
|
|
1421
|
+
try {
|
|
1422
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_balance_tmp.js --network ${options.network}`);
|
|
1423
|
+
} finally {
|
|
1424
|
+
fs.removeSync(tmpScript);
|
|
1425
|
+
}
|
|
1426
|
+
});
|
|
1427
|
+
|
|
1428
|
+
// ── transfer: Transfer ERC-1155 tokens between addresses ────────────────
|
|
1429
|
+
chain
|
|
1430
|
+
.command('transfer')
|
|
1431
|
+
.description('Transfer ERC-1155 tokens (CKY, semi-fungible resources, or non-fungible items)')
|
|
1432
|
+
.requiredOption('--from <address>', 'Sender address (must be the deployer/owner for relayed transfers)')
|
|
1433
|
+
.requiredOption('--to <address>', 'Recipient address')
|
|
1434
|
+
.requiredOption('--token-id <tokenId>', 'ERC-1155 token ID (0 = CKY)')
|
|
1435
|
+
.requiredOption('--amount <amount>', 'Amount to transfer')
|
|
1436
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1437
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
1438
|
+
.action(async (options) => {
|
|
1439
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1440
|
+
|
|
1441
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1442
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1443
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1444
|
+
logger.error(`Deployment artifact not found: ${artifactPath}. Run "cyberia chain deploy-contract" first.`);
|
|
1445
|
+
process.exit(1);
|
|
1446
|
+
}
|
|
1447
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1448
|
+
const contractAddress = deployment.address;
|
|
1449
|
+
|
|
1450
|
+
logger.info(
|
|
1451
|
+
`Transferring ${options.amount} of token ID ${options.tokenId} from ${options.from} to ${options.to}`,
|
|
1452
|
+
);
|
|
1453
|
+
|
|
1454
|
+
const transferScript = `
|
|
1455
|
+
import hre from 'hardhat';
|
|
1456
|
+
const { ethers } = hre;
|
|
1457
|
+
async function main() {
|
|
1458
|
+
const [signer] = await ethers.getSigners();
|
|
1459
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${contractAddress}');
|
|
1460
|
+
const tx = await token.safeTransferFrom(
|
|
1461
|
+
'${options.from}',
|
|
1462
|
+
'${options.to}',
|
|
1463
|
+
${options.tokenId},
|
|
1464
|
+
${options.amount},
|
|
1465
|
+
'0x'
|
|
1466
|
+
);
|
|
1467
|
+
const receipt = await tx.wait();
|
|
1468
|
+
console.log('Transfer tx hash:', receipt.hash);
|
|
1469
|
+
const senderBal = await token.balanceOf('${options.from}', ${options.tokenId});
|
|
1470
|
+
const recipientBal = await token.balanceOf('${options.to}', ${options.tokenId});
|
|
1471
|
+
console.log('Sender balance:', senderBal.toString());
|
|
1472
|
+
console.log('Recipient balance:', recipientBal.toString());
|
|
1473
|
+
}
|
|
1474
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1475
|
+
`;
|
|
1476
|
+
const tmpScript = './hardhat/scripts/_cli_transfer_tmp.js';
|
|
1477
|
+
fs.writeFileSync(tmpScript, transferScript, 'utf8');
|
|
1478
|
+
try {
|
|
1479
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_transfer_tmp.js --network ${options.network}`);
|
|
1480
|
+
} finally {
|
|
1481
|
+
fs.removeSync(tmpScript);
|
|
1482
|
+
}
|
|
1483
|
+
});
|
|
1484
|
+
|
|
1485
|
+
// ── burn: Burn ERC-1155 tokens ──────────────────────────────────────────
|
|
1486
|
+
chain
|
|
1487
|
+
.command('burn')
|
|
1488
|
+
.description(
|
|
1489
|
+
'Burn ERC-1155 tokens (CKY to reduce supply, semi-fungible for crafting cost, non-fungible to destroy)',
|
|
1490
|
+
)
|
|
1491
|
+
.requiredOption('--address <address>', 'Address holding the tokens to burn')
|
|
1492
|
+
.requiredOption('--token-id <tokenId>', 'ERC-1155 token ID (0 = CKY)')
|
|
1493
|
+
.requiredOption('--amount <amount>', 'Amount to burn')
|
|
1494
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1495
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
1496
|
+
.action(async (options) => {
|
|
1497
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1498
|
+
|
|
1499
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1500
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1501
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1502
|
+
logger.error(`Deployment artifact not found: ${artifactPath}. Run "cyberia chain deploy-contract" first.`);
|
|
1503
|
+
process.exit(1);
|
|
1504
|
+
}
|
|
1505
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1506
|
+
const contractAddress = deployment.address;
|
|
1507
|
+
|
|
1508
|
+
logger.info(`Burning ${options.amount} of token ID ${options.tokenId} from ${options.address}`);
|
|
1509
|
+
|
|
1510
|
+
const burnScript = `
|
|
1511
|
+
import hre from 'hardhat';
|
|
1512
|
+
const { ethers } = hre;
|
|
1513
|
+
async function main() {
|
|
1514
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${contractAddress}');
|
|
1515
|
+
const tx = await token.burn('${options.address}', ${options.tokenId}, ${options.amount});
|
|
1516
|
+
const receipt = await tx.wait();
|
|
1517
|
+
console.log('Burn tx hash:', receipt.hash);
|
|
1518
|
+
const remaining = await token.balanceOf('${options.address}', ${options.tokenId});
|
|
1519
|
+
console.log('Remaining balance:', remaining.toString());
|
|
1520
|
+
let totalSupply;
|
|
1521
|
+
try { totalSupply = await token['totalSupply(uint256)'](${options.tokenId}); } catch (_) { totalSupply = 'N/A'; }
|
|
1522
|
+
console.log('Total supply after burn:', totalSupply.toString());
|
|
1523
|
+
}
|
|
1524
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1525
|
+
`;
|
|
1526
|
+
const tmpScript = './hardhat/scripts/_cli_burn_tmp.js';
|
|
1527
|
+
fs.writeFileSync(tmpScript, burnScript, 'utf8');
|
|
1528
|
+
try {
|
|
1529
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_burn_tmp.js --network ${options.network}`);
|
|
1530
|
+
} finally {
|
|
1531
|
+
fs.removeSync(tmpScript);
|
|
1532
|
+
}
|
|
1533
|
+
});
|
|
1534
|
+
|
|
1535
|
+
// ── batch-register: Register multiple Object Layer items in one tx ──────
|
|
1536
|
+
chain
|
|
1537
|
+
.command('batch-register')
|
|
1538
|
+
.description(
|
|
1539
|
+
'Batch-register multiple Object Layer items on-chain in a single transaction.\n' +
|
|
1540
|
+
'When --from-db is set, the canonical CID for every item is resolved from MongoDB\n' +
|
|
1541
|
+
'(fast-json-stable-stringify of objectLayer.data), overriding any "cid" values in the JSON input.',
|
|
1542
|
+
)
|
|
1543
|
+
.requiredOption('--items <json>', 'JSON array of items: [{"itemId":"wood","cid":"bafk...","supply":500000}, ...]')
|
|
1544
|
+
.option('--from-db', 'Resolve canonical CIDs from the ObjectLayer MongoDB documents (recommended)')
|
|
1545
|
+
.option('--network <network>', 'Hardhat network name', 'besu-k8s')
|
|
1546
|
+
.option('--env-path <envPath>', 'Env path', './.env')
|
|
1547
|
+
.option('--mongo-host <mongoHost>', 'MongoDB host override (used with --from-db)')
|
|
1548
|
+
.action(async (options) => {
|
|
1549
|
+
if (fs.existsSync(options.envPath)) dotenv.config({ path: options.envPath, override: true });
|
|
1550
|
+
|
|
1551
|
+
let items;
|
|
1552
|
+
try {
|
|
1553
|
+
items = JSON.parse(options.items);
|
|
1554
|
+
if (!Array.isArray(items) || items.length === 0) throw new Error('Must be a non-empty array');
|
|
1555
|
+
} catch (e) {
|
|
1556
|
+
logger.error(`Invalid --items JSON: ${e.message}`);
|
|
1557
|
+
process.exit(1);
|
|
1558
|
+
}
|
|
1559
|
+
|
|
1560
|
+
const deploymentsDir = './hardhat/deployments';
|
|
1561
|
+
const artifactPath = `${deploymentsDir}/${options.network}-ObjectLayerToken.json`;
|
|
1562
|
+
if (!fs.existsSync(artifactPath)) {
|
|
1563
|
+
logger.error(`Deployment artifact not found: ${artifactPath}. Run "cyberia chain deploy-contract" first.`);
|
|
1564
|
+
process.exit(1);
|
|
1565
|
+
}
|
|
1566
|
+
const deployment = JSON.parse(fs.readFileSync(artifactPath, 'utf8'));
|
|
1567
|
+
const contractAddress = deployment.address;
|
|
1568
|
+
|
|
1569
|
+
// ── Resolve canonical CIDs when --from-db is set ────────────────
|
|
1570
|
+
if (options.fromDb) {
|
|
1571
|
+
let ObjectLayer, host, path;
|
|
1572
|
+
try {
|
|
1573
|
+
({ ObjectLayer, host, path } = await connectDbForChain({
|
|
1574
|
+
envPath: options.envPath,
|
|
1575
|
+
mongoHost: options.mongoHost,
|
|
1576
|
+
}));
|
|
1577
|
+
} catch (dbErr) {
|
|
1578
|
+
logger.error(`Failed to connect to database: ${dbErr.message}`);
|
|
1579
|
+
process.exit(1);
|
|
1580
|
+
}
|
|
1581
|
+
|
|
1582
|
+
for (const item of items) {
|
|
1583
|
+
try {
|
|
1584
|
+
const resolved = await resolveCanonicalCid({
|
|
1585
|
+
itemId: item.itemId,
|
|
1586
|
+
ObjectLayer,
|
|
1587
|
+
ipfsClient: IpfsClient,
|
|
1588
|
+
options: { host, path },
|
|
1589
|
+
});
|
|
1590
|
+
|
|
1591
|
+
if (item.cid && item.cid !== resolved.cid) {
|
|
1592
|
+
logger.warn(
|
|
1593
|
+
`Item "${item.itemId}": provided cid "${item.cid}" differs from canonical "${resolved.cid}" (${resolved.source}). Using canonical.`,
|
|
1594
|
+
);
|
|
1595
|
+
}
|
|
1596
|
+
|
|
1597
|
+
item.cid = resolved.cid;
|
|
1598
|
+
logger.info(` "${item.itemId}" canonical CID (${resolved.source}): ${resolved.cid}`);
|
|
1599
|
+
} catch (resolveErr) {
|
|
1600
|
+
logger.error(`Failed to resolve canonical CID for "${item.itemId}": ${resolveErr.message}`);
|
|
1601
|
+
process.exit(1);
|
|
1602
|
+
}
|
|
1603
|
+
}
|
|
1604
|
+
|
|
1605
|
+
try {
|
|
1606
|
+
await DataBaseProvider.instance[`${host}${path}`].mongoose.close();
|
|
1607
|
+
} catch (_) {
|
|
1608
|
+
/* ignore close errors */
|
|
1609
|
+
}
|
|
1610
|
+
}
|
|
1611
|
+
|
|
1612
|
+
const itemIds = items.map((i) => i.itemId);
|
|
1613
|
+
const cids = items.map((i) => i.cid || '');
|
|
1614
|
+
const supplies = items.map((i) => i.supply || 1);
|
|
1615
|
+
|
|
1616
|
+
logger.info(`Batch-registering ${items.length} items on contract ${contractAddress}`);
|
|
1617
|
+
for (const item of items) {
|
|
1618
|
+
logger.info(` - ${item.itemId} (supply: ${item.supply || 1}, cid: ${item.cid || '(none)'})`);
|
|
1619
|
+
}
|
|
1620
|
+
|
|
1621
|
+
const batchScript = `
|
|
1622
|
+
import hre from 'hardhat';
|
|
1623
|
+
const { ethers } = hre;
|
|
1624
|
+
async function main() {
|
|
1625
|
+
const [deployer] = await ethers.getSigners();
|
|
1626
|
+
const token = await ethers.getContractAt('ObjectLayerToken', '${contractAddress}');
|
|
1627
|
+
const itemIds = ${JSON.stringify(itemIds)};
|
|
1628
|
+
const cids = ${JSON.stringify(cids)};
|
|
1629
|
+
const supplies = ${JSON.stringify(supplies)};
|
|
1630
|
+
const tx = await token.batchRegisterObjectLayers(
|
|
1631
|
+
deployer.address,
|
|
1632
|
+
itemIds,
|
|
1633
|
+
cids,
|
|
1634
|
+
supplies,
|
|
1635
|
+
'0x'
|
|
1636
|
+
);
|
|
1637
|
+
const receipt = await tx.wait();
|
|
1638
|
+
console.log('Batch register tx hash:', receipt.hash);
|
|
1639
|
+
for (const id of itemIds) {
|
|
1640
|
+
const tokenId = await token.computeTokenId(id);
|
|
1641
|
+
const balance = await token.balanceOf(deployer.address, tokenId);
|
|
1642
|
+
console.log(' ' + id + ' -> tokenId:', tokenId.toString(), ' balance:', balance.toString());
|
|
1643
|
+
}
|
|
1644
|
+
}
|
|
1645
|
+
main().then(() => process.exit(0)).catch(e => { console.error(e); process.exit(1); });
|
|
1646
|
+
`;
|
|
1647
|
+
const tmpScript = './hardhat/scripts/_cli_batch_register_tmp.js';
|
|
1648
|
+
fs.writeFileSync(tmpScript, batchScript, 'utf8');
|
|
1649
|
+
try {
|
|
1650
|
+
shellExec(`cd hardhat && npx hardhat run scripts/_cli_batch_register_tmp.js --network ${options.network}`);
|
|
1651
|
+
} finally {
|
|
1652
|
+
fs.removeSync(tmpScript);
|
|
1653
|
+
}
|
|
1654
|
+
});
|
|
1655
|
+
|
|
1656
|
+
if (underpostProgram.commands.find((c) => c._name == process.argv[2]))
|
|
1657
|
+
throw new Error('Trigger underpost passthrough');
|
|
707
1658
|
|
|
708
1659
|
program.parse();
|
|
709
1660
|
} catch (error) {
|