@subql/node-ethereum 1.10.1-0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +7 -0
- package/LICENSE +201 -0
- package/README.md +76 -0
- package/bin/run +4 -0
- package/bin/run.cmd +3 -0
- package/dist/.tsbuildinfo +1 -0
- package/dist/app.module.d.ts +2 -0
- package/dist/app.module.js +35 -0
- package/dist/app.module.js.map +1 -0
- package/dist/configure/SubqueryProject.d.ts +29 -0
- package/dist/configure/SubqueryProject.js +82 -0
- package/dist/configure/SubqueryProject.js.map +1 -0
- package/dist/configure/configure.module.d.ts +7 -0
- package/dist/configure/configure.module.js +172 -0
- package/dist/configure/configure.module.js.map +1 -0
- package/dist/configure/configure.module.spec.d.ts +1 -0
- package/dist/configure/configure.module.spec.js +26 -0
- package/dist/configure/configure.module.spec.js.map +1 -0
- package/dist/ethereum/api.ethereum.d.ts +21 -0
- package/dist/ethereum/api.ethereum.js +190 -0
- package/dist/ethereum/api.ethereum.js.map +1 -0
- package/dist/ethereum/api.service.ethereum.d.ts +8 -0
- package/dist/ethereum/api.service.ethereum.js +57 -0
- package/dist/ethereum/api.service.ethereum.js.map +1 -0
- package/dist/ethereum/block.ethereum.d.ts +15 -0
- package/dist/ethereum/block.ethereum.js +87 -0
- package/dist/ethereum/block.ethereum.js.map +1 -0
- package/dist/ethereum/index.d.ts +2 -0
- package/dist/ethereum/index.js +21 -0
- package/dist/ethereum/index.js.map +1 -0
- package/dist/ethereum/utils.ethereum.d.ts +6 -0
- package/dist/ethereum/utils.ethereum.js +131 -0
- package/dist/ethereum/utils.ethereum.js.map +1 -0
- package/dist/indexer/dictionary.service.d.ts +7 -0
- package/dist/indexer/dictionary.service.js +29 -0
- package/dist/indexer/dictionary.service.js.map +1 -0
- package/dist/indexer/ds-processor.service.d.ts +26 -0
- package/dist/indexer/ds-processor.service.js +133 -0
- package/dist/indexer/ds-processor.service.js.map +1 -0
- package/dist/indexer/dynamic-ds.service.d.ts +23 -0
- package/dist/indexer/dynamic-ds.service.js +105 -0
- package/dist/indexer/dynamic-ds.service.js.map +1 -0
- package/dist/indexer/fetch.module.d.ts +2 -0
- package/dist/indexer/fetch.module.js +68 -0
- package/dist/indexer/fetch.module.js.map +1 -0
- package/dist/indexer/fetch.service.d.ts +43 -0
- package/dist/indexer/fetch.service.js +359 -0
- package/dist/indexer/fetch.service.js.map +1 -0
- package/dist/indexer/indexer.manager.d.ts +36 -0
- package/dist/indexer/indexer.manager.js +256 -0
- package/dist/indexer/indexer.manager.js.map +1 -0
- package/dist/indexer/indexer.module.d.ts +2 -0
- package/dist/indexer/indexer.module.js +52 -0
- package/dist/indexer/indexer.module.js.map +1 -0
- package/dist/indexer/project.service.d.ts +39 -0
- package/dist/indexer/project.service.js +255 -0
- package/dist/indexer/project.service.js.map +1 -0
- package/dist/indexer/sandbox.service.d.ts +12 -0
- package/dist/indexer/sandbox.service.js +58 -0
- package/dist/indexer/sandbox.service.js.map +1 -0
- package/dist/indexer/types.d.ts +10 -0
- package/dist/indexer/types.js +11 -0
- package/dist/indexer/types.js.map +1 -0
- package/dist/indexer/worker/block-dispatcher.service.d.ts +69 -0
- package/dist/indexer/worker/block-dispatcher.service.js +356 -0
- package/dist/indexer/worker/block-dispatcher.service.js.map +1 -0
- package/dist/indexer/worker/worker.d.ts +14 -0
- package/dist/indexer/worker/worker.js +85 -0
- package/dist/indexer/worker/worker.js.map +1 -0
- package/dist/indexer/worker/worker.module.d.ts +2 -0
- package/dist/indexer/worker/worker.module.js +33 -0
- package/dist/indexer/worker/worker.module.js.map +1 -0
- package/dist/indexer/worker/worker.service.d.ts +28 -0
- package/dist/indexer/worker/worker.service.js +79 -0
- package/dist/indexer/worker/worker.service.js.map +1 -0
- package/dist/init.d.ts +1 -0
- package/dist/init.js +54 -0
- package/dist/init.js.map +1 -0
- package/dist/main.d.ts +1 -0
- package/dist/main.js +14 -0
- package/dist/main.js.map +1 -0
- package/dist/meta/meta.controller.d.ts +23 -0
- package/dist/meta/meta.controller.js +36 -0
- package/dist/meta/meta.controller.js.map +1 -0
- package/dist/meta/meta.module.d.ts +2 -0
- package/dist/meta/meta.module.js +77 -0
- package/dist/meta/meta.module.js.map +1 -0
- package/dist/meta/meta.service.d.ts +42 -0
- package/dist/meta/meta.service.js +110 -0
- package/dist/meta/meta.service.js.map +1 -0
- package/dist/subcommands/forceClean.init.d.ts +1 -0
- package/dist/subcommands/forceClean.init.js +25 -0
- package/dist/subcommands/forceClean.init.js.map +1 -0
- package/dist/subcommands/forceClean.module.d.ts +4 -0
- package/dist/subcommands/forceClean.module.js +38 -0
- package/dist/subcommands/forceClean.module.js.map +1 -0
- package/dist/subcommands/forceClean.service.d.ts +8 -0
- package/dist/subcommands/forceClean.service.js +65 -0
- package/dist/subcommands/forceClean.service.js.map +1 -0
- package/dist/subcommands/reindex.init.d.ts +1 -0
- package/dist/subcommands/reindex.init.js +25 -0
- package/dist/subcommands/reindex.init.js.map +1 -0
- package/dist/subcommands/reindex.module.d.ts +4 -0
- package/dist/subcommands/reindex.module.js +39 -0
- package/dist/subcommands/reindex.module.js.map +1 -0
- package/dist/subcommands/reindex.service.d.ts +24 -0
- package/dist/subcommands/reindex.service.js +114 -0
- package/dist/subcommands/reindex.service.js.map +1 -0
- package/dist/utils/project.d.ts +13 -0
- package/dist/utils/project.js +191 -0
- package/dist/utils/project.js.map +1 -0
- package/dist/utils/string.d.ts +4 -0
- package/dist/utils/string.js +32 -0
- package/dist/utils/string.js.map +1 -0
- package/dist/yargs.d.ts +154 -0
- package/dist/yargs.js +193 -0
- package/dist/yargs.js.map +1 -0
- package/package.json +77 -0
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Copyright 2020-2022 OnFinality Limited authors & contributors
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
5
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
6
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
7
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
8
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
9
|
+
};
|
|
10
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
11
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
12
|
+
};
|
|
13
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
14
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
15
|
+
};
|
|
16
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
17
|
+
exports.ForceCleanService = void 0;
|
|
18
|
+
const fs_1 = __importDefault(require("fs"));
|
|
19
|
+
const common_1 = require("@nestjs/common");
|
|
20
|
+
const node_core_1 = require("@subql/node-core");
|
|
21
|
+
const sequelize_1 = require("sequelize");
|
|
22
|
+
const logger = (0, node_core_1.getLogger)('Force-clean');
|
|
23
|
+
let ForceCleanService = class ForceCleanService {
|
|
24
|
+
constructor(sequelize, nodeConfig) {
|
|
25
|
+
this.sequelize = sequelize;
|
|
26
|
+
this.nodeConfig = nodeConfig;
|
|
27
|
+
}
|
|
28
|
+
async forceClean() {
|
|
29
|
+
const schema = await (0, node_core_1.getExistingProjectSchema)(this.nodeConfig, this.sequelize);
|
|
30
|
+
if (!schema) {
|
|
31
|
+
logger.error('Unable to locate schema');
|
|
32
|
+
throw new Error('Schema does not exist.');
|
|
33
|
+
}
|
|
34
|
+
try {
|
|
35
|
+
// drop existing project schema and metadata table
|
|
36
|
+
await this.sequelize.dropSchema(`"${schema}"`, {
|
|
37
|
+
logging: false,
|
|
38
|
+
benchmark: false,
|
|
39
|
+
});
|
|
40
|
+
// remove schema from subquery table (might not exist)
|
|
41
|
+
await this.sequelize.query(` DELETE
|
|
42
|
+
FROM public.subqueries
|
|
43
|
+
WHERE name = :name`, {
|
|
44
|
+
replacements: { name: this.nodeConfig.subqueryName },
|
|
45
|
+
type: sequelize_1.QueryTypes.DELETE,
|
|
46
|
+
});
|
|
47
|
+
logger.info('force cleaned schema and tables');
|
|
48
|
+
if (fs_1.default.existsSync(this.nodeConfig.mmrPath)) {
|
|
49
|
+
await fs_1.default.promises.unlink(this.nodeConfig.mmrPath);
|
|
50
|
+
logger.info('force cleaned file based mmr');
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
catch (err) {
|
|
54
|
+
logger.error(err, 'failed to force clean');
|
|
55
|
+
throw err;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
};
|
|
59
|
+
ForceCleanService = __decorate([
|
|
60
|
+
(0, common_1.Injectable)(),
|
|
61
|
+
__metadata("design:paramtypes", [sequelize_1.Sequelize,
|
|
62
|
+
node_core_1.NodeConfig])
|
|
63
|
+
], ForceCleanService);
|
|
64
|
+
exports.ForceCleanService = ForceCleanService;
|
|
65
|
+
//# sourceMappingURL=forceClean.service.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"forceClean.service.js","sourceRoot":"","sources":["../../src/subcommands/forceClean.service.ts"],"names":[],"mappings":";AAAA,gEAAgE;AAChE,sCAAsC;;;;;;;;;;;;;;;AAEtC,4CAAoB;AACpB,2CAA4C;AAC5C,gDAI0B;AAC1B,yCAAkD;AAElD,MAAM,MAAM,GAAG,IAAA,qBAAS,EAAC,aAAa,CAAC,CAAC;AAGjC,IAAM,iBAAiB,GAAvB,MAAM,iBAAiB;IAC5B,YACmB,SAAoB,EACpB,UAAsB;QADtB,cAAS,GAAT,SAAS,CAAW;QACpB,eAAU,GAAV,UAAU,CAAY;IACtC,CAAC;IAEJ,KAAK,CAAC,UAAU;QACd,MAAM,MAAM,GAAG,MAAM,IAAA,oCAAwB,EAC3C,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,SAAS,CACf,CAAC;QACF,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,CAAC,KAAK,CAAC,yBAAyB,CAAC,CAAC;YACxC,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;SAC3C;QAED,IAAI;YACF,kDAAkD;YAClD,MAAM,IAAI,CAAC,SAAS,CAAC,UAAU,CAAC,IAAI,MAAM,GAAG,EAAE;gBAC7C,OAAO,EAAE,KAAK;gBACd,SAAS,EAAE,KAAK;aACjB,CAAC,CAAC;YAEH,sDAAsD;YACtD,MAAM,IAAI,CAAC,SAAS,CAAC,KAAK,CACxB;;qCAE6B,EAC7B;gBACE,YAAY,EAAE,EAAE,IAAI,EAAE,IAAI,CAAC,UAAU,CAAC,YAAY,EAAE;gBACpD,IAAI,EAAE,sBAAU,CAAC,MAAM;aACxB,CACF,CAAC;YAEF,MAAM,CAAC,IAAI,CAAC,iCAAiC,CAAC,CAAC;YAE/C,IAAI,YAAE,CAAC,UAAU,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,EAAE;gBAC1C,MAAM,YAAE,CAAC,QAAQ,CAAC,MAAM,CAAC,IAAI,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC;gBAClD,MAAM,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC;aAC7C;SACF;QAAC,OAAO,GAAG,EAAE;YACZ,MAAM,CAAC,KAAK,CAAC,GAAG,EAAE,uBAAuB,CAAC,CAAC;YAC3C,MAAM,GAAG,CAAC;SACX;IACH,CAAC;CACF,CAAA;AA7CY,iBAAiB;IAD7B,IAAA,mBAAU,GAAE;qCAGmB,qBAAS;QACR,sBAAU;GAH9B,iBAAiB,CA6C7B;AA7CY,8CAAiB","sourcesContent":["// Copyright 2020-2022 OnFinality Limited authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\nimport fs from 'fs';\nimport { Injectable } from '@nestjs/common';\nimport {\n getLogger,\n NodeConfig,\n getExistingProjectSchema,\n} from '@subql/node-core';\nimport { QueryTypes, Sequelize } from 'sequelize';\n\nconst logger = getLogger('Force-clean');\n\n@Injectable()\nexport class ForceCleanService {\n constructor(\n private readonly sequelize: Sequelize,\n private readonly nodeConfig: NodeConfig,\n ) {}\n\n async forceClean(): Promise<void> {\n const schema = await getExistingProjectSchema(\n this.nodeConfig,\n this.sequelize,\n );\n if (!schema) {\n logger.error('Unable to locate schema');\n throw new Error('Schema does not exist.');\n }\n\n try {\n // drop existing project schema and metadata table\n await this.sequelize.dropSchema(`\"${schema}\"`, {\n logging: false,\n benchmark: false,\n });\n\n // remove schema from subquery table (might not exist)\n await this.sequelize.query(\n ` DELETE\n FROM public.subqueries\n WHERE name = :name`,\n {\n replacements: { name: this.nodeConfig.subqueryName },\n type: QueryTypes.DELETE,\n },\n );\n\n logger.info('force cleaned schema and tables');\n\n if (fs.existsSync(this.nodeConfig.mmrPath)) {\n await fs.promises.unlink(this.nodeConfig.mmrPath);\n logger.info('force cleaned file based mmr');\n }\n } catch (err) {\n logger.error(err, 'failed to force clean');\n throw err;\n }\n }\n}\n"]}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export declare function reindexInit(targetHeight: number): Promise<void>;
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Copyright 2020-2022 OnFinality Limited authors & contributors
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.reindexInit = void 0;
|
|
6
|
+
const core_1 = require("@nestjs/core");
|
|
7
|
+
const node_core_1 = require("@subql/node-core");
|
|
8
|
+
const reindex_module_1 = require("./reindex.module");
|
|
9
|
+
const reindex_service_1 = require("./reindex.service");
|
|
10
|
+
const logger = (0, node_core_1.getLogger)('CLI-Reindex');
|
|
11
|
+
async function reindexInit(targetHeight) {
|
|
12
|
+
try {
|
|
13
|
+
const app = await core_1.NestFactory.create(reindex_module_1.ReindexModule);
|
|
14
|
+
await app.init();
|
|
15
|
+
const reindexService = app.get(reindex_service_1.ReindexService);
|
|
16
|
+
await reindexService.reindex(targetHeight);
|
|
17
|
+
}
|
|
18
|
+
catch (e) {
|
|
19
|
+
logger.error(e, 'Reindex failed to execute');
|
|
20
|
+
process.exit(1);
|
|
21
|
+
}
|
|
22
|
+
process.exit(0);
|
|
23
|
+
}
|
|
24
|
+
exports.reindexInit = reindexInit;
|
|
25
|
+
//# sourceMappingURL=reindex.init.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"reindex.init.js","sourceRoot":"","sources":["../../src/subcommands/reindex.init.ts"],"names":[],"mappings":";AAAA,gEAAgE;AAChE,sCAAsC;;;AAEtC,uCAA2C;AAC3C,gDAA6C;AAC7C,qDAAiD;AACjD,uDAAmD;AAEnD,MAAM,MAAM,GAAG,IAAA,qBAAS,EAAC,aAAa,CAAC,CAAC;AACjC,KAAK,UAAU,WAAW,CAAC,YAAoB;IACpD,IAAI;QACF,MAAM,GAAG,GAAG,MAAM,kBAAW,CAAC,MAAM,CAAC,8BAAa,CAAC,CAAC;QAEpD,MAAM,GAAG,CAAC,IAAI,EAAE,CAAC;QACjB,MAAM,cAAc,GAAG,GAAG,CAAC,GAAG,CAAC,gCAAc,CAAC,CAAC;QAC/C,MAAM,cAAc,CAAC,OAAO,CAAC,YAAY,CAAC,CAAC;KAC5C;IAAC,OAAO,CAAC,EAAE;QACV,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,2BAA2B,CAAC,CAAC;QAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;KACjB;IACD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;AAClB,CAAC;AAZD,kCAYC","sourcesContent":["// Copyright 2020-2022 OnFinality Limited authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\nimport { NestFactory } from '@nestjs/core';\nimport { getLogger } from '@subql/node-core';\nimport { ReindexModule } from './reindex.module';\nimport { ReindexService } from './reindex.service';\n\nconst logger = getLogger('CLI-Reindex');\nexport async function reindexInit(targetHeight: number): Promise<void> {\n try {\n const app = await NestFactory.create(ReindexModule);\n\n await app.init();\n const reindexService = app.get(ReindexService);\n await reindexService.reindex(targetHeight);\n } catch (e) {\n logger.error(e, 'Reindex failed to execute');\n process.exit(1);\n }\n process.exit(0);\n}\n"]}
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Copyright 2020-2022 OnFinality Limited authors & contributors
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
5
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
6
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
7
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
8
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
9
|
+
};
|
|
10
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
11
|
+
exports.ReindexModule = exports.ReindexFeatureModule = void 0;
|
|
12
|
+
const common_1 = require("@nestjs/common");
|
|
13
|
+
const node_core_1 = require("@subql/node-core");
|
|
14
|
+
const configure_module_1 = require("../configure/configure.module");
|
|
15
|
+
const forceClean_service_1 = require("./forceClean.service");
|
|
16
|
+
const reindex_service_1 = require("./reindex.service");
|
|
17
|
+
let ReindexFeatureModule = class ReindexFeatureModule {
|
|
18
|
+
};
|
|
19
|
+
ReindexFeatureModule = __decorate([
|
|
20
|
+
(0, common_1.Module)({
|
|
21
|
+
providers: [node_core_1.StoreService, reindex_service_1.ReindexService, node_core_1.MmrService, forceClean_service_1.ForceCleanService],
|
|
22
|
+
controllers: [],
|
|
23
|
+
})
|
|
24
|
+
], ReindexFeatureModule);
|
|
25
|
+
exports.ReindexFeatureModule = ReindexFeatureModule;
|
|
26
|
+
let ReindexModule = class ReindexModule {
|
|
27
|
+
};
|
|
28
|
+
ReindexModule = __decorate([
|
|
29
|
+
(0, common_1.Module)({
|
|
30
|
+
imports: [
|
|
31
|
+
node_core_1.DbModule.forRoot(),
|
|
32
|
+
configure_module_1.ConfigureModule.register(),
|
|
33
|
+
ReindexFeatureModule,
|
|
34
|
+
],
|
|
35
|
+
controllers: [],
|
|
36
|
+
})
|
|
37
|
+
], ReindexModule);
|
|
38
|
+
exports.ReindexModule = ReindexModule;
|
|
39
|
+
//# sourceMappingURL=reindex.module.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"reindex.module.js","sourceRoot":"","sources":["../../src/subcommands/reindex.module.ts"],"names":[],"mappings":";AAAA,gEAAgE;AAChE,sCAAsC;;;;;;;;;AAEtC,2CAAwC;AACxC,gDAAsE;AACtE,oEAAgE;AAChE,6DAAyD;AACzD,uDAAmD;AAM5C,IAAM,oBAAoB,GAA1B,MAAM,oBAAoB;CAAG,CAAA;AAAvB,oBAAoB;IAJhC,IAAA,eAAM,EAAC;QACN,SAAS,EAAE,CAAC,wBAAY,EAAE,gCAAc,EAAE,sBAAU,EAAE,sCAAiB,CAAC;QACxE,WAAW,EAAE,EAAE;KAChB,CAAC;GACW,oBAAoB,CAAG;AAAvB,oDAAoB;AAU1B,IAAM,aAAa,GAAnB,MAAM,aAAa;CAAG,CAAA;AAAhB,aAAa;IARzB,IAAA,eAAM,EAAC;QACN,OAAO,EAAE;YACP,oBAAQ,CAAC,OAAO,EAAE;YAClB,kCAAe,CAAC,QAAQ,EAAE;YAC1B,oBAAoB;SACrB;QACD,WAAW,EAAE,EAAE;KAChB,CAAC;GACW,aAAa,CAAG;AAAhB,sCAAa","sourcesContent":["// Copyright 2020-2022 OnFinality Limited authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\nimport { Module } from '@nestjs/common';\nimport { DbModule, MmrService, StoreService } from '@subql/node-core';\nimport { ConfigureModule } from '../configure/configure.module';\nimport { ForceCleanService } from './forceClean.service';\nimport { ReindexService } from './reindex.service';\n\n@Module({\n providers: [StoreService, ReindexService, MmrService, ForceCleanService],\n controllers: [],\n})\nexport class ReindexFeatureModule {}\n\n@Module({\n imports: [\n DbModule.forRoot(),\n ConfigureModule.register(),\n ReindexFeatureModule,\n ],\n controllers: [],\n})\nexport class ReindexModule {}\n"]}
|
|
@@ -0,0 +1,24 @@
|
|
|
1
|
+
import { MmrService, NodeConfig, StoreService } from '@subql/node-core';
|
|
2
|
+
import { Sequelize } from 'sequelize';
|
|
3
|
+
import { SubqueryProject } from '../configure/SubqueryProject';
|
|
4
|
+
import { ForceCleanService } from './forceClean.service';
|
|
5
|
+
export declare class ReindexService {
|
|
6
|
+
private readonly sequelize;
|
|
7
|
+
private readonly nodeConfig;
|
|
8
|
+
private readonly storeService;
|
|
9
|
+
private readonly mmrService;
|
|
10
|
+
private readonly project;
|
|
11
|
+
private readonly forceCleanService;
|
|
12
|
+
private schema;
|
|
13
|
+
private metadataRepo;
|
|
14
|
+
private specName;
|
|
15
|
+
private startHeight;
|
|
16
|
+
constructor(sequelize: Sequelize, nodeConfig: NodeConfig, storeService: StoreService, mmrService: MmrService, project: SubqueryProject, forceCleanService: ForceCleanService);
|
|
17
|
+
private getExistingProjectSchema;
|
|
18
|
+
private getLastProcessedHeight;
|
|
19
|
+
private getMetadataBlockOffset;
|
|
20
|
+
private getMetadataSpecName;
|
|
21
|
+
private initDbSchema;
|
|
22
|
+
private getStartBlockFromDataSources;
|
|
23
|
+
reindex(targetBlockHeight: number): Promise<void>;
|
|
24
|
+
}
|
|
@@ -0,0 +1,114 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Copyright 2020-2022 OnFinality Limited authors & contributors
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) {
|
|
5
|
+
var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
|
|
6
|
+
if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
|
|
7
|
+
else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
|
|
8
|
+
return c > 3 && r && Object.defineProperty(target, key, r), r;
|
|
9
|
+
};
|
|
10
|
+
var __metadata = (this && this.__metadata) || function (k, v) {
|
|
11
|
+
if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(k, v);
|
|
12
|
+
};
|
|
13
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
14
|
+
exports.ReindexService = void 0;
|
|
15
|
+
const common_1 = require("@nestjs/common");
|
|
16
|
+
const node_core_1 = require("@subql/node-core");
|
|
17
|
+
const sequelize_1 = require("sequelize");
|
|
18
|
+
const SubqueryProject_1 = require("../configure/SubqueryProject");
|
|
19
|
+
const project_1 = require("../utils/project");
|
|
20
|
+
const forceClean_service_1 = require("./forceClean.service");
|
|
21
|
+
const logger = (0, node_core_1.getLogger)('Reindex');
|
|
22
|
+
let ReindexService = class ReindexService {
|
|
23
|
+
constructor(sequelize, nodeConfig, storeService, mmrService, project, forceCleanService) {
|
|
24
|
+
this.sequelize = sequelize;
|
|
25
|
+
this.nodeConfig = nodeConfig;
|
|
26
|
+
this.storeService = storeService;
|
|
27
|
+
this.mmrService = mmrService;
|
|
28
|
+
this.project = project;
|
|
29
|
+
this.forceCleanService = forceCleanService;
|
|
30
|
+
}
|
|
31
|
+
async getExistingProjectSchema() {
|
|
32
|
+
return (0, node_core_1.getExistingProjectSchema)(this.nodeConfig, this.sequelize);
|
|
33
|
+
}
|
|
34
|
+
async getLastProcessedHeight() {
|
|
35
|
+
return (0, node_core_1.getMetaDataInfo)(this.metadataRepo, 'lastProcessedHeight');
|
|
36
|
+
}
|
|
37
|
+
async getMetadataBlockOffset() {
|
|
38
|
+
return (0, node_core_1.getMetaDataInfo)(this.metadataRepo, 'blockOffset');
|
|
39
|
+
}
|
|
40
|
+
async getMetadataSpecName() {
|
|
41
|
+
const res = await this.metadataRepo.findOne({
|
|
42
|
+
where: { key: 'specName' },
|
|
43
|
+
});
|
|
44
|
+
return res === null || res === void 0 ? void 0 : res.value;
|
|
45
|
+
}
|
|
46
|
+
async initDbSchema() {
|
|
47
|
+
await (0, project_1.initDbSchema)(this.project, this.schema, this.storeService);
|
|
48
|
+
}
|
|
49
|
+
// eslint-disable-next-line @typescript-eslint/require-await
|
|
50
|
+
async getStartBlockFromDataSources() {
|
|
51
|
+
const datasources = this.project.dataSources;
|
|
52
|
+
const startBlocksList = datasources.map((item) => { var _a; return (_a = item.startBlock) !== null && _a !== void 0 ? _a : 1; });
|
|
53
|
+
if (startBlocksList.length === 0) {
|
|
54
|
+
logger.error(`Failed to find a valid datasource, Please check your endpoint if specName filter is used.`);
|
|
55
|
+
process.exit(1);
|
|
56
|
+
}
|
|
57
|
+
else {
|
|
58
|
+
return Math.min(...startBlocksList);
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
async reindex(targetBlockHeight) {
|
|
62
|
+
this.schema = await this.getExistingProjectSchema();
|
|
63
|
+
if (!this.schema) {
|
|
64
|
+
logger.error('Unable to locate schema');
|
|
65
|
+
throw new Error('Schema does not exist.');
|
|
66
|
+
}
|
|
67
|
+
await this.initDbSchema();
|
|
68
|
+
this.metadataRepo = (0, node_core_1.MetadataFactory)(this.sequelize, this.schema);
|
|
69
|
+
this.startHeight = await this.getStartBlockFromDataSources();
|
|
70
|
+
const lastProcessedHeight = await this.getLastProcessedHeight();
|
|
71
|
+
if (!this.storeService.historical) {
|
|
72
|
+
logger.warn('Unable to reindex, historical state not enabled');
|
|
73
|
+
return;
|
|
74
|
+
}
|
|
75
|
+
if (!lastProcessedHeight || lastProcessedHeight < targetBlockHeight) {
|
|
76
|
+
logger.warn(`Skipping reindexing to block ${targetBlockHeight}: current indexing height ${lastProcessedHeight} is behind requested block`);
|
|
77
|
+
return;
|
|
78
|
+
}
|
|
79
|
+
// if startHeight is greater than the targetHeight, just force clean
|
|
80
|
+
if (targetBlockHeight < this.startHeight) {
|
|
81
|
+
logger.info(`targetHeight: ${targetBlockHeight} is less than startHeight: ${this.startHeight}. Hence executing force-clean`);
|
|
82
|
+
await this.forceCleanService.forceClean();
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
logger.info(`Reindexing to block: ${targetBlockHeight}`);
|
|
86
|
+
const transaction = await this.sequelize.transaction();
|
|
87
|
+
try {
|
|
88
|
+
await this.storeService.rewind(targetBlockHeight, transaction);
|
|
89
|
+
const blockOffset = await this.getMetadataBlockOffset();
|
|
90
|
+
if (blockOffset) {
|
|
91
|
+
await this.mmrService.deleteMmrNode(targetBlockHeight + 1, blockOffset);
|
|
92
|
+
}
|
|
93
|
+
await transaction.commit();
|
|
94
|
+
logger.info('Reindex Success');
|
|
95
|
+
}
|
|
96
|
+
catch (err) {
|
|
97
|
+
logger.error(err, 'Reindexing failed');
|
|
98
|
+
await transaction.rollback();
|
|
99
|
+
throw err;
|
|
100
|
+
}
|
|
101
|
+
}
|
|
102
|
+
}
|
|
103
|
+
};
|
|
104
|
+
ReindexService = __decorate([
|
|
105
|
+
(0, common_1.Injectable)(),
|
|
106
|
+
__metadata("design:paramtypes", [sequelize_1.Sequelize,
|
|
107
|
+
node_core_1.NodeConfig,
|
|
108
|
+
node_core_1.StoreService,
|
|
109
|
+
node_core_1.MmrService,
|
|
110
|
+
SubqueryProject_1.SubqueryProject,
|
|
111
|
+
forceClean_service_1.ForceCleanService])
|
|
112
|
+
], ReindexService);
|
|
113
|
+
exports.ReindexService = ReindexService;
|
|
114
|
+
//# sourceMappingURL=reindex.service.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"reindex.service.js","sourceRoot":"","sources":["../../src/subcommands/reindex.service.ts"],"names":[],"mappings":";AAAA,gEAAgE;AAChE,sCAAsC;;;;;;;;;;;;AAEtC,2CAA4C;AAC5C,gDAS0B;AAC1B,yCAAsC;AACtC,kEAA+E;AAC/E,8CAAgD;AAEhD,6DAAyD;AAEzD,MAAM,MAAM,GAAG,IAAA,qBAAS,EAAC,SAAS,CAAC,CAAC;AAG7B,IAAM,cAAc,GAApB,MAAM,cAAc;IAKzB,YACmB,SAAoB,EACpB,UAAsB,EACtB,YAA0B,EAC1B,UAAsB,EACtB,OAAwB,EACxB,iBAAoC;QALpC,cAAS,GAAT,SAAS,CAAW;QACpB,eAAU,GAAV,UAAU,CAAY;QACtB,iBAAY,GAAZ,YAAY,CAAc;QAC1B,eAAU,GAAV,UAAU,CAAY;QACtB,YAAO,GAAP,OAAO,CAAiB;QACxB,sBAAiB,GAAjB,iBAAiB,CAAmB;IACpD,CAAC;IAEI,KAAK,CAAC,wBAAwB;QACpC,OAAO,IAAA,oCAAwB,EAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,SAAS,CAAC,CAAC;IACnE,CAAC;IAEO,KAAK,CAAC,sBAAsB;QAClC,OAAO,IAAA,2BAAe,EAAC,IAAI,CAAC,YAAY,EAAE,qBAAqB,CAAC,CAAC;IACnE,CAAC;IAEO,KAAK,CAAC,sBAAsB;QAClC,OAAO,IAAA,2BAAe,EAAC,IAAI,CAAC,YAAY,EAAE,aAAa,CAAC,CAAC;IAC3D,CAAC;IAEO,KAAK,CAAC,mBAAmB;QAC/B,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC;YAC1C,KAAK,EAAE,EAAE,GAAG,EAAE,UAAU,EAAE;SAC3B,CAAC,CAAC;QACH,OAAO,GAAG,aAAH,GAAG,uBAAH,GAAG,CAAE,KAA2B,CAAC;IAC1C,CAAC;IAEO,KAAK,CAAC,YAAY;QACxB,MAAM,IAAA,sBAAY,EAAC,IAAI,CAAC,OAAO,EAAE,IAAI,CAAC,MAAM,EAAE,IAAI,CAAC,YAAY,CAAC,CAAC;IACnE,CAAC;IAED,4DAA4D;IACpD,KAAK,CAAC,4BAA4B;QACxC,MAAM,WAAW,GAAG,IAAI,CAAC,OAAO,CAAC,WAAW,CAAC;QAE7C,MAAM,eAAe,GAAG,WAAW,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,EAAE,WAAC,OAAA,MAAA,IAAI,CAAC,UAAU,mCAAI,CAAC,CAAA,EAAA,CAAC,CAAC;QACxE,IAAI,eAAe,CAAC,MAAM,KAAK,CAAC,EAAE;YAChC,MAAM,CAAC,KAAK,CACV,2FAA2F,CAC5F,CAAC;YACF,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACjB;aAAM;YACL,OAAO,IAAI,CAAC,GAAG,CAAC,GAAG,eAAe,CAAC,CAAC;SACrC;IACH,CAAC;IAED,KAAK,CAAC,OAAO,CAAC,iBAAyB;QACrC,IAAI,CAAC,MAAM,GAAG,MAAM,IAAI,CAAC,wBAAwB,EAAE,CAAC;QAEpD,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,CAAC,KAAK,CAAC,yBAAyB,CAAC,CAAC;YACxC,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAC;SAC3C;QACD,MAAM,IAAI,CAAC,YAAY,EAAE,CAAC;QAE1B,IAAI,CAAC,YAAY,GAAG,IAAA,2BAAe,EAAC,IAAI,CAAC,SAAS,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC;QAEjE,IAAI,CAAC,WAAW,GAAG,MAAM,IAAI,CAAC,4BAA4B,EAAE,CAAC;QAE7D,MAAM,mBAAmB,GAAG,MAAM,IAAI,CAAC,sBAAsB,EAAE,CAAC;QAEhE,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,UAAU,EAAE;YACjC,MAAM,CAAC,IAAI,CAAC,iDAAiD,CAAC,CAAC;YAC/D,OAAO;SACR;QACD,IAAI,CAAC,mBAAmB,IAAI,mBAAmB,GAAG,iBAAiB,EAAE;YACnE,MAAM,CAAC,IAAI,CACT,gCAAgC,iBAAiB,6BAA6B,mBAAmB,4BAA4B,CAC9H,CAAC;YACF,OAAO;SACR;QAED,oEAAoE;QACpE,IAAI,iBAAiB,GAAG,IAAI,CAAC,WAAW,EAAE;YACxC,MAAM,CAAC,IAAI,CACT,iBAAiB,iBAAiB,8BAA8B,IAAI,CAAC,WAAW,+BAA+B,CAChH,CAAC;YACF,MAAM,IAAI,CAAC,iBAAiB,CAAC,UAAU,EAAE,CAAC;SAC3C;aAAM;YACL,MAAM,CAAC,IAAI,CAAC,wBAAwB,iBAAiB,EAAE,CAAC,CAAC;YACzD,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,SAAS,CAAC,WAAW,EAAE,CAAC;YACvD,IAAI;gBACF,MAAM,IAAI,CAAC,YAAY,CAAC,MAAM,CAAC,iBAAiB,EAAE,WAAW,CAAC,CAAC;gBAE/D,MAAM,WAAW,GAAG,MAAM,IAAI,CAAC,sBAAsB,EAAE,CAAC;gBACxD,IAAI,WAAW,EAAE;oBACf,MAAM,IAAI,CAAC,UAAU,CAAC,aAAa,CACjC,iBAAiB,GAAG,CAAC,EACrB,WAAW,CACZ,CAAC;iBACH;gBACD,MAAM,WAAW,CAAC,MAAM,EAAE,CAAC;gBAC3B,MAAM,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;aAChC;YAAC,OAAO,GAAG,EAAE;gBACZ,MAAM,CAAC,KAAK,CAAC,GAAG,EAAE,mBAAmB,CAAC,CAAC;gBACvC,MAAM,WAAW,CAAC,QAAQ,EAAE,CAAC;gBAC7B,MAAM,GAAG,CAAC;aACX;SACF;IACH,CAAC;CACF,CAAA;AA1GY,cAAc;IAD1B,IAAA,mBAAU,GAAE;qCAOmB,qBAAS;QACR,sBAAU;QACR,wBAAY;QACd,sBAAU;QACb,iCAAe;QACL,sCAAiB;GAX5C,cAAc,CA0G1B;AA1GY,wCAAc","sourcesContent":["// Copyright 2020-2022 OnFinality Limited authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\nimport { Injectable } from '@nestjs/common';\nimport {\n getLogger,\n MetadataFactory,\n MetadataRepo,\n MmrService,\n NodeConfig,\n StoreService,\n getExistingProjectSchema,\n getMetaDataInfo,\n} from '@subql/node-core';\nimport { Sequelize } from 'sequelize';\nimport { SubqlProjectDs, SubqueryProject } from '../configure/SubqueryProject';\nimport { initDbSchema } from '../utils/project';\n\nimport { ForceCleanService } from './forceClean.service';\n\nconst logger = getLogger('Reindex');\n\n@Injectable()\nexport class ReindexService {\n private schema: string;\n private metadataRepo: MetadataRepo;\n private specName: string;\n private startHeight: number;\n constructor(\n private readonly sequelize: Sequelize,\n private readonly nodeConfig: NodeConfig,\n private readonly storeService: StoreService,\n private readonly mmrService: MmrService,\n private readonly project: SubqueryProject,\n private readonly forceCleanService: ForceCleanService,\n ) {}\n\n private async getExistingProjectSchema(): Promise<string> {\n return getExistingProjectSchema(this.nodeConfig, this.sequelize);\n }\n\n private async getLastProcessedHeight(): Promise<number | undefined> {\n return getMetaDataInfo(this.metadataRepo, 'lastProcessedHeight');\n }\n\n private async getMetadataBlockOffset(): Promise<number | undefined> {\n return getMetaDataInfo(this.metadataRepo, 'blockOffset');\n }\n\n private async getMetadataSpecName(): Promise<string | undefined> {\n const res = await this.metadataRepo.findOne({\n where: { key: 'specName' },\n });\n return res?.value as string | undefined;\n }\n\n private async initDbSchema(): Promise<void> {\n await initDbSchema(this.project, this.schema, this.storeService);\n }\n\n // eslint-disable-next-line @typescript-eslint/require-await\n private async getStartBlockFromDataSources() {\n const datasources = this.project.dataSources;\n\n const startBlocksList = datasources.map((item) => item.startBlock ?? 1);\n if (startBlocksList.length === 0) {\n logger.error(\n `Failed to find a valid datasource, Please check your endpoint if specName filter is used.`,\n );\n process.exit(1);\n } else {\n return Math.min(...startBlocksList);\n }\n }\n\n async reindex(targetBlockHeight: number): Promise<void> {\n this.schema = await this.getExistingProjectSchema();\n\n if (!this.schema) {\n logger.error('Unable to locate schema');\n throw new Error('Schema does not exist.');\n }\n await this.initDbSchema();\n\n this.metadataRepo = MetadataFactory(this.sequelize, this.schema);\n\n this.startHeight = await this.getStartBlockFromDataSources();\n\n const lastProcessedHeight = await this.getLastProcessedHeight();\n\n if (!this.storeService.historical) {\n logger.warn('Unable to reindex, historical state not enabled');\n return;\n }\n if (!lastProcessedHeight || lastProcessedHeight < targetBlockHeight) {\n logger.warn(\n `Skipping reindexing to block ${targetBlockHeight}: current indexing height ${lastProcessedHeight} is behind requested block`,\n );\n return;\n }\n\n // if startHeight is greater than the targetHeight, just force clean\n if (targetBlockHeight < this.startHeight) {\n logger.info(\n `targetHeight: ${targetBlockHeight} is less than startHeight: ${this.startHeight}. Hence executing force-clean`,\n );\n await this.forceCleanService.forceClean();\n } else {\n logger.info(`Reindexing to block: ${targetBlockHeight}`);\n const transaction = await this.sequelize.transaction();\n try {\n await this.storeService.rewind(targetBlockHeight, transaction);\n\n const blockOffset = await this.getMetadataBlockOffset();\n if (blockOffset) {\n await this.mmrService.deleteMmrNode(\n targetBlockHeight + 1,\n blockOffset,\n );\n }\n await transaction.commit();\n logger.info('Reindex Success');\n } catch (err) {\n logger.error(err, 'Reindexing failed');\n await transaction.rollback();\n throw err;\n }\n }\n }\n}\n"]}
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
import { Reader } from '@subql/common';
|
|
2
|
+
import { ChainTypes, CustomDatasourceV0_2_0, RuntimeDataSourceV0_2_0, SubqlRuntimeHandler, SubqlCustomHandler, SubqlHandler } from '@subql/common-ethereum';
|
|
3
|
+
import { StoreService } from '@subql/node-core';
|
|
4
|
+
import { SubqlProjectDs, SubqueryProject } from '../configure/SubqueryProject';
|
|
5
|
+
export declare function prepareProjectDir(projectPath: string): Promise<string>;
|
|
6
|
+
export declare function getProjectEntry(root: string): string;
|
|
7
|
+
export declare function isBaseHandler(handler: SubqlHandler): handler is SubqlRuntimeHandler;
|
|
8
|
+
export declare function isCustomHandler(handler: SubqlHandler): handler is SubqlCustomHandler;
|
|
9
|
+
export declare function updateDataSourcesV0_2_0(_dataSources: (RuntimeDataSourceV0_2_0 | CustomDatasourceV0_2_0)[], reader: Reader, root: string): Promise<SubqlProjectDs[]>;
|
|
10
|
+
export declare function getChainTypes(reader: Reader, root: string, file: string): Promise<ChainTypes>;
|
|
11
|
+
export declare function loadDataSourceScript(reader: Reader, file?: string): Promise<string>;
|
|
12
|
+
export declare function getProjectRoot(reader: Reader): Promise<string>;
|
|
13
|
+
export declare function initDbSchema(project: SubqueryProject, schema: string, storeService: StoreService): Promise<void>;
|
|
@@ -0,0 +1,191 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Copyright 2020-2022 OnFinality Limited authors & contributors
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
5
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
6
|
+
};
|
|
7
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
8
|
+
exports.initDbSchema = exports.getProjectRoot = exports.loadDataSourceScript = exports.getChainTypes = exports.updateDataSourcesV0_2_0 = exports.isCustomHandler = exports.isBaseHandler = exports.getProjectEntry = exports.prepareProjectDir = void 0;
|
|
9
|
+
const fs_1 = __importDefault(require("fs"));
|
|
10
|
+
const os_1 = __importDefault(require("os"));
|
|
11
|
+
const path_1 = __importDefault(require("path"));
|
|
12
|
+
const common_1 = require("@subql/common");
|
|
13
|
+
const common_ethereum_1 = require("@subql/common-ethereum");
|
|
14
|
+
const utils_1 = require("@subql/utils");
|
|
15
|
+
const js_yaml_1 = __importDefault(require("js-yaml"));
|
|
16
|
+
const tar_1 = __importDefault(require("tar"));
|
|
17
|
+
async function prepareProjectDir(projectPath) {
|
|
18
|
+
const stats = fs_1.default.statSync(projectPath);
|
|
19
|
+
if (stats.isFile()) {
|
|
20
|
+
const sep = path_1.default.sep;
|
|
21
|
+
const tmpDir = os_1.default.tmpdir();
|
|
22
|
+
const tempPath = fs_1.default.mkdtempSync(`${tmpDir}${sep}`);
|
|
23
|
+
// Will promote errors if incorrect format/extension
|
|
24
|
+
await tar_1.default.x({ file: projectPath, cwd: tempPath });
|
|
25
|
+
return tempPath.concat('/package');
|
|
26
|
+
}
|
|
27
|
+
else if (stats.isDirectory()) {
|
|
28
|
+
return projectPath;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
exports.prepareProjectDir = prepareProjectDir;
|
|
32
|
+
// We cache this to avoid repeated reads from fs
|
|
33
|
+
const projectEntryCache = {};
|
|
34
|
+
function getProjectEntry(root) {
|
|
35
|
+
const pkgPath = path_1.default.join(root, 'package.json');
|
|
36
|
+
try {
|
|
37
|
+
if (!projectEntryCache[pkgPath]) {
|
|
38
|
+
const content = fs_1.default.readFileSync(pkgPath).toString();
|
|
39
|
+
const pkg = JSON.parse(content);
|
|
40
|
+
if (!pkg.main) {
|
|
41
|
+
return './dist';
|
|
42
|
+
}
|
|
43
|
+
projectEntryCache[pkgPath] = pkg.main.startsWith('./')
|
|
44
|
+
? pkg.main
|
|
45
|
+
: `./${pkg.main}`;
|
|
46
|
+
}
|
|
47
|
+
return projectEntryCache[pkgPath];
|
|
48
|
+
}
|
|
49
|
+
catch (err) {
|
|
50
|
+
throw new Error(`can not find package.json within directory ${root}`);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
exports.getProjectEntry = getProjectEntry;
|
|
54
|
+
function isBaseHandler(handler) {
|
|
55
|
+
return Object.values(common_ethereum_1.EthereumHandlerKind).includes(handler.kind);
|
|
56
|
+
}
|
|
57
|
+
exports.isBaseHandler = isBaseHandler;
|
|
58
|
+
function isCustomHandler(handler) {
|
|
59
|
+
return !isBaseHandler(handler);
|
|
60
|
+
}
|
|
61
|
+
exports.isCustomHandler = isCustomHandler;
|
|
62
|
+
async function updateDataSourcesV0_2_0(_dataSources, reader, root) {
|
|
63
|
+
// force convert to updated ds
|
|
64
|
+
return Promise.all(_dataSources.map(async (dataSource) => {
|
|
65
|
+
const entryScript = await loadDataSourceScript(reader, dataSource.mapping.file);
|
|
66
|
+
const file = await updateDataSourcesEntry(reader, dataSource.mapping.file, root, entryScript);
|
|
67
|
+
if (dataSource.assets) {
|
|
68
|
+
for (const [, asset] of Object.entries(dataSource.assets)) {
|
|
69
|
+
if (reader instanceof common_1.LocalReader) {
|
|
70
|
+
asset.file = path_1.default.resolve(root, asset.file);
|
|
71
|
+
}
|
|
72
|
+
else {
|
|
73
|
+
const res = await reader.getFile(asset.file);
|
|
74
|
+
const outputPath = path_1.default.resolve(root, asset.file.replace('ipfs://', ''));
|
|
75
|
+
await fs_1.default.promises.writeFile(outputPath, res);
|
|
76
|
+
asset.file = outputPath;
|
|
77
|
+
}
|
|
78
|
+
}
|
|
79
|
+
}
|
|
80
|
+
if ((0, common_ethereum_1.isCustomDs)(dataSource)) {
|
|
81
|
+
if (dataSource.processor) {
|
|
82
|
+
dataSource.processor.file = await updateProcessor(reader, root, dataSource.processor.file);
|
|
83
|
+
}
|
|
84
|
+
if (dataSource.assets) {
|
|
85
|
+
for (const [, asset] of dataSource.assets) {
|
|
86
|
+
if (reader instanceof common_1.LocalReader) {
|
|
87
|
+
asset.file = path_1.default.resolve(root, asset.file);
|
|
88
|
+
}
|
|
89
|
+
else {
|
|
90
|
+
const res = await reader.getFile(asset.file);
|
|
91
|
+
const outputPath = path_1.default.resolve(root, asset.file.replace('ipfs://', ''));
|
|
92
|
+
await fs_1.default.promises.writeFile(outputPath, res);
|
|
93
|
+
asset.file = outputPath;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return Object.assign(Object.assign({}, dataSource), { mapping: Object.assign(Object.assign({}, dataSource.mapping), { entryScript, file }) });
|
|
98
|
+
}
|
|
99
|
+
else {
|
|
100
|
+
return Object.assign(Object.assign({}, dataSource), { mapping: Object.assign(Object.assign({}, dataSource.mapping), { entryScript, file }) });
|
|
101
|
+
}
|
|
102
|
+
}));
|
|
103
|
+
}
|
|
104
|
+
exports.updateDataSourcesV0_2_0 = updateDataSourcesV0_2_0;
|
|
105
|
+
async function updateDataSourcesEntry(reader, file, root, script) {
|
|
106
|
+
if (reader instanceof common_1.LocalReader)
|
|
107
|
+
return file;
|
|
108
|
+
else if (reader instanceof common_1.IPFSReader || reader instanceof common_1.GithubReader) {
|
|
109
|
+
const outputPath = `${path_1.default.resolve(root, file.replace('ipfs://', ''))}.js`;
|
|
110
|
+
await fs_1.default.promises.writeFile(outputPath, script);
|
|
111
|
+
return outputPath;
|
|
112
|
+
}
|
|
113
|
+
}
|
|
114
|
+
async function updateProcessor(reader, root, file) {
|
|
115
|
+
if (reader instanceof common_1.LocalReader) {
|
|
116
|
+
return path_1.default.resolve(root, file);
|
|
117
|
+
}
|
|
118
|
+
else {
|
|
119
|
+
const res = await reader.getFile(file);
|
|
120
|
+
const outputPath = `${path_1.default.resolve(root, file.replace('ipfs://', ''))}.js`;
|
|
121
|
+
await fs_1.default.promises.writeFile(outputPath, res);
|
|
122
|
+
return outputPath;
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
async function getChainTypes(reader, root, file) {
|
|
126
|
+
// If the project is load from local, we will direct load them
|
|
127
|
+
if (reader instanceof common_1.LocalReader) {
|
|
128
|
+
return (0, common_ethereum_1.loadChainTypes)(file, root);
|
|
129
|
+
}
|
|
130
|
+
else {
|
|
131
|
+
// If it is stored in ipfs or other resources, we will use the corresponding reader to read the file
|
|
132
|
+
// Because ipfs not provide extension of the file, it is difficult to determine its format
|
|
133
|
+
// We will use yaml.load to try to load the script and parse them to supported chain types
|
|
134
|
+
// if it failed, we will give it another another attempt, and assume the script written in js
|
|
135
|
+
// we will download it to a temp folder, and load them within sandbox
|
|
136
|
+
const res = await reader.getFile(file);
|
|
137
|
+
let raw;
|
|
138
|
+
try {
|
|
139
|
+
raw = js_yaml_1.default.load(res);
|
|
140
|
+
return (0, common_ethereum_1.parseChainTypes)(raw);
|
|
141
|
+
}
|
|
142
|
+
catch (e) {
|
|
143
|
+
const chainTypesPath = `${path_1.default.resolve(root, file.replace('ipfs://', ''))}.js`;
|
|
144
|
+
await fs_1.default.promises.writeFile(chainTypesPath, res);
|
|
145
|
+
raw = (0, common_ethereum_1.loadChainTypesFromJs)(chainTypesPath); //root not required, as it been packed in single js
|
|
146
|
+
return (0, common_ethereum_1.parseChainTypes)(raw);
|
|
147
|
+
}
|
|
148
|
+
}
|
|
149
|
+
}
|
|
150
|
+
exports.getChainTypes = getChainTypes;
|
|
151
|
+
async function loadDataSourceScript(reader, file) {
|
|
152
|
+
let entry;
|
|
153
|
+
//For RuntimeDataSourceV0_0_1
|
|
154
|
+
if (!file) {
|
|
155
|
+
const pkg = await reader.getPkg();
|
|
156
|
+
if (pkg === undefined)
|
|
157
|
+
throw new Error('Project package.json is not found');
|
|
158
|
+
if (pkg.main) {
|
|
159
|
+
entry = pkg.main.startsWith('./') ? pkg.main : `./${pkg.main}`;
|
|
160
|
+
}
|
|
161
|
+
else {
|
|
162
|
+
entry = './dist';
|
|
163
|
+
}
|
|
164
|
+
}
|
|
165
|
+
//Else get file
|
|
166
|
+
const entryScript = await reader.getFile(file ? file : entry);
|
|
167
|
+
if (entryScript === undefined) {
|
|
168
|
+
throw new Error(`Entry file ${entry} for datasource not exist`);
|
|
169
|
+
}
|
|
170
|
+
return entryScript;
|
|
171
|
+
}
|
|
172
|
+
exports.loadDataSourceScript = loadDataSourceScript;
|
|
173
|
+
async function makeTempDir() {
|
|
174
|
+
const sep = path_1.default.sep;
|
|
175
|
+
const tmpDir = os_1.default.tmpdir();
|
|
176
|
+
return fs_1.default.promises.mkdtemp(`${tmpDir}${sep}`);
|
|
177
|
+
}
|
|
178
|
+
async function getProjectRoot(reader) {
|
|
179
|
+
if (reader instanceof common_1.LocalReader)
|
|
180
|
+
return reader.root;
|
|
181
|
+
if (reader instanceof common_1.IPFSReader || reader instanceof common_1.GithubReader) {
|
|
182
|
+
return makeTempDir();
|
|
183
|
+
}
|
|
184
|
+
}
|
|
185
|
+
exports.getProjectRoot = getProjectRoot;
|
|
186
|
+
async function initDbSchema(project, schema, storeService) {
|
|
187
|
+
const modelsRelation = (0, utils_1.getAllEntitiesRelations)(project.schema);
|
|
188
|
+
await storeService.init(modelsRelation, schema);
|
|
189
|
+
}
|
|
190
|
+
exports.initDbSchema = initDbSchema;
|
|
191
|
+
//# sourceMappingURL=project.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"project.js","sourceRoot":"","sources":["../../src/utils/project.ts"],"names":[],"mappings":";AAAA,gEAAgE;AAChE,sCAAsC;;;;;;AAEtC,4CAAoB;AACpB,4CAAoB;AACpB,gDAAwB;AACxB,0CAA8E;AAC9E,4DAYgC;AAEhC,wCAAuD;AACvD,sDAA2B;AAC3B,8CAAsB;AAGf,KAAK,UAAU,iBAAiB,CAAC,WAAmB;IACzD,MAAM,KAAK,GAAG,YAAE,CAAC,QAAQ,CAAC,WAAW,CAAC,CAAC;IACvC,IAAI,KAAK,CAAC,MAAM,EAAE,EAAE;QAClB,MAAM,GAAG,GAAG,cAAI,CAAC,GAAG,CAAC;QACrB,MAAM,MAAM,GAAG,YAAE,CAAC,MAAM,EAAE,CAAC;QAC3B,MAAM,QAAQ,GAAG,YAAE,CAAC,WAAW,CAAC,GAAG,MAAM,GAAG,GAAG,EAAE,CAAC,CAAC;QACnD,oDAAoD;QACpD,MAAM,aAAG,CAAC,CAAC,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,GAAG,EAAE,QAAQ,EAAE,CAAC,CAAC;QAClD,OAAO,QAAQ,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC;KACpC;SAAM,IAAI,KAAK,CAAC,WAAW,EAAE,EAAE;QAC9B,OAAO,WAAW,CAAC;KACpB;AACH,CAAC;AAZD,8CAYC;AAED,gDAAgD;AAChD,MAAM,iBAAiB,GAA2B,EAAE,CAAC;AAErD,SAAgB,eAAe,CAAC,IAAY;IAC1C,MAAM,OAAO,GAAG,cAAI,CAAC,IAAI,CAAC,IAAI,EAAE,cAAc,CAAC,CAAC;IAChD,IAAI;QACF,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,EAAE;YAC/B,MAAM,OAAO,GAAG,YAAE,CAAC,YAAY,CAAC,OAAO,CAAC,CAAC,QAAQ,EAAE,CAAC;YACpD,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC;YAChC,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE;gBACb,OAAO,QAAQ,CAAC;aACjB;YACD,iBAAiB,CAAC,OAAO,CAAC,GAAG,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC;gBACpD,CAAC,CAAC,GAAG,CAAC,IAAI;gBACV,CAAC,CAAC,KAAK,GAAG,CAAC,IAAI,EAAE,CAAC;SACrB;QAED,OAAO,iBAAiB,CAAC,OAAO,CAAC,CAAC;KACnC;IAAC,OAAO,GAAG,EAAE;QACZ,MAAM,IAAI,KAAK,CAAC,8CAA8C,IAAI,EAAE,CAAC,CAAC;KACvE;AACH,CAAC;AAlBD,0CAkBC;AAED,SAAgB,aAAa,CAC3B,OAAqB;IAErB,OAAO,MAAM,CAAC,MAAM,CAAS,qCAAmB,CAAC,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAC3E,CAAC;AAJD,sCAIC;AAED,SAAgB,eAAe,CAC7B,OAAqB;IAErB,OAAO,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC;AACjC,CAAC;AAJD,0CAIC;AAEM,KAAK,UAAU,uBAAuB,CAC3C,YAAkE,EAClE,MAAc,EACd,IAAY;IAEZ,8BAA8B;IAC9B,OAAO,OAAO,CAAC,GAAG,CAChB,YAAY,CAAC,GAAG,CAAC,KAAK,EAAE,UAAU,EAAE,EAAE;QACpC,MAAM,WAAW,GAAG,MAAM,oBAAoB,CAC5C,MAAM,EACN,UAAU,CAAC,OAAO,CAAC,IAAI,CACxB,CAAC;QACF,MAAM,IAAI,GAAG,MAAM,sBAAsB,CACvC,MAAM,EACN,UAAU,CAAC,OAAO,CAAC,IAAI,EACvB,IAAI,EACJ,WAAW,CACZ,CAAC;QACF,IAAI,UAAU,CAAC,MAAM,EAAE;YACrB,KAAK,MAAM,CAAC,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE;gBACzD,IAAI,MAAM,YAAY,oBAAW,EAAE;oBACjC,KAAK,CAAC,IAAI,GAAG,cAAI,CAAC,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;iBAC7C;qBAAM;oBACL,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;oBAC7C,MAAM,UAAU,GAAG,cAAI,CAAC,OAAO,CAC7B,IAAI,EACJ,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAClC,CAAC;oBACF,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,EAAE,GAAa,CAAC,CAAC;oBACvD,KAAK,CAAC,IAAI,GAAG,UAAU,CAAC;iBACzB;aACF;SACF;QACD,IAAI,IAAA,4BAAU,EAAC,UAAU,CAAC,EAAE;YAC1B,IAAI,UAAU,CAAC,SAAS,EAAE;gBACxB,UAAU,CAAC,SAAS,CAAC,IAAI,GAAG,MAAM,eAAe,CAC/C,MAAM,EACN,IAAI,EACJ,UAAU,CAAC,SAAS,CAAC,IAAI,CAC1B,CAAC;aACH;YACD,IAAI,UAAU,CAAC,MAAM,EAAE;gBACrB,KAAK,MAAM,CAAC,EAAE,KAAK,CAAC,IAAI,UAAU,CAAC,MAAM,EAAE;oBACzC,IAAI,MAAM,YAAY,oBAAW,EAAE;wBACjC,KAAK,CAAC,IAAI,GAAG,cAAI,CAAC,OAAO,CAAC,IAAI,EAAE,KAAK,CAAC,IAAI,CAAC,CAAC;qBAC7C;yBAAM;wBACL,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC;wBAC7C,MAAM,UAAU,GAAG,cAAI,CAAC,OAAO,CAC7B,IAAI,EACJ,KAAK,CAAC,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAClC,CAAC;wBACF,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,EAAE,GAAa,CAAC,CAAC;wBACvD,KAAK,CAAC,IAAI,GAAG,UAAU,CAAC;qBACzB;iBACF;aACF;YACD,uCACK,UAAU,KACb,OAAO,kCAAO,UAAU,CAAC,OAAO,KAAE,WAAW,EAAE,IAAI,OACnD;SACH;aAAM;YACL,uCACK,UAAU,KACb,OAAO,kCAAO,UAAU,CAAC,OAAO,KAAE,WAAW,EAAE,IAAI,OACnD;SACH;IACH,CAAC,CAAC,CACH,CAAC;AACJ,CAAC;AApED,0DAoEC;AAED,KAAK,UAAU,sBAAsB,CACnC,MAAc,EACd,IAAY,EACZ,IAAY,EACZ,MAAc;IAEd,IAAI,MAAM,YAAY,oBAAW;QAAE,OAAO,IAAI,CAAC;SAC1C,IAAI,MAAM,YAAY,mBAAU,IAAI,MAAM,YAAY,qBAAY,EAAE;QACvE,MAAM,UAAU,GAAG,GAAG,cAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC;QAC3E,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,EAAE,MAAM,CAAC,CAAC;QAChD,OAAO,UAAU,CAAC;KACnB;AACH,CAAC;AAED,KAAK,UAAU,eAAe,CAC5B,MAAc,EACd,IAAY,EACZ,IAAY;IAEZ,IAAI,MAAM,YAAY,oBAAW,EAAE;QACjC,OAAO,cAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KACjC;SAAM;QACL,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACvC,MAAM,UAAU,GAAG,GAAG,cAAI,CAAC,OAAO,CAAC,IAAI,EAAE,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC;QAC3E,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC;QAC7C,OAAO,UAAU,CAAC;KACnB;AACH,CAAC;AAEM,KAAK,UAAU,aAAa,CACjC,MAAc,EACd,IAAY,EACZ,IAAY;IAEZ,8DAA8D;IAC9D,IAAI,MAAM,YAAY,oBAAW,EAAE;QACjC,OAAO,IAAA,gCAAc,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC;KACnC;SAAM;QACL,oGAAoG;QACpG,0FAA0F;QAC1F,0FAA0F;QAC1F,6FAA6F;QAC7F,qEAAqE;QACrE,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;QACvC,IAAI,GAAY,CAAC;QACjB,IAAI;YACF,GAAG,GAAG,iBAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;YACrB,OAAO,IAAA,iCAAe,EAAC,GAAG,CAAC,CAAC;SAC7B;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,cAAc,GAAG,GAAG,cAAI,CAAC,OAAO,CACpC,IAAI,EACJ,IAAI,CAAC,OAAO,CAAC,SAAS,EAAE,EAAE,CAAC,CAC5B,KAAK,CAAC;YACP,MAAM,YAAE,CAAC,QAAQ,CAAC,SAAS,CAAC,cAAc,EAAE,GAAG,CAAC,CAAC;YACjD,GAAG,GAAG,IAAA,sCAAoB,EAAC,cAAc,CAAC,CAAC,CAAC,mDAAmD;YAC/F,OAAO,IAAA,iCAAe,EAAC,GAAG,CAAC,CAAC;SAC7B;KACF;AACH,CAAC;AA7BD,sCA6BC;AAEM,KAAK,UAAU,oBAAoB,CACxC,MAAc,EACd,IAAa;IAEb,IAAI,KAAa,CAAC;IAClB,6BAA6B;IAC7B,IAAI,CAAC,IAAI,EAAE;QACT,MAAM,GAAG,GAAG,MAAM,MAAM,CAAC,MAAM,EAAE,CAAC;QAClC,IAAI,GAAG,KAAK,SAAS;YAAE,MAAM,IAAI,KAAK,CAAC,mCAAmC,CAAC,CAAC;QAC5E,IAAI,GAAG,CAAC,IAAI,EAAE;YACZ,KAAK,GAAG,GAAG,CAAC,IAAI,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,GAAG,CAAC,IAAI,EAAE,CAAC;SAChE;aAAM;YACL,KAAK,GAAG,QAAQ,CAAC;SAClB;KACF;IACD,eAAe;IACf,MAAM,WAAW,GAAG,MAAM,MAAM,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC;IAC9D,IAAI,WAAW,KAAK,SAAS,EAAE;QAC7B,MAAM,IAAI,KAAK,CAAC,cAAc,KAAK,2BAA2B,CAAC,CAAC;KACjE;IACD,OAAO,WAAW,CAAC;AACrB,CAAC;AArBD,oDAqBC;AAED,KAAK,UAAU,WAAW;IACxB,MAAM,GAAG,GAAG,cAAI,CAAC,GAAG,CAAC;IACrB,MAAM,MAAM,GAAG,YAAE,CAAC,MAAM,EAAE,CAAC;IAC3B,OAAO,YAAE,CAAC,QAAQ,CAAC,OAAO,CAAC,GAAG,MAAM,GAAG,GAAG,EAAE,CAAC,CAAC;AAChD,CAAC;AAEM,KAAK,UAAU,cAAc,CAAC,MAAc;IACjD,IAAI,MAAM,YAAY,oBAAW;QAAE,OAAO,MAAM,CAAC,IAAI,CAAC;IACtD,IAAI,MAAM,YAAY,mBAAU,IAAI,MAAM,YAAY,qBAAY,EAAE;QAClE,OAAO,WAAW,EAAE,CAAC;KACtB;AACH,CAAC;AALD,wCAKC;AAEM,KAAK,UAAU,YAAY,CAChC,OAAwB,EACxB,MAAc,EACd,YAA0B;IAE1B,MAAM,cAAc,GAAG,IAAA,+BAAuB,EAAC,OAAO,CAAC,MAAM,CAAC,CAAC;IAC/D,MAAM,YAAY,CAAC,IAAI,CAAC,cAAc,EAAE,MAAM,CAAC,CAAC;AAClD,CAAC;AAPD,oCAOC","sourcesContent":["// Copyright 2020-2022 OnFinality Limited authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\nimport fs from 'fs';\nimport os from 'os';\nimport path from 'path';\nimport { GithubReader, IPFSReader, LocalReader, Reader } from '@subql/common';\nimport {\n ChainTypes,\n CustomDatasourceV0_2_0,\n isCustomDs,\n loadChainTypes,\n loadChainTypesFromJs,\n parseChainTypes,\n RuntimeDataSourceV0_2_0,\n SubqlRuntimeHandler,\n SubqlCustomHandler,\n SubqlHandler,\n EthereumHandlerKind,\n} from '@subql/common-ethereum';\nimport { StoreService } from '@subql/node-core';\nimport { getAllEntitiesRelations } from '@subql/utils';\nimport yaml from 'js-yaml';\nimport tar from 'tar';\nimport { SubqlProjectDs, SubqueryProject } from '../configure/SubqueryProject';\n\nexport async function prepareProjectDir(projectPath: string): Promise<string> {\n const stats = fs.statSync(projectPath);\n if (stats.isFile()) {\n const sep = path.sep;\n const tmpDir = os.tmpdir();\n const tempPath = fs.mkdtempSync(`${tmpDir}${sep}`);\n // Will promote errors if incorrect format/extension\n await tar.x({ file: projectPath, cwd: tempPath });\n return tempPath.concat('/package');\n } else if (stats.isDirectory()) {\n return projectPath;\n }\n}\n\n// We cache this to avoid repeated reads from fs\nconst projectEntryCache: Record<string, string> = {};\n\nexport function getProjectEntry(root: string): string {\n const pkgPath = path.join(root, 'package.json');\n try {\n if (!projectEntryCache[pkgPath]) {\n const content = fs.readFileSync(pkgPath).toString();\n const pkg = JSON.parse(content);\n if (!pkg.main) {\n return './dist';\n }\n projectEntryCache[pkgPath] = pkg.main.startsWith('./')\n ? pkg.main\n : `./${pkg.main}`;\n }\n\n return projectEntryCache[pkgPath];\n } catch (err) {\n throw new Error(`can not find package.json within directory ${root}`);\n }\n}\n\nexport function isBaseHandler(\n handler: SubqlHandler,\n): handler is SubqlRuntimeHandler {\n return Object.values<string>(EthereumHandlerKind).includes(handler.kind);\n}\n\nexport function isCustomHandler(\n handler: SubqlHandler,\n): handler is SubqlCustomHandler {\n return !isBaseHandler(handler);\n}\n\nexport async function updateDataSourcesV0_2_0(\n _dataSources: (RuntimeDataSourceV0_2_0 | CustomDatasourceV0_2_0)[],\n reader: Reader,\n root: string,\n): Promise<SubqlProjectDs[]> {\n // force convert to updated ds\n return Promise.all(\n _dataSources.map(async (dataSource) => {\n const entryScript = await loadDataSourceScript(\n reader,\n dataSource.mapping.file,\n );\n const file = await updateDataSourcesEntry(\n reader,\n dataSource.mapping.file,\n root,\n entryScript,\n );\n if (dataSource.assets) {\n for (const [, asset] of Object.entries(dataSource.assets)) {\n if (reader instanceof LocalReader) {\n asset.file = path.resolve(root, asset.file);\n } else {\n const res = await reader.getFile(asset.file);\n const outputPath = path.resolve(\n root,\n asset.file.replace('ipfs://', ''),\n );\n await fs.promises.writeFile(outputPath, res as string);\n asset.file = outputPath;\n }\n }\n }\n if (isCustomDs(dataSource)) {\n if (dataSource.processor) {\n dataSource.processor.file = await updateProcessor(\n reader,\n root,\n dataSource.processor.file,\n );\n }\n if (dataSource.assets) {\n for (const [, asset] of dataSource.assets) {\n if (reader instanceof LocalReader) {\n asset.file = path.resolve(root, asset.file);\n } else {\n const res = await reader.getFile(asset.file);\n const outputPath = path.resolve(\n root,\n asset.file.replace('ipfs://', ''),\n );\n await fs.promises.writeFile(outputPath, res as string);\n asset.file = outputPath;\n }\n }\n }\n return {\n ...dataSource,\n mapping: { ...dataSource.mapping, entryScript, file },\n };\n } else {\n return {\n ...dataSource,\n mapping: { ...dataSource.mapping, entryScript, file },\n };\n }\n }),\n );\n}\n\nasync function updateDataSourcesEntry(\n reader: Reader,\n file: string,\n root: string,\n script: string,\n): Promise<string> {\n if (reader instanceof LocalReader) return file;\n else if (reader instanceof IPFSReader || reader instanceof GithubReader) {\n const outputPath = `${path.resolve(root, file.replace('ipfs://', ''))}.js`;\n await fs.promises.writeFile(outputPath, script);\n return outputPath;\n }\n}\n\nasync function updateProcessor(\n reader: Reader,\n root: string,\n file: string,\n): Promise<string> {\n if (reader instanceof LocalReader) {\n return path.resolve(root, file);\n } else {\n const res = await reader.getFile(file);\n const outputPath = `${path.resolve(root, file.replace('ipfs://', ''))}.js`;\n await fs.promises.writeFile(outputPath, res);\n return outputPath;\n }\n}\n\nexport async function getChainTypes(\n reader: Reader,\n root: string,\n file: string,\n): Promise<ChainTypes> {\n // If the project is load from local, we will direct load them\n if (reader instanceof LocalReader) {\n return loadChainTypes(file, root);\n } else {\n // If it is stored in ipfs or other resources, we will use the corresponding reader to read the file\n // Because ipfs not provide extension of the file, it is difficult to determine its format\n // We will use yaml.load to try to load the script and parse them to supported chain types\n // if it failed, we will give it another another attempt, and assume the script written in js\n // we will download it to a temp folder, and load them within sandbox\n const res = await reader.getFile(file);\n let raw: unknown;\n try {\n raw = yaml.load(res);\n return parseChainTypes(raw);\n } catch (e) {\n const chainTypesPath = `${path.resolve(\n root,\n file.replace('ipfs://', ''),\n )}.js`;\n await fs.promises.writeFile(chainTypesPath, res);\n raw = loadChainTypesFromJs(chainTypesPath); //root not required, as it been packed in single js\n return parseChainTypes(raw);\n }\n }\n}\n\nexport async function loadDataSourceScript(\n reader: Reader,\n file?: string,\n): Promise<string> {\n let entry: string;\n //For RuntimeDataSourceV0_0_1\n if (!file) {\n const pkg = await reader.getPkg();\n if (pkg === undefined) throw new Error('Project package.json is not found');\n if (pkg.main) {\n entry = pkg.main.startsWith('./') ? pkg.main : `./${pkg.main}`;\n } else {\n entry = './dist';\n }\n }\n //Else get file\n const entryScript = await reader.getFile(file ? file : entry);\n if (entryScript === undefined) {\n throw new Error(`Entry file ${entry} for datasource not exist`);\n }\n return entryScript;\n}\n\nasync function makeTempDir(): Promise<string> {\n const sep = path.sep;\n const tmpDir = os.tmpdir();\n return fs.promises.mkdtemp(`${tmpDir}${sep}`);\n}\n\nexport async function getProjectRoot(reader: Reader): Promise<string> {\n if (reader instanceof LocalReader) return reader.root;\n if (reader instanceof IPFSReader || reader instanceof GithubReader) {\n return makeTempDir();\n }\n}\n\nexport async function initDbSchema(\n project: SubqueryProject,\n schema: string,\n storeService: StoreService,\n): Promise<void> {\n const modelsRelation = getAllEntitiesRelations(project.schema);\n await storeService.init(modelsRelation, schema);\n}\n"]}
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
export declare function stringNormalizedEq(a: string, b: string): boolean;
|
|
2
|
+
export declare function hexStringEq(a: string, b: string): boolean;
|
|
3
|
+
export declare function eventToTopic(input: string): string;
|
|
4
|
+
export declare function functionToSighash(input: string): string;
|
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
// Copyright 2020-2022 OnFinality Limited authors & contributors
|
|
3
|
+
// SPDX-License-Identifier: Apache-2.0
|
|
4
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
5
|
+
exports.functionToSighash = exports.eventToTopic = exports.hexStringEq = exports.stringNormalizedEq = void 0;
|
|
6
|
+
const abi_1 = require("@ethersproject/abi");
|
|
7
|
+
const bytes_1 = require("@ethersproject/bytes");
|
|
8
|
+
const hash_1 = require("@ethersproject/hash");
|
|
9
|
+
function stringNormalizedEq(a, b) {
|
|
10
|
+
return a.toLowerCase() === (b === null || b === void 0 ? void 0 : b.toLowerCase());
|
|
11
|
+
}
|
|
12
|
+
exports.stringNormalizedEq = stringNormalizedEq;
|
|
13
|
+
function hexStringEq(a, b) {
|
|
14
|
+
if (!(0, bytes_1.isHexString)(a) || !(0, bytes_1.isHexString)(b)) {
|
|
15
|
+
throw new Error('Inputs are not hex strings');
|
|
16
|
+
}
|
|
17
|
+
return stringNormalizedEq((0, bytes_1.hexStripZeros)(a), (0, bytes_1.hexStripZeros)(b));
|
|
18
|
+
}
|
|
19
|
+
exports.hexStringEq = hexStringEq;
|
|
20
|
+
function eventToTopic(input) {
|
|
21
|
+
if ((0, bytes_1.isHexString)(input))
|
|
22
|
+
return input;
|
|
23
|
+
return (0, hash_1.id)(abi_1.EventFragment.fromString(input).format());
|
|
24
|
+
}
|
|
25
|
+
exports.eventToTopic = eventToTopic;
|
|
26
|
+
function functionToSighash(input) {
|
|
27
|
+
if ((0, bytes_1.isHexString)(input))
|
|
28
|
+
return input;
|
|
29
|
+
return (0, bytes_1.hexDataSlice)((0, hash_1.id)(abi_1.FunctionFragment.fromString(input).format()), 0, 4);
|
|
30
|
+
}
|
|
31
|
+
exports.functionToSighash = functionToSighash;
|
|
32
|
+
//# sourceMappingURL=string.js.map
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
{"version":3,"file":"string.js","sourceRoot":"","sources":["../../src/utils/string.ts"],"names":[],"mappings":";AAAA,gEAAgE;AAChE,sCAAsC;;;AAEtC,4CAAqE;AACrE,gDAAgF;AAChF,8CAAyC;AAEzC,SAAgB,kBAAkB,CAAC,CAAS,EAAE,CAAS;IACrD,OAAO,CAAC,CAAC,WAAW,EAAE,MAAK,CAAC,aAAD,CAAC,uBAAD,CAAC,CAAE,WAAW,EAAE,CAAA,CAAC;AAC9C,CAAC;AAFD,gDAEC;AAED,SAAgB,WAAW,CAAC,CAAS,EAAE,CAAS;IAC9C,IAAI,CAAC,IAAA,mBAAW,EAAC,CAAC,CAAC,IAAI,CAAC,IAAA,mBAAW,EAAC,CAAC,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,4BAA4B,CAAC,CAAC;KAC/C;IACD,OAAO,kBAAkB,CAAC,IAAA,qBAAa,EAAC,CAAC,CAAC,EAAE,IAAA,qBAAa,EAAC,CAAC,CAAC,CAAC,CAAC;AAChE,CAAC;AALD,kCAKC;AAED,SAAgB,YAAY,CAAC,KAAa;IACxC,IAAI,IAAA,mBAAW,EAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAErC,OAAO,IAAA,SAAE,EAAC,mBAAa,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,CAAC,CAAC;AACtD,CAAC;AAJD,oCAIC;AAED,SAAgB,iBAAiB,CAAC,KAAa;IAC7C,IAAI,IAAA,mBAAW,EAAC,KAAK,CAAC;QAAE,OAAO,KAAK,CAAC;IAErC,OAAO,IAAA,oBAAY,EAAC,IAAA,SAAE,EAAC,sBAAgB,CAAC,UAAU,CAAC,KAAK,CAAC,CAAC,MAAM,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,CAAC,CAAC;AAC7E,CAAC;AAJD,8CAIC","sourcesContent":["// Copyright 2020-2022 OnFinality Limited authors & contributors\n// SPDX-License-Identifier: Apache-2.0\n\nimport { EventFragment, FunctionFragment } from '@ethersproject/abi';\nimport { isHexString, hexStripZeros, hexDataSlice } from '@ethersproject/bytes';\nimport { id } from '@ethersproject/hash';\n\nexport function stringNormalizedEq(a: string, b: string): boolean {\n return a.toLowerCase() === b?.toLowerCase();\n}\n\nexport function hexStringEq(a: string, b: string): boolean {\n if (!isHexString(a) || !isHexString(b)) {\n throw new Error('Inputs are not hex strings');\n }\n return stringNormalizedEq(hexStripZeros(a), hexStripZeros(b));\n}\n\nexport function eventToTopic(input: string): string {\n if (isHexString(input)) return input;\n\n return id(EventFragment.fromString(input).format());\n}\n\nexport function functionToSighash(input: string): string {\n if (isHexString(input)) return input;\n\n return hexDataSlice(id(FunctionFragment.fromString(input).format()), 0, 4);\n}\n"]}
|