@socket.tech/dl-common 1.0.16 → 1.0.17
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/constants/confirmations.d.ts +5 -0
- package/dist/constants/confirmations.js +17 -0
- package/dist/constants/enums.d.ts +44 -0
- package/dist/constants/enums.js +52 -0
- package/dist/constants/index.d.ts +3 -0
- package/dist/constants/index.js +19 -0
- package/dist/constants/types.d.ts +136 -0
- package/dist/constants/types.js +53 -0
- package/dist/constants/waitTime.d.ts +8 -0
- package/dist/constants/waitTime.js +45 -0
- package/dist/dl-common/constants/index.d.ts +1 -0
- package/dist/dl-common/constants/index.js +1 -0
- package/dist/dl-common/constants/index.js.map +1 -1
- package/dist/dl-common/constants/relay-types.d.ts +70 -0
- package/dist/dl-common/constants/relay-types.js +23 -0
- package/dist/dl-common/constants/relay-types.js.map +1 -0
- package/dist/dl-common/utils/index.d.ts +1 -0
- package/dist/dl-common/utils/index.js +1 -0
- package/dist/dl-common/utils/index.js.map +1 -1
- package/dist/dl-common/utils/relaySigner.d.ts +2 -0
- package/dist/dl-common/utils/relaySigner.js +16 -0
- package/dist/dl-common/utils/relaySigner.js.map +1 -1
- package/dist/dl-common/utils/relayUtils.d.ts +2 -0
- package/dist/dl-common/utils/relayUtils.js +21 -0
- package/dist/dl-common/utils/relayUtils.js.map +1 -0
- package/dist/dl-common/utils/signer/adapter.js.map +1 -1
- package/dist/dl-common/utils/signer/kms-ethers-signer.js.map +1 -1
- package/dist/dl-common/utils/signer/kms-signer.js +1 -1
- package/dist/index.d.ts +4 -0
- package/dist/index.js +36 -0
- package/dist/models/attestSignature.d.ts +35 -0
- package/dist/models/attestSignature.js +53 -0
- package/dist/models/attestation.d.ts +70 -0
- package/dist/models/attestation.js +86 -0
- package/dist/models/index.d.ts +7 -0
- package/dist/models/index.js +23 -0
- package/dist/models/lastBlock.d.ts +28 -0
- package/dist/models/lastBlock.js +27 -0
- package/dist/models/message.d.ts +171 -0
- package/dist/models/message.js +193 -0
- package/dist/models/packet.d.ts +240 -0
- package/dist/models/packet.js +249 -0
- package/dist/models/proposal.d.ts +127 -0
- package/dist/models/proposal.js +141 -0
- package/dist/models/switchboard.d.ts +69 -0
- package/dist/models/switchboard.js +92 -0
- package/dist/models/transaction.d.ts +0 -0
- package/dist/models/transaction.js +280 -0
- package/dist/services/cacheService.d.ts +14 -0
- package/dist/services/cacheService.js +77 -0
- package/dist/services/eventBridgeService.d.ts +8 -0
- package/dist/services/eventBridgeService.js +40 -0
- package/dist/services/index.d.ts +3 -0
- package/dist/services/index.js +20 -0
- package/dist/services/queueService.d.ts +10 -0
- package/dist/services/queueService.js +62 -0
- package/dist/src/constants/gasEstimation.d.ts +8 -0
- package/dist/src/constants/gasEstimation.js +61 -0
- package/dist/src/relayers/propose/main.js.map +1 -1
- package/dist/src/relayers/proposeRelayer.d.ts +7 -0
- package/dist/src/relayers/proposeRelayer.js +587 -0
- package/dist/src/services/batcherService.d.ts +9 -0
- package/dist/src/services/batcherService.js +197 -0
- package/dist/src/services/executeService.d.ts +16 -0
- package/dist/src/services/executeService.js +209 -0
- package/dist/utils/address.d.ts +2 -0
- package/dist/utils/address.js +8 -0
- package/dist/utils/axios.d.ts +2 -0
- package/dist/utils/axios.js +54 -0
- package/dist/utils/dataStructHelper.d.ts +2 -0
- package/dist/utils/dataStructHelper.js +10 -0
- package/dist/utils/discord.d.ts +2 -0
- package/dist/utils/discord.js +43 -0
- package/dist/utils/ethersAwsKmsSigner.d.ts +2 -0
- package/dist/utils/ethersAwsKmsSigner.js +26 -0
- package/dist/utils/eventGetter.d.ts +4 -0
- package/dist/utils/eventGetter.js +50 -0
- package/dist/utils/extraUtils.d.ts +32 -0
- package/dist/utils/extraUtils.js +103 -0
- package/dist/utils/idUtils.d.ts +14 -0
- package/dist/utils/idUtils.js +50 -0
- package/dist/utils/index.d.ts +13 -0
- package/dist/utils/index.js +29 -0
- package/dist/utils/relaySigner.d.ts +21 -0
- package/dist/utils/relaySigner.js +68 -0
- package/dist/utils/s3Service.d.ts +11 -0
- package/dist/utils/s3Service.js +70 -0
- package/dist/utils/secretManagerService.d.ts +2 -0
- package/dist/utils/secretManagerService.js +33 -0
- package/dist/utils/signer/adapter.d.ts +18 -0
- package/dist/utils/signer/adapter.js +71 -0
- package/dist/utils/signer/address.d.ts +9 -0
- package/dist/utils/signer/address.js +42 -0
- package/dist/utils/signer/asn1-parser.d.ts +10 -0
- package/dist/utils/signer/asn1-parser.js +79 -0
- package/dist/utils/signer/crypto.d.ts +5 -0
- package/dist/utils/signer/crypto.js +33 -0
- package/dist/utils/signer/index.d.ts +5 -0
- package/dist/utils/signer/index.js +21 -0
- package/dist/utils/signer/kms-ethers-signer.d.ts +19 -0
- package/dist/utils/signer/kms-ethers-signer.js +32 -0
- package/dist/utils/signer/kms-signer.d.ts +13 -0
- package/dist/utils/signer/kms-signer.js +46 -0
- package/dist/utils/signer/signature.d.ts +16 -0
- package/dist/utils/signer/signature.js +65 -0
- package/dist/utils/signer/signer.d.ts +6 -0
- package/dist/utils/signer/signer.js +2 -0
- package/dist/utils/signer/socketSigner.d.ts +39 -0
- package/dist/utils/signer/socketSigner.js +153 -0
- package/dist/utils/time.d.ts +2 -0
- package/dist/utils/time.js +7 -0
- package/package.json +4 -4
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getBatchDataArgs = exports.checkGasLimitAndAddToBatch = exports.calculateBatchGasLimit = void 0;
|
|
4
|
+
const dl_core_1 = require("@socket.tech/dl-core");
|
|
5
|
+
const constants_1 = require("../constants");
|
|
6
|
+
const ethers_1 = require("ethers");
|
|
7
|
+
const gasEstimation_1 = require("../constants/gasEstimation");
|
|
8
|
+
const gasEstimationService_1 = require("./gasEstimationService");
|
|
9
|
+
const common_1 = require("../relayers/common");
|
|
10
|
+
const utils_1 = require("../utils");
|
|
11
|
+
const calculateBatchGasLimit = async (dstChainSlug, currentBatch) => {
|
|
12
|
+
try {
|
|
13
|
+
const start = Date.now();
|
|
14
|
+
let totalGasLimit = 0;
|
|
15
|
+
const socketBatcher = await (0, common_1.getSocketBatcher)(dstChainSlug, {}, false);
|
|
16
|
+
const socketAddress = (0, common_1.getSocketAddress)(dstChainSlug);
|
|
17
|
+
(0, utils_1.logInfo)({
|
|
18
|
+
info: "ESTIMATING_GAS_PROPOSE_RELAY",
|
|
19
|
+
packetIds: currentBatch.proposeBatchDataArgs.map((a) => a[0]),
|
|
20
|
+
messageIds: [],
|
|
21
|
+
});
|
|
22
|
+
const proposeAttestGasEstimate = await socketBatcher.estimateGas.sendBatch(socketAddress,
|
|
23
|
+
// switchboardAddress, // this may change if we have multiple switchboards. this works only for
|
|
24
|
+
// fast switchboard case
|
|
25
|
+
[], // for sealBatch
|
|
26
|
+
currentBatch.proposeBatchDataArgs, currentBatch.attestBatchDataArgs, [], {
|
|
27
|
+
value: currentBatch.totalMsgValue.toHexString(),
|
|
28
|
+
});
|
|
29
|
+
totalGasLimit += proposeAttestGasEstimate.toNumber();
|
|
30
|
+
for (const [executionDetails, messageDetails,] of currentBatch.executeBatchDataArgs) {
|
|
31
|
+
totalGasLimit += executionDetails[2].toNumber();
|
|
32
|
+
}
|
|
33
|
+
totalGasLimit = Math.floor(1.1 * totalGasLimit + 100000); // for safety
|
|
34
|
+
currentBatch.totalGasLimit = totalGasLimit;
|
|
35
|
+
(0, utils_1.logInfo)({
|
|
36
|
+
info: "FINAL_GAS_LIMIT",
|
|
37
|
+
totalGasLimit,
|
|
38
|
+
packetIds: currentBatch.proposeBatchDataArgs.map((a) => a[0]),
|
|
39
|
+
timeTaken: Date.now() - start,
|
|
40
|
+
messageIds: [],
|
|
41
|
+
});
|
|
42
|
+
}
|
|
43
|
+
catch (error) {
|
|
44
|
+
console.log("Error in calculateBatchGasLimit", error);
|
|
45
|
+
}
|
|
46
|
+
return currentBatch;
|
|
47
|
+
};
|
|
48
|
+
exports.calculateBatchGasLimit = calculateBatchGasLimit;
|
|
49
|
+
const checkGasLimitAndAddToBatch = async (dstChainSlug, currentGasLimit, currentBatch, batchArgsArray) => {
|
|
50
|
+
const maxGasLimit = (0, gasEstimation_1.getMaxGasLimit)(dstChainSlug);
|
|
51
|
+
// console.log("maxGasLimit", maxGasLimit);
|
|
52
|
+
if (currentGasLimit > maxGasLimit) {
|
|
53
|
+
currentBatch = await (0, exports.calculateBatchGasLimit)(dstChainSlug, currentBatch);
|
|
54
|
+
// console.log(currentBatch.proposeRowIds, currentBatch.attestRowIds, currentBatch.messageRowIds);
|
|
55
|
+
batchArgsArray.push(currentBatch);
|
|
56
|
+
currentBatch = {
|
|
57
|
+
proposeBatchDataArgs: [],
|
|
58
|
+
attestBatchDataArgs: [],
|
|
59
|
+
executeBatchDataArgs: [],
|
|
60
|
+
totalMsgValue: ethers_1.BigNumber.from(0),
|
|
61
|
+
totalGasLimit: 0,
|
|
62
|
+
proposeRowIds: [],
|
|
63
|
+
attestRowIds: [],
|
|
64
|
+
messageRowIds: [],
|
|
65
|
+
};
|
|
66
|
+
currentGasLimit = 0;
|
|
67
|
+
}
|
|
68
|
+
return { currentGasLimit, batchArgsArray, currentBatch };
|
|
69
|
+
};
|
|
70
|
+
exports.checkGasLimitAndAddToBatch = checkGasLimitAndAddToBatch;
|
|
71
|
+
const getBatchDataArgs = async (packets, packetAttestSignatures, packetProposalCounts) => {
|
|
72
|
+
let currentGasLimit = 0;
|
|
73
|
+
let packetGasLimit = 0;
|
|
74
|
+
let skip = false;
|
|
75
|
+
let batchArgsArray = [];
|
|
76
|
+
let currentBatch = {
|
|
77
|
+
proposeBatchDataArgs: [],
|
|
78
|
+
attestBatchDataArgs: [],
|
|
79
|
+
executeBatchDataArgs: [],
|
|
80
|
+
totalMsgValue: ethers_1.BigNumber.from(0),
|
|
81
|
+
totalGasLimit: 0,
|
|
82
|
+
proposeRowIds: [],
|
|
83
|
+
attestRowIds: [],
|
|
84
|
+
messageRowIds: [],
|
|
85
|
+
};
|
|
86
|
+
for (const packet of packets) {
|
|
87
|
+
// before start of every iteration, check if currentGasLimit is greater than
|
|
88
|
+
// maxGasLimit and reset packetGasLimit. Ignored on first iteration.
|
|
89
|
+
currentGasLimit += packetGasLimit;
|
|
90
|
+
({ currentGasLimit, currentBatch, batchArgsArray } =
|
|
91
|
+
await (0, exports.checkGasLimitAndAddToBatch)(packet.dstChainSlug, currentGasLimit, currentBatch, batchArgsArray));
|
|
92
|
+
packetGasLimit = 0;
|
|
93
|
+
skip = false;
|
|
94
|
+
({ currentBatch, packetGasLimit, skip } = await addProposeArgs(packet, currentBatch, packetGasLimit));
|
|
95
|
+
if (skip)
|
|
96
|
+
continue;
|
|
97
|
+
({ currentBatch, packetGasLimit, skip } = await addAttestArgs(packet, packetAttestSignatures, packetProposalCounts, currentBatch, packetGasLimit));
|
|
98
|
+
if (skip)
|
|
99
|
+
continue;
|
|
100
|
+
if (!constants_1.executeRelayMode)
|
|
101
|
+
continue;
|
|
102
|
+
({ currentBatch, packetGasLimit, skip } = await addExecuteArgs(packet, currentBatch, packetGasLimit, packetProposalCounts));
|
|
103
|
+
if (skip)
|
|
104
|
+
continue;
|
|
105
|
+
}
|
|
106
|
+
if (currentBatch.proposeBatchDataArgs.length > 0 ||
|
|
107
|
+
currentBatch.attestBatchDataArgs.length > 0 ||
|
|
108
|
+
currentBatch.executeBatchDataArgs.length > 0) {
|
|
109
|
+
currentBatch = await (0, exports.calculateBatchGasLimit)(packets[0].dstChainSlug, currentBatch);
|
|
110
|
+
batchArgsArray.push(currentBatch);
|
|
111
|
+
}
|
|
112
|
+
return batchArgsArray;
|
|
113
|
+
};
|
|
114
|
+
exports.getBatchDataArgs = getBatchDataArgs;
|
|
115
|
+
const addProposeArgs = async (packet, currentBatch, packetGasLimit) => {
|
|
116
|
+
// If not proposed, and proposeRelayMode is also false, no point in continuing.
|
|
117
|
+
// If proposeRelayMode is false, but isProposed is true, we can still proceed if
|
|
118
|
+
// attest and execute is pending (and their relay mode is true)
|
|
119
|
+
if (!constants_1.proposeRelayMode && !packet.isProposed) {
|
|
120
|
+
return { currentBatch, packetGasLimit, skip: true };
|
|
121
|
+
}
|
|
122
|
+
if (!packet.isProposed) {
|
|
123
|
+
currentBatch.proposeBatchDataArgs.push([
|
|
124
|
+
packet.packetId,
|
|
125
|
+
packet.root,
|
|
126
|
+
packet.dstSwitchboard,
|
|
127
|
+
packet.sealSignature,
|
|
128
|
+
]);
|
|
129
|
+
currentBatch.proposeRowIds.push(packet.id);
|
|
130
|
+
const proposeGasLimit = await (0, gasEstimationService_1.getProposeGasLimit)(packet.dstChainSlug);
|
|
131
|
+
packetGasLimit += proposeGasLimit;
|
|
132
|
+
}
|
|
133
|
+
return { currentBatch, packetGasLimit, skip: false };
|
|
134
|
+
};
|
|
135
|
+
const addAttestArgs = async (packet, packetAttestSignatures, packetProposalCounts, currentBatch, packetGasLimit) => {
|
|
136
|
+
// if fast integration type and not attested, and got enough signatures, add to attest batch.
|
|
137
|
+
// if not attested and didnt get enough signatures, continue from here.
|
|
138
|
+
// attestRelayMode check inside attestService. we get empty packetAttestSignatures
|
|
139
|
+
// and therefore no attestations, and no message execution(if attestation pending).
|
|
140
|
+
var _a;
|
|
141
|
+
if ([dl_core_1.IntegrationTypes.fast, dl_core_1.IntegrationTypes.fast2].includes(packet.integrationType) &&
|
|
142
|
+
!packet.isAttested) {
|
|
143
|
+
if ((_a = packetAttestSignatures[packet.packetId]) === null || _a === void 0 ? void 0 : _a.length) {
|
|
144
|
+
const attestGasLimit = await (0, gasEstimationService_1.getAttestGasLimit)(packet.dstChainSlug);
|
|
145
|
+
packetAttestSignatures[packet.packetId].map(async (signature) => {
|
|
146
|
+
currentBatch.attestBatchDataArgs.push([
|
|
147
|
+
packet.dstSwitchboard,
|
|
148
|
+
packet.packetId,
|
|
149
|
+
packetProposalCounts[packet.packetId].count,
|
|
150
|
+
packet.root,
|
|
151
|
+
signature,
|
|
152
|
+
]);
|
|
153
|
+
packetGasLimit += attestGasLimit;
|
|
154
|
+
});
|
|
155
|
+
currentBatch.attestRowIds.push(packet.id);
|
|
156
|
+
}
|
|
157
|
+
else {
|
|
158
|
+
// If not already attested, and didnt get enough signatures now as well, no point
|
|
159
|
+
// batching message execution. continue from here.
|
|
160
|
+
return { currentBatch, packetGasLimit, skip: true };
|
|
161
|
+
}
|
|
162
|
+
}
|
|
163
|
+
return { currentBatch, packetGasLimit, skip: false };
|
|
164
|
+
};
|
|
165
|
+
const addExecuteArgs = async (packet, currentBatch, packetGasLimit, packetProposalCounts) => {
|
|
166
|
+
// while getting isProposed false packets, we might get native packets
|
|
167
|
+
// with isVerified false, but valid message. skip their execution.
|
|
168
|
+
// wont appear in query again as soon as proposed.
|
|
169
|
+
if ([dl_core_1.IntegrationTypes.native, dl_core_1.IntegrationTypes.optimistic].includes(packet.integrationType) &&
|
|
170
|
+
!packet.isVerified)
|
|
171
|
+
return { currentBatch, packetGasLimit, skip: true };
|
|
172
|
+
const messages = packet.Messages;
|
|
173
|
+
if (messages === null || messages === void 0 ? void 0 : messages.length)
|
|
174
|
+
for (const message of messages) {
|
|
175
|
+
const { retry, messageId, executeMsgGasLimit, executeSignature, inboundSuccess, packetId, decapacitorProof, executionFee, minMsgGasLimit, executionParams, payload, id, } = message;
|
|
176
|
+
currentBatch.messageRowIds.push(id);
|
|
177
|
+
if (executionParams.slice(0, 4) !== "0x00") {
|
|
178
|
+
const msgValue = ethers_1.BigNumber.from("0x" + executionParams.slice(4));
|
|
179
|
+
currentBatch.totalMsgValue = currentBatch.totalMsgValue.add(msgValue);
|
|
180
|
+
}
|
|
181
|
+
currentBatch.executeBatchDataArgs.push([
|
|
182
|
+
// executionDetails,
|
|
183
|
+
[
|
|
184
|
+
packetId,
|
|
185
|
+
packetProposalCounts[packetId].count,
|
|
186
|
+
ethers_1.BigNumber.from(executeMsgGasLimit),
|
|
187
|
+
decapacitorProof,
|
|
188
|
+
executeSignature,
|
|
189
|
+
],
|
|
190
|
+
// msgDetails
|
|
191
|
+
[messageId, executionFee, minMsgGasLimit, executionParams, payload],
|
|
192
|
+
]);
|
|
193
|
+
const executeGasLimit = ethers_1.BigNumber.from(executeMsgGasLimit).toNumber();
|
|
194
|
+
packetGasLimit += executeGasLimit;
|
|
195
|
+
}
|
|
196
|
+
return { currentBatch, packetGasLimit, skip: false };
|
|
197
|
+
};
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
import { type ChainSlug } from "@socket.tech/dl-core";
|
|
2
|
+
import { type ExecuteSigResponse } from "../../dl-common/constants";
|
|
3
|
+
export interface ExecuteSigRequest {
|
|
4
|
+
messageId: string;
|
|
5
|
+
destPlug: string;
|
|
6
|
+
srcPlug: string;
|
|
7
|
+
dstChainSlug: ChainSlug;
|
|
8
|
+
srcChainSlug: ChainSlug;
|
|
9
|
+
payload: string;
|
|
10
|
+
minMsgGasLimit: string;
|
|
11
|
+
executionParams: string;
|
|
12
|
+
packedMessage: string;
|
|
13
|
+
}
|
|
14
|
+
export declare const sendRetrySimulationRequest: () => Promise<void>;
|
|
15
|
+
export declare const sendPendingSimulationRequest: () => Promise<void>;
|
|
16
|
+
export declare const saveMessageSimulationResponse: (executeResponseData: ExecuteSigResponse[]) => Promise<void>;
|
|
@@ -0,0 +1,209 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.saveMessageSimulationResponse = exports.sendPendingSimulationRequest = exports.sendRetrySimulationRequest = void 0;
|
|
4
|
+
const utils_1 = require("../../dl-common/utils");
|
|
5
|
+
const utils_2 = require("../utils");
|
|
6
|
+
const constants_1 = require("../constants");
|
|
7
|
+
const models_1 = require("../../dl-common/models");
|
|
8
|
+
const sequelize_1 = require("sequelize");
|
|
9
|
+
const prometheus_config_1 = require("../constants/prometheus.config");
|
|
10
|
+
const constants_2 = require("../../dl-common/constants");
|
|
11
|
+
const ethers_1 = require("ethers");
|
|
12
|
+
const sendRequest = async (messages) => {
|
|
13
|
+
const executeRequestData = [];
|
|
14
|
+
const start = Date.now();
|
|
15
|
+
for (const message of messages) {
|
|
16
|
+
executeRequestData.push({
|
|
17
|
+
messageId: message.messageId,
|
|
18
|
+
destPlug: message.destPlug,
|
|
19
|
+
srcPlug: message.srcPlug,
|
|
20
|
+
dstChainSlug: message.dstChainSlug,
|
|
21
|
+
srcChainSlug: message.srcChainSlug,
|
|
22
|
+
payload: message.payload,
|
|
23
|
+
minMsgGasLimit: message.minMsgGasLimit,
|
|
24
|
+
executionParams: message.executionParams,
|
|
25
|
+
packedMessage: message.packedMessage,
|
|
26
|
+
});
|
|
27
|
+
}
|
|
28
|
+
(0, utils_2.logInfo)({
|
|
29
|
+
info: "EXECUTOR_POST_API_CALLING",
|
|
30
|
+
messageIds: messages.map((m) => m.messageId),
|
|
31
|
+
start,
|
|
32
|
+
packetIds: [],
|
|
33
|
+
});
|
|
34
|
+
if (constants_1.EXECUTOR_URL) {
|
|
35
|
+
const response = await (0, utils_1.axiosPost)(constants_1.EXECUTOR_URL, { messageData: executeRequestData }, { headers: { "x-auth-token": constants_1.EXECUTOR_AUTH_TOKEN } });
|
|
36
|
+
(0, utils_2.logInfo)({
|
|
37
|
+
info: "EXECUTOR_POST_API_CALLED",
|
|
38
|
+
messageIds: messages.map((m) => m.messageId),
|
|
39
|
+
packetIds: [],
|
|
40
|
+
end: Date.now() - start,
|
|
41
|
+
response,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
// @review throw here?
|
|
46
|
+
console.log("Call to executor failed");
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
const sendRetrySimulationRequest = async () => {
|
|
50
|
+
try {
|
|
51
|
+
const messages = await getRetrySimulationMessages();
|
|
52
|
+
if (messages.length == 0)
|
|
53
|
+
return;
|
|
54
|
+
(0, utils_2.logInfo)({
|
|
55
|
+
info: "RETRY_SIMULATION_REQUEST",
|
|
56
|
+
msgCount: messages.length,
|
|
57
|
+
messageIds: messages.map((m) => m.messageId),
|
|
58
|
+
packetIds: [],
|
|
59
|
+
});
|
|
60
|
+
const messagesAsChunks = [...(0, utils_1.chunks)(messages, constants_1.executeBatchRelayLimit)];
|
|
61
|
+
for (const chunk of messagesAsChunks)
|
|
62
|
+
await sendRequest(chunk);
|
|
63
|
+
}
|
|
64
|
+
catch (error) {
|
|
65
|
+
console.log("error while sending retry simulation request: ", error);
|
|
66
|
+
}
|
|
67
|
+
};
|
|
68
|
+
exports.sendRetrySimulationRequest = sendRetrySimulationRequest;
|
|
69
|
+
const sendPendingSimulationRequest = async () => {
|
|
70
|
+
try {
|
|
71
|
+
const messages = await getPendingSimulationMessages();
|
|
72
|
+
if (messages.length > 0) {
|
|
73
|
+
(0, utils_2.logInfo)({
|
|
74
|
+
info: "PENDING_SIMULATION_REQUEST",
|
|
75
|
+
msgCount: messages.length,
|
|
76
|
+
messageIds: messages.map((m) => m.messageId),
|
|
77
|
+
packetIds: [],
|
|
78
|
+
});
|
|
79
|
+
const messagesAsChunks = [...(0, utils_1.chunks)(messages, constants_1.executeBatchRelayLimit)];
|
|
80
|
+
for (const chunk of messagesAsChunks)
|
|
81
|
+
await sendRequest(chunk);
|
|
82
|
+
(0, utils_2.logInfo)({
|
|
83
|
+
info: "PENDING_SIMULATION_REQUEST_SENT",
|
|
84
|
+
messageIds: messages.map((m) => m.messageId),
|
|
85
|
+
packetIds: [],
|
|
86
|
+
});
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
catch (error) {
|
|
90
|
+
console.log("error while sendMessageSimulationRequest request: ", error);
|
|
91
|
+
}
|
|
92
|
+
};
|
|
93
|
+
exports.sendPendingSimulationRequest = sendPendingSimulationRequest;
|
|
94
|
+
const saveMessageSimulationResponse = async (executeResponseData) => {
|
|
95
|
+
if (!(executeResponseData === null || executeResponseData === void 0 ? void 0 : executeResponseData.length))
|
|
96
|
+
return;
|
|
97
|
+
const messageIds = [];
|
|
98
|
+
executeResponseData.map((data) => {
|
|
99
|
+
messageIds.push(data.messageId);
|
|
100
|
+
});
|
|
101
|
+
const messages = await models_1.Message.findAll({
|
|
102
|
+
where: {
|
|
103
|
+
messageId: { [sequelize_1.Op.in]: messageIds },
|
|
104
|
+
},
|
|
105
|
+
});
|
|
106
|
+
const messageMap = new Map();
|
|
107
|
+
messages.forEach((message) => {
|
|
108
|
+
messageMap.set(message.messageId, message);
|
|
109
|
+
});
|
|
110
|
+
if (executeResponseData === null || executeResponseData === void 0 ? void 0 : executeResponseData.length) {
|
|
111
|
+
executeResponseData = executeResponseData.map((data) => {
|
|
112
|
+
var _a;
|
|
113
|
+
const firstSimulationTimestamp = Math.floor(Date.now() / 1000);
|
|
114
|
+
let executeRelayStatus = models_1.ExecutionRelayStatus.NO;
|
|
115
|
+
const MaxMsgGasLimit = (0, constants_1.getMaxMsgGasLimit)((_a = messageMap.get(data.messageId)) === null || _a === void 0 ? void 0 : _a.dstChainSlug);
|
|
116
|
+
// mark ignored if msgGasLimit mentioned is too big
|
|
117
|
+
if (ethers_1.BigNumber.from(data.executeMsgGasLimit).gt(MaxMsgGasLimit)) {
|
|
118
|
+
executeRelayStatus = models_1.ExecutionRelayStatus.IGNORED;
|
|
119
|
+
}
|
|
120
|
+
if (data.inboundRevertString === constants_2.ErrorReasons.ZERO_AMOUNT) {
|
|
121
|
+
executeRelayStatus = models_1.ExecutionRelayStatus.IGNORED;
|
|
122
|
+
}
|
|
123
|
+
return {
|
|
124
|
+
...data,
|
|
125
|
+
firstSimulationTimestamp,
|
|
126
|
+
executeRelayStatus,
|
|
127
|
+
};
|
|
128
|
+
});
|
|
129
|
+
(0, utils_2.logInfo)({
|
|
130
|
+
info: "UPDATING_SIMULATION_STATES",
|
|
131
|
+
msgCount: executeResponseData.length,
|
|
132
|
+
messageIds: executeResponseData.map((m) => m.messageId),
|
|
133
|
+
packetIds: [],
|
|
134
|
+
});
|
|
135
|
+
await saveMessageUpdates(executeResponseData);
|
|
136
|
+
(0, utils_2.logInfo)({
|
|
137
|
+
info: "UPDATED_SIMULATION_STATES",
|
|
138
|
+
messageIds: executeResponseData.map((m) => m.messageId),
|
|
139
|
+
packetIds: [],
|
|
140
|
+
});
|
|
141
|
+
}
|
|
142
|
+
messages.forEach((message) => {
|
|
143
|
+
(0, prometheus_config_1.observeHistogram)(prometheus_config_1.relayMetrics.executeRelay.signHistogram, {
|
|
144
|
+
labels: {
|
|
145
|
+
[prometheus_config_1.Labels.SRC_CHAIN_SLUG]: message.srcChainSlug,
|
|
146
|
+
[prometheus_config_1.Labels.DST_CHAIN_SLUG]: message.dstChainSlug,
|
|
147
|
+
},
|
|
148
|
+
timeDiff: (0, utils_1.currentTimestampInSeconds)() - message.outboundTime,
|
|
149
|
+
});
|
|
150
|
+
});
|
|
151
|
+
};
|
|
152
|
+
exports.saveMessageSimulationResponse = saveMessageSimulationResponse;
|
|
153
|
+
const saveMessageUpdates = async (dbUpdates) => {
|
|
154
|
+
try {
|
|
155
|
+
// TODO: fix this
|
|
156
|
+
// @ts-expect-error
|
|
157
|
+
await models_1.Message.bulkCreate(dbUpdates, {
|
|
158
|
+
updateOnDuplicate: [
|
|
159
|
+
"retry",
|
|
160
|
+
"inboundSuccess",
|
|
161
|
+
"inboundRevertString",
|
|
162
|
+
"firstSimulationTimestamp",
|
|
163
|
+
"executeSignature",
|
|
164
|
+
"executeMsgGasLimit",
|
|
165
|
+
"executeRelayStatus",
|
|
166
|
+
],
|
|
167
|
+
});
|
|
168
|
+
}
|
|
169
|
+
catch (error) {
|
|
170
|
+
utils_2.logger.error("Error while saving execute signature updates", error);
|
|
171
|
+
throw new Error(`Error while saving execute signature updates ${error === null || error === void 0 ? void 0 : error.message}`);
|
|
172
|
+
}
|
|
173
|
+
};
|
|
174
|
+
const getPendingSimulationMessages = async () => {
|
|
175
|
+
return await models_1.Message.findAll({
|
|
176
|
+
limit: constants_1.executeBatchQueryLimit,
|
|
177
|
+
where: {
|
|
178
|
+
isExecuted: false,
|
|
179
|
+
executeRelayStatus: {
|
|
180
|
+
[sequelize_1.Op.not]: models_1.ExecutionRelayStatus.IGNORED,
|
|
181
|
+
},
|
|
182
|
+
firstSimulationTimestamp: 0,
|
|
183
|
+
},
|
|
184
|
+
order: [["updatedAt", "ASC"]],
|
|
185
|
+
});
|
|
186
|
+
};
|
|
187
|
+
const getRetrySimulationMessages = async () => {
|
|
188
|
+
return await models_1.Message.findAll({
|
|
189
|
+
limit: constants_1.executeBatchQueryLimit,
|
|
190
|
+
where: {
|
|
191
|
+
isExecuted: false,
|
|
192
|
+
// already checked before, and within retry interval period
|
|
193
|
+
firstSimulationTimestamp: {
|
|
194
|
+
[sequelize_1.Op.gt]: Math.floor(Date.now() / 1000) - constants_1.retryInterval,
|
|
195
|
+
},
|
|
196
|
+
[sequelize_1.Op.or]: [
|
|
197
|
+
{
|
|
198
|
+
inboundSuccess: false,
|
|
199
|
+
},
|
|
200
|
+
{
|
|
201
|
+
retry: false,
|
|
202
|
+
},
|
|
203
|
+
],
|
|
204
|
+
// ignore already executed, failed, executing, and ignored messages
|
|
205
|
+
executeRelayStatus: models_1.ExecutionRelayStatus.NO,
|
|
206
|
+
},
|
|
207
|
+
order: [["updatedAt", "ASC"]],
|
|
208
|
+
});
|
|
209
|
+
};
|
|
@@ -0,0 +1,8 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.toLowerCase = exports.checksum = void 0;
|
|
4
|
+
const ethers_1 = require("ethers");
|
|
5
|
+
const checksum = (address) => ethers_1.utils.getAddress(address);
|
|
6
|
+
exports.checksum = checksum;
|
|
7
|
+
const toLowerCase = (address) => address.toLowerCase();
|
|
8
|
+
exports.toLowerCase = toLowerCase;
|
|
@@ -0,0 +1,54 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
3
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
4
|
+
};
|
|
5
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
6
|
+
exports.axiosPost = exports.axiosGet = void 0;
|
|
7
|
+
const axios_1 = __importDefault(require("axios"));
|
|
8
|
+
const __1 = require("..");
|
|
9
|
+
const axiosGet = async (url, config = {}, errorLogging = false) => {
|
|
10
|
+
var _a, _b, _c, _d;
|
|
11
|
+
if (!url)
|
|
12
|
+
throw (0, __1.InternalServerError)("GET request url not provided");
|
|
13
|
+
try {
|
|
14
|
+
const response = await axios_1.default.get(url, config);
|
|
15
|
+
return { success: true, ...response === null || response === void 0 ? void 0 : response.data };
|
|
16
|
+
}
|
|
17
|
+
catch (error) {
|
|
18
|
+
if (errorLogging) {
|
|
19
|
+
console.log("status : ", (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.status);
|
|
20
|
+
console.log("error occurred, url : ", url, "\n error : ", error === null || error === void 0 ? void 0 : error.message, (_b = error === null || error === void 0 ? void 0 : error.response) === null || _b === void 0 ? void 0 : _b.data);
|
|
21
|
+
}
|
|
22
|
+
return {
|
|
23
|
+
success: false,
|
|
24
|
+
status: (_c = error === null || error === void 0 ? void 0 : error.response) === null || _c === void 0 ? void 0 : _c.status,
|
|
25
|
+
message: error === null || error === void 0 ? void 0 : error.message,
|
|
26
|
+
...(_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data,
|
|
27
|
+
};
|
|
28
|
+
}
|
|
29
|
+
};
|
|
30
|
+
exports.axiosGet = axiosGet;
|
|
31
|
+
const axiosPost = async (url, data, config = {}, errorLogging = false) => {
|
|
32
|
+
var _a, _b, _c, _d;
|
|
33
|
+
if (!url)
|
|
34
|
+
throw (0, __1.InternalServerError)("POST request url not provided");
|
|
35
|
+
if (!data)
|
|
36
|
+
throw (0, __1.InternalServerError)("POST request data not provided");
|
|
37
|
+
try {
|
|
38
|
+
const response = await axios_1.default.post(url, data, config);
|
|
39
|
+
return { success: true, ...response === null || response === void 0 ? void 0 : response.data };
|
|
40
|
+
}
|
|
41
|
+
catch (error) {
|
|
42
|
+
if (errorLogging) {
|
|
43
|
+
console.log("status : ", (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.status);
|
|
44
|
+
console.log("error occurred, url : ", url, "data : ", data, "\n error : ", error === null || error === void 0 ? void 0 : error.message, (_b = error === null || error === void 0 ? void 0 : error.response) === null || _b === void 0 ? void 0 : _b.data);
|
|
45
|
+
}
|
|
46
|
+
return {
|
|
47
|
+
success: false,
|
|
48
|
+
message: error === null || error === void 0 ? void 0 : error.message,
|
|
49
|
+
status: (_c = error === null || error === void 0 ? void 0 : error.response) === null || _c === void 0 ? void 0 : _c.status,
|
|
50
|
+
...(_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data,
|
|
51
|
+
};
|
|
52
|
+
}
|
|
53
|
+
};
|
|
54
|
+
exports.axiosPost = axiosPost;
|
|
@@ -0,0 +1,10 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.MAX_PAYLOAD_SIZE = void 0;
|
|
4
|
+
exports.chunks = chunks;
|
|
5
|
+
exports.MAX_PAYLOAD_SIZE = 10 * 1024; // max is 24.5 kb, left some buffer for function and batch encoding
|
|
6
|
+
function* chunks(arr, n) {
|
|
7
|
+
for (let i = 0; i < arr.length; i += n) {
|
|
8
|
+
yield arr.slice(i, i + n);
|
|
9
|
+
}
|
|
10
|
+
}
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.sendToDiscordWebhook = exports.initDiscord = void 0;
|
|
4
|
+
const __1 = require("..");
|
|
5
|
+
const axios_1 = require("./axios");
|
|
6
|
+
let discordWebhookUrl;
|
|
7
|
+
let discordDebugWebhookUrl;
|
|
8
|
+
let serviceName;
|
|
9
|
+
const initDiscord = (webhookUrl_, serviceName_, discordDebugWebhookUrl_ = undefined) => {
|
|
10
|
+
discordWebhookUrl = webhookUrl_;
|
|
11
|
+
discordDebugWebhookUrl = discordDebugWebhookUrl_;
|
|
12
|
+
serviceName = serviceName_;
|
|
13
|
+
};
|
|
14
|
+
exports.initDiscord = initDiscord;
|
|
15
|
+
const sendToDiscordWebhook = async (title, data, optionalConfig = {}, isDebug = false) => {
|
|
16
|
+
try {
|
|
17
|
+
if (!discordWebhookUrl) {
|
|
18
|
+
throw (0, __1.InternalServerError)("DISCORD_WEBHOOK_URL not provided");
|
|
19
|
+
}
|
|
20
|
+
if (isDebug && !discordDebugWebhookUrl) {
|
|
21
|
+
console.log("Debug mode enabled but no debug webhook provided");
|
|
22
|
+
}
|
|
23
|
+
let content = formatMsg(title, data);
|
|
24
|
+
const urlToUse = isDebug && discordDebugWebhookUrl
|
|
25
|
+
? discordDebugWebhookUrl
|
|
26
|
+
: discordWebhookUrl;
|
|
27
|
+
await (0, axios_1.axiosPost)(urlToUse, { content, ...optionalConfig });
|
|
28
|
+
}
|
|
29
|
+
catch (error) {
|
|
30
|
+
throw error;
|
|
31
|
+
}
|
|
32
|
+
};
|
|
33
|
+
exports.sendToDiscordWebhook = sendToDiscordWebhook;
|
|
34
|
+
const formatMsg = (title, data) => {
|
|
35
|
+
let message = `**${title}**\n`;
|
|
36
|
+
// Iterate through the object's key-value pairs
|
|
37
|
+
for (const key in data) {
|
|
38
|
+
if (data.hasOwnProperty(key)) {
|
|
39
|
+
message += `${key}: ${data[key]}\n`;
|
|
40
|
+
}
|
|
41
|
+
}
|
|
42
|
+
return message;
|
|
43
|
+
};
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getAwsKmsSigner = void 0;
|
|
4
|
+
const kms_ethers_signer_1 = require("./signer/kms-ethers-signer");
|
|
5
|
+
const getAwsKmsSigner = async (keyId, region = "us-east-1") => {
|
|
6
|
+
try {
|
|
7
|
+
const kmsCredentials = {
|
|
8
|
+
kmsClientConfig: {
|
|
9
|
+
signingRegion: region,
|
|
10
|
+
region,
|
|
11
|
+
},
|
|
12
|
+
keyId,
|
|
13
|
+
};
|
|
14
|
+
let signer = new kms_ethers_signer_1.KmsEthersSigner(kmsCredentials);
|
|
15
|
+
let signerAddress = await signer.getAddress();
|
|
16
|
+
// logger.info(` keyid ${keyId} ${signerAddress}`);
|
|
17
|
+
if (!signerAddress) {
|
|
18
|
+
throw new Error(`Invalid kms signer, keyId:${keyId}`);
|
|
19
|
+
}
|
|
20
|
+
return signer;
|
|
21
|
+
}
|
|
22
|
+
catch (error) {
|
|
23
|
+
throw error;
|
|
24
|
+
}
|
|
25
|
+
};
|
|
26
|
+
exports.getAwsKmsSigner = getAwsKmsSigner;
|
|
@@ -0,0 +1,4 @@
|
|
|
1
|
+
import { Contract, Event, EventFilter, providers } from "ethers";
|
|
2
|
+
import { StaticJsonRpcProvider } from "@ethersproject/providers";
|
|
3
|
+
export declare const getEvents: (contract: Contract, filter: EventFilter | string, fromBlock: number, toBlock: number, eventBlockRange?: number) => Promise<Event[]>;
|
|
4
|
+
export declare const getLogs: (provider: StaticJsonRpcProvider, address: string[], topics: string[], fromBlock: number, toBlock: number, eventBlockRange?: number) => Promise<providers.Log[]>;
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.getLogs = exports.getEvents = void 0;
|
|
4
|
+
const getEvents = async (contract, filter, fromBlock, toBlock, eventBlockRange = 5000) => {
|
|
5
|
+
const allEvents = [];
|
|
6
|
+
try {
|
|
7
|
+
let from = fromBlock;
|
|
8
|
+
let to = from + eventBlockRange - 1;
|
|
9
|
+
to = to < toBlock ? to : toBlock;
|
|
10
|
+
while (from <= toBlock) {
|
|
11
|
+
const events = await contract.queryFilter(filter, from, to);
|
|
12
|
+
allEvents.push(...events);
|
|
13
|
+
from = to + 1;
|
|
14
|
+
to = from + eventBlockRange - 1;
|
|
15
|
+
to = to < toBlock ? to : toBlock;
|
|
16
|
+
}
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
throw error;
|
|
20
|
+
}
|
|
21
|
+
return allEvents;
|
|
22
|
+
};
|
|
23
|
+
exports.getEvents = getEvents;
|
|
24
|
+
const getLogs = async (provider, address, topics, fromBlock, toBlock, eventBlockRange = 5000) => {
|
|
25
|
+
const allLogs = [];
|
|
26
|
+
try {
|
|
27
|
+
let from = fromBlock;
|
|
28
|
+
let to = from + eventBlockRange - 1;
|
|
29
|
+
to = to < toBlock ? to : toBlock;
|
|
30
|
+
while (from <= toBlock) {
|
|
31
|
+
const logs = await provider.send("eth_getLogs", [
|
|
32
|
+
{
|
|
33
|
+
address,
|
|
34
|
+
topics,
|
|
35
|
+
fromBlock: `0x${from.toString(16)}`,
|
|
36
|
+
toBlock: `0x${to.toString(16)}`,
|
|
37
|
+
},
|
|
38
|
+
]);
|
|
39
|
+
allLogs.push(...logs);
|
|
40
|
+
from = to + 1;
|
|
41
|
+
to = from + eventBlockRange - 1;
|
|
42
|
+
to = to < toBlock ? to : toBlock;
|
|
43
|
+
}
|
|
44
|
+
}
|
|
45
|
+
catch (error) {
|
|
46
|
+
throw error;
|
|
47
|
+
}
|
|
48
|
+
return allLogs;
|
|
49
|
+
};
|
|
50
|
+
exports.getLogs = getLogs;
|