@aztec/p2p 0.76.4 → 0.77.0-testnet-ignition.21

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (241) hide show
  1. package/dest/bootstrap/bootstrap.d.ts +2 -2
  2. package/dest/bootstrap/bootstrap.d.ts.map +1 -1
  3. package/dest/bootstrap/bootstrap.js +55 -41
  4. package/dest/client/factory.d.ts +8 -6
  5. package/dest/client/factory.d.ts.map +1 -1
  6. package/dest/client/factory.js +8 -10
  7. package/dest/client/index.js +0 -1
  8. package/dest/client/p2p_client.d.ts +7 -4
  9. package/dest/client/p2p_client.d.ts.map +1 -1
  10. package/dest/client/p2p_client.js +492 -514
  11. package/dest/config.d.ts +8 -10
  12. package/dest/config.d.ts.map +1 -1
  13. package/dest/config.js +54 -47
  14. package/dest/enr/generate-enr.d.ts +9 -0
  15. package/dest/enr/generate-enr.d.ts.map +1 -0
  16. package/dest/enr/generate-enr.js +30 -0
  17. package/dest/enr/index.d.ts +2 -0
  18. package/dest/enr/index.d.ts.map +1 -0
  19. package/dest/enr/index.js +1 -0
  20. package/dest/errors/reqresp.error.js +6 -10
  21. package/dest/index.js +0 -1
  22. package/dest/mem_pools/attestation_pool/attestation_pool.d.ts +1 -1
  23. package/dest/mem_pools/attestation_pool/attestation_pool.d.ts.map +1 -1
  24. package/dest/mem_pools/attestation_pool/attestation_pool.js +6 -2
  25. package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.d.ts +1 -1
  26. package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.d.ts.map +1 -1
  27. package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.js +65 -33
  28. package/dest/mem_pools/attestation_pool/index.js +0 -1
  29. package/dest/mem_pools/attestation_pool/kv_attestation_pool.d.ts +3 -3
  30. package/dest/mem_pools/attestation_pool/kv_attestation_pool.d.ts.map +1 -1
  31. package/dest/mem_pools/attestation_pool/kv_attestation_pool.js +23 -20
  32. package/dest/mem_pools/attestation_pool/memory_attestation_pool.d.ts +2 -2
  33. package/dest/mem_pools/attestation_pool/memory_attestation_pool.d.ts.map +1 -1
  34. package/dest/mem_pools/attestation_pool/memory_attestation_pool.js +22 -26
  35. package/dest/mem_pools/attestation_pool/mocks.d.ts +3 -2
  36. package/dest/mem_pools/attestation_pool/mocks.d.ts.map +1 -1
  37. package/dest/mem_pools/attestation_pool/mocks.js +12 -7
  38. package/dest/mem_pools/index.d.ts +2 -2
  39. package/dest/mem_pools/index.d.ts.map +1 -1
  40. package/dest/mem_pools/index.js +1 -2
  41. package/dest/mem_pools/instrumentation.d.ts +1 -1
  42. package/dest/mem_pools/instrumentation.d.ts.map +1 -1
  43. package/dest/mem_pools/instrumentation.js +35 -39
  44. package/dest/mem_pools/interface.d.ts +3 -3
  45. package/dest/mem_pools/interface.d.ts.map +1 -1
  46. package/dest/mem_pools/interface.js +3 -2
  47. package/dest/mem_pools/tx_pool/aztec_kv_tx_pool.d.ts +2 -2
  48. package/dest/mem_pools/tx_pool/aztec_kv_tx_pool.d.ts.map +1 -1
  49. package/dest/mem_pools/tx_pool/aztec_kv_tx_pool.js +129 -136
  50. package/dest/mem_pools/tx_pool/index.js +0 -1
  51. package/dest/mem_pools/tx_pool/memory_tx_pool.d.ts +2 -2
  52. package/dest/mem_pools/tx_pool/memory_tx_pool.d.ts.map +1 -1
  53. package/dest/mem_pools/tx_pool/memory_tx_pool.js +46 -44
  54. package/dest/mem_pools/tx_pool/priority.d.ts +1 -1
  55. package/dest/mem_pools/tx_pool/priority.d.ts.map +1 -1
  56. package/dest/mem_pools/tx_pool/priority.js +1 -3
  57. package/dest/mem_pools/tx_pool/tx_pool.d.ts +1 -1
  58. package/dest/mem_pools/tx_pool/tx_pool.d.ts.map +1 -1
  59. package/dest/mem_pools/tx_pool/tx_pool.js +3 -2
  60. package/dest/mem_pools/tx_pool/tx_pool_test_suite.d.ts +1 -1
  61. package/dest/mem_pools/tx_pool/tx_pool_test_suite.d.ts.map +1 -1
  62. package/dest/mem_pools/tx_pool/tx_pool_test_suite.js +109 -39
  63. package/dest/msg_validators/attestation_validator/attestation_validator.d.ts +2 -2
  64. package/dest/msg_validators/attestation_validator/attestation_validator.d.ts.map +1 -1
  65. package/dest/msg_validators/attestation_validator/attestation_validator.js +4 -4
  66. package/dest/msg_validators/attestation_validator/index.js +0 -1
  67. package/dest/msg_validators/block_proposal_validator/block_proposal_validator.d.ts +2 -2
  68. package/dest/msg_validators/block_proposal_validator/block_proposal_validator.d.ts.map +1 -1
  69. package/dest/msg_validators/block_proposal_validator/block_proposal_validator.js +3 -3
  70. package/dest/msg_validators/block_proposal_validator/index.js +0 -1
  71. package/dest/msg_validators/index.js +0 -1
  72. package/dest/msg_validators/tx_validator/aggregate_tx_validator.d.ts +1 -1
  73. package/dest/msg_validators/tx_validator/aggregate_tx_validator.d.ts.map +1 -1
  74. package/dest/msg_validators/tx_validator/aggregate_tx_validator.js +9 -11
  75. package/dest/msg_validators/tx_validator/block_header_validator.d.ts +2 -2
  76. package/dest/msg_validators/tx_validator/block_header_validator.d.ts.map +1 -1
  77. package/dest/msg_validators/tx_validator/block_header_validator.js +18 -13
  78. package/dest/msg_validators/tx_validator/data_validator.d.ts +1 -1
  79. package/dest/msg_validators/tx_validator/data_validator.d.ts.map +1 -1
  80. package/dest/msg_validators/tx_validator/data_validator.js +102 -33
  81. package/dest/msg_validators/tx_validator/double_spend_validator.d.ts +1 -1
  82. package/dest/msg_validators/tx_validator/double_spend_validator.d.ts.map +1 -1
  83. package/dest/msg_validators/tx_validator/double_spend_validator.js +34 -20
  84. package/dest/msg_validators/tx_validator/index.js +0 -1
  85. package/dest/msg_validators/tx_validator/metadata_validator.d.ts +2 -2
  86. package/dest/msg_validators/tx_validator/metadata_validator.d.ts.map +1 -1
  87. package/dest/msg_validators/tx_validator/metadata_validator.js +30 -27
  88. package/dest/msg_validators/tx_validator/tx_proof_validator.d.ts +2 -1
  89. package/dest/msg_validators/tx_validator/tx_proof_validator.d.ts.map +1 -1
  90. package/dest/msg_validators/tx_validator/tx_proof_validator.js +17 -12
  91. package/dest/services/data_store.js +57 -57
  92. package/dest/services/discv5/discV5_service.d.ts +2 -0
  93. package/dest/services/discv5/discV5_service.d.ts.map +1 -1
  94. package/dest/services/discv5/discV5_service.js +64 -36
  95. package/dest/services/dummy_service.d.ts +4 -2
  96. package/dest/services/dummy_service.d.ts.map +1 -1
  97. package/dest/services/dummy_service.js +41 -59
  98. package/dest/services/encoding.d.ts +3 -3
  99. package/dest/services/encoding.d.ts.map +1 -1
  100. package/dest/services/encoding.js +10 -9
  101. package/dest/services/gossipsub/scoring.d.ts +7 -0
  102. package/dest/services/gossipsub/scoring.d.ts.map +1 -0
  103. package/dest/services/gossipsub/scoring.js +10 -0
  104. package/dest/services/index.js +0 -1
  105. package/dest/services/libp2p/libp2p_service.d.ts +10 -33
  106. package/dest/services/libp2p/libp2p_service.d.ts.map +1 -1
  107. package/dest/services/libp2p/libp2p_service.js +682 -673
  108. package/dest/services/peer-manager/metrics.js +14 -7
  109. package/dest/services/peer-manager/peer_manager.d.ts +24 -6
  110. package/dest/services/peer-manager/peer_manager.d.ts.map +1 -1
  111. package/dest/services/peer-manager/peer_manager.js +390 -340
  112. package/dest/services/peer-manager/peer_scoring.d.ts +3 -3
  113. package/dest/services/peer-manager/peer_scoring.d.ts.map +1 -1
  114. package/dest/services/peer-manager/peer_scoring.js +21 -19
  115. package/dest/services/reqresp/config.js +4 -5
  116. package/dest/services/reqresp/connection-sampler/batch_connection_sampler.d.ts +2 -2
  117. package/dest/services/reqresp/connection-sampler/batch_connection_sampler.d.ts.map +1 -1
  118. package/dest/services/reqresp/connection-sampler/batch_connection_sampler.js +35 -28
  119. package/dest/services/reqresp/connection-sampler/connection_sampler.d.ts +1 -1
  120. package/dest/services/reqresp/connection-sampler/connection_sampler.d.ts.map +1 -1
  121. package/dest/services/reqresp/connection-sampler/connection_sampler.js +67 -61
  122. package/dest/services/reqresp/index.js +1 -3
  123. package/dest/services/reqresp/interface.d.ts +2 -2
  124. package/dest/services/reqresp/interface.d.ts.map +1 -1
  125. package/dest/services/reqresp/interface.js +27 -31
  126. package/dest/services/reqresp/metrics.d.ts +1 -1
  127. package/dest/services/reqresp/metrics.d.ts.map +1 -1
  128. package/dest/services/reqresp/metrics.js +23 -10
  129. package/dest/services/reqresp/protocols/block.d.ts +2 -2
  130. package/dest/services/reqresp/protocols/block.d.ts.map +1 -1
  131. package/dest/services/reqresp/protocols/block.js +1 -2
  132. package/dest/services/reqresp/protocols/goodbye.d.ts +5 -5
  133. package/dest/services/reqresp/protocols/goodbye.d.ts.map +1 -1
  134. package/dest/services/reqresp/protocols/goodbye.js +36 -41
  135. package/dest/services/reqresp/protocols/index.js +1 -3
  136. package/dest/services/reqresp/protocols/ping.js +1 -3
  137. package/dest/services/reqresp/protocols/status.js +1 -3
  138. package/dest/services/reqresp/protocols/tx.d.ts +3 -3
  139. package/dest/services/reqresp/protocols/tx.d.ts.map +1 -1
  140. package/dest/services/reqresp/protocols/tx.js +6 -9
  141. package/dest/services/reqresp/rate-limiter/index.js +0 -1
  142. package/dest/services/reqresp/rate-limiter/rate_limiter.d.ts +9 -9
  143. package/dest/services/reqresp/rate-limiter/rate_limiter.d.ts.map +1 -1
  144. package/dest/services/reqresp/rate-limiter/rate_limiter.js +53 -46
  145. package/dest/services/reqresp/rate-limiter/rate_limits.js +16 -17
  146. package/dest/services/reqresp/reqresp.d.ts +4 -4
  147. package/dest/services/reqresp/reqresp.d.ts.map +1 -1
  148. package/dest/services/reqresp/reqresp.js +467 -464
  149. package/dest/services/reqresp/status.js +16 -17
  150. package/dest/services/service.d.ts +3 -2
  151. package/dest/services/service.d.ts.map +1 -1
  152. package/dest/services/service.js +3 -4
  153. package/dest/test-helpers/generate-peer-id-private-keys.js +2 -4
  154. package/dest/test-helpers/get-ports.js +3 -3
  155. package/dest/test-helpers/index.js +0 -1
  156. package/dest/test-helpers/make-enrs.d.ts +1 -1
  157. package/dest/test-helpers/make-enrs.d.ts.map +1 -1
  158. package/dest/test-helpers/make-enrs.js +3 -6
  159. package/dest/test-helpers/make-test-p2p-clients.d.ts +7 -6
  160. package/dest/test-helpers/make-test-p2p-clients.d.ts.map +1 -1
  161. package/dest/test-helpers/make-test-p2p-clients.js +10 -12
  162. package/dest/test-helpers/reqresp-nodes.d.ts +18 -7
  163. package/dest/test-helpers/reqresp-nodes.d.ts.map +1 -1
  164. package/dest/test-helpers/reqresp-nodes.js +64 -40
  165. package/dest/testbench/p2p_client_testbench_worker.js +61 -45
  166. package/dest/testbench/parse_log_file.d.ts +2 -0
  167. package/dest/testbench/parse_log_file.d.ts.map +1 -0
  168. package/dest/testbench/parse_log_file.js +131 -0
  169. package/dest/testbench/testbench.d.ts +2 -0
  170. package/dest/testbench/testbench.d.ts.map +1 -0
  171. package/dest/testbench/testbench.js +141 -0
  172. package/dest/{services/types.d.ts → types/index.d.ts} +1 -1
  173. package/dest/types/index.d.ts.map +1 -0
  174. package/dest/types/index.js +28 -0
  175. package/dest/util.d.ts +5 -5
  176. package/dest/util.d.ts.map +1 -1
  177. package/dest/util.js +23 -34
  178. package/dest/versioning.d.ts +3 -3
  179. package/dest/versioning.d.ts.map +1 -1
  180. package/dest/versioning.js +7 -12
  181. package/package.json +15 -13
  182. package/src/bootstrap/bootstrap.ts +30 -17
  183. package/src/client/factory.ts +9 -12
  184. package/src/client/p2p_client.ts +13 -24
  185. package/src/config.ts +14 -15
  186. package/src/enr/generate-enr.ts +39 -0
  187. package/src/enr/index.ts +1 -0
  188. package/src/mem_pools/attestation_pool/attestation_pool.ts +1 -1
  189. package/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts +4 -3
  190. package/src/mem_pools/attestation_pool/kv_attestation_pool.ts +3 -3
  191. package/src/mem_pools/attestation_pool/memory_attestation_pool.ts +2 -2
  192. package/src/mem_pools/attestation_pool/mocks.ts +5 -5
  193. package/src/mem_pools/index.ts +2 -2
  194. package/src/mem_pools/instrumentation.ts +4 -3
  195. package/src/mem_pools/interface.ts +3 -3
  196. package/src/mem_pools/tx_pool/aztec_kv_tx_pool.ts +4 -4
  197. package/src/mem_pools/tx_pool/memory_tx_pool.ts +3 -3
  198. package/src/mem_pools/tx_pool/priority.ts +1 -1
  199. package/src/mem_pools/tx_pool/tx_pool.ts +1 -1
  200. package/src/mem_pools/tx_pool/tx_pool_test_suite.ts +4 -3
  201. package/src/msg_validators/attestation_validator/attestation_validator.ts +2 -2
  202. package/src/msg_validators/block_proposal_validator/block_proposal_validator.ts +2 -2
  203. package/src/msg_validators/tx_validator/aggregate_tx_validator.ts +1 -1
  204. package/src/msg_validators/tx_validator/block_header_validator.ts +2 -2
  205. package/src/msg_validators/tx_validator/data_validator.ts +57 -4
  206. package/src/msg_validators/tx_validator/double_spend_validator.ts +17 -12
  207. package/src/msg_validators/tx_validator/metadata_validator.ts +2 -2
  208. package/src/msg_validators/tx_validator/tx_proof_validator.ts +2 -6
  209. package/src/services/discv5/discV5_service.ts +33 -8
  210. package/src/services/dummy_service.ts +4 -2
  211. package/src/services/encoding.ts +3 -3
  212. package/src/services/gossipsub/scoring.ts +13 -0
  213. package/src/services/libp2p/libp2p_service.ts +124 -146
  214. package/src/services/peer-manager/peer_manager.ts +71 -13
  215. package/src/services/peer-manager/peer_scoring.ts +3 -3
  216. package/src/services/reqresp/connection-sampler/batch_connection_sampler.ts +2 -2
  217. package/src/services/reqresp/connection-sampler/connection_sampler.ts +9 -3
  218. package/src/services/reqresp/interface.ts +4 -3
  219. package/src/services/reqresp/metrics.ts +1 -1
  220. package/src/services/reqresp/protocols/block.ts +3 -3
  221. package/src/services/reqresp/protocols/goodbye.ts +7 -7
  222. package/src/services/reqresp/protocols/tx.ts +5 -5
  223. package/src/services/reqresp/rate-limiter/rate_limiter.ts +22 -18
  224. package/src/services/reqresp/reqresp.ts +18 -11
  225. package/src/services/service.ts +3 -2
  226. package/src/test-helpers/make-enrs.ts +1 -1
  227. package/src/test-helpers/make-test-p2p-clients.ts +9 -7
  228. package/src/test-helpers/reqresp-nodes.ts +32 -18
  229. package/src/testbench/p2p_client_testbench_worker.ts +16 -9
  230. package/src/testbench/parse_log_file.ts +175 -0
  231. package/src/testbench/testbench.ts +157 -0
  232. package/src/util.ts +5 -5
  233. package/src/versioning.ts +7 -7
  234. package/dest/services/libp2p/libp2p_logger.d.ts +0 -7
  235. package/dest/services/libp2p/libp2p_logger.d.ts.map +0 -1
  236. package/dest/services/libp2p/libp2p_logger.js +0 -67
  237. package/dest/services/types.d.ts.map +0 -1
  238. package/dest/services/types.js +0 -35
  239. package/src/services/libp2p/libp2p_logger.ts +0 -78
  240. package/src/testbench/scripts/run_testbench.sh +0 -7
  241. /package/src/{services/types.ts → types/index.ts} +0 -0
@@ -1,17 +1,22 @@
1
- import { __esDecorate, __runInitializers } from "tslib";
2
1
  // @attribution: lodestar impl for inspiration
3
- import { PeerErrorSeverity } from '@aztec/circuit-types';
2
+ function _ts_decorate(decorators, target, key, desc) {
3
+ var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d;
4
+ if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc);
5
+ else for(var i = decorators.length - 1; i >= 0; i--)if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r;
6
+ return c > 3 && r && Object.defineProperty(target, key, r), r;
7
+ }
4
8
  import { createLogger } from '@aztec/foundation/log';
5
9
  import { executeTimeout } from '@aztec/foundation/timer';
10
+ import { PeerErrorSeverity } from '@aztec/stdlib/p2p';
6
11
  import { Attributes, getTelemetryClient, trackSpan } from '@aztec/telemetry-client';
7
12
  import { pipe } from 'it-pipe';
8
- import { CollectiveReqRespTimeoutError, IndividualReqRespTimeoutError, InvalidResponseError, } from '../../errors/reqresp.error.js';
13
+ import { CollectiveReqRespTimeoutError, IndividualReqRespTimeoutError, InvalidResponseError } from '../../errors/reqresp.error.js';
9
14
  import { SnappyTransform } from '../encoding.js';
10
15
  import { BatchConnectionSampler } from './connection-sampler/batch_connection_sampler.js';
11
16
  import { ConnectionSampler } from './connection-sampler/connection_sampler.js';
12
- import { DEFAULT_SUB_PROTOCOL_HANDLERS, DEFAULT_SUB_PROTOCOL_VALIDATORS, ReqRespSubProtocol, subProtocolMap, } from './interface.js';
17
+ import { DEFAULT_SUB_PROTOCOL_HANDLERS, DEFAULT_SUB_PROTOCOL_VALIDATORS, ReqRespSubProtocol, subProtocolMap } from './interface.js';
13
18
  import { ReqRespMetrics } from './metrics.js';
14
- import { RequestResponseRateLimiter } from './rate-limiter/rate_limiter.js';
19
+ import { RateLimitStatus, RequestResponseRateLimiter, prettyPrintRateLimitStatus } from './rate-limiter/rate_limiter.js';
15
20
  import { ReqRespStatus, ReqRespStatusError, parseStatusChunk, prettyPrintReqRespStatus } from './status.js';
16
21
  /**
17
22
  * The Request Response Service
@@ -26,488 +31,486 @@ import { ReqRespStatus, ReqRespStatusError, parseStatusChunk, prettyPrintReqResp
26
31
  * so they get decompressed in readMessage
27
32
  *
28
33
  * see: https://github.com/ethereum/consensus-specs/blob/dev/specs/phase0/p2p-interface.md#the-reqresp-domain
29
- */
30
- let ReqResp = (() => {
31
- var _a;
32
- let _instanceExtraInitializers = [];
33
- let _sendBatchRequest_decorators;
34
- let _sendRequestToPeer_decorators;
35
- let _streamHandler_decorators;
36
- return _a = class ReqResp {
37
- constructor(config, libp2p, peerScoring, telemetryClient = getTelemetryClient()) {
38
- this.libp2p = (__runInitializers(this, _instanceExtraInitializers), libp2p);
39
- this.peerScoring = peerScoring;
40
- // Warning, if the `start` function is not called as the parent class constructor, then the default sub protocol handlers will be used ( not good )
41
- this.subProtocolHandlers = DEFAULT_SUB_PROTOCOL_HANDLERS;
42
- this.subProtocolValidators = DEFAULT_SUB_PROTOCOL_VALIDATORS;
43
- this.logger = createLogger('p2p:reqresp');
44
- this.overallRequestTimeoutMs = config.overallRequestTimeoutMs;
45
- this.individualRequestTimeoutMs = config.individualRequestTimeoutMs;
46
- this.rateLimiter = new RequestResponseRateLimiter(peerScoring);
47
- // Connection sampler is used to sample our connected peers
48
- this.connectionSampler = new ConnectionSampler(libp2p);
49
- this.snappyTransform = new SnappyTransform();
50
- this.metrics = new ReqRespMetrics(telemetryClient);
34
+ */ export class ReqResp {
35
+ libp2p;
36
+ peerScoring;
37
+ logger;
38
+ overallRequestTimeoutMs;
39
+ individualRequestTimeoutMs;
40
+ // Warning, if the `start` function is not called as the parent class constructor, then the default sub protocol handlers will be used ( not good )
41
+ subProtocolHandlers;
42
+ subProtocolValidators;
43
+ connectionSampler;
44
+ rateLimiter;
45
+ snappyTransform;
46
+ metrics;
47
+ constructor(config, libp2p, peerScoring, telemetryClient = getTelemetryClient()){
48
+ this.libp2p = libp2p;
49
+ this.peerScoring = peerScoring;
50
+ this.subProtocolHandlers = DEFAULT_SUB_PROTOCOL_HANDLERS;
51
+ this.subProtocolValidators = DEFAULT_SUB_PROTOCOL_VALIDATORS;
52
+ this.logger = createLogger('p2p:reqresp');
53
+ this.overallRequestTimeoutMs = config.overallRequestTimeoutMs;
54
+ this.individualRequestTimeoutMs = config.individualRequestTimeoutMs;
55
+ this.rateLimiter = new RequestResponseRateLimiter(peerScoring);
56
+ // Connection sampler is used to sample our connected peers
57
+ this.connectionSampler = new ConnectionSampler(libp2p);
58
+ this.snappyTransform = new SnappyTransform();
59
+ this.metrics = new ReqRespMetrics(telemetryClient);
60
+ }
61
+ get tracer() {
62
+ return this.metrics.tracer;
63
+ }
64
+ /**
65
+ * Start the reqresp service
66
+ */ async start(subProtocolHandlers, subProtocolValidators) {
67
+ this.subProtocolHandlers = subProtocolHandlers;
68
+ this.subProtocolValidators = subProtocolValidators;
69
+ // Register all protocol handlers
70
+ for (const subProtocol of Object.keys(this.subProtocolHandlers)){
71
+ await this.libp2p.handle(subProtocol, (data)=>void this.streamHandler(subProtocol, data).catch((err)=>this.logger.error(`Error on libp2p subprotocol ${subProtocol} handler`, err)));
72
+ }
73
+ this.rateLimiter.start();
74
+ }
75
+ /**
76
+ * Stop the reqresp service
77
+ */ async stop() {
78
+ // Unregister handlers in parallel
79
+ const unregisterPromises = Object.keys(this.subProtocolHandlers).map((protocol)=>this.libp2p.unhandle(protocol));
80
+ await Promise.all(unregisterPromises);
81
+ // Close connection sampler
82
+ await this.connectionSampler.stop();
83
+ this.logger.debug('ReqResp: Connection sampler stopped');
84
+ // Close streams in parallel
85
+ const closeStreamPromises = this.libp2p.getConnections().map((connection)=>connection.close());
86
+ await Promise.all(closeStreamPromises);
87
+ this.logger.debug('ReqResp: All active streams closed');
88
+ this.rateLimiter.stop();
89
+ this.logger.debug('ReqResp: Rate limiter stopped');
90
+ // NOTE: We assume libp2p instance is managed by the caller
91
+ }
92
+ /**
93
+ * Send a request to peers, returns the first response
94
+ *
95
+ * @param subProtocol - The protocol being requested
96
+ * @param request - The request to send
97
+ * @returns - The response from the peer, otherwise undefined
98
+ *
99
+ * @description
100
+ * This method attempts to send a request to all active peers using the specified sub-protocol.
101
+ * It opens a stream with each peer, sends the request, and awaits a response.
102
+ * If a valid response is received, it returns the response; otherwise, it continues to the next peer.
103
+ * If no response is received from any peer, it returns undefined.
104
+ *
105
+ * The method performs the following steps:
106
+ * - Sample a peer to send the request to.
107
+ * - Opens a stream with the peer using the specified sub-protocol.
108
+ *
109
+ * When a response is received, it is validated using the given sub protocols response validator.
110
+ * To see the interface for the response validator - see `interface.ts`
111
+ *
112
+ * Failing a response validation requests in a severe peer penalty, and will
113
+ * prompt the node to continue to search to the next peer.
114
+ * For example, a transaction request validator will check that the payload returned does in fact
115
+ * match the txHash that was requested. A peer that fails this check an only be an extremely naughty peer.
116
+ *
117
+ * This entire operation is wrapped in an overall timeout, that is independent of the
118
+ * peer it is requesting data from.
119
+ *
120
+ */ async sendRequest(subProtocol, request) {
121
+ const responseValidator = this.subProtocolValidators[subProtocol];
122
+ const requestBuffer = request.toBuffer();
123
+ const requestFunction = async ()=>{
124
+ // Attempt to ask all of our peers, but sampled in a random order
125
+ // This function is wrapped in a timeout, so we will exit the loop if we have not received a response
126
+ const numberOfPeers = this.libp2p.getPeers().length;
127
+ if (numberOfPeers === 0) {
128
+ this.logger.debug('No active peers to send requests to');
129
+ return undefined;
51
130
  }
52
- get tracer() {
53
- return this.metrics.tracer;
54
- }
55
- /**
56
- * Start the reqresp service
57
- */
58
- async start(subProtocolHandlers, subProtocolValidators) {
59
- this.subProtocolHandlers = subProtocolHandlers;
60
- this.subProtocolValidators = subProtocolValidators;
61
- // Register all protocol handlers
62
- for (const subProtocol of Object.keys(this.subProtocolHandlers)) {
63
- await this.libp2p.handle(subProtocol, (data) => void this.streamHandler(subProtocol, data).catch(err => this.logger.error(`Error on libp2p subprotocol ${subProtocol} handler`, err)));
131
+ const attemptedPeers = new Map();
132
+ for(let i = 0; i < numberOfPeers; i++){
133
+ // Sample a peer to make a request to
134
+ const peer = this.connectionSampler.getPeer(attemptedPeers);
135
+ this.logger.trace(`Attempting to send request to peer: ${peer?.toString()}`);
136
+ if (!peer) {
137
+ this.logger.debug('No peers available to send requests to');
138
+ return undefined;
139
+ }
140
+ attemptedPeers.set(peer.toString(), true);
141
+ this.logger.trace(`Sending request to peer: ${peer.toString()}`);
142
+ const response = await this.sendRequestToPeer(peer, subProtocol, requestBuffer);
143
+ if (response && response.status !== ReqRespStatus.SUCCESS) {
144
+ this.logger.debug(`Request to peer ${peer.toString()} failed with status ${prettyPrintReqRespStatus(response.status)}`);
145
+ continue;
146
+ }
147
+ // If we get a response, return it, otherwise we iterate onto the next peer
148
+ // We do not consider it a success if we have an empty buffer
149
+ if (response && response.data.length > 0) {
150
+ const object = subProtocolMap[subProtocol].response.fromBuffer(response.data);
151
+ // The response validator handles peer punishment within
152
+ const isValid = await responseValidator(request, object, peer);
153
+ if (!isValid) {
154
+ throw new InvalidResponseError();
155
+ }
156
+ return object;
64
157
  }
65
- this.rateLimiter.start();
66
158
  }
67
- /**
68
- * Stop the reqresp service
69
- */
70
- async stop() {
71
- // Unregister handlers in parallel
72
- const unregisterPromises = Object.keys(this.subProtocolHandlers).map(protocol => this.libp2p.unhandle(protocol));
73
- await Promise.all(unregisterPromises);
74
- // Close connection sampler
75
- await this.connectionSampler.stop();
76
- this.logger.debug('ReqResp: Connection sampler stopped');
77
- // Close streams in parallel
78
- const closeStreamPromises = this.libp2p.getConnections().map(connection => connection.close());
79
- await Promise.all(closeStreamPromises);
80
- this.logger.debug('ReqResp: All active streams closed');
81
- this.rateLimiter.stop();
82
- this.logger.debug('ReqResp: Rate limiter stopped');
83
- // NOTE: We assume libp2p instance is managed by the caller
159
+ };
160
+ try {
161
+ return await executeTimeout(requestFunction, this.overallRequestTimeoutMs, ()=>new CollectiveReqRespTimeoutError());
162
+ } catch (e) {
163
+ this.logger.debug(`${e.message} | subProtocol: ${subProtocol}`);
164
+ return undefined;
165
+ }
166
+ }
167
+ /**
168
+ * Request multiple messages over the same sub protocol, balancing the requests across peers.
169
+ *
170
+ * @devnote
171
+ * - The function prioritizes sending requests to free peers using a batch sampling strategy.
172
+ * - If a peer fails to respond or returns an invalid response, it is removed from the sampling pool and replaced.
173
+ * - The function stops retrying once all requests are processed, no active peers remain, or the maximum retry attempts are reached.
174
+ * - Responses are validated using a custom validator for the sub-protocol.*
175
+ *
176
+ * Requests are sent in parallel to each peer, but multiple requests are sent to the same peer in series
177
+ * - If a peer fails to respond or returns an invalid response, it is removed from the sampling pool and replaced.
178
+ * - The function stops retrying once all requests are processed, no active peers remain, or the maximum retry attempts are reached.
179
+ * - Responses are validated using a custom validator for the sub-protocol.*
180
+ *
181
+ * @param subProtocol
182
+ * @param requests
183
+ * @param timeoutMs
184
+ * @param maxPeers
185
+ * @returns
186
+ *
187
+ * @throws {CollectiveReqRespTimeoutError} - If the request batch exceeds the specified timeout (`timeoutMs`).
188
+ */ async sendBatchRequest(subProtocol, requests, timeoutMs = 10000, maxPeers = Math.min(10, requests.length), maxRetryAttempts = 3) {
189
+ const responseValidator = this.subProtocolValidators[subProtocol];
190
+ const responses = new Array(requests.length);
191
+ const requestBuffers = requests.map((req)=>req.toBuffer());
192
+ const requestFunction = async ()=>{
193
+ // Track which requests still need to be processed
194
+ const pendingRequestIndices = new Set(requestBuffers.map((_, i)=>i));
195
+ // Create batch sampler with the total number of requests and max peers
196
+ const batchSampler = new BatchConnectionSampler(this.connectionSampler, requests.length, maxPeers);
197
+ if (batchSampler.activePeerCount === 0) {
198
+ this.logger.debug('No active peers to send requests to');
199
+ return [];
84
200
  }
85
- /**
86
- * Send a request to peers, returns the first response
87
- *
88
- * @param subProtocol - The protocol being requested
89
- * @param request - The request to send
90
- * @returns - The response from the peer, otherwise undefined
91
- *
92
- * @description
93
- * This method attempts to send a request to all active peers using the specified sub-protocol.
94
- * It opens a stream with each peer, sends the request, and awaits a response.
95
- * If a valid response is received, it returns the response; otherwise, it continues to the next peer.
96
- * If no response is received from any peer, it returns undefined.
97
- *
98
- * The method performs the following steps:
99
- * - Sample a peer to send the request to.
100
- * - Opens a stream with the peer using the specified sub-protocol.
101
- *
102
- * When a response is received, it is validated using the given sub protocols response validator.
103
- * To see the interface for the response validator - see `interface.ts`
104
- *
105
- * Failing a response validation requests in a severe peer penalty, and will
106
- * prompt the node to continue to search to the next peer.
107
- * For example, a transaction request validator will check that the payload returned does in fact
108
- * match the txHash that was requested. A peer that fails this check an only be an extremely naughty peer.
109
- *
110
- * This entire operation is wrapped in an overall timeout, that is independent of the
111
- * peer it is requesting data from.
112
- *
113
- */
114
- async sendRequest(subProtocol, request) {
115
- const responseValidator = this.subProtocolValidators[subProtocol];
116
- const requestBuffer = request.toBuffer();
117
- const requestFunction = async () => {
118
- // Attempt to ask all of our peers, but sampled in a random order
119
- // This function is wrapped in a timeout, so we will exit the loop if we have not received a response
120
- const numberOfPeers = this.libp2p.getPeers().length;
121
- if (numberOfPeers === 0) {
122
- this.logger.debug('No active peers to send requests to');
123
- return undefined;
201
+ // This is where it gets fun
202
+ // The outer loop is the retry loop, we will continue to retry until we process all indices we have
203
+ // not received a response for, or we have reached the max retry attempts
204
+ // The inner loop is the batch loop, we will process all requests for each peer in parallel
205
+ // We will then process the results of the requests, and resample any peers that failed to respond
206
+ // We will continue to retry until we have processed all indices, or we have reached the max retry attempts
207
+ let retryAttempts = 0;
208
+ while(pendingRequestIndices.size > 0 && batchSampler.activePeerCount > 0 && retryAttempts < maxRetryAttempts){
209
+ // Process requests in parallel for each available peer
210
+ const requestBatches = new Map();
211
+ // Group requests by peer
212
+ for (const requestIndex of pendingRequestIndices){
213
+ const peer = batchSampler.getPeerForRequest(requestIndex);
214
+ if (!peer) {
215
+ break;
124
216
  }
125
- const attemptedPeers = new Map();
126
- for (let i = 0; i < numberOfPeers; i++) {
127
- // Sample a peer to make a request to
128
- const peer = this.connectionSampler.getPeer(attemptedPeers);
129
- this.logger.trace(`Attempting to send request to peer: ${peer?.toString()}`);
130
- if (!peer) {
131
- this.logger.debug('No peers available to send requests to');
132
- return undefined;
133
- }
134
- attemptedPeers.set(peer.toString(), true);
135
- this.logger.trace(`Sending request to peer: ${peer.toString()}`);
136
- const response = await this.sendRequestToPeer(peer, subProtocol, requestBuffer);
137
- if (response && response.status !== ReqRespStatus.SUCCESS) {
138
- this.logger.debug(`Request to peer ${peer.toString()} failed with status ${prettyPrintReqRespStatus(response.status)}`);
139
- continue;
140
- }
141
- // If we get a response, return it, otherwise we iterate onto the next peer
142
- // We do not consider it a success if we have an empty buffer
143
- if (response && response.data.length > 0) {
144
- const object = subProtocolMap[subProtocol].response.fromBuffer(response.data);
145
- // The response validator handles peer punishment within
146
- const isValid = await responseValidator(request, object, peer);
147
- if (!isValid) {
148
- throw new InvalidResponseError();
149
- }
150
- return object;
151
- }
217
+ if (!requestBatches.has(peer)) {
218
+ requestBatches.set(peer, []);
152
219
  }
153
- };
154
- try {
155
- return await executeTimeout(requestFunction, this.overallRequestTimeoutMs, () => new CollectiveReqRespTimeoutError());
220
+ requestBatches.get(peer).push(requestIndex);
156
221
  }
157
- catch (e) {
158
- this.logger.debug(`${e.message} | subProtocol: ${subProtocol}`);
159
- return undefined;
160
- }
161
- }
162
- /**
163
- * Request multiple messages over the same sub protocol, balancing the requests across peers.
164
- *
165
- * @devnote
166
- * - The function prioritizes sending requests to free peers using a batch sampling strategy.
167
- * - If a peer fails to respond or returns an invalid response, it is removed from the sampling pool and replaced.
168
- * - The function stops retrying once all requests are processed, no active peers remain, or the maximum retry attempts are reached.
169
- * - Responses are validated using a custom validator for the sub-protocol.*
170
- *
171
- * Requests are sent in parallel to each peer, but multiple requests are sent to the same peer in series
172
- * - If a peer fails to respond or returns an invalid response, it is removed from the sampling pool and replaced.
173
- * - The function stops retrying once all requests are processed, no active peers remain, or the maximum retry attempts are reached.
174
- * - Responses are validated using a custom validator for the sub-protocol.*
175
- *
176
- * @param subProtocol
177
- * @param requests
178
- * @param timeoutMs
179
- * @param maxPeers
180
- * @returns
181
- *
182
- * @throws {CollectiveReqRespTimeoutError} - If the request batch exceeds the specified timeout (`timeoutMs`).
183
- */
184
- async sendBatchRequest(subProtocol, requests, timeoutMs = 10000, maxPeers = Math.min(10, requests.length), maxRetryAttempts = 3) {
185
- const responseValidator = this.subProtocolValidators[subProtocol];
186
- const responses = new Array(requests.length);
187
- const requestBuffers = requests.map(req => req.toBuffer());
188
- const requestFunction = async () => {
189
- // Track which requests still need to be processed
190
- const pendingRequestIndices = new Set(requestBuffers.map((_, i) => i));
191
- // Create batch sampler with the total number of requests and max peers
192
- const batchSampler = new BatchConnectionSampler(this.connectionSampler, requests.length, maxPeers);
193
- if (batchSampler.activePeerCount === 0) {
194
- this.logger.debug('No active peers to send requests to');
195
- return [];
196
- }
197
- // This is where it gets fun
198
- // The outer loop is the retry loop, we will continue to retry until we process all indices we have
199
- // not received a response for, or we have reached the max retry attempts
200
- // The inner loop is the batch loop, we will process all requests for each peer in parallel
201
- // We will then process the results of the requests, and resample any peers that failed to respond
202
- // We will continue to retry until we have processed all indices, or we have reached the max retry attempts
203
- let retryAttempts = 0;
204
- while (pendingRequestIndices.size > 0 && batchSampler.activePeerCount > 0 && retryAttempts < maxRetryAttempts) {
205
- // Process requests in parallel for each available peer
206
- const requestBatches = new Map();
207
- // Group requests by peer
208
- for (const requestIndex of pendingRequestIndices) {
209
- const peer = batchSampler.getPeerForRequest(requestIndex);
210
- if (!peer) {
211
- break;
212
- }
213
- if (!requestBatches.has(peer)) {
214
- requestBatches.set(peer, []);
215
- }
216
- requestBatches.get(peer).push(requestIndex);
217
- }
218
- // Make parallel requests for each peer's batch
219
- // A batch entry will look something like this:
220
- // PeerId0: [0, 1, 2, 3]
221
- // PeerId1: [4, 5, 6, 7]
222
- // Peer Id 0 will send requests 0, 1, 2, 3 in serial
223
- // while simultaneously Peer Id 1 will send requests 4, 5, 6, 7 in serial
224
- const batchResults = await Promise.all(Array.from(requestBatches.entries()).map(async ([peer, indices]) => {
225
- try {
226
- // Requests all going to the same peer are sent synchronously
227
- const peerResults = [];
228
- for (const index of indices) {
229
- const response = await this.sendRequestToPeer(peer, subProtocol, requestBuffers[index]);
230
- // Check the status of the response buffer
231
- if (response && response.status !== ReqRespStatus.SUCCESS) {
232
- this.logger.debug(`Request to peer ${peer.toString()} failed with status ${prettyPrintReqRespStatus(response.status)}`);
233
- // If we hit a rate limit or some failure, we remove the peer and return the results,
234
- // they will be split among remaining peers and the new sampled peer
235
- batchSampler.removePeerAndReplace(peer);
236
- return { peer, results: peerResults };
237
- }
238
- if (response && response.data.length > 0) {
239
- const object = subProtocolMap[subProtocol].response.fromBuffer(response.data);
240
- const isValid = await responseValidator(requests[index], object, peer);
241
- if (isValid) {
242
- peerResults.push({ index, response: object });
243
- }
244
- }
245
- }
246
- return { peer, results: peerResults };
247
- }
248
- catch (error) {
249
- this.logger.debug(`Failed batch request to peer ${peer.toString()}:`, error);
222
+ // Make parallel requests for each peer's batch
223
+ // A batch entry will look something like this:
224
+ // PeerId0: [0, 1, 2, 3]
225
+ // PeerId1: [4, 5, 6, 7]
226
+ // Peer Id 0 will send requests 0, 1, 2, 3 in serial
227
+ // while simultaneously Peer Id 1 will send requests 4, 5, 6, 7 in serial
228
+ const batchResults = await Promise.all(Array.from(requestBatches.entries()).map(async ([peer, indices])=>{
229
+ try {
230
+ // Requests all going to the same peer are sent synchronously
231
+ const peerResults = [];
232
+ for (const index of indices){
233
+ const response = await this.sendRequestToPeer(peer, subProtocol, requestBuffers[index]);
234
+ // Check the status of the response buffer
235
+ if (response && response.status !== ReqRespStatus.SUCCESS) {
236
+ this.logger.debug(`Request to peer ${peer.toString()} failed with status ${prettyPrintReqRespStatus(response.status)}`);
237
+ // If we hit a rate limit or some failure, we remove the peer and return the results,
238
+ // they will be split among remaining peers and the new sampled peer
250
239
  batchSampler.removePeerAndReplace(peer);
251
- return { peer, results: [] };
240
+ return {
241
+ peer,
242
+ results: peerResults
243
+ };
252
244
  }
253
- }));
254
- // Process results
255
- for (const { results } of batchResults) {
256
- for (const { index, response } of results) {
257
- if (response) {
258
- responses[index] = response;
259
- pendingRequestIndices.delete(index);
245
+ if (response && response.data.length > 0) {
246
+ const object = subProtocolMap[subProtocol].response.fromBuffer(response.data);
247
+ const isValid = await responseValidator(requests[index], object, peer);
248
+ if (isValid) {
249
+ peerResults.push({
250
+ index,
251
+ response: object
252
+ });
260
253
  }
261
254
  }
262
255
  }
263
- retryAttempts++;
256
+ return {
257
+ peer,
258
+ results: peerResults
259
+ };
260
+ } catch (error) {
261
+ this.logger.debug(`Failed batch request to peer ${peer.toString()}:`, error);
262
+ batchSampler.removePeerAndReplace(peer);
263
+ return {
264
+ peer,
265
+ results: []
266
+ };
264
267
  }
265
- if (retryAttempts >= maxRetryAttempts) {
266
- this.logger.debug(`Max retry attempts ${maxRetryAttempts} reached for batch request`);
268
+ }));
269
+ // Process results
270
+ for (const { results } of batchResults){
271
+ for (const { index, response } of results){
272
+ if (response) {
273
+ responses[index] = response;
274
+ pendingRequestIndices.delete(index);
275
+ }
267
276
  }
268
- return responses;
269
- };
270
- try {
271
- return await executeTimeout(requestFunction, timeoutMs, () => new CollectiveReqRespTimeoutError());
272
- }
273
- catch (e) {
274
- this.logger.debug(`${e.message} | subProtocol: ${subProtocol}`);
275
- return [];
276
277
  }
278
+ retryAttempts++;
277
279
  }
278
- /**
279
- * Sends a request to a specific peer
280
- *
281
- * We first dial a particular protocol for the peer, this ensures that the peer knows
282
- * what to respond with
283
- *
284
- *
285
- * @param peerId - The peer to send the request to
286
- * @param subProtocol - The protocol to use to request
287
- * @param payload - The payload to send
288
- * @returns If the request is successful, the response is returned, otherwise undefined
289
- *
290
- * @description
291
- * This method attempts to open a stream with the specified peer, send the payload,
292
- * and await a response.
293
- * If an error occurs, it penalizes the peer and returns undefined.
294
- *
295
- * The method performs the following steps:
296
- * - Opens a stream with the peer using the specified sub-protocol.
297
- * - Sends the payload and awaits a response with a timeout.
298
- *
299
- * If the stream is not closed by the dialled peer, and a timeout occurs, then
300
- * the stream is closed on the requester's end and sender (us) updates its peer score
301
- */
302
- async sendRequestToPeer(peerId, subProtocol, payload) {
303
- let stream;
280
+ if (retryAttempts >= maxRetryAttempts) {
281
+ this.logger.debug(`Max retry attempts ${maxRetryAttempts} reached for batch request`);
282
+ }
283
+ return responses;
284
+ };
285
+ try {
286
+ return await executeTimeout(requestFunction, timeoutMs, ()=>new CollectiveReqRespTimeoutError());
287
+ } catch (e) {
288
+ this.logger.debug(`${e.message} | subProtocol: ${subProtocol}`);
289
+ return [];
290
+ }
291
+ }
292
+ /**
293
+ * Sends a request to a specific peer
294
+ *
295
+ * We first dial a particular protocol for the peer, this ensures that the peer knows
296
+ * what to respond with
297
+ *
298
+ *
299
+ * @param peerId - The peer to send the request to
300
+ * @param subProtocol - The protocol to use to request
301
+ * @param payload - The payload to send
302
+ * @returns If the request is successful, the response is returned, otherwise undefined
303
+ *
304
+ * @description
305
+ * This method attempts to open a stream with the specified peer, send the payload,
306
+ * and await a response.
307
+ * If an error occurs, it penalizes the peer and returns undefined.
308
+ *
309
+ * The method performs the following steps:
310
+ * - Opens a stream with the peer using the specified sub-protocol.
311
+ * - Sends the payload and awaits a response with a timeout.
312
+ *
313
+ * If the stream is not closed by the dialled peer, and a timeout occurs, then
314
+ * the stream is closed on the requester's end and sender (us) updates its peer score
315
+ */ async sendRequestToPeer(peerId, subProtocol, payload) {
316
+ let stream;
317
+ try {
318
+ this.metrics.recordRequestSent(subProtocol);
319
+ stream = await this.connectionSampler.dialProtocol(peerId, subProtocol);
320
+ // Open the stream with a timeout
321
+ const result = await executeTimeout(()=>pipe([
322
+ payload
323
+ ], stream, this.readMessage.bind(this)), this.individualRequestTimeoutMs, ()=>new IndividualReqRespTimeoutError());
324
+ return result;
325
+ } catch (e) {
326
+ this.metrics.recordRequestError(subProtocol);
327
+ this.handleResponseError(e, peerId, subProtocol);
328
+ } finally{
329
+ // Only close the stream if we created it
330
+ if (stream) {
304
331
  try {
305
- this.metrics.recordRequestSent(subProtocol);
306
- stream = await this.connectionSampler.dialProtocol(peerId, subProtocol);
307
- // Open the stream with a timeout
308
- const result = await executeTimeout(() => pipe([payload], stream, this.readMessage.bind(this)), this.individualRequestTimeoutMs, () => new IndividualReqRespTimeoutError());
309
- return result;
310
- }
311
- catch (e) {
312
- this.metrics.recordRequestError(subProtocol);
313
- this.handleResponseError(e, peerId, subProtocol);
314
- }
315
- finally {
316
- // Only close the stream if we created it
317
- if (stream) {
318
- try {
319
- await this.connectionSampler.close(stream.id);
320
- }
321
- catch (closeError) {
322
- this.logger.error(`Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`);
323
- }
324
- }
332
+ await this.connectionSampler.close(stream.id);
333
+ } catch (closeError) {
334
+ this.logger.error(`Error closing stream: ${closeError instanceof Error ? closeError.message : 'Unknown error'}`);
325
335
  }
326
336
  }
327
- /**
328
- * Handle a response error
329
- *
330
- * ReqResp errors are punished differently depending on the severity of the offense
331
- *
332
- * @param e - The error
333
- * @param peerId - The peer id
334
- * @param subProtocol - The sub protocol
335
- * @returns If the error is non pubishable, then undefined is returned, otherwise the peer is penalized
336
- */
337
- handleResponseError(e, peerId, subProtocol) {
338
- const severity = this.categorizeError(e, peerId, subProtocol);
339
- if (severity) {
340
- this.peerScoring.penalizePeer(peerId, severity);
337
+ }
338
+ }
339
+ /**
340
+ * Handle a response error
341
+ *
342
+ * ReqResp errors are punished differently depending on the severity of the offense
343
+ *
344
+ * @param e - The error
345
+ * @param peerId - The peer id
346
+ * @param subProtocol - The sub protocol
347
+ * @returns If the error is non pubishable, then undefined is returned, otherwise the peer is penalized
348
+ */ handleResponseError(e, peerId, subProtocol) {
349
+ const severity = this.categorizeError(e, peerId, subProtocol);
350
+ if (severity) {
351
+ this.peerScoring.penalizePeer(peerId, severity);
352
+ }
353
+ }
354
+ /**
355
+ * Categorize the error and log it.
356
+ */ categorizeError(e, peerId, subProtocol) {
357
+ // Non punishable errors - we do not expect a response for goodbye messages
358
+ if (subProtocol === ReqRespSubProtocol.GOODBYE) {
359
+ this.logger.debug('Error encountered on goodbye sub protocol, no penalty', {
360
+ peerId: peerId.toString(),
361
+ subProtocol
362
+ });
363
+ return undefined;
364
+ }
365
+ // We do not punish a collective timeout, as the node triggers this interupt, independent of the peer's behaviour
366
+ const logTags = {
367
+ peerId: peerId.toString(),
368
+ subProtocol
369
+ };
370
+ if (e instanceof CollectiveReqRespTimeoutError || e instanceof InvalidResponseError) {
371
+ this.logger.debug(`Non-punishable error: ${e.message} | peerId: ${peerId.toString()} | subProtocol: ${subProtocol}`, logTags);
372
+ return undefined;
373
+ }
374
+ // Pubishable errors
375
+ // Connection reset errors in the networking stack are punished with high severity
376
+ // it just signals an unreliable peer
377
+ // We assume that the requesting node has a functioning networking stack.
378
+ if (e?.code === 'ECONNRESET' || e?.code === 'EPIPE') {
379
+ this.logger.debug(`Connection reset: ${peerId.toString()}`, logTags);
380
+ return PeerErrorSeverity.HighToleranceError;
381
+ }
382
+ if (e?.code === 'ECONNREFUSED') {
383
+ this.logger.debug(`Connection refused: ${peerId.toString()}`, logTags);
384
+ return PeerErrorSeverity.HighToleranceError;
385
+ }
386
+ // Timeout errors are punished with high tolerance, they can be due to a geogrpahically far away peer or an
387
+ // overloaded peer
388
+ if (e instanceof IndividualReqRespTimeoutError) {
389
+ this.logger.debug(`Timeout error: ${e.message} | peerId: ${peerId.toString()} | subProtocol: ${subProtocol}`, logTags);
390
+ return PeerErrorSeverity.HighToleranceError;
391
+ }
392
+ // Catch all error
393
+ this.logger.error(`Unexpected error sending request to peer`, e, logTags);
394
+ return PeerErrorSeverity.HighToleranceError;
395
+ }
396
+ /**
397
+ * Read a message returned from a stream into a single buffer
398
+ *
399
+ * The message is split into two components
400
+ * - The first chunk should contain a control byte, indicating the status of the response see `ReqRespStatus`
401
+ * - The second chunk should contain the response data
402
+ */ async readMessage(source) {
403
+ let statusBuffer;
404
+ const chunks = [];
405
+ try {
406
+ for await (const chunk of source){
407
+ if (statusBuffer === undefined) {
408
+ const firstChunkBuffer = chunk.subarray();
409
+ statusBuffer = parseStatusChunk(firstChunkBuffer);
410
+ } else {
411
+ chunks.push(chunk.subarray());
341
412
  }
342
413
  }
343
- /**
344
- * Categorize the error and log it.
345
- */
346
- categorizeError(e, peerId, subProtocol) {
347
- // Non punishable errors - we do not expect a response for goodbye messages
348
- if (subProtocol === ReqRespSubProtocol.GOODBYE) {
349
- this.logger.debug('Error encountered on goodbye sub protocol, no penalty', {
350
- peerId: peerId.toString(),
351
- subProtocol,
352
- });
353
- return undefined;
354
- }
355
- // We do not punish a collective timeout, as the node triggers this interupt, independent of the peer's behaviour
356
- const logTags = {
357
- peerId: peerId.toString(),
358
- subProtocol,
359
- };
360
- if (e instanceof CollectiveReqRespTimeoutError || e instanceof InvalidResponseError) {
361
- this.logger.debug(`Non-punishable error: ${e.message} | peerId: ${peerId.toString()} | subProtocol: ${subProtocol}`, logTags);
362
- return undefined;
363
- }
364
- // Pubishable errors
365
- // Connection reset errors in the networking stack are punished with high severity
366
- // it just signals an unreliable peer
367
- // We assume that the requesting node has a functioning networking stack.
368
- if (e?.code === 'ECONNRESET' || e?.code === 'EPIPE') {
369
- this.logger.debug(`Connection reset: ${peerId.toString()}`, logTags);
370
- return PeerErrorSeverity.HighToleranceError;
371
- }
372
- if (e?.code === 'ECONNREFUSED') {
373
- this.logger.debug(`Connection refused: ${peerId.toString()}`, logTags);
374
- return PeerErrorSeverity.HighToleranceError;
375
- }
376
- // Timeout errors are punished with high tolerance, they can be due to a geogrpahically far away peer or an
377
- // overloaded peer
378
- if (e instanceof IndividualReqRespTimeoutError) {
379
- this.logger.debug(`Timeout error: ${e.message} | peerId: ${peerId.toString()} | subProtocol: ${subProtocol}`, logTags);
380
- return PeerErrorSeverity.HighToleranceError;
381
- }
382
- // Catch all error
383
- this.logger.error(`Unexpected error sending request to peer`, e, logTags);
384
- return PeerErrorSeverity.HighToleranceError;
414
+ const messageData = Buffer.concat(chunks);
415
+ const message = this.snappyTransform.inboundTransformNoTopic(messageData);
416
+ return {
417
+ status: statusBuffer ?? ReqRespStatus.UNKNOWN,
418
+ data: message
419
+ };
420
+ } catch (e) {
421
+ this.logger.debug(`Reading message failed: ${e.message}`);
422
+ let status = ReqRespStatus.UNKNOWN;
423
+ if (e instanceof ReqRespStatusError) {
424
+ status = e.status;
385
425
  }
386
- /**
387
- * Read a message returned from a stream into a single buffer
388
- *
389
- * The message is split into two components
390
- * - The first chunk should contain a control byte, indicating the status of the response see `ReqRespStatus`
391
- * - The second chunk should contain the response data
392
- */
393
- async readMessage(source) {
394
- let statusBuffer;
395
- const chunks = [];
396
- try {
397
- for await (const chunk of source) {
398
- if (statusBuffer === undefined) {
399
- const firstChunkBuffer = chunk.subarray();
400
- statusBuffer = parseStatusChunk(firstChunkBuffer);
401
- }
402
- else {
403
- chunks.push(chunk.subarray());
404
- }
405
- }
406
- const messageData = Buffer.concat(chunks);
407
- const message = this.snappyTransform.inboundTransformNoTopic(messageData);
408
- return {
409
- status: statusBuffer ?? ReqRespStatus.UNKNOWN,
410
- data: message,
411
- };
412
- }
413
- catch (e) {
414
- this.logger.debug(`Reading message failed: ${e.message}`);
415
- let status = ReqRespStatus.UNKNOWN;
416
- if (e instanceof ReqRespStatusError) {
417
- status = e.status;
418
- }
419
- return {
420
- status,
421
- data: Buffer.from([]),
422
- };
423
- }
426
+ return {
427
+ status,
428
+ data: Buffer.from([])
429
+ };
430
+ }
431
+ }
432
+ /**
433
+ * Stream Handler
434
+ * Reads the incoming stream, determines the protocol, then triggers the appropriate handler
435
+ *
436
+ * @param param0 - The incoming stream data
437
+ *
438
+ * @description
439
+ * An individual stream handler will be bound to each sub protocol, and handles returning data back
440
+ * to the requesting peer.
441
+ *
442
+ * The sub protocol handler interface is defined within `interface.ts` and will be assigned to the
443
+ * req resp service on start up.
444
+ *
445
+ * We check rate limits for each peer, note the peer will be penalised within the rate limiter implementation
446
+ * if they exceed their peer specific limits.
447
+ */ async streamHandler(protocol, { stream, connection }) {
448
+ this.metrics.recordRequestReceived(protocol);
449
+ try {
450
+ // Store a reference to from this for the async generator
451
+ const rateLimitStatus = this.rateLimiter.allow(protocol, connection.remotePeer);
452
+ if (rateLimitStatus != RateLimitStatus.Allowed) {
453
+ this.logger.warn(`Rate limit exceeded ${prettyPrintRateLimitStatus(rateLimitStatus)} for ${protocol} from ${connection.remotePeer}`);
454
+ throw new ReqRespStatusError(ReqRespStatus.RATE_LIMIT_EXCEEDED);
424
455
  }
425
- /**
426
- * Stream Handler
427
- * Reads the incoming stream, determines the protocol, then triggers the appropriate handler
428
- *
429
- * @param param0 - The incoming stream data
430
- *
431
- * @description
432
- * An individual stream handler will be bound to each sub protocol, and handles returning data back
433
- * to the requesting peer.
434
- *
435
- * The sub protocol handler interface is defined within `interface.ts` and will be assigned to the
436
- * req resp service on start up.
437
- *
438
- * We check rate limits for each peer, note the peer will be penalised within the rate limiter implementation
439
- * if they exceed their peer specific limits.
440
- */
441
- async streamHandler(protocol, { stream, connection }) {
442
- this.metrics.recordRequestReceived(protocol);
443
- try {
444
- // Store a reference to from this for the async generator
445
- if (!this.rateLimiter.allow(protocol, connection.remotePeer)) {
446
- this.logger.warn(`Rate limit exceeded for ${protocol} from ${connection.remotePeer}`);
447
- throw new ReqRespStatusError(ReqRespStatus.RATE_LIMIT_EXCEEDED);
456
+ const handler = this.subProtocolHandlers[protocol];
457
+ const transform = this.snappyTransform;
458
+ await pipe(stream, async function*(source) {
459
+ for await (const chunkList of source){
460
+ const msg = Buffer.from(chunkList.subarray());
461
+ const response = await handler(connection.remotePeer, msg);
462
+ if (protocol === ReqRespSubProtocol.GOODBYE) {
463
+ // Don't respond
464
+ await stream.close();
465
+ return;
448
466
  }
449
- const handler = this.subProtocolHandlers[protocol];
450
- const transform = this.snappyTransform;
451
- this.logger.info(`Stream handler for ${protocol}`);
452
- await pipe(stream, async function* (source) {
453
- for await (const chunkList of source) {
454
- const msg = Buffer.from(chunkList.subarray());
455
- const response = await handler(connection.remotePeer, msg);
456
- if (protocol === ReqRespSubProtocol.GOODBYE) {
457
- // Don't respond
458
- await stream.close();
459
- return;
460
- }
461
- // Send success code first, then the response
462
- const successChunk = Buffer.from([ReqRespStatus.SUCCESS]);
463
- yield new Uint8Array(successChunk);
464
- yield new Uint8Array(transform.outboundTransformNoTopic(response));
465
- }
466
- }, stream);
467
- }
468
- catch (e) {
469
- this.logger.warn('Reqresp Response error: ', e);
470
- this.metrics.recordResponseError(protocol);
471
- // If we receive a known error, we use the error status in the response chunk, otherwise we categorize as unknown
472
- let errorStatus = ReqRespStatus.UNKNOWN;
473
- if (e instanceof ReqRespStatusError) {
474
- errorStatus = e.status;
475
- }
476
- const sendErrorChunk = this.sendErrorChunk(errorStatus);
477
- // Return and yield the response chunk
478
- await pipe(stream, async function* (_source) {
479
- yield* sendErrorChunk;
480
- }, stream);
481
- }
482
- finally {
483
- await stream.close();
467
+ // Send success code first, then the response
468
+ const successChunk = Buffer.from([
469
+ ReqRespStatus.SUCCESS
470
+ ]);
471
+ yield new Uint8Array(successChunk);
472
+ yield new Uint8Array(transform.outboundTransformNoTopic(response));
484
473
  }
474
+ }, stream);
475
+ } catch (e) {
476
+ this.logger.warn('Reqresp Response error: ', e);
477
+ this.metrics.recordResponseError(protocol);
478
+ // If we receive a known error, we use the error status in the response chunk, otherwise we categorize as unknown
479
+ let errorStatus = ReqRespStatus.UNKNOWN;
480
+ if (e instanceof ReqRespStatusError) {
481
+ errorStatus = e.status;
485
482
  }
486
- async *sendErrorChunk(error) {
487
- const errorChunk = Buffer.from([error]);
488
- yield new Uint8Array(errorChunk);
489
- }
490
- },
491
- (() => {
492
- const _metadata = typeof Symbol === "function" && Symbol.metadata ? Object.create(null) : void 0;
493
- _sendBatchRequest_decorators = [trackSpan('ReqResp.sendBatchRequest', (subProtocol, requests) => ({
494
- [Attributes.P2P_REQ_RESP_PROTOCOL]: subProtocol,
495
- [Attributes.P2P_REQ_RESP_BATCH_REQUESTS_COUNT]: requests.length,
496
- }))];
497
- _sendRequestToPeer_decorators = [trackSpan('ReqResp.sendRequestToPeer', (peerId, subProtocol, _) => ({
498
- [Attributes.P2P_ID]: peerId.toString(),
499
- [Attributes.P2P_REQ_RESP_PROTOCOL]: subProtocol,
500
- }))];
501
- _streamHandler_decorators = [trackSpan('ReqResp.streamHandler', (protocol, { connection }) => ({
502
- [Attributes.P2P_REQ_RESP_PROTOCOL]: protocol,
503
- [Attributes.P2P_ID]: connection.remotePeer.toString(),
504
- }))];
505
- __esDecorate(_a, null, _sendBatchRequest_decorators, { kind: "method", name: "sendBatchRequest", static: false, private: false, access: { has: obj => "sendBatchRequest" in obj, get: obj => obj.sendBatchRequest }, metadata: _metadata }, null, _instanceExtraInitializers);
506
- __esDecorate(_a, null, _sendRequestToPeer_decorators, { kind: "method", name: "sendRequestToPeer", static: false, private: false, access: { has: obj => "sendRequestToPeer" in obj, get: obj => obj.sendRequestToPeer }, metadata: _metadata }, null, _instanceExtraInitializers);
507
- __esDecorate(_a, null, _streamHandler_decorators, { kind: "method", name: "streamHandler", static: false, private: false, access: { has: obj => "streamHandler" in obj, get: obj => obj.streamHandler }, metadata: _metadata }, null, _instanceExtraInitializers);
508
- if (_metadata) Object.defineProperty(_a, Symbol.metadata, { enumerable: true, configurable: true, writable: true, value: _metadata });
509
- })(),
510
- _a;
511
- })();
512
- export { ReqResp };
513
- //# sourceMappingURL=data:application/json;base64,{"version":3,"file":"reqresp.js","sourceRoot":"","sources":["../../../src/services/reqresp/reqresp.ts"],"names":[],"mappings":";AAAA,8CAA8C;AAC9C,OAAO,EAAE,iBAAiB,EAAE,MAAM,sBAAsB,CAAC;AACzD,OAAO,EAAe,YAAY,EAAE,MAAM,uBAAuB,CAAC;AAClE,OAAO,EAAE,cAAc,EAAE,MAAM,yBAAyB,CAAC;AACzD,OAAO,EAAE,UAAU,EAAwB,kBAAkB,EAAE,SAAS,EAAE,MAAM,yBAAyB,CAAC;AAG1G,OAAO,EAAE,IAAI,EAAE,MAAM,SAAS,CAAC;AAI/B,OAAO,EACL,6BAA6B,EAC7B,6BAA6B,EAC7B,oBAAoB,GACrB,MAAM,+BAA+B,CAAC;AACvC,OAAO,EAAE,eAAe,EAAE,MAAM,gBAAgB,CAAC;AAGjD,OAAO,EAAE,sBAAsB,EAAE,MAAM,kDAAkD,CAAC;AAC1F,OAAO,EAAE,iBAAiB,EAAE,MAAM,4CAA4C,CAAC;AAC/E,OAAO,EACL,6BAA6B,EAC7B,+BAA+B,EAE/B,kBAAkB,EAIlB,cAAc,GACf,MAAM,gBAAgB,CAAC;AACxB,OAAO,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAC9C,OAAO,EAAE,0BAA0B,EAAE,MAAM,gCAAgC,CAAC;AAC5E,OAAO,EAAE,aAAa,EAAE,kBAAkB,EAAE,gBAAgB,EAAE,wBAAwB,EAAE,MAAM,aAAa,CAAC;AAE5G;;;;;;;;;;;;;GAaG;IACU,OAAO;;;;;;sBAAP,OAAO;YAiBlB,YACE,MAAwB,EAChB,MAAc,EACd,WAAwB,EAChC,kBAAmC,kBAAkB,EAAE;gBAF/C,WAAM,IAnBL,mDAAO,EAmBR,MAAM,EAAQ;gBACd,gBAAW,GAAX,WAAW,CAAa;gBAdlC,mJAAmJ;gBAC3I,wBAAmB,GAA+B,6BAA6B,CAAC;gBAChF,0BAAqB,GAAiC,+BAA+B,CAAC;gBAe5F,IAAI,CAAC,MAAM,GAAG,YAAY,CAAC,aAAa,CAAC,CAAC;gBAE1C,IAAI,CAAC,uBAAuB,GAAG,MAAM,CAAC,uBAAuB,CAAC;gBAC9D,IAAI,CAAC,0BAA0B,GAAG,MAAM,CAAC,0BAA0B,CAAC;gBAEpE,IAAI,CAAC,WAAW,GAAG,IAAI,0BAA0B,CAAC,WAAW,CAAC,CAAC;gBAE/D,2DAA2D;gBAC3D,IAAI,CAAC,iBAAiB,GAAG,IAAI,iBAAiB,CAAC,MAAM,CAAC,CAAC;gBAEvD,IAAI,CAAC,eAAe,GAAG,IAAI,eAAe,EAAE,CAAC;gBAC7C,IAAI,CAAC,OAAO,GAAG,IAAI,cAAc,CAAC,eAAe,CAAC,CAAC;YACrD,CAAC;YAED,IAAI,MAAM;gBACR,OAAO,IAAI,CAAC,OAAO,CAAC,MAAM,CAAC;YAC7B,CAAC;YAED;;eAEG;YACH,KAAK,CAAC,KAAK,CAAC,mBAA+C,EAAE,qBAAmD;gBAC9G,IAAI,CAAC,mBAAmB,GAAG,mBAAmB,CAAC;gBAC/C,IAAI,CAAC,qBAAqB,GAAG,qBAAqB,CAAC;gBAEnD,iCAAiC;gBACjC,KAAK,MAAM,WAAW,IAAI,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,CAAC,EAAE,CAAC;oBAChE,MAAM,IAAI,CAAC,MAAM,CAAC,MAAM,CACtB,WAAW,EACX,CAAC,IAAwB,EAAE,EAAE,CAC3B,KAAK,IAAI,CAAC,aAAa,CAAC,WAAiC,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAC3E,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,+BAA+B,WAAW,UAAU,EAAE,GAAG,CAAC,CAC7E,CACJ,CAAC;gBACJ,CAAC;gBACD,IAAI,CAAC,WAAW,CAAC,KAAK,EAAE,CAAC;YAC3B,CAAC;YAED;;eAEG;YACH,KAAK,CAAC,IAAI;gBACR,kCAAkC;gBAClC,MAAM,kBAAkB,GAAG,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,CAAC;gBACjH,MAAM,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAC;gBAEtC,2BAA2B;gBAC3B,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,CAAC;gBACpC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAC;gBAEzD,4BAA4B;gBAC5B,MAAM,mBAAmB,GAAG,IAAI,CAAC,MAAM,CAAC,cAAc,EAAE,CAAC,GAAG,CAAC,UAAU,CAAC,EAAE,CAAC,UAAU,CAAC,KAAK,EAAE,CAAC,CAAC;gBAC/F,MAAM,OAAO,CAAC,GAAG,CAAC,mBAAmB,CAAC,CAAC;gBACvC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,oCAAoC,CAAC,CAAC;gBAExD,IAAI,CAAC,WAAW,CAAC,IAAI,EAAE,CAAC;gBACxB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,+BAA+B,CAAC,CAAC;gBAEnD,2DAA2D;YAC7D,CAAC;YAED;;;;;;;;;;;;;;;;;;;;;;;;;;;;eA4BG;YACH,KAAK,CAAC,WAAW,CACf,WAAwB,EACxB,OAA6D;gBAE7D,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;gBAClE,MAAM,aAAa,GAAG,OAAO,CAAC,QAAQ,EAAE,CAAC;gBAEzC,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;oBACjC,iEAAiE;oBACjE,qGAAqG;oBACrG,MAAM,aAAa,GAAG,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAC,MAAM,CAAC;oBAEpD,IAAI,aAAa,KAAK,CAAC,EAAE,CAAC;wBACxB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAC;wBACzD,OAAO,SAAS,CAAC;oBACnB,CAAC;oBAED,MAAM,cAAc,GAAyB,IAAI,GAAG,EAAE,CAAC;oBACvD,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,aAAa,EAAE,CAAC,EAAE,EAAE,CAAC;wBACvC,qCAAqC;wBACrC,MAAM,IAAI,GAAG,IAAI,CAAC,iBAAiB,CAAC,OAAO,CAAC,cAAc,CAAC,CAAC;wBAC5D,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,uCAAuC,IAAI,EAAE,QAAQ,EAAE,EAAE,CAAC,CAAC;wBAC7E,IAAI,CAAC,IAAI,EAAE,CAAC;4BACV,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,wCAAwC,CAAC,CAAC;4BAC5D,OAAO,SAAS,CAAC;wBACnB,CAAC;wBAED,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,QAAQ,EAAE,EAAE,IAAI,CAAC,CAAC;wBAE1C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,4BAA4B,IAAI,CAAC,QAAQ,EAAE,EAAE,CAAC,CAAC;wBACjE,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,WAAW,EAAE,aAAa,CAAC,CAAC;wBAEhF,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,aAAa,CAAC,OAAO,EAAE,CAAC;4BAC1D,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,mBAAmB,IAAI,CAAC,QAAQ,EAAE,uBAAuB,wBAAwB,CAAC,QAAQ,CAAC,MAAM,CAAC,EAAE,CACrG,CAAC;4BACF,SAAS;wBACX,CAAC;wBAED,2EAA2E;wBAC3E,6DAA6D;wBAC7D,IAAI,QAAQ,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;4BACzC,MAAM,MAAM,GAAG,cAAc,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;4BAC9E,wDAAwD;4BACxD,MAAM,OAAO,GAAG,MAAM,iBAAiB,CAAC,OAAO,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;4BAC/D,IAAI,CAAC,OAAO,EAAE,CAAC;gCACb,MAAM,IAAI,oBAAoB,EAAE,CAAC;4BACnC,CAAC;4BACD,OAAO,MAAM,CAAC;wBAChB,CAAC;oBACH,CAAC;gBACH,CAAC,CAAC;gBAEF,IAAI,CAAC;oBACH,OAAO,MAAM,cAAc,CACzB,eAAe,EACf,IAAI,CAAC,uBAAuB,EAC5B,GAAG,EAAE,CAAC,IAAI,6BAA6B,EAAE,CAC1C,CAAC;gBACJ,CAAC;gBAAC,OAAO,CAAM,EAAE,CAAC;oBAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,mBAAmB,WAAW,EAAE,CAAC,CAAC;oBAChE,OAAO,SAAS,CAAC;gBACnB,CAAC;YACH,CAAC;YAED;;;;;;;;;;;;;;;;;;;;;eAqBG;YAQH,KAAK,CAAC,gBAAgB,CACpB,WAAwB,EACxB,QAAgE,EAChE,SAAS,GAAG,KAAK,EACjB,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,EAAE,EAAE,QAAQ,CAAC,MAAM,CAAC,EACxC,gBAAgB,GAAG,CAAC;gBAEpB,MAAM,iBAAiB,GAAG,IAAI,CAAC,qBAAqB,CAAC,WAAW,CAAC,CAAC;gBAClE,MAAM,SAAS,GAA4D,IAAI,KAAK,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;gBACtG,MAAM,cAAc,GAAG,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,QAAQ,EAAE,CAAC,CAAC;gBAE3D,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE;oBACjC,kDAAkD;oBAClD,MAAM,qBAAqB,GAAG,IAAI,GAAG,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;oBAEvE,uEAAuE;oBACvE,MAAM,YAAY,GAAG,IAAI,sBAAsB,CAAC,IAAI,CAAC,iBAAiB,EAAE,QAAQ,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;oBAEnG,IAAI,YAAY,CAAC,eAAe,KAAK,CAAC,EAAE,CAAC;wBACvC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAC;wBACzD,OAAO,EAAE,CAAC;oBACZ,CAAC;oBAED,4BAA4B;oBAC5B,mGAAmG;oBACnG,yEAAyE;oBAEzE,2FAA2F;oBAC3F,kGAAkG;oBAClG,2GAA2G;oBAE3G,IAAI,aAAa,GAAG,CAAC,CAAC;oBACtB,OAAO,qBAAqB,CAAC,IAAI,GAAG,CAAC,IAAI,YAAY,CAAC,eAAe,GAAG,CAAC,IAAI,aAAa,GAAG,gBAAgB,EAAE,CAAC;wBAC9G,uDAAuD;wBACvD,MAAM,cAAc,GAAG,IAAI,GAAG,EAAoB,CAAC;wBAEnD,yBAAyB;wBACzB,KAAK,MAAM,YAAY,IAAI,qBAAqB,EAAE,CAAC;4BACjD,MAAM,IAAI,GAAG,YAAY,CAAC,iBAAiB,CAAC,YAAY,CAAC,CAAC;4BAC1D,IAAI,CAAC,IAAI,EAAE,CAAC;gCACV,MAAM;4BACR,CAAC;4BAED,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;gCAC9B,cAAc,CAAC,GAAG,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC;4BAC/B,CAAC;4BACD,cAAc,CAAC,GAAG,CAAC,IAAI,CAAE,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC;wBAC/C,CAAC;wBAED,+CAA+C;wBAC/C,+CAA+C;wBAC/C,wBAAwB;wBACxB,wBAAwB;wBAExB,oDAAoD;wBACpD,yEAAyE;wBAEzE,MAAM,YAAY,GAAG,MAAM,OAAO,CAAC,GAAG,CACpC,KAAK,CAAC,IAAI,CAAC,cAAc,CAAC,OAAO,EAAE,CAAC,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,EAAE;4BACjE,IAAI,CAAC;gCACH,6DAA6D;gCAC7D,MAAM,WAAW,GACf,EAAE,CAAC;gCACL,KAAK,MAAM,KAAK,IAAI,OAAO,EAAE,CAAC;oCAC5B,MAAM,QAAQ,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,IAAI,EAAE,WAAW,EAAE,cAAc,CAAC,KAAK,CAAC,CAAC,CAAC;oCAExF,0CAA0C;oCAC1C,IAAI,QAAQ,IAAI,QAAQ,CAAC,MAAM,KAAK,aAAa,CAAC,OAAO,EAAE,CAAC;wCAC1D,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,mBAAmB,IAAI,CAAC,QAAQ,EAAE,uBAAuB,wBAAwB,CAC/E,QAAQ,CAAC,MAAM,CAChB,EAAE,CACJ,CAAC;wCAEF,qFAAqF;wCACrF,oEAAoE;wCACpE,YAAY,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;wCACxC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC;oCACxC,CAAC;oCAED,IAAI,QAAQ,IAAI,QAAQ,CAAC,IAAI,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;wCACzC,MAAM,MAAM,GAAG,cAAc,CAAC,WAAW,CAAC,CAAC,QAAQ,CAAC,UAAU,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;wCAC9E,MAAM,OAAO,GAAG,MAAM,iBAAiB,CAAC,QAAQ,CAAC,KAAK,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,CAAC;wCAEvE,IAAI,OAAO,EAAE,CAAC;4CACZ,WAAW,CAAC,IAAI,CAAC,EAAE,KAAK,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC,CAAC;wCAChD,CAAC;oCACH,CAAC;gCACH,CAAC;gCAED,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC;4BACxC,CAAC;4BAAC,OAAO,KAAK,EAAE,CAAC;gCACf,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,gCAAgC,IAAI,CAAC,QAAQ,EAAE,GAAG,EAAE,KAAK,CAAC,CAAC;gCAC7E,YAAY,CAAC,oBAAoB,CAAC,IAAI,CAAC,CAAC;gCACxC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,EAAE,EAAE,CAAC;4BAC/B,CAAC;wBACH,CAAC,CAAC,CACH,CAAC;wBAEF,kBAAkB;wBAClB,KAAK,MAAM,EAAE,OAAO,EAAE,IAAI,YAAY,EAAE,CAAC;4BACvC,KAAK,MAAM,EAAE,KAAK,EAAE,QAAQ,EAAE,IAAI,OAAO,EAAE,CAAC;gCAC1C,IAAI,QAAQ,EAAE,CAAC;oCACb,SAAS,CAAC,KAAK,CAAC,GAAG,QAAQ,CAAC;oCAC5B,qBAAqB,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gCACtC,CAAC;4BACH,CAAC;wBACH,CAAC;wBAED,aAAa,EAAE,CAAC;oBAClB,CAAC;oBAED,IAAI,aAAa,IAAI,gBAAgB,EAAE,CAAC;wBACtC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,sBAAsB,gBAAgB,4BAA4B,CAAC,CAAC;oBACxF,CAAC;oBAED,OAAO,SAAS,CAAC;gBACnB,CAAC,CAAC;gBAEF,IAAI,CAAC;oBACH,OAAO,MAAM,cAAc,CACzB,eAAe,EACf,SAAS,EACT,GAAG,EAAE,CAAC,IAAI,6BAA6B,EAAE,CAC1C,CAAC;gBACJ,CAAC;gBAAC,OAAO,CAAM,EAAE,CAAC;oBAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,OAAO,mBAAmB,WAAW,EAAE,CAAC,CAAC;oBAChE,OAAO,EAAE,CAAC;gBACZ,CAAC;YACH,CAAC;YAED;;;;;;;;;;;;;;;;;;;;;;;eAuBG;YAKI,KAAK,CAAC,iBAAiB,CAC5B,MAAc,EACd,WAA+B,EAC/B,OAAe;gBAEf,IAAI,MAA0B,CAAC;gBAC/B,IAAI,CAAC;oBACH,IAAI,CAAC,OAAO,CAAC,iBAAiB,CAAC,WAAW,CAAC,CAAC;oBAE5C,MAAM,GAAG,MAAM,IAAI,CAAC,iBAAiB,CAAC,YAAY,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;oBAExE,iCAAiC;oBACjC,MAAM,MAAM,GAAG,MAAM,cAAc,CACjC,GAA6B,EAAE,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,EAAE,MAAO,EAAE,IAAI,CAAC,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,EACrF,IAAI,CAAC,0BAA0B,EAC/B,GAAG,EAAE,CAAC,IAAI,6BAA6B,EAAE,CAC1C,CAAC;oBAEF,OAAO,MAAM,CAAC;gBAChB,CAAC;gBAAC,OAAO,CAAM,EAAE,CAAC;oBAChB,IAAI,CAAC,OAAO,CAAC,kBAAkB,CAAC,WAAW,CAAC,CAAC;oBAC7C,IAAI,CAAC,mBAAmB,CAAC,CAAC,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC;gBACnD,CAAC;wBAAS,CAAC;oBACT,yCAAyC;oBACzC,IAAI,MAAM,EAAE,CAAC;wBACX,IAAI,CAAC;4BACH,MAAM,IAAI,CAAC,iBAAiB,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC;wBAChD,CAAC;wBAAC,OAAO,UAAU,EAAE,CAAC;4BACpB,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,yBAAyB,UAAU,YAAY,KAAK,CAAC,CAAC,CAAC,UAAU,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,EAAE,CAC9F,CAAC;wBACJ,CAAC;oBACH,CAAC;gBACH,CAAC;YACH,CAAC;YAED;;;;;;;;;eASG;YACK,mBAAmB,CAAC,CAAM,EAAE,MAAc,EAAE,WAA+B;gBACjF,MAAM,QAAQ,GAAG,IAAI,CAAC,eAAe,CAAC,CAAC,EAAE,MAAM,EAAE,WAAW,CAAC,CAAC;gBAC9D,IAAI,QAAQ,EAAE,CAAC;oBACb,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;gBAClD,CAAC;YACH,CAAC;YAED;;eAEG;YACK,eAAe,CAAC,CAAM,EAAE,MAAc,EAAE,WAA+B;gBAC7E,2EAA2E;gBAC3E,IAAI,WAAW,KAAK,kBAAkB,CAAC,OAAO,EAAE,CAAC;oBAC/C,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,uDAAuD,EAAE;wBACzE,MAAM,EAAE,MAAM,CAAC,QAAQ,EAAE;wBACzB,WAAW;qBACZ,CAAC,CAAC;oBACH,OAAO,SAAS,CAAC;gBACnB,CAAC;gBAED,iHAAiH;gBACjH,MAAM,OAAO,GAAG;oBACd,MAAM,EAAE,MAAM,CAAC,QAAQ,EAAE;oBACzB,WAAW;iBACZ,CAAC;gBACF,IAAI,CAAC,YAAY,6BAA6B,IAAI,CAAC,YAAY,oBAAoB,EAAE,CAAC;oBACpF,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,yBAAyB,CAAC,CAAC,OAAO,cAAc,MAAM,CAAC,QAAQ,EAAE,mBAAmB,WAAW,EAAE,EACjG,OAAO,CACR,CAAC;oBACF,OAAO,SAAS,CAAC;gBACnB,CAAC;gBAED,oBAAoB;gBACpB,kFAAkF;gBAClF,qCAAqC;gBACrC,yEAAyE;gBACzE,IAAI,CAAC,EAAE,IAAI,KAAK,YAAY,IAAI,CAAC,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;oBACpD,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,qBAAqB,MAAM,CAAC,QAAQ,EAAE,EAAE,EAAE,OAAO,CAAC,CAAC;oBACrE,OAAO,iBAAiB,CAAC,kBAAkB,CAAC;gBAC9C,CAAC;gBAED,IAAI,CAAC,EAAE,IAAI,KAAK,cAAc,EAAE,CAAC;oBAC/B,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,uBAAuB,MAAM,CAAC,QAAQ,EAAE,EAAE,EAAE,OAAO,CAAC,CAAC;oBACvE,OAAO,iBAAiB,CAAC,kBAAkB,CAAC;gBAC9C,CAAC;gBAED,2GAA2G;gBAC3G,kBAAkB;gBAClB,IAAI,CAAC,YAAY,6BAA6B,EAAE,CAAC;oBAC/C,IAAI,CAAC,MAAM,CAAC,KAAK,CACf,kBAAkB,CAAC,CAAC,OAAO,cAAc,MAAM,CAAC,QAAQ,EAAE,mBAAmB,WAAW,EAAE,EAC1F,OAAO,CACR,CAAC;oBACF,OAAO,iBAAiB,CAAC,kBAAkB,CAAC;gBAC9C,CAAC;gBAED,kBAAkB;gBAClB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,0CAA0C,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC;gBAC1E,OAAO,iBAAiB,CAAC,kBAAkB,CAAC;YAC9C,CAAC;YAED;;;;;;eAMG;YACK,KAAK,CAAC,WAAW,CAAC,MAAqC;gBAC7D,IAAI,YAAuC,CAAC;gBAC5C,MAAM,MAAM,GAAiB,EAAE,CAAC;gBAEhC,IAAI,CAAC;oBACH,IAAI,KAAK,EAAE,MAAM,KAAK,IAAI,MAAM,EAAE,CAAC;wBACjC,IAAI,YAAY,KAAK,SAAS,EAAE,CAAC;4BAC/B,MAAM,gBAAgB,GAAG,KAAK,CAAC,QAAQ,EAAE,CAAC;4BAC1C,YAAY,GAAG,gBAAgB,CAAC,gBAAgB,CAAC,CAAC;wBACpD,CAAC;6BAAM,CAAC;4BACN,MAAM,CAAC,IAAI,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;wBAChC,CAAC;oBACH,CAAC;oBAED,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,CAAC,CAAC;oBAC1C,MAAM,OAAO,GAAW,IAAI,CAAC,eAAe,CAAC,uBAAuB,CAAC,WAAW,CAAC,CAAC;oBAElF,OAAO;wBACL,MAAM,EAAE,YAAY,IAAI,aAAa,CAAC,OAAO;wBAC7C,IAAI,EAAE,OAAO;qBACd,CAAC;gBACJ,CAAC;gBAAC,OAAO,CAAM,EAAE,CAAC;oBAChB,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,2BAA2B,CAAC,CAAC,OAAO,EAAE,CAAC,CAAC;oBAE1D,IAAI,MAAM,GAAG,aAAa,CAAC,OAAO,CAAC;oBACnC,IAAI,CAAC,YAAY,kBAAkB,EAAE,CAAC;wBACpC,MAAM,GAAG,CAAC,CAAC,MAAM,CAAC;oBACpB,CAAC;oBAED,OAAO;wBACL,MAAM;wBACN,IAAI,EAAE,MAAM,CAAC,IAAI,CAAC,EAAE,CAAC;qBACtB,CAAC;gBACJ,CAAC;YACH,CAAC;YAED;;;;;;;;;;;;;;;eAeG;YAKK,KAAK,CAAC,aAAa,CAAC,QAA4B,EAAE,EAAE,MAAM,EAAE,UAAU,EAAsB;gBAClG,IAAI,CAAC,OAAO,CAAC,qBAAqB,CAAC,QAAQ,CAAC,CAAC;gBAE7C,IAAI,CAAC;oBACH,yDAAyD;oBACzD,IAAI,CAAC,IAAI,CAAC,WAAW,CAAC,KAAK,CAAC,QAAQ,EAAE,UAAU,CAAC,UAAU,CAAC,EAAE,CAAC;wBAC7D,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,2BAA2B,QAAQ,SAAS,UAAU,CAAC,UAAU,EAAE,CAAC,CAAC;wBAEtF,MAAM,IAAI,kBAAkB,CAAC,aAAa,CAAC,mBAAmB,CAAC,CAAC;oBAClE,CAAC;oBAED,MAAM,OAAO,GAAG,IAAI,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC;oBACnD,MAAM,SAAS,GAAG,IAAI,CAAC,eAAe,CAAC;oBAEvC,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,sBAAsB,QAAQ,EAAE,CAAC,CAAC;oBAEnD,MAAM,IAAI,CACR,MAAM,EACN,KAAK,SAAS,CAAC,EAAE,MAAW;wBAC1B,IAAI,KAAK,EAAE,MAAM,SAAS,IAAI,MAAM,EAAE,CAAC;4BACrC,MAAM,GAAG,GAAG,MAAM,CAAC,IAAI,CAAC,SAAS,CAAC,QAAQ,EAAE,CAAC,CAAC;4BAC9C,MAAM,QAAQ,GAAG,MAAM,OAAO,CAAC,UAAU,CAAC,UAAU,EAAE,GAAG,CAAC,CAAC;4BAE3D,IAAI,QAAQ,KAAK,kBAAkB,CAAC,OAAO,EAAE,CAAC;gCAC5C,gBAAgB;gCAChB,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;gCACrB,OAAO;4BACT,CAAC;4BAED,6CAA6C;4BAC7C,MAAM,YAAY,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,aAAa,CAAC,OAAO,CAAC,CAAC,CAAC;4BAC1D,MAAM,IAAI,UAAU,CAAC,YAAY,CAAC,CAAC;4BAEnC,MAAM,IAAI,UAAU,CAAC,SAAS,CAAC,wBAAwB,CAAC,QAAQ,CAAC,CAAC,CAAC;wBACrE,CAAC;oBACH,CAAC,EACD,MAAM,CACP,CAAC;gBACJ,CAAC;gBAAC,OAAO,CAAM,EAAE,CAAC;oBAChB,IAAI,CAAC,MAAM,CAAC,IAAI,CAAC,0BAA0B,EAAE,CAAC,CAAC,CAAC;oBAChD,IAAI,CAAC,OAAO,CAAC,mBAAmB,CAAC,QAAQ,CAAC,CAAC;oBAE3C,iHAAiH;oBACjH,IAAI,WAAW,GAAG,aAAa,CAAC,OAAO,CAAC;oBACxC,IAAI,CAAC,YAAY,kBAAkB,EAAE,CAAC;wBACpC,WAAW,GAAG,CAAC,CAAC,MAAM,CAAC;oBACzB,CAAC;oBAED,MAAM,cAAc,GAAG,IAAI,CAAC,cAAc,CAAC,WAAW,CAAC,CAAC;oBAExD,sCAAsC;oBACtC,MAAM,IAAI,CACR,MAAM,EACN,KAAK,SAAS,CAAC,EAAE,OAAY;wBAC3B,KAAK,CAAC,CAAC,cAAc,CAAC;oBACxB,CAAC,EACD,MAAM,CACP,CAAC;gBACJ,CAAC;wBAAS,CAAC;oBACT,MAAM,MAAM,CAAC,KAAK,EAAE,CAAC;gBACvB,CAAC;YACH,CAAC;YAEO,KAAK,CAAC,CAAC,cAAc,CAAC,KAAoB;gBAChD,MAAM,UAAU,GAAG,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;gBACxC,MAAM,IAAI,UAAU,CAAC,UAAU,CAAC,CAAC;YACnC,CAAC;;;;4CAnZA,SAAS,CACR,0BAA0B,EAC1B,CAAC,WAA+B,EAAE,QAAuE,EAAE,EAAE,CAAC,CAAC;oBAC7G,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE,WAAW;oBAC/C,CAAC,UAAU,CAAC,iCAAiC,CAAC,EAAE,QAAQ,CAAC,MAAM;iBAChE,CAAC,CACH;6CA4JA,SAAS,CAAC,2BAA2B,EAAE,CAAC,MAAc,EAAE,WAA+B,EAAE,CAAS,EAAE,EAAE,CAAC,CAAC;oBACvG,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC,QAAQ,EAAE;oBACtC,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE,WAAW;iBAChD,CAAC,CAAC;yCAwKF,SAAS,CAAC,uBAAuB,EAAE,CAAC,QAA4B,EAAE,EAAE,UAAU,EAAsB,EAAE,EAAE,CAAC,CAAC;oBACzG,CAAC,UAAU,CAAC,qBAAqB,CAAC,EAAE,QAAQ;oBAC5C,CAAC,UAAU,CAAC,MAAM,CAAC,EAAE,UAAU,CAAC,UAAU,CAAC,QAAQ,EAAE;iBACtD,CAAC,CAAC;YAzUH,iMAAM,gBAAgB,6DAiIrB;YA8BD,oMAAa,iBAAiB,6DAkC7B;YAyID,wLAAc,aAAa,6DA6D1B;;;;;SAtlBU,OAAO"}
483
+ const sendErrorChunk = this.sendErrorChunk(errorStatus);
484
+ // Return and yield the response chunk
485
+ await pipe(stream, async function*(_source) {
486
+ yield* sendErrorChunk;
487
+ }, stream);
488
+ } finally{
489
+ await stream.close();
490
+ }
491
+ }
492
+ async *sendErrorChunk(error) {
493
+ const errorChunk = Buffer.from([
494
+ error
495
+ ]);
496
+ yield new Uint8Array(errorChunk);
497
+ }
498
+ }
499
+ _ts_decorate([
500
+ trackSpan('ReqResp.sendBatchRequest', (subProtocol, requests)=>({
501
+ [Attributes.P2P_REQ_RESP_PROTOCOL]: subProtocol,
502
+ [Attributes.P2P_REQ_RESP_BATCH_REQUESTS_COUNT]: requests.length
503
+ }))
504
+ ], ReqResp.prototype, "sendBatchRequest", null);
505
+ _ts_decorate([
506
+ trackSpan('ReqResp.sendRequestToPeer', (peerId, subProtocol, _)=>({
507
+ [Attributes.P2P_ID]: peerId.toString(),
508
+ [Attributes.P2P_REQ_RESP_PROTOCOL]: subProtocol
509
+ }))
510
+ ], ReqResp.prototype, "sendRequestToPeer", null);
511
+ _ts_decorate([
512
+ trackSpan('ReqResp.streamHandler', (protocol, { connection })=>({
513
+ [Attributes.P2P_REQ_RESP_PROTOCOL]: protocol,
514
+ [Attributes.P2P_ID]: connection.remotePeer.toString()
515
+ }))
516
+ ], ReqResp.prototype, "streamHandler", null);