@aztec/p2p 0.0.1-commit.e61ad554 → 0.0.1-commit.ec5f612

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (445) hide show
  1. package/dest/bootstrap/bootstrap.d.ts +4 -3
  2. package/dest/bootstrap/bootstrap.d.ts.map +1 -1
  3. package/dest/bootstrap/bootstrap.js +4 -4
  4. package/dest/client/factory.d.ts +10 -10
  5. package/dest/client/factory.d.ts.map +1 -1
  6. package/dest/client/factory.js +45 -18
  7. package/dest/client/interface.d.ts +46 -33
  8. package/dest/client/interface.d.ts.map +1 -1
  9. package/dest/client/p2p_client.d.ts +41 -51
  10. package/dest/client/p2p_client.d.ts.map +1 -1
  11. package/dest/client/p2p_client.js +156 -200
  12. package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker.d.ts +2 -0
  13. package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker.d.ts.map +1 -0
  14. package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker.js +304 -0
  15. package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker_protocol.d.ts +73 -0
  16. package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker_protocol.d.ts.map +1 -0
  17. package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker_protocol.js +8 -0
  18. package/dest/config.d.ts +35 -7
  19. package/dest/config.d.ts.map +1 -1
  20. package/dest/config.js +21 -7
  21. package/dest/errors/tx-pool.error.d.ts +8 -0
  22. package/dest/errors/tx-pool.error.d.ts.map +1 -0
  23. package/dest/errors/tx-pool.error.js +9 -0
  24. package/dest/index.d.ts +2 -1
  25. package/dest/index.d.ts.map +1 -1
  26. package/dest/index.js +1 -0
  27. package/dest/mem_pools/attestation_pool/attestation_pool.d.ts +104 -88
  28. package/dest/mem_pools/attestation_pool/attestation_pool.d.ts.map +1 -1
  29. package/dest/mem_pools/attestation_pool/attestation_pool.js +441 -3
  30. package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.d.ts +2 -2
  31. package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.d.ts.map +1 -1
  32. package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.js +353 -87
  33. package/dest/mem_pools/attestation_pool/index.d.ts +2 -3
  34. package/dest/mem_pools/attestation_pool/index.d.ts.map +1 -1
  35. package/dest/mem_pools/attestation_pool/index.js +1 -2
  36. package/dest/mem_pools/attestation_pool/mocks.d.ts +2 -2
  37. package/dest/mem_pools/attestation_pool/mocks.d.ts.map +1 -1
  38. package/dest/mem_pools/attestation_pool/mocks.js +2 -2
  39. package/dest/mem_pools/index.d.ts +3 -2
  40. package/dest/mem_pools/index.d.ts.map +1 -1
  41. package/dest/mem_pools/index.js +1 -1
  42. package/dest/mem_pools/instrumentation.d.ts +1 -1
  43. package/dest/mem_pools/instrumentation.d.ts.map +1 -1
  44. package/dest/mem_pools/instrumentation.js +2 -2
  45. package/dest/mem_pools/interface.d.ts +5 -5
  46. package/dest/mem_pools/interface.d.ts.map +1 -1
  47. package/dest/mem_pools/tx_pool/eviction/invalid_txs_after_mining_rule.js +3 -3
  48. package/dest/mem_pools/tx_pool_v2/archive/index.d.ts +2 -0
  49. package/dest/mem_pools/tx_pool_v2/archive/index.d.ts.map +1 -0
  50. package/dest/mem_pools/tx_pool_v2/archive/index.js +1 -0
  51. package/dest/mem_pools/tx_pool_v2/archive/tx_archive.d.ts +43 -0
  52. package/dest/mem_pools/tx_pool_v2/archive/tx_archive.d.ts.map +1 -0
  53. package/dest/mem_pools/tx_pool_v2/archive/tx_archive.js +103 -0
  54. package/dest/mem_pools/tx_pool_v2/deleted_pool.d.ts +104 -0
  55. package/dest/mem_pools/tx_pool_v2/deleted_pool.d.ts.map +1 -0
  56. package/dest/mem_pools/tx_pool_v2/deleted_pool.js +251 -0
  57. package/dest/mem_pools/tx_pool_v2/eviction/eviction_manager.d.ts +47 -0
  58. package/dest/mem_pools/tx_pool_v2/eviction/eviction_manager.d.ts.map +1 -0
  59. package/dest/mem_pools/tx_pool_v2/eviction/eviction_manager.js +128 -0
  60. package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.d.ts +17 -0
  61. package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.d.ts.map +1 -0
  62. package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.js +93 -0
  63. package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.d.ts +19 -0
  64. package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.d.ts.map +1 -0
  65. package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.js +97 -0
  66. package/dest/mem_pools/tx_pool_v2/eviction/index.d.ts +10 -0
  67. package/dest/mem_pools/tx_pool_v2/eviction/index.d.ts.map +1 -0
  68. package/dest/mem_pools/tx_pool_v2/eviction/index.js +11 -0
  69. package/dest/mem_pools/tx_pool_v2/eviction/interfaces.d.ts +174 -0
  70. package/dest/mem_pools/tx_pool_v2/eviction/interfaces.d.ts.map +1 -0
  71. package/dest/mem_pools/tx_pool_v2/eviction/interfaces.js +25 -0
  72. package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.d.ts +15 -0
  73. package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.d.ts.map +1 -0
  74. package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.js +65 -0
  75. package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.d.ts +17 -0
  76. package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.d.ts.map +1 -0
  77. package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.js +93 -0
  78. package/dest/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.d.ts +16 -0
  79. package/dest/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.d.ts.map +1 -0
  80. package/dest/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.js +78 -0
  81. package/dest/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.d.ts +20 -0
  82. package/dest/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.d.ts.map +1 -0
  83. package/dest/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.js +73 -0
  84. package/dest/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.d.ts +15 -0
  85. package/dest/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.d.ts.map +1 -0
  86. package/dest/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.js +19 -0
  87. package/dest/mem_pools/tx_pool_v2/index.d.ts +6 -0
  88. package/dest/mem_pools/tx_pool_v2/index.d.ts.map +1 -0
  89. package/dest/mem_pools/tx_pool_v2/index.js +5 -0
  90. package/dest/mem_pools/tx_pool_v2/instrumentation.d.ts +15 -0
  91. package/dest/mem_pools/tx_pool_v2/instrumentation.d.ts.map +1 -0
  92. package/dest/mem_pools/tx_pool_v2/instrumentation.js +43 -0
  93. package/dest/mem_pools/tx_pool_v2/interfaces.d.ts +211 -0
  94. package/dest/mem_pools/tx_pool_v2/interfaces.d.ts.map +1 -0
  95. package/dest/mem_pools/tx_pool_v2/interfaces.js +9 -0
  96. package/dest/mem_pools/tx_pool_v2/tx_metadata.d.ts +119 -0
  97. package/dest/mem_pools/tx_pool_v2/tx_metadata.d.ts.map +1 -0
  98. package/dest/mem_pools/tx_pool_v2/tx_metadata.js +193 -0
  99. package/dest/mem_pools/tx_pool_v2/tx_pool_bench_metrics.d.ts +26 -0
  100. package/dest/mem_pools/tx_pool_v2/tx_pool_bench_metrics.d.ts.map +1 -0
  101. package/dest/mem_pools/tx_pool_v2/tx_pool_bench_metrics.js +70 -0
  102. package/dest/mem_pools/tx_pool_v2/tx_pool_indices.d.ts +108 -0
  103. package/dest/mem_pools/tx_pool_v2/tx_pool_indices.d.ts.map +1 -0
  104. package/dest/mem_pools/tx_pool_v2/tx_pool_indices.js +354 -0
  105. package/dest/mem_pools/tx_pool_v2/tx_pool_v2.d.ts +60 -0
  106. package/dest/mem_pools/tx_pool_v2/tx_pool_v2.d.ts.map +1 -0
  107. package/dest/mem_pools/tx_pool_v2/tx_pool_v2.js +161 -0
  108. package/dest/mem_pools/tx_pool_v2/tx_pool_v2_impl.d.ts +77 -0
  109. package/dest/mem_pools/tx_pool_v2/tx_pool_v2_impl.d.ts.map +1 -0
  110. package/dest/mem_pools/tx_pool_v2/tx_pool_v2_impl.js +905 -0
  111. package/dest/msg_validators/attestation_validator/fisherman_attestation_validator.d.ts +3 -3
  112. package/dest/msg_validators/attestation_validator/fisherman_attestation_validator.d.ts.map +1 -1
  113. package/dest/msg_validators/attestation_validator/fisherman_attestation_validator.js +7 -2
  114. package/dest/msg_validators/proposal_validator/proposal_validator.js +5 -5
  115. package/dest/msg_validators/tx_validator/aggregate_tx_validator.d.ts +4 -4
  116. package/dest/msg_validators/tx_validator/aggregate_tx_validator.d.ts.map +1 -1
  117. package/dest/msg_validators/tx_validator/aggregate_tx_validator.js +3 -3
  118. package/dest/msg_validators/tx_validator/archive_cache.d.ts +3 -3
  119. package/dest/msg_validators/tx_validator/archive_cache.d.ts.map +1 -1
  120. package/dest/msg_validators/tx_validator/archive_cache.js +1 -1
  121. package/dest/msg_validators/tx_validator/block_header_validator.d.ts +20 -6
  122. package/dest/msg_validators/tx_validator/block_header_validator.d.ts.map +1 -1
  123. package/dest/msg_validators/tx_validator/block_header_validator.js +5 -4
  124. package/dest/msg_validators/tx_validator/data_validator.d.ts +3 -1
  125. package/dest/msg_validators/tx_validator/data_validator.d.ts.map +1 -1
  126. package/dest/msg_validators/tx_validator/data_validator.js +4 -1
  127. package/dest/msg_validators/tx_validator/double_spend_validator.d.ts +15 -4
  128. package/dest/msg_validators/tx_validator/double_spend_validator.d.ts.map +1 -1
  129. package/dest/msg_validators/tx_validator/double_spend_validator.js +7 -6
  130. package/dest/msg_validators/tx_validator/factory.d.ts +118 -5
  131. package/dest/msg_validators/tx_validator/factory.d.ts.map +1 -1
  132. package/dest/msg_validators/tx_validator/factory.js +228 -57
  133. package/dest/msg_validators/tx_validator/gas_validator.d.ts +59 -3
  134. package/dest/msg_validators/tx_validator/gas_validator.d.ts.map +1 -1
  135. package/dest/msg_validators/tx_validator/gas_validator.js +76 -38
  136. package/dest/msg_validators/tx_validator/index.d.ts +2 -1
  137. package/dest/msg_validators/tx_validator/index.d.ts.map +1 -1
  138. package/dest/msg_validators/tx_validator/index.js +1 -0
  139. package/dest/msg_validators/tx_validator/metadata_validator.d.ts +3 -2
  140. package/dest/msg_validators/tx_validator/metadata_validator.d.ts.map +1 -1
  141. package/dest/msg_validators/tx_validator/metadata_validator.js +2 -2
  142. package/dest/msg_validators/tx_validator/nullifier_cache.d.ts +14 -0
  143. package/dest/msg_validators/tx_validator/nullifier_cache.d.ts.map +1 -0
  144. package/dest/msg_validators/tx_validator/nullifier_cache.js +24 -0
  145. package/dest/msg_validators/tx_validator/phases_validator.d.ts +3 -2
  146. package/dest/msg_validators/tx_validator/phases_validator.d.ts.map +1 -1
  147. package/dest/msg_validators/tx_validator/phases_validator.js +3 -3
  148. package/dest/msg_validators/tx_validator/size_validator.d.ts +3 -1
  149. package/dest/msg_validators/tx_validator/size_validator.d.ts.map +1 -1
  150. package/dest/msg_validators/tx_validator/size_validator.js +4 -1
  151. package/dest/msg_validators/tx_validator/timestamp_validator.d.ts +22 -5
  152. package/dest/msg_validators/tx_validator/timestamp_validator.d.ts.map +1 -1
  153. package/dest/msg_validators/tx_validator/timestamp_validator.js +8 -8
  154. package/dest/msg_validators/tx_validator/tx_permitted_validator.d.ts +3 -2
  155. package/dest/msg_validators/tx_validator/tx_permitted_validator.d.ts.map +1 -1
  156. package/dest/msg_validators/tx_validator/tx_permitted_validator.js +2 -2
  157. package/dest/msg_validators/tx_validator/tx_proof_validator.d.ts +3 -2
  158. package/dest/msg_validators/tx_validator/tx_proof_validator.d.ts.map +1 -1
  159. package/dest/msg_validators/tx_validator/tx_proof_validator.js +2 -2
  160. package/dest/services/data_store.d.ts +1 -1
  161. package/dest/services/data_store.d.ts.map +1 -1
  162. package/dest/services/data_store.js +10 -6
  163. package/dest/services/discv5/discV5_service.js +1 -1
  164. package/dest/services/dummy_service.d.ts +24 -4
  165. package/dest/services/dummy_service.d.ts.map +1 -1
  166. package/dest/services/dummy_service.js +46 -1
  167. package/dest/services/encoding.d.ts +3 -3
  168. package/dest/services/encoding.d.ts.map +1 -1
  169. package/dest/services/encoding.js +11 -10
  170. package/dest/services/gossipsub/index.d.ts +3 -0
  171. package/dest/services/gossipsub/index.d.ts.map +1 -0
  172. package/dest/services/gossipsub/index.js +2 -0
  173. package/dest/services/gossipsub/scoring.d.ts +21 -3
  174. package/dest/services/gossipsub/scoring.d.ts.map +1 -1
  175. package/dest/services/gossipsub/scoring.js +24 -7
  176. package/dest/services/gossipsub/topic_score_params.d.ts +173 -0
  177. package/dest/services/gossipsub/topic_score_params.d.ts.map +1 -0
  178. package/dest/services/gossipsub/topic_score_params.js +346 -0
  179. package/dest/services/index.d.ts +2 -1
  180. package/dest/services/index.d.ts.map +1 -1
  181. package/dest/services/index.js +1 -0
  182. package/dest/services/libp2p/instrumentation.d.ts +1 -1
  183. package/dest/services/libp2p/instrumentation.d.ts.map +1 -1
  184. package/dest/services/libp2p/instrumentation.js +14 -3
  185. package/dest/services/libp2p/libp2p_service.d.ts +100 -42
  186. package/dest/services/libp2p/libp2p_service.d.ts.map +1 -1
  187. package/dest/services/libp2p/libp2p_service.js +451 -359
  188. package/dest/services/peer-manager/metrics.d.ts +2 -2
  189. package/dest/services/peer-manager/metrics.d.ts.map +1 -1
  190. package/dest/services/peer-manager/metrics.js +20 -5
  191. package/dest/services/peer-manager/peer_scoring.d.ts +1 -1
  192. package/dest/services/peer-manager/peer_scoring.d.ts.map +1 -1
  193. package/dest/services/peer-manager/peer_scoring.js +33 -4
  194. package/dest/services/reqresp/batch-tx-requester/batch_tx_requester.d.ts +48 -0
  195. package/dest/services/reqresp/batch-tx-requester/batch_tx_requester.d.ts.map +1 -0
  196. package/dest/services/reqresp/batch-tx-requester/batch_tx_requester.js +539 -0
  197. package/dest/services/reqresp/batch-tx-requester/config.d.ts +17 -0
  198. package/dest/services/reqresp/batch-tx-requester/config.d.ts.map +1 -0
  199. package/dest/services/reqresp/batch-tx-requester/config.js +27 -0
  200. package/dest/services/reqresp/batch-tx-requester/interface.d.ts +46 -0
  201. package/dest/services/reqresp/batch-tx-requester/interface.d.ts.map +1 -0
  202. package/dest/services/reqresp/batch-tx-requester/interface.js +1 -0
  203. package/dest/services/reqresp/batch-tx-requester/missing_txs.d.ts +34 -0
  204. package/dest/services/reqresp/batch-tx-requester/missing_txs.d.ts.map +1 -0
  205. package/dest/services/reqresp/batch-tx-requester/missing_txs.js +130 -0
  206. package/dest/services/reqresp/batch-tx-requester/peer_collection.d.ts +60 -0
  207. package/dest/services/reqresp/batch-tx-requester/peer_collection.d.ts.map +1 -0
  208. package/dest/services/reqresp/batch-tx-requester/peer_collection.js +173 -0
  209. package/dest/services/reqresp/batch-tx-requester/tx_validator.d.ts +20 -0
  210. package/dest/services/reqresp/batch-tx-requester/tx_validator.d.ts.map +1 -0
  211. package/dest/services/reqresp/batch-tx-requester/tx_validator.js +21 -0
  212. package/dest/services/reqresp/connection-sampler/batch_connection_sampler.d.ts +22 -3
  213. package/dest/services/reqresp/connection-sampler/batch_connection_sampler.d.ts.map +1 -1
  214. package/dest/services/reqresp/connection-sampler/batch_connection_sampler.js +63 -4
  215. package/dest/services/reqresp/connection-sampler/connection_sampler.d.ts +2 -1
  216. package/dest/services/reqresp/connection-sampler/connection_sampler.d.ts.map +1 -1
  217. package/dest/services/reqresp/connection-sampler/connection_sampler.js +12 -0
  218. package/dest/services/reqresp/interface.d.ts +12 -1
  219. package/dest/services/reqresp/interface.d.ts.map +1 -1
  220. package/dest/services/reqresp/interface.js +15 -1
  221. package/dest/services/reqresp/metrics.d.ts +6 -5
  222. package/dest/services/reqresp/metrics.d.ts.map +1 -1
  223. package/dest/services/reqresp/metrics.js +17 -5
  224. package/dest/services/reqresp/protocols/block_txs/bitvector.d.ts +5 -1
  225. package/dest/services/reqresp/protocols/block_txs/bitvector.d.ts.map +1 -1
  226. package/dest/services/reqresp/protocols/block_txs/bitvector.js +5 -0
  227. package/dest/services/reqresp/protocols/block_txs/block_txs_handler.d.ts +7 -5
  228. package/dest/services/reqresp/protocols/block_txs/block_txs_handler.d.ts.map +1 -1
  229. package/dest/services/reqresp/protocols/block_txs/block_txs_handler.js +27 -9
  230. package/dest/services/reqresp/protocols/block_txs/block_txs_reqresp.d.ts +29 -6
  231. package/dest/services/reqresp/protocols/block_txs/block_txs_reqresp.d.ts.map +1 -1
  232. package/dest/services/reqresp/protocols/block_txs/block_txs_reqresp.js +59 -13
  233. package/dest/services/reqresp/protocols/tx.d.ts +7 -1
  234. package/dest/services/reqresp/protocols/tx.d.ts.map +1 -1
  235. package/dest/services/reqresp/protocols/tx.js +20 -0
  236. package/dest/services/reqresp/reqresp.d.ts +6 -1
  237. package/dest/services/reqresp/reqresp.d.ts.map +1 -1
  238. package/dest/services/reqresp/reqresp.js +71 -27
  239. package/dest/services/service.d.ts +42 -3
  240. package/dest/services/service.d.ts.map +1 -1
  241. package/dest/services/tx_collection/config.d.ts +22 -1
  242. package/dest/services/tx_collection/config.d.ts.map +1 -1
  243. package/dest/services/tx_collection/config.js +55 -1
  244. package/dest/services/tx_collection/fast_tx_collection.d.ts +7 -4
  245. package/dest/services/tx_collection/fast_tx_collection.d.ts.map +1 -1
  246. package/dest/services/tx_collection/fast_tx_collection.js +71 -44
  247. package/dest/services/tx_collection/file_store_tx_collection.d.ts +53 -0
  248. package/dest/services/tx_collection/file_store_tx_collection.d.ts.map +1 -0
  249. package/dest/services/tx_collection/file_store_tx_collection.js +167 -0
  250. package/dest/services/tx_collection/file_store_tx_source.d.ts +37 -0
  251. package/dest/services/tx_collection/file_store_tx_source.d.ts.map +1 -0
  252. package/dest/services/tx_collection/file_store_tx_source.js +90 -0
  253. package/dest/services/tx_collection/index.d.ts +3 -1
  254. package/dest/services/tx_collection/index.d.ts.map +1 -1
  255. package/dest/services/tx_collection/index.js +2 -0
  256. package/dest/services/tx_collection/instrumentation.d.ts +1 -1
  257. package/dest/services/tx_collection/instrumentation.d.ts.map +1 -1
  258. package/dest/services/tx_collection/instrumentation.js +10 -2
  259. package/dest/services/tx_collection/missing_txs_tracker.d.ts +32 -0
  260. package/dest/services/tx_collection/missing_txs_tracker.d.ts.map +1 -0
  261. package/dest/services/tx_collection/missing_txs_tracker.js +27 -0
  262. package/dest/services/tx_collection/proposal_tx_collector.d.ts +49 -0
  263. package/dest/services/tx_collection/proposal_tx_collector.d.ts.map +1 -0
  264. package/dest/services/tx_collection/proposal_tx_collector.js +50 -0
  265. package/dest/services/tx_collection/slow_tx_collection.d.ts +7 -3
  266. package/dest/services/tx_collection/slow_tx_collection.d.ts.map +1 -1
  267. package/dest/services/tx_collection/slow_tx_collection.js +60 -26
  268. package/dest/services/tx_collection/tx_collection.d.ts +25 -12
  269. package/dest/services/tx_collection/tx_collection.d.ts.map +1 -1
  270. package/dest/services/tx_collection/tx_collection.js +79 -7
  271. package/dest/services/tx_collection/tx_collection_sink.d.ts +18 -8
  272. package/dest/services/tx_collection/tx_collection_sink.d.ts.map +1 -1
  273. package/dest/services/tx_collection/tx_collection_sink.js +26 -29
  274. package/dest/services/tx_collection/tx_source.d.ts +8 -3
  275. package/dest/services/tx_collection/tx_source.d.ts.map +1 -1
  276. package/dest/services/tx_collection/tx_source.js +19 -2
  277. package/dest/services/tx_file_store/config.d.ts +16 -0
  278. package/dest/services/tx_file_store/config.d.ts.map +1 -0
  279. package/dest/services/tx_file_store/config.js +22 -0
  280. package/dest/services/tx_file_store/index.d.ts +4 -0
  281. package/dest/services/tx_file_store/index.d.ts.map +1 -0
  282. package/dest/services/tx_file_store/index.js +3 -0
  283. package/dest/services/tx_file_store/instrumentation.d.ts +15 -0
  284. package/dest/services/tx_file_store/instrumentation.d.ts.map +1 -0
  285. package/dest/services/tx_file_store/instrumentation.js +29 -0
  286. package/dest/services/tx_file_store/tx_file_store.d.ts +48 -0
  287. package/dest/services/tx_file_store/tx_file_store.d.ts.map +1 -0
  288. package/dest/services/tx_file_store/tx_file_store.js +152 -0
  289. package/dest/services/tx_provider.d.ts +4 -4
  290. package/dest/services/tx_provider.d.ts.map +1 -1
  291. package/dest/services/tx_provider.js +9 -8
  292. package/dest/services/tx_provider_instrumentation.d.ts +1 -1
  293. package/dest/services/tx_provider_instrumentation.d.ts.map +1 -1
  294. package/dest/services/tx_provider_instrumentation.js +5 -5
  295. package/dest/test-helpers/index.d.ts +3 -1
  296. package/dest/test-helpers/index.d.ts.map +1 -1
  297. package/dest/test-helpers/index.js +2 -0
  298. package/dest/test-helpers/make-test-p2p-clients.d.ts +7 -8
  299. package/dest/test-helpers/make-test-p2p-clients.d.ts.map +1 -1
  300. package/dest/test-helpers/make-test-p2p-clients.js +1 -2
  301. package/dest/test-helpers/mock-pubsub.d.ts +30 -4
  302. package/dest/test-helpers/mock-pubsub.d.ts.map +1 -1
  303. package/dest/test-helpers/mock-pubsub.js +105 -4
  304. package/dest/test-helpers/reqresp-nodes.d.ts +2 -3
  305. package/dest/test-helpers/reqresp-nodes.d.ts.map +1 -1
  306. package/dest/test-helpers/reqresp-nodes.js +4 -3
  307. package/dest/test-helpers/test_tx_provider.d.ts +40 -0
  308. package/dest/test-helpers/test_tx_provider.d.ts.map +1 -0
  309. package/dest/test-helpers/test_tx_provider.js +41 -0
  310. package/dest/test-helpers/testbench-utils.d.ts +163 -0
  311. package/dest/test-helpers/testbench-utils.d.ts.map +1 -0
  312. package/dest/test-helpers/testbench-utils.js +366 -0
  313. package/dest/testbench/p2p_client_testbench_worker.d.ts +28 -2
  314. package/dest/testbench/p2p_client_testbench_worker.d.ts.map +1 -1
  315. package/dest/testbench/p2p_client_testbench_worker.js +219 -138
  316. package/dest/testbench/worker_client_manager.d.ts +51 -6
  317. package/dest/testbench/worker_client_manager.d.ts.map +1 -1
  318. package/dest/testbench/worker_client_manager.js +226 -44
  319. package/dest/util.d.ts +2 -2
  320. package/dest/util.d.ts.map +1 -1
  321. package/package.json +14 -14
  322. package/src/bootstrap/bootstrap.ts +7 -4
  323. package/src/client/factory.ts +83 -36
  324. package/src/client/interface.ts +56 -34
  325. package/src/client/p2p_client.ts +192 -247
  326. package/src/client/test/tx_proposal_collector/README.md +227 -0
  327. package/src/client/test/tx_proposal_collector/proposal_tx_collector_worker.ts +345 -0
  328. package/src/client/test/tx_proposal_collector/proposal_tx_collector_worker_protocol.ts +43 -0
  329. package/src/config.ts +47 -11
  330. package/src/errors/tx-pool.error.ts +12 -0
  331. package/src/index.ts +1 -0
  332. package/src/mem_pools/attestation_pool/attestation_pool.ts +496 -91
  333. package/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts +442 -102
  334. package/src/mem_pools/attestation_pool/index.ts +9 -2
  335. package/src/mem_pools/attestation_pool/mocks.ts +2 -1
  336. package/src/mem_pools/index.ts +4 -1
  337. package/src/mem_pools/instrumentation.ts +2 -1
  338. package/src/mem_pools/interface.ts +4 -4
  339. package/src/mem_pools/tx_pool/README.md +1 -1
  340. package/src/mem_pools/tx_pool/eviction/invalid_txs_after_mining_rule.ts +3 -3
  341. package/src/mem_pools/tx_pool_v2/README.md +275 -0
  342. package/src/mem_pools/tx_pool_v2/archive/index.ts +1 -0
  343. package/src/mem_pools/tx_pool_v2/archive/tx_archive.ts +120 -0
  344. package/src/mem_pools/tx_pool_v2/deleted_pool.ts +321 -0
  345. package/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts +160 -0
  346. package/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts +121 -0
  347. package/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts +125 -0
  348. package/src/mem_pools/tx_pool_v2/eviction/index.ts +27 -0
  349. package/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +209 -0
  350. package/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts +74 -0
  351. package/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts +101 -0
  352. package/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts +91 -0
  353. package/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +90 -0
  354. package/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts +31 -0
  355. package/src/mem_pools/tx_pool_v2/index.ts +12 -0
  356. package/src/mem_pools/tx_pool_v2/instrumentation.ts +69 -0
  357. package/src/mem_pools/tx_pool_v2/interfaces.ts +242 -0
  358. package/src/mem_pools/tx_pool_v2/tx_metadata.ts +297 -0
  359. package/src/mem_pools/tx_pool_v2/tx_pool_bench_metrics.ts +77 -0
  360. package/src/mem_pools/tx_pool_v2/tx_pool_indices.ts +444 -0
  361. package/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +223 -0
  362. package/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +1083 -0
  363. package/src/msg_validators/attestation_validator/fisherman_attestation_validator.ts +10 -4
  364. package/src/msg_validators/proposal_validator/proposal_validator.ts +5 -5
  365. package/src/msg_validators/tx_validator/README.md +115 -0
  366. package/src/msg_validators/tx_validator/aggregate_tx_validator.ts +5 -5
  367. package/src/msg_validators/tx_validator/archive_cache.ts +3 -3
  368. package/src/msg_validators/tx_validator/block_header_validator.ts +22 -11
  369. package/src/msg_validators/tx_validator/data_validator.ts +6 -2
  370. package/src/msg_validators/tx_validator/double_spend_validator.ts +15 -9
  371. package/src/msg_validators/tx_validator/factory.ts +372 -55
  372. package/src/msg_validators/tx_validator/gas_validator.ts +98 -29
  373. package/src/msg_validators/tx_validator/index.ts +1 -0
  374. package/src/msg_validators/tx_validator/metadata_validator.ts +6 -3
  375. package/src/msg_validators/tx_validator/nullifier_cache.ts +30 -0
  376. package/src/msg_validators/tx_validator/phases_validator.ts +5 -3
  377. package/src/msg_validators/tx_validator/size_validator.ts +6 -2
  378. package/src/msg_validators/tx_validator/timestamp_validator.ts +29 -21
  379. package/src/msg_validators/tx_validator/tx_permitted_validator.ts +8 -3
  380. package/src/msg_validators/tx_validator/tx_proof_validator.ts +8 -3
  381. package/src/services/data_store.ts +10 -7
  382. package/src/services/discv5/discV5_service.ts +1 -1
  383. package/src/services/dummy_service.ts +59 -2
  384. package/src/services/encoding.ts +9 -9
  385. package/src/services/gossipsub/README.md +641 -0
  386. package/src/services/gossipsub/index.ts +2 -0
  387. package/src/services/gossipsub/scoring.ts +29 -5
  388. package/src/services/gossipsub/topic_score_params.ts +487 -0
  389. package/src/services/index.ts +1 -0
  390. package/src/services/libp2p/instrumentation.ts +15 -2
  391. package/src/services/libp2p/libp2p_service.ts +496 -397
  392. package/src/services/peer-manager/metrics.ts +21 -4
  393. package/src/services/peer-manager/peer_scoring.ts +29 -1
  394. package/src/services/reqresp/batch-tx-requester/README.md +305 -0
  395. package/src/services/reqresp/batch-tx-requester/batch_tx_requester.ts +678 -0
  396. package/src/services/reqresp/batch-tx-requester/config.ts +40 -0
  397. package/src/services/reqresp/batch-tx-requester/interface.ts +53 -0
  398. package/src/services/reqresp/batch-tx-requester/missing_txs.ts +161 -0
  399. package/src/services/reqresp/batch-tx-requester/peer_collection.ts +244 -0
  400. package/src/services/reqresp/batch-tx-requester/tx_validator.ts +37 -0
  401. package/src/services/reqresp/connection-sampler/batch_connection_sampler.ts +65 -4
  402. package/src/services/reqresp/connection-sampler/connection_sampler.ts +16 -0
  403. package/src/services/reqresp/interface.ts +29 -1
  404. package/src/services/reqresp/metrics.ts +34 -9
  405. package/src/services/reqresp/protocols/block_txs/bitvector.ts +7 -0
  406. package/src/services/reqresp/protocols/block_txs/block_txs_handler.ts +35 -12
  407. package/src/services/reqresp/protocols/block_txs/block_txs_reqresp.ts +74 -9
  408. package/src/services/reqresp/protocols/tx.ts +22 -0
  409. package/src/services/reqresp/reqresp.ts +82 -23
  410. package/src/services/service.ts +55 -2
  411. package/src/services/tx_collection/config.ts +83 -1
  412. package/src/services/tx_collection/fast_tx_collection.ts +93 -47
  413. package/src/services/tx_collection/file_store_tx_collection.ts +202 -0
  414. package/src/services/tx_collection/file_store_tx_source.ts +117 -0
  415. package/src/services/tx_collection/index.ts +6 -0
  416. package/src/services/tx_collection/instrumentation.ts +17 -2
  417. package/src/services/tx_collection/missing_txs_tracker.ts +52 -0
  418. package/src/services/tx_collection/proposal_tx_collector.ts +113 -0
  419. package/src/services/tx_collection/slow_tx_collection.ts +66 -33
  420. package/src/services/tx_collection/tx_collection.ts +117 -20
  421. package/src/services/tx_collection/tx_collection_sink.ts +30 -34
  422. package/src/services/tx_collection/tx_source.ts +22 -3
  423. package/src/services/tx_file_store/config.ts +37 -0
  424. package/src/services/tx_file_store/index.ts +3 -0
  425. package/src/services/tx_file_store/instrumentation.ts +36 -0
  426. package/src/services/tx_file_store/tx_file_store.ts +175 -0
  427. package/src/services/tx_provider.ts +10 -9
  428. package/src/services/tx_provider_instrumentation.ts +11 -5
  429. package/src/test-helpers/index.ts +2 -0
  430. package/src/test-helpers/make-test-p2p-clients.ts +3 -5
  431. package/src/test-helpers/mock-pubsub.ts +146 -9
  432. package/src/test-helpers/reqresp-nodes.ts +4 -6
  433. package/src/test-helpers/test_tx_provider.ts +64 -0
  434. package/src/test-helpers/testbench-utils.ts +430 -0
  435. package/src/testbench/p2p_client_testbench_worker.ts +333 -131
  436. package/src/testbench/worker_client_manager.ts +304 -47
  437. package/src/util.ts +7 -1
  438. package/dest/mem_pools/attestation_pool/kv_attestation_pool.d.ts +0 -40
  439. package/dest/mem_pools/attestation_pool/kv_attestation_pool.d.ts.map +0 -1
  440. package/dest/mem_pools/attestation_pool/kv_attestation_pool.js +0 -218
  441. package/dest/mem_pools/attestation_pool/memory_attestation_pool.d.ts +0 -31
  442. package/dest/mem_pools/attestation_pool/memory_attestation_pool.d.ts.map +0 -1
  443. package/dest/mem_pools/attestation_pool/memory_attestation_pool.js +0 -180
  444. package/src/mem_pools/attestation_pool/kv_attestation_pool.ts +0 -320
  445. package/src/mem_pools/attestation_pool/memory_attestation_pool.ts +0 -264
@@ -0,0 +1,539 @@
1
+ import { chunkWrapAround } from '@aztec/foundation/collection';
2
+ import { TimeoutError } from '@aztec/foundation/error';
3
+ import { createLogger } from '@aztec/foundation/log';
4
+ import { FifoMemoryQueue, Semaphore } from '@aztec/foundation/queue';
5
+ import { sleep } from '@aztec/foundation/sleep';
6
+ import { DateProvider, executeTimeout } from '@aztec/foundation/timer';
7
+ import { PeerErrorSeverity } from '@aztec/stdlib/p2p';
8
+ import { TxHash } from '@aztec/stdlib/tx';
9
+ import { ReqRespSubProtocol } from '.././interface.js';
10
+ import { BlockTxsRequest, BlockTxsResponse } from '.././protocols/index.js';
11
+ import { ReqRespStatus } from '.././status.js';
12
+ import { DEFAULT_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD, DEFAULT_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT, DEFAULT_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT, DEFAULT_BATCH_TX_REQUESTER_TX_BATCH_SIZE } from './config.js';
13
+ import { MissingTxMetadataCollection } from './missing_txs.js';
14
+ import { PeerCollection } from './peer_collection.js';
15
+ import { BatchRequestTxValidator } from './tx_validator.js';
16
+ /*
17
+ * Tries to fetch all missing transaction until deadline is hit.
18
+ * Transactions are yield by calling run*() method
19
+ *
20
+ * We have a couple of peer types:
21
+ * - Pinned peer is the one who sent us the block proposal
22
+ * - Dumb peer:
23
+ * - We query this peer blindly because we don't know which txs it has
24
+ * - We hope it might have some of the transactions we asked for
25
+ * - When this peer sends response it might become Smart peer
26
+ * - Smart peer:
27
+ * - Initially there are no smart peers, all are considered "dumb"
28
+ * - Peer becomes smart when in response it tels us exactly which transactions it has
29
+ * AND we are missing some of those transactions
30
+ * - Bad peer:
31
+ * - Is the peer which was unable to send us successful response N times in a row
32
+ * */ export class BatchTxRequester {
33
+ blockTxsSource;
34
+ pinnedPeer;
35
+ timeoutMs;
36
+ p2pService;
37
+ logger;
38
+ dateProvider;
39
+ opts;
40
+ peers;
41
+ txsMetadata;
42
+ deadline;
43
+ smartRequesterSemaphore;
44
+ txQueue;
45
+ txValidator;
46
+ smartParallelWorkerCount;
47
+ dumbParallelWorkerCount;
48
+ txBatchSize;
49
+ constructor(missingTxsTracker, blockTxsSource, pinnedPeer, timeoutMs, p2pService, logger, dateProvider, opts){
50
+ this.blockTxsSource = blockTxsSource;
51
+ this.pinnedPeer = pinnedPeer;
52
+ this.timeoutMs = timeoutMs;
53
+ this.p2pService = p2pService;
54
+ this.logger = logger ?? createLogger('p2p:reqresp_batch');
55
+ this.dateProvider = dateProvider ?? new DateProvider();
56
+ this.opts = opts ?? {};
57
+ this.smartParallelWorkerCount = this.opts.smartParallelWorkerCount ?? DEFAULT_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT;
58
+ this.dumbParallelWorkerCount = this.opts.dumbParallelWorkerCount ?? DEFAULT_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT;
59
+ this.txBatchSize = this.opts.txBatchSize ?? DEFAULT_BATCH_TX_REQUESTER_TX_BATCH_SIZE;
60
+ this.deadline = this.dateProvider.now() + this.timeoutMs;
61
+ this.txQueue = new FifoMemoryQueue(this.logger);
62
+ this.txValidator = this.opts.txValidator ?? new BatchRequestTxValidator(this.p2pService.txValidatorConfig);
63
+ if (this.opts.peerCollection) {
64
+ this.peers = this.opts.peerCollection;
65
+ } else {
66
+ const badPeerThreshold = this.opts.badPeerThreshold ?? DEFAULT_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD;
67
+ this.peers = new PeerCollection(this.p2pService.connectionSampler, this.pinnedPeer, this.dateProvider, badPeerThreshold, this.p2pService.peerScoring);
68
+ }
69
+ this.txsMetadata = new MissingTxMetadataCollection(missingTxsTracker, this.txBatchSize);
70
+ this.smartRequesterSemaphore = this.opts.semaphore ?? new Semaphore(0);
71
+ }
72
+ /*
73
+ * Fetches all missing transactions and yields them one by one
74
+ * */ async *run() {
75
+ // Our timeout is represented in milliseconds but queue expects seconds
76
+ // We also want to make sure we wait at least 1 second in case of very low timeouts
77
+ const timeoutQueueAfter = Math.max(Math.ceil(this.timeoutMs / 1_000), 1);
78
+ try {
79
+ if (this.txsMetadata.getMissingTxHashes().size === 0) {
80
+ return undefined;
81
+ }
82
+ // Start workers in background
83
+ const workersPromise = executeTimeout(()=>Promise.allSettled([
84
+ this.smartRequester(),
85
+ this.dumbRequester(),
86
+ this.pinnedPeerRequester()
87
+ ]), this.timeoutMs).finally(()=>{
88
+ this.txQueue.end();
89
+ });
90
+ while(true){
91
+ const tx = await this.txQueue.get(timeoutQueueAfter);
92
+ // null indicates that the queue has ended
93
+ if (tx === null) {
94
+ break;
95
+ }
96
+ yield tx;
97
+ if (this.shouldStop()) {
98
+ // Drain queue before ending
99
+ let remaining;
100
+ while((remaining = this.txQueue.getImmediate()) !== undefined){
101
+ yield remaining;
102
+ }
103
+ break;
104
+ }
105
+ }
106
+ this.unlockSmartRequesterSemaphores();
107
+ await workersPromise;
108
+ } catch (e) {
109
+ this.logger.error(`Batch tx requester failed with error: ${e.message}`, {
110
+ error: e
111
+ });
112
+ } finally{
113
+ this.txQueue.end();
114
+ this.unlockSmartRequesterSemaphores();
115
+ }
116
+ }
117
+ /*
118
+ * Fetches all missing transactions
119
+ * @returns Collection of fetched transactions */ static async collectAllTxs(generator) {
120
+ const txs = [];
121
+ for await (const tx of generator){
122
+ if (tx === undefined) {
123
+ break;
124
+ }
125
+ txs.push(tx);
126
+ }
127
+ return txs;
128
+ }
129
+ /*
130
+ * Handles so-called pinned peer
131
+ * The pinned peer is the one who sent us block proposal
132
+ * We expect pinned peer to have all transactions from the proposal at some point
133
+ * This holds because they them selves have to attest to proposal and thus fetch all missing transactions
134
+ *
135
+ * Given the reasoning above - we query pinned peer separately from dumb/smart peers
136
+ * */ async pinnedPeerRequester() {
137
+ if (!this.pinnedPeer) {
138
+ this.logger.debug('No pinned peer to request from');
139
+ return;
140
+ }
141
+ while(!this.shouldStop()){
142
+ // We've hit rate limits on the pinned peer - wait until rate limit expires (clamped to deadline)
143
+ const rateLimitDelay = this.peers.getPeerRateLimitDelayMs(this.pinnedPeer);
144
+ const pinnedPeerIsRateLimited = rateLimitDelay !== undefined;
145
+ if (pinnedPeerIsRateLimited) {
146
+ await this.sleepClampedToDeadline(rateLimitDelay);
147
+ continue;
148
+ }
149
+ const pinnedPeerWentBad = this.peers.getBadPeers().has(this.pinnedPeer.toString());
150
+ if (pinnedPeerWentBad) {
151
+ return;
152
+ }
153
+ // From pinned peer we always request transactions so that we first request the least requested and not in flight
154
+ // This makes sense since pinned peer should have ALL transactions,
155
+ // Thus if it has all it is best to ask pinned first for the transactions we have trouble getting from other peers
156
+ const txs = this.txsMetadata.getTxsToRequestFromThePeer(this.pinnedPeer);
157
+ if (txs.length === 0) {
158
+ this.logger.debug(`Pinned peer ${this.pinnedPeer.toString()} has no txs to request`);
159
+ return;
160
+ }
161
+ const request = BlockTxsRequest.fromTxsSourceAndMissingTxs(this.blockTxsSource, txs);
162
+ if (!request) {
163
+ return;
164
+ }
165
+ txs.forEach((tx)=>{
166
+ this.txsMetadata.markRequested(tx);
167
+ this.txsMetadata.markInFlightBySmartPeer(tx);
168
+ });
169
+ await this.requestTxBatch(this.pinnedPeer, request);
170
+ txs.forEach((tx)=>{
171
+ this.txsMetadata.markNotInFlightBySmartPeer(tx);
172
+ });
173
+ }
174
+ }
175
+ /*
176
+ * Starts dumb worker loops
177
+ * */ async dumbRequester() {
178
+ const nextBatchIndex = this.makeRoundRobinIndexer();
179
+ // Chunk missing tx hashes into batches of txBatchSize, wrapping around to ensure no peer gets less than txBatchSize
180
+ const txChunks = ()=>{
181
+ const missingHashes = Array.from(this.txsMetadata.getMissingTxHashes());
182
+ return chunkWrapAround(missingHashes, this.txBatchSize);
183
+ };
184
+ const makeRequest = (_pid)=>{
185
+ const chunks = txChunks();
186
+ const idx = nextBatchIndex(()=>chunks.length);
187
+ const noMoreTxsToRequest = idx === undefined;
188
+ if (noMoreTxsToRequest) {
189
+ return undefined;
190
+ }
191
+ const txs = chunks[idx].map((t)=>TxHash.fromString(t));
192
+ // If peer is dumb peer, we don't know yet if they received full blockProposal
193
+ // there is solid chance that peer didn't receive proposal yet, thus we must send full hashes
194
+ const includeFullHashesInRequestNotJustIndices = true;
195
+ const blockRequest = BlockTxsRequest.fromTxsSourceAndMissingTxs(this.blockTxsSource, txs, includeFullHashesInRequestNotJustIndices);
196
+ const blockRequestHasNoMissingTxsFromTheProposal = !blockRequest;
197
+ if (blockRequestHasNoMissingTxsFromTheProposal) {
198
+ return undefined;
199
+ }
200
+ return {
201
+ blockRequest,
202
+ txs
203
+ };
204
+ };
205
+ const workerCount = this.dumbParallelWorkerCount;
206
+ const workers = Array.from({
207
+ length: workerCount
208
+ }, (_, index)=>this.dumbWorkerLoop(this.peers.nextDumbPeerToQuery.bind(this.peers), makeRequest, index + 1));
209
+ await Promise.allSettled(workers);
210
+ }
211
+ /*
212
+ * Dumb worker loop.
213
+ * It fetches next available dumb peer and builds request for that peer
214
+ * Loops until shouldStop condition is not met or there are no more dubm peers to query
215
+ * This can happen if e.g. all "dumb" peers transition to "smart" or e.g. become "bad"
216
+ * */ async dumbWorkerLoop(pickNextPeer, request, workerIndex) {
217
+ try {
218
+ this.logger.debug(`Dumb worker ${workerIndex} started`);
219
+ while(!this.shouldStop()){
220
+ const peerId = pickNextPeer();
221
+ const weRanOutOfPeersToQuery = peerId === undefined;
222
+ if (weRanOutOfPeersToQuery) {
223
+ const nextDumbPeerDelay = this.peers.getNextDumbPeerAvailabilityDelayMs();
224
+ const thereAreSomeRateLimitedDumbPeers = nextDumbPeerDelay !== undefined;
225
+ if (thereAreSomeRateLimitedDumbPeers) {
226
+ // There are still some dumb peers to query but they have been rate limited
227
+ // Sleep until the earliest one gets unblocked (clamped to deadline)
228
+ await this.sleepClampedToDeadline(nextDumbPeerDelay);
229
+ continue;
230
+ }
231
+ this.logger.debug(`Worker loop dumb: No more peers to query`);
232
+ break;
233
+ }
234
+ const nextBatchTxRequest = request(peerId);
235
+ if (!nextBatchTxRequest) {
236
+ this.logger.debug(`Worker loop dumb: no txs to request, exiting`);
237
+ break;
238
+ }
239
+ const { blockRequest, txs } = nextBatchTxRequest;
240
+ this.logger.debug(`Worker type dumb: Requesting txs from peer ${peerId.toString()}: ${txs.map((tx)=>tx.toString()).join(', ')}`);
241
+ await this.requestTxBatch(peerId, blockRequest);
242
+ }
243
+ } catch (err) {
244
+ this.logger.error(`Dumb worker ${workerIndex} encountered an error: ${err}`);
245
+ } finally{
246
+ this.logger.debug(`Dumb worker ${workerIndex} finished`);
247
+ }
248
+ }
249
+ /*
250
+ * Starts smart worker loops
251
+ * */ async smartRequester() {
252
+ const makeRequest = (pid)=>{
253
+ const txs = this.txsMetadata.getTxsToRequestFromThePeer(pid);
254
+ const blockRequest = BlockTxsRequest.fromTxsSourceAndMissingTxs(this.blockTxsSource, txs);
255
+ if (!blockRequest) {
256
+ return undefined;
257
+ }
258
+ return {
259
+ blockRequest,
260
+ txs
261
+ };
262
+ };
263
+ const workers = Array.from({
264
+ length: this.smartParallelWorkerCount
265
+ }, (_, index)=>this.smartWorkerLoop(this.peers.nextSmartPeerToQuery.bind(this.peers), makeRequest, index + 1));
266
+ await Promise.allSettled(workers);
267
+ }
268
+ /*
269
+ * Smart worker loop.
270
+ * It fetches next available smart peer and builds request for that peer
271
+ * Loops until shouldStop condition is not met
272
+ *
273
+ * Notes:
274
+ * - We don't start worker loop immediately, but block on semaphore
275
+ * until some dumb peer transactions to smart peer
276
+ * - We might run out of smart peers, because:
277
+ * - they "went bad"
278
+ * - there are less smart peers than worker loops
279
+ * In such scenario we either wait for next dumb peer to become smart or kill the worker loop
280
+ * */ async smartWorkerLoop(pickNextPeer, request, workerIndex) {
281
+ try {
282
+ this.logger.trace(`Smart worker ${workerIndex} started`);
283
+ await executeTimeout((_)=>this.smartRequesterSemaphore.acquire(), this.timeoutMs);
284
+ this.logger.trace(`Smart worker ${workerIndex} acquired semaphore`);
285
+ while(!this.shouldStop()){
286
+ const peerId = pickNextPeer();
287
+ const weRanOutOfPeersToQuery = peerId === undefined;
288
+ if (weRanOutOfPeersToQuery) {
289
+ this.logger.debug(`Worker loop smart: No more peers to query`);
290
+ // If we have rate limited peers wait for them.
291
+ const nextSmartPeerDelay = this.peers.getNextSmartPeerAvailabilityDelayMs();
292
+ const thereAreSomeRateLimitedSmartPeers = nextSmartPeerDelay !== undefined;
293
+ if (thereAreSomeRateLimitedSmartPeers) {
294
+ await this.sleepClampedToDeadline(nextSmartPeerDelay);
295
+ continue;
296
+ }
297
+ // We end up here when all known smart peers became temporarily unavailable via combination of
298
+ // (bad, in-flight, or rate-limited) or in some weird scenario all current smart peers turn bad which is permanent
299
+ // but there are dumb peers that could be promoted
300
+ // or new peer can join as dumb and be promoted later
301
+ //
302
+ // When a dumb peer responds with valid txIndices, it gets
303
+ // promoted to smart and releases the semaphore, waking this worker.
304
+ await executeTimeout((_)=>this.smartRequesterSemaphore.acquire(), this.timeoutMs);
305
+ this.logger.debug(`Worker loop smart: acquired next smart peer`);
306
+ continue;
307
+ }
308
+ const nextBatchTxRequest = request(peerId);
309
+ if (!nextBatchTxRequest) {
310
+ this.logger.debug(`Worker loop smart: no txs to request, exiting`);
311
+ break;
312
+ }
313
+ const { blockRequest, txs } = nextBatchTxRequest;
314
+ // We only mark transactions as in flight if queried by Smart peer
315
+ // Because asking them from dumb peer is shot in the dark (there is a good chance they won't have it)
316
+ // So we don't gain anything if we mark txs in-flight for dumb peers
317
+ txs.forEach((tx)=>{
318
+ this.txsMetadata.markRequested(tx);
319
+ this.txsMetadata.markInFlightBySmartPeer(tx);
320
+ });
321
+ await this.requestTxBatch(peerId, blockRequest);
322
+ txs.forEach((tx)=>{
323
+ this.txsMetadata.markNotInFlightBySmartPeer(tx);
324
+ });
325
+ }
326
+ } catch (err) {
327
+ if (err instanceof TimeoutError) {
328
+ this.logger.debug(`Smart worker ${workerIndex} timed out waiting for semaphore`);
329
+ } else {
330
+ this.logger.error(`Smart worker ${workerIndex} encountered an error: ${err}`);
331
+ }
332
+ } finally{
333
+ this.logger.debug(`Smart worker ${workerIndex} finished`);
334
+ }
335
+ }
336
+ /*
337
+ * Sends actual request to the peer and handles response
338
+ *
339
+ * @param peerId - the peer to send request to
340
+ * @param request - the actual request
341
+ */ async requestTxBatch(peerId, request) {
342
+ try {
343
+ this.peers.markPeerInFlight(peerId);
344
+ const response = await this.p2pService.reqResp.sendRequestToPeer(peerId, ReqRespSubProtocol.BLOCK_TXS, request.toBuffer());
345
+ if (response.status !== ReqRespStatus.SUCCESS) {
346
+ this.logger.debug(`Peer ${peerId.toString()} failed to respond with status: ${response.status}`);
347
+ this.handleFailResponseFromPeer(peerId, response.status);
348
+ return;
349
+ }
350
+ const blockResponse = BlockTxsResponse.fromBuffer(response.data);
351
+ await this.handleSuccessResponseFromPeer(peerId, blockResponse);
352
+ } catch (err) {
353
+ this.logger.error(`Failed to get valid response from peer ${peerId.toString()}: ${err.message}`, {
354
+ peerId,
355
+ error: err
356
+ });
357
+ this.handleFailResponseFromPeer(peerId, ReqRespStatus.UNKNOWN);
358
+ } finally{
359
+ this.peers.unMarkPeerInFlight(peerId);
360
+ }
361
+ }
362
+ /*
363
+ * Handles failed response form the peer
364
+ * There are 3 scenarios
365
+ * - RATE_LIMIT_EXCEEDED: We mark this and don't query this peer again for some_time
366
+ * - FAILURE and UNKNOWN: We penalise this, if peer has been penalised this way N times they are not queried again
367
+ * this implies we will query these peers couple of more times and give them a chance to "redeem" themselves before completely ignoring them
368
+ */ handleFailResponseFromPeer(peerId, responseStatus) {
369
+ //TODO: Should we ban these peers?
370
+ if (responseStatus === ReqRespStatus.FAILURE || responseStatus === ReqRespStatus.UNKNOWN) {
371
+ this.peers.penalisePeer(peerId, PeerErrorSeverity.HighToleranceError);
372
+ return;
373
+ }
374
+ if (responseStatus === ReqRespStatus.RATE_LIMIT_EXCEEDED) {
375
+ this.peers.markPeerRateLimitExceeded(peerId);
376
+ }
377
+ }
378
+ /*
379
+ * Handles successful response form the peer, this includes
380
+ * - Handling received transactions
381
+ * - Deciding if the peer is "smart" or not
382
+ * */ async handleSuccessResponseFromPeer(peerId, response) {
383
+ this.logger.debug(`Received txs: ${response.txs.length} from peer ${peerId.toString()} `);
384
+ await this.handleReceivedTxs(peerId, response.txs);
385
+ this.decideIfPeerIsSmart(peerId, response);
386
+ }
387
+ /*
388
+ * Handles received txs.
389
+ * Transactions are validated and then put on async queue
390
+ * to be yielded by main running loop
391
+ * */ async handleReceivedTxs(peerId, txs) {
392
+ const newTxs = txs.filter((tx)=>!this.txsMetadata.alreadyFetched(tx.txHash));
393
+ if (newTxs.length === 0) {
394
+ return;
395
+ }
396
+ //TODO: this validation can be slow, maybe spawn worker just for validation
397
+ // We could use the async queue for communication.
398
+ const validationResults = await Promise.allSettled(newTxs.map(async (tx)=>({
399
+ tx,
400
+ isValid: (await this.txValidator.validateRequestedTx(tx)).result === 'valid'
401
+ })));
402
+ let hasInvalidTx = false;
403
+ validationResults.forEach((result)=>{
404
+ if (result.status === 'fulfilled' && result.value.isValid) {
405
+ if (this.txsMetadata.markFetched(peerId, result.value.tx)) {
406
+ this.txQueue.put(result.value.tx);
407
+ }
408
+ } else {
409
+ hasInvalidTx = true;
410
+ }
411
+ });
412
+ if (hasInvalidTx) {
413
+ this.peers.penalisePeer(peerId, PeerErrorSeverity.LowToleranceError);
414
+ } else {
415
+ // If we have received successful response from the peer, they have "redeemed" themselves and not considered bad anymore
416
+ this.peers.unMarkPeerAsBad(peerId);
417
+ }
418
+ const missingTxHashes = this.txsMetadata.getMissingTxHashes();
419
+ if (missingTxHashes.size === 0) {
420
+ // wake sleepers so they can see shouldStop() and exit before waiting on timeout
421
+ this.unlockSmartRequesterSemaphores();
422
+ } else {
423
+ this.logger.trace(`Missing txs: ${Array.from(this.txsMetadata.getMissingTxHashes()).map((tx)=>tx.toString()).join(', ')}`);
424
+ }
425
+ }
426
+ /*
427
+ * Peer is smart if:
428
+ * - They are not pinned peer
429
+ * - They have sent successful response indicating which txs from Block proposal they have
430
+ * - They have transactions we are missing
431
+ */ decideIfPeerIsSmart(peerId, response) {
432
+ const pinnedPeerShouldNeverBeMarkedAsSmart = this.pinnedPeer && peerId.toString() === this.pinnedPeer.toString();
433
+ if (pinnedPeerShouldNeverBeMarkedAsSmart) {
434
+ return;
435
+ }
436
+ const smartPeersAreDisabled = this.smartParallelWorkerCount === 0;
437
+ if (smartPeersAreDisabled) {
438
+ return;
439
+ }
440
+ // If block response is invalid we still want to query this peer in the future
441
+ // Because they sent successful response, so they might become smart peer in the future
442
+ // Or send us needed txs
443
+ if (!this.isBlockResponseValid(response)) {
444
+ return;
445
+ }
446
+ // We mark peer as "smart" only if they have some txs we are missing
447
+ // Otherwise we keep them as "dumb" in hope they'll receive some new txs we are missing in the future
448
+ if (!this.peerHasSomeTxsWeAreMissing(peerId, response)) {
449
+ this.logger.debug(`${peerId.toString()} has no txs we are missing, skipping`);
450
+ return;
451
+ }
452
+ this.peers.markPeerSmart(peerId);
453
+ this.markTxsPeerHas(peerId, response);
454
+ // Unblock smart workers
455
+ this.smartRequesterSemaphore.release();
456
+ }
457
+ isBlockResponseValid(response) {
458
+ const archiveRootsMatch = this.blockTxsSource.archive.toString() === response.archiveRoot.toString();
459
+ const peerHasSomeTxsFromProposal = !response.txIndices.isEmpty();
460
+ return archiveRootsMatch && peerHasSomeTxsFromProposal;
461
+ }
462
+ peerHasSomeTxsWeAreMissing(_peerId, response) {
463
+ const txsPeerHas = new Set(this.extractHashesPeerHasFromResponse(response).map((h)=>h.toString()));
464
+ return this.txsMetadata.getMissingTxHashes().intersection(txsPeerHas).size > 0;
465
+ }
466
+ markTxsPeerHas(peerId, response) {
467
+ const txsPeerHas = this.extractHashesPeerHasFromResponse(response);
468
+ this.logger.debug(`${peerId.toString()} has txs: ${txsPeerHas.map((tx)=>tx.toString()).join(', ')}`);
469
+ this.txsMetadata.markPeerHas(peerId, txsPeerHas);
470
+ }
471
+ extractHashesPeerHasFromResponse(response) {
472
+ const hashes = [];
473
+ const indicesOfHashesPeerHas = new Set(response.txIndices.getTrueIndices());
474
+ this.blockTxsSource.txHashes.forEach((hash, idx)=>{
475
+ if (indicesOfHashesPeerHas.has(idx)) {
476
+ hashes.push(hash);
477
+ }
478
+ });
479
+ return hashes;
480
+ }
481
+ /*
482
+ * Helper function to crate round robin indexer -
483
+ * i.e. the "thing" which returns next index/number in round robin fashion
484
+ **/ makeRoundRobinIndexer(start = 0) {
485
+ let i = start;
486
+ /*
487
+ * Function to calculate next round-robin number
488
+ * Idea is that we pass in an array size and based on it and previous state we call next
489
+ * Array size can change between calls thus it is passed as function
490
+ *
491
+ * @returns next index or undefined if size is 0
492
+ */ return (size)=>{
493
+ const length = size();
494
+ if (length === 0) {
495
+ return undefined;
496
+ }
497
+ const current = i % length;
498
+ i = (current + 1) % length;
499
+ return current;
500
+ };
501
+ }
502
+ /*
503
+ * @returns true if all missing txs have been fetched */ fetchedAllTxs() {
504
+ return this.txsMetadata.getMissingTxHashes().size == 0;
505
+ }
506
+ /*
507
+ * Checks if the BatchTxRequester should stop fetching missing txs
508
+ * Conditions for stopping are:
509
+ * - There have been no missing transactions to start with
510
+ * - All transactions have been fetched
511
+ * - The deadline has been hit (no more time to fetch)
512
+ * - This process has been cancelled via abortSignal
513
+ *
514
+ * @returns true if BatchTxRequester should stop, otherwise false*/ shouldStop() {
515
+ const aborted = this.opts.abortSignal?.aborted ?? false;
516
+ if (aborted) {
517
+ this.unlockSmartRequesterSemaphores();
518
+ }
519
+ return aborted || this.fetchedAllTxs() || this.dateProvider.now() > this.deadline;
520
+ }
521
+ /*
522
+ * Helper function which unlocks all smart requester semaphores
523
+ * @note This is needed otherwise they will block forever
524
+ * */ unlockSmartRequesterSemaphores() {
525
+ for(let i = 0; i < this.smartParallelWorkerCount; i++){
526
+ this.smartRequesterSemaphore.release();
527
+ }
528
+ }
529
+ /*
530
+ * Sleeps for the given duration, but clamped to the deadline.
531
+ * This ensures we don't sleep past the deadline.
532
+ * */ async sleepClampedToDeadline(durationMs) {
533
+ const remaining = this.deadline - this.dateProvider.now();
534
+ const thereIsTimeRemaining = remaining > 0;
535
+ if (thereIsTimeRemaining) {
536
+ await sleep(Math.min(durationMs, remaining));
537
+ }
538
+ }
539
+ }
@@ -0,0 +1,17 @@
1
+ import { type ConfigMappingsType } from '@aztec/foundation/config';
2
+ export declare const DEFAULT_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT = 10;
3
+ export declare const DEFAULT_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT = 10;
4
+ export declare const DEFAULT_BATCH_TX_REQUESTER_TX_BATCH_SIZE = 8;
5
+ export declare const DEFAULT_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD = 2;
6
+ export interface BatchTxRequesterConfig {
7
+ /** Max concurrent requests to smart peers. */
8
+ batchTxRequesterSmartParallelWorkerCount: number;
9
+ /** Max concurrent requests to dumb peers. */
10
+ batchTxRequesterDumbParallelWorkerCount: number;
11
+ /** Max transactions per request / chunk size. */
12
+ batchTxRequesterTxBatchSize: number;
13
+ /** Failures before a peer is considered bad (see > threshold logic). */
14
+ batchTxRequesterBadPeerThreshold: number;
15
+ }
16
+ export declare const batchTxRequesterConfigMappings: ConfigMappingsType<BatchTxRequesterConfig>;
17
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiY29uZmlnLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi8uLi9zcmMvc2VydmljZXMvcmVxcmVzcC9iYXRjaC10eC1yZXF1ZXN0ZXIvY29uZmlnLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sRUFBRSxLQUFLLGtCQUFrQixFQUFzQixNQUFNLDBCQUEwQixDQUFDO0FBRXZGLGVBQU8sTUFBTSxzREFBc0QsS0FBSyxDQUFDO0FBQ3pFLGVBQU8sTUFBTSxxREFBcUQsS0FBSyxDQUFDO0FBQ3hFLGVBQU8sTUFBTSx3Q0FBd0MsSUFBSSxDQUFDO0FBQzFELGVBQU8sTUFBTSw2Q0FBNkMsSUFBSSxDQUFDO0FBRS9ELE1BQU0sV0FBVyxzQkFBc0I7SUFDckMsOENBQThDO0lBQzlDLHdDQUF3QyxFQUFFLE1BQU0sQ0FBQztJQUNqRCw2Q0FBNkM7SUFDN0MsdUNBQXVDLEVBQUUsTUFBTSxDQUFDO0lBQ2hELGlEQUFpRDtJQUNqRCwyQkFBMkIsRUFBRSxNQUFNLENBQUM7SUFDcEMsd0VBQXdFO0lBQ3hFLGdDQUFnQyxFQUFFLE1BQU0sQ0FBQztDQUMxQztBQUVELGVBQU8sTUFBTSw4QkFBOEIsRUFBRSxrQkFBa0IsQ0FBQyxzQkFBc0IsQ0FxQnJGLENBQUMifQ==
@@ -0,0 +1 @@
1
+ {"version":3,"file":"config.d.ts","sourceRoot":"","sources":["../../../../src/services/reqresp/batch-tx-requester/config.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,kBAAkB,EAAsB,MAAM,0BAA0B,CAAC;AAEvF,eAAO,MAAM,sDAAsD,KAAK,CAAC;AACzE,eAAO,MAAM,qDAAqD,KAAK,CAAC;AACxE,eAAO,MAAM,wCAAwC,IAAI,CAAC;AAC1D,eAAO,MAAM,6CAA6C,IAAI,CAAC;AAE/D,MAAM,WAAW,sBAAsB;IACrC,8CAA8C;IAC9C,wCAAwC,EAAE,MAAM,CAAC;IACjD,6CAA6C;IAC7C,uCAAuC,EAAE,MAAM,CAAC;IAChD,iDAAiD;IACjD,2BAA2B,EAAE,MAAM,CAAC;IACpC,wEAAwE;IACxE,gCAAgC,EAAE,MAAM,CAAC;CAC1C;AAED,eAAO,MAAM,8BAA8B,EAAE,kBAAkB,CAAC,sBAAsB,CAqBrF,CAAC"}
@@ -0,0 +1,27 @@
1
+ import { numberConfigHelper } from '@aztec/foundation/config';
2
+ export const DEFAULT_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT = 10;
3
+ export const DEFAULT_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT = 10;
4
+ export const DEFAULT_BATCH_TX_REQUESTER_TX_BATCH_SIZE = 8;
5
+ export const DEFAULT_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD = 2;
6
+ export const batchTxRequesterConfigMappings = {
7
+ batchTxRequesterSmartParallelWorkerCount: {
8
+ env: 'P2P_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT',
9
+ description: 'Max concurrent requests to smart peers for batch tx requester.',
10
+ ...numberConfigHelper(DEFAULT_BATCH_TX_REQUESTER_SMART_PARALLEL_WORKER_COUNT)
11
+ },
12
+ batchTxRequesterDumbParallelWorkerCount: {
13
+ env: 'P2P_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT',
14
+ description: 'Max concurrent requests to dumb peers for batch tx requester.',
15
+ ...numberConfigHelper(DEFAULT_BATCH_TX_REQUESTER_DUMB_PARALLEL_WORKER_COUNT)
16
+ },
17
+ batchTxRequesterTxBatchSize: {
18
+ env: 'P2P_BATCH_TX_REQUESTER_TX_BATCH_SIZE',
19
+ description: 'Max transactions per request / chunk size for batch tx requester.',
20
+ ...numberConfigHelper(DEFAULT_BATCH_TX_REQUESTER_TX_BATCH_SIZE)
21
+ },
22
+ batchTxRequesterBadPeerThreshold: {
23
+ env: 'P2P_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD',
24
+ description: 'Failures before a peer is considered bad (see > threshold logic).',
25
+ ...numberConfigHelper(DEFAULT_BATCH_TX_REQUESTER_BAD_PEER_THRESHOLD)
26
+ }
27
+ };
@@ -0,0 +1,46 @@
1
+ import type { ISemaphore } from '@aztec/foundation/queue';
2
+ import type { PeerErrorSeverity } from '@aztec/stdlib/p2p';
3
+ import type { Tx, TxHash } from '@aztec/stdlib/tx';
4
+ import type { PeerId } from '@libp2p/interface';
5
+ import type { ConnectionSampler } from '../connection-sampler/connection_sampler.js';
6
+ import type { ReqRespInterface } from '../interface.js';
7
+ import type { IPeerCollection } from './peer_collection.js';
8
+ import type { BatchRequestTxValidatorConfig, IBatchRequestTxValidator } from './tx_validator.js';
9
+ export interface IPeerPenalizer {
10
+ penalizePeer(peerId: PeerId, penalty: PeerErrorSeverity): void;
11
+ }
12
+ export interface ITxMetadataCollection {
13
+ getMissingTxHashes(): Set<string>;
14
+ markFetched(peerId: PeerId, tx: Tx): boolean;
15
+ getTxsToRequestFromThePeer(peer: PeerId): TxHash[];
16
+ markRequested(txHash: TxHash): void;
17
+ markInFlightBySmartPeer(txHash: TxHash): void;
18
+ markNotInFlightBySmartPeer(txHash: TxHash): void;
19
+ alreadyFetched(txHash: TxHash): boolean;
20
+ markPeerHas(peerId: PeerId, txHashes: TxHash[]): void;
21
+ }
22
+ /**
23
+ * Interface for BatchTxRequester dependencies that can be injected from upstream
24
+ */
25
+ export interface BatchTxRequesterLibP2PService {
26
+ /** ReqResp interface for sending requests to peers */
27
+ reqResp: Pick<ReqRespInterface, 'sendBatchRequest' | 'sendRequestToPeer'>;
28
+ /** Connection sampler for getting peer lists */
29
+ connectionSampler: Pick<ConnectionSampler, 'getPeerListSortedByConnectionCountAsc'>;
30
+ /** Configuration needed for transaction validation */
31
+ txValidatorConfig: BatchRequestTxValidatorConfig;
32
+ /** Peer scoring for penalizing peers */
33
+ peerScoring: IPeerPenalizer;
34
+ }
35
+ export interface BatchTxRequesterOptions {
36
+ smartParallelWorkerCount?: number;
37
+ dumbParallelWorkerCount?: number;
38
+ txBatchSize?: number;
39
+ badPeerThreshold?: number;
40
+ semaphore?: ISemaphore;
41
+ peerCollection?: IPeerCollection;
42
+ abortSignal?: AbortSignal;
43
+ /** Optional tx validator for testing - if not provided, one is created from p2pService.txValidatorConfig */
44
+ txValidator?: IBatchRequestTxValidator;
45
+ }
46
+ //# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW50ZXJmYWNlLmQudHMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi8uLi8uLi8uLi9zcmMvc2VydmljZXMvcmVxcmVzcC9iYXRjaC10eC1yZXF1ZXN0ZXIvaW50ZXJmYWNlLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sS0FBSyxFQUFFLFVBQVUsRUFBRSxNQUFNLHlCQUF5QixDQUFDO0FBQzFELE9BQU8sS0FBSyxFQUFFLGlCQUFpQixFQUFFLE1BQU0sbUJBQW1CLENBQUM7QUFDM0QsT0FBTyxLQUFLLEVBQUUsRUFBRSxFQUFFLE1BQU0sRUFBRSxNQUFNLGtCQUFrQixDQUFDO0FBRW5ELE9BQU8sS0FBSyxFQUFFLE1BQU0sRUFBRSxNQUFNLG1CQUFtQixDQUFDO0FBRWhELE9BQU8sS0FBSyxFQUFFLGlCQUFpQixFQUFFLE1BQU0sNkNBQTZDLENBQUM7QUFDckYsT0FBTyxLQUFLLEVBQUUsZ0JBQWdCLEVBQUUsTUFBTSxpQkFBaUIsQ0FBQztBQUN4RCxPQUFPLEtBQUssRUFBRSxlQUFlLEVBQUUsTUFBTSxzQkFBc0IsQ0FBQztBQUM1RCxPQUFPLEtBQUssRUFBRSw2QkFBNkIsRUFBRSx3QkFBd0IsRUFBRSxNQUFNLG1CQUFtQixDQUFDO0FBRWpHLE1BQU0sV0FBVyxjQUFjO0lBQzdCLFlBQVksQ0FBQyxNQUFNLEVBQUUsTUFBTSxFQUFFLE9BQU8sRUFBRSxpQkFBaUIsR0FBRyxJQUFJLENBQUM7Q0FDaEU7QUFFRCxNQUFNLFdBQVcscUJBQXFCO0lBQ3BDLGtCQUFrQixJQUFJLEdBQUcsQ0FBQyxNQUFNLENBQUMsQ0FBQztJQUNsQyxXQUFXLENBQUMsTUFBTSxFQUFFLE1BQU0sRUFBRSxFQUFFLEVBQUUsRUFBRSxHQUFHLE9BQU8sQ0FBQztJQUM3QywwQkFBMEIsQ0FBQyxJQUFJLEVBQUUsTUFBTSxHQUFHLE1BQU0sRUFBRSxDQUFDO0lBQ25ELGFBQWEsQ0FBQyxNQUFNLEVBQUUsTUFBTSxHQUFHLElBQUksQ0FBQztJQUNwQyx1QkFBdUIsQ0FBQyxNQUFNLEVBQUUsTUFBTSxHQUFHLElBQUksQ0FBQztJQUM5QywwQkFBMEIsQ0FBQyxNQUFNLEVBQUUsTUFBTSxHQUFHLElBQUksQ0FBQztJQUNqRCxjQUFjLENBQUMsTUFBTSxFQUFFLE1BQU0sR0FBRyxPQUFPLENBQUM7SUFFeEMsV0FBVyxDQUFDLE1BQU0sRUFBRSxNQUFNLEVBQUUsUUFBUSxFQUFFLE1BQU0sRUFBRSxHQUFHLElBQUksQ0FBQztDQUN2RDtBQUVEOztHQUVHO0FBQ0gsTUFBTSxXQUFXLDZCQUE2QjtJQUM1QyxzREFBc0Q7SUFDdEQsT0FBTyxFQUFFLElBQUksQ0FBQyxnQkFBZ0IsRUFBRSxrQkFBa0IsR0FBRyxtQkFBbUIsQ0FBQyxDQUFDO0lBQzFFLGdEQUFnRDtJQUNoRCxpQkFBaUIsRUFBRSxJQUFJLENBQUMsaUJBQWlCLEVBQUUsdUNBQXVDLENBQUMsQ0FBQztJQUNwRixzREFBc0Q7SUFDdEQsaUJBQWlCLEVBQUUsNkJBQTZCLENBQUM7SUFDakQsd0NBQXdDO0lBQ3hDLFdBQVcsRUFBRSxjQUFjLENBQUM7Q0FDN0I7QUFFRCxNQUFNLFdBQVcsdUJBQXVCO0lBQ3RDLHdCQUF3QixDQUFDLEVBQUUsTUFBTSxDQUFDO0lBQ2xDLHVCQUF1QixDQUFDLEVBQUUsTUFBTSxDQUFDO0lBQ2pDLFdBQVcsQ0FBQyxFQUFFLE1BQU0sQ0FBQztJQUNyQixnQkFBZ0IsQ0FBQyxFQUFFLE1BQU0sQ0FBQztJQUUxQixTQUFTLENBQUMsRUFBRSxVQUFVLENBQUM7SUFDdkIsY0FBYyxDQUFDLEVBQUUsZUFBZSxDQUFDO0lBQ2pDLFdBQVcsQ0FBQyxFQUFFLFdBQVcsQ0FBQztJQUMxQiw0R0FBNEc7SUFDNUcsV0FBVyxDQUFDLEVBQUUsd0JBQXdCLENBQUM7Q0FDeEMifQ==
@@ -0,0 +1 @@
1
+ {"version":3,"file":"interface.d.ts","sourceRoot":"","sources":["../../../../src/services/reqresp/batch-tx-requester/interface.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,yBAAyB,CAAC;AAC1D,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,mBAAmB,CAAC;AAC3D,OAAO,KAAK,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,kBAAkB,CAAC;AAEnD,OAAO,KAAK,EAAE,MAAM,EAAE,MAAM,mBAAmB,CAAC;AAEhD,OAAO,KAAK,EAAE,iBAAiB,EAAE,MAAM,6CAA6C,CAAC;AACrF,OAAO,KAAK,EAAE,gBAAgB,EAAE,MAAM,iBAAiB,CAAC;AACxD,OAAO,KAAK,EAAE,eAAe,EAAE,MAAM,sBAAsB,CAAC;AAC5D,OAAO,KAAK,EAAE,6BAA6B,EAAE,wBAAwB,EAAE,MAAM,mBAAmB,CAAC;AAEjG,MAAM,WAAW,cAAc;IAC7B,YAAY,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,GAAG,IAAI,CAAC;CAChE;AAED,MAAM,WAAW,qBAAqB;IACpC,kBAAkB,IAAI,GAAG,CAAC,MAAM,CAAC,CAAC;IAClC,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,EAAE,EAAE,EAAE,GAAG,OAAO,CAAC;IAC7C,0BAA0B,CAAC,IAAI,EAAE,MAAM,GAAG,MAAM,EAAE,CAAC;IACnD,aAAa,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IACpC,uBAAuB,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IAC9C,0BAA0B,CAAC,MAAM,EAAE,MAAM,GAAG,IAAI,CAAC;IACjD,cAAc,CAAC,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC;IAExC,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC;CACvD;AAED;;GAEG;AACH,MAAM,WAAW,6BAA6B;IAC5C,sDAAsD;IACtD,OAAO,EAAE,IAAI,CAAC,gBAAgB,EAAE,kBAAkB,GAAG,mBAAmB,CAAC,CAAC;IAC1E,gDAAgD;IAChD,iBAAiB,EAAE,IAAI,CAAC,iBAAiB,EAAE,uCAAuC,CAAC,CAAC;IACpF,sDAAsD;IACtD,iBAAiB,EAAE,6BAA6B,CAAC;IACjD,wCAAwC;IACxC,WAAW,EAAE,cAAc,CAAC;CAC7B;AAED,MAAM,WAAW,uBAAuB;IACtC,wBAAwB,CAAC,EAAE,MAAM,CAAC;IAClC,uBAAuB,CAAC,EAAE,MAAM,CAAC;IACjC,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,gBAAgB,CAAC,EAAE,MAAM,CAAC;IAE1B,SAAS,CAAC,EAAE,UAAU,CAAC;IACvB,cAAc,CAAC,EAAE,eAAe,CAAC;IACjC,WAAW,CAAC,EAAE,WAAW,CAAC;IAC1B,4GAA4G;IAC5G,WAAW,CAAC,EAAE,wBAAwB,CAAC;CACxC"}