@aztec/p2p 0.0.1-commit.8afd444 → 0.0.1-commit.8f9871590
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dest/client/factory.d.ts +5 -5
- package/dest/client/factory.d.ts.map +1 -1
- package/dest/client/factory.js +44 -10
- package/dest/client/interface.d.ts +37 -15
- package/dest/client/interface.d.ts.map +1 -1
- package/dest/client/p2p_client.d.ts +35 -36
- package/dest/client/p2p_client.d.ts.map +1 -1
- package/dest/client/p2p_client.js +114 -138
- package/dest/client/test/tx_proposal_collector/proposal_tx_collector_worker.js +1 -1
- package/dest/config.d.ts +23 -5
- package/dest/config.d.ts.map +1 -1
- package/dest/config.js +16 -1
- package/dest/index.d.ts +2 -1
- package/dest/index.d.ts.map +1 -1
- package/dest/index.js +1 -0
- package/dest/mem_pools/attestation_pool/attestation_pool.d.ts +104 -88
- package/dest/mem_pools/attestation_pool/attestation_pool.d.ts.map +1 -1
- package/dest/mem_pools/attestation_pool/attestation_pool.js +441 -3
- package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.d.ts +2 -2
- package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.d.ts.map +1 -1
- package/dest/mem_pools/attestation_pool/attestation_pool_test_suite.js +353 -87
- package/dest/mem_pools/attestation_pool/index.d.ts +2 -3
- package/dest/mem_pools/attestation_pool/index.d.ts.map +1 -1
- package/dest/mem_pools/attestation_pool/index.js +1 -2
- package/dest/mem_pools/attestation_pool/mocks.d.ts +2 -2
- package/dest/mem_pools/attestation_pool/mocks.d.ts.map +1 -1
- package/dest/mem_pools/attestation_pool/mocks.js +2 -2
- package/dest/mem_pools/index.d.ts +3 -2
- package/dest/mem_pools/index.d.ts.map +1 -1
- package/dest/mem_pools/index.js +1 -1
- package/dest/mem_pools/interface.d.ts +5 -5
- package/dest/mem_pools/interface.d.ts.map +1 -1
- package/dest/mem_pools/tx_pool/eviction/invalid_txs_after_mining_rule.js +3 -3
- package/dest/mem_pools/tx_pool_v2/archive/index.d.ts +2 -0
- package/dest/mem_pools/tx_pool_v2/archive/index.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/archive/index.js +1 -0
- package/dest/mem_pools/tx_pool_v2/archive/tx_archive.d.ts +43 -0
- package/dest/mem_pools/tx_pool_v2/archive/tx_archive.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/archive/tx_archive.js +103 -0
- package/dest/mem_pools/tx_pool_v2/deleted_pool.d.ts +102 -0
- package/dest/mem_pools/tx_pool_v2/deleted_pool.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/deleted_pool.js +242 -0
- package/dest/mem_pools/tx_pool_v2/eviction/eviction_manager.d.ts +47 -0
- package/dest/mem_pools/tx_pool_v2/eviction/eviction_manager.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/eviction_manager.js +119 -0
- package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.d.ts +17 -0
- package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.js +93 -0
- package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.d.ts +19 -0
- package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.js +89 -0
- package/dest/mem_pools/tx_pool_v2/eviction/index.d.ts +10 -0
- package/dest/mem_pools/tx_pool_v2/eviction/index.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/index.js +11 -0
- package/dest/mem_pools/tx_pool_v2/eviction/interfaces.d.ts +131 -0
- package/dest/mem_pools/tx_pool_v2/eviction/interfaces.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/interfaces.js +17 -0
- package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.d.ts +15 -0
- package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.js +65 -0
- package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.d.ts +17 -0
- package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.js +93 -0
- package/dest/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.d.ts +16 -0
- package/dest/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.js +72 -0
- package/dest/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.d.ts +20 -0
- package/dest/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.js +63 -0
- package/dest/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.d.ts +15 -0
- package/dest/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.js +19 -0
- package/dest/mem_pools/tx_pool_v2/index.d.ts +6 -0
- package/dest/mem_pools/tx_pool_v2/index.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/index.js +5 -0
- package/dest/mem_pools/tx_pool_v2/interfaces.d.ts +201 -0
- package/dest/mem_pools/tx_pool_v2/interfaces.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/interfaces.js +7 -0
- package/dest/mem_pools/tx_pool_v2/tx_metadata.d.ts +95 -0
- package/dest/mem_pools/tx_pool_v2/tx_metadata.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/tx_metadata.js +128 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_bench_metrics.d.ts +26 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_bench_metrics.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_bench_metrics.js +70 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_indices.d.ts +105 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_indices.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_indices.js +345 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_v2.d.ts +57 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_v2.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_v2.js +160 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_v2_impl.d.ts +71 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_v2_impl.d.ts.map +1 -0
- package/dest/mem_pools/tx_pool_v2/tx_pool_v2_impl.js +780 -0
- package/dest/msg_validators/attestation_validator/fisherman_attestation_validator.d.ts +3 -3
- package/dest/msg_validators/attestation_validator/fisherman_attestation_validator.d.ts.map +1 -1
- package/dest/msg_validators/tx_validator/aggregate_tx_validator.d.ts +3 -3
- package/dest/msg_validators/tx_validator/aggregate_tx_validator.d.ts.map +1 -1
- package/dest/msg_validators/tx_validator/block_header_validator.d.ts +16 -3
- package/dest/msg_validators/tx_validator/block_header_validator.d.ts.map +1 -1
- package/dest/msg_validators/tx_validator/block_header_validator.js +1 -1
- package/dest/msg_validators/tx_validator/double_spend_validator.d.ts +13 -3
- package/dest/msg_validators/tx_validator/double_spend_validator.d.ts.map +1 -1
- package/dest/msg_validators/tx_validator/double_spend_validator.js +4 -4
- package/dest/msg_validators/tx_validator/timestamp_validator.d.ts +20 -4
- package/dest/msg_validators/tx_validator/timestamp_validator.d.ts.map +1 -1
- package/dest/msg_validators/tx_validator/timestamp_validator.js +6 -6
- package/dest/services/dummy_service.d.ts +10 -2
- package/dest/services/dummy_service.d.ts.map +1 -1
- package/dest/services/dummy_service.js +6 -0
- package/dest/services/encoding.d.ts +2 -2
- package/dest/services/encoding.d.ts.map +1 -1
- package/dest/services/encoding.js +2 -2
- package/dest/services/gossipsub/index.d.ts +3 -0
- package/dest/services/gossipsub/index.d.ts.map +1 -0
- package/dest/services/gossipsub/index.js +2 -0
- package/dest/services/gossipsub/scoring.d.ts +21 -3
- package/dest/services/gossipsub/scoring.d.ts.map +1 -1
- package/dest/services/gossipsub/scoring.js +24 -7
- package/dest/services/gossipsub/topic_score_params.d.ts +161 -0
- package/dest/services/gossipsub/topic_score_params.d.ts.map +1 -0
- package/dest/services/gossipsub/topic_score_params.js +324 -0
- package/dest/services/libp2p/libp2p_service.d.ts +84 -35
- package/dest/services/libp2p/libp2p_service.d.ts.map +1 -1
- package/dest/services/libp2p/libp2p_service.js +370 -275
- package/dest/services/peer-manager/peer_scoring.d.ts +1 -1
- package/dest/services/peer-manager/peer_scoring.d.ts.map +1 -1
- package/dest/services/peer-manager/peer_scoring.js +25 -2
- package/dest/services/reqresp/batch-tx-requester/batch_tx_requester.d.ts +4 -4
- package/dest/services/reqresp/batch-tx-requester/batch_tx_requester.d.ts.map +1 -1
- package/dest/services/reqresp/batch-tx-requester/batch_tx_requester.js +8 -8
- package/dest/services/reqresp/interface.d.ts +10 -1
- package/dest/services/reqresp/interface.d.ts.map +1 -1
- package/dest/services/reqresp/interface.js +15 -1
- package/dest/services/reqresp/protocols/block_txs/block_txs_handler.d.ts +7 -5
- package/dest/services/reqresp/protocols/block_txs/block_txs_handler.d.ts.map +1 -1
- package/dest/services/reqresp/protocols/block_txs/block_txs_handler.js +16 -11
- package/dest/services/reqresp/protocols/block_txs/block_txs_reqresp.d.ts +21 -10
- package/dest/services/reqresp/protocols/block_txs/block_txs_reqresp.d.ts.map +1 -1
- package/dest/services/reqresp/protocols/block_txs/block_txs_reqresp.js +27 -11
- package/dest/services/reqresp/protocols/tx.d.ts +7 -1
- package/dest/services/reqresp/protocols/tx.d.ts.map +1 -1
- package/dest/services/reqresp/protocols/tx.js +20 -0
- package/dest/services/reqresp/reqresp.d.ts +1 -1
- package/dest/services/reqresp/reqresp.d.ts.map +1 -1
- package/dest/services/reqresp/reqresp.js +11 -4
- package/dest/services/service.d.ts +35 -1
- package/dest/services/service.d.ts.map +1 -1
- package/dest/services/tx_collection/config.d.ts +22 -4
- package/dest/services/tx_collection/config.d.ts.map +1 -1
- package/dest/services/tx_collection/config.js +49 -3
- package/dest/services/tx_collection/fast_tx_collection.d.ts +6 -5
- package/dest/services/tx_collection/fast_tx_collection.d.ts.map +1 -1
- package/dest/services/tx_collection/fast_tx_collection.js +27 -17
- package/dest/services/tx_collection/file_store_tx_collection.d.ts +53 -0
- package/dest/services/tx_collection/file_store_tx_collection.d.ts.map +1 -0
- package/dest/services/tx_collection/file_store_tx_collection.js +165 -0
- package/dest/services/tx_collection/file_store_tx_source.d.ts +28 -0
- package/dest/services/tx_collection/file_store_tx_source.d.ts.map +1 -0
- package/dest/services/tx_collection/file_store_tx_source.js +59 -0
- package/dest/services/tx_collection/index.d.ts +3 -2
- package/dest/services/tx_collection/index.d.ts.map +1 -1
- package/dest/services/tx_collection/index.js +1 -0
- package/dest/services/tx_collection/proposal_tx_collector.d.ts +12 -12
- package/dest/services/tx_collection/proposal_tx_collector.d.ts.map +1 -1
- package/dest/services/tx_collection/proposal_tx_collector.js +4 -5
- package/dest/services/tx_collection/slow_tx_collection.d.ts +6 -2
- package/dest/services/tx_collection/slow_tx_collection.d.ts.map +1 -1
- package/dest/services/tx_collection/slow_tx_collection.js +55 -23
- package/dest/services/tx_collection/tx_collection.d.ts +19 -7
- package/dest/services/tx_collection/tx_collection.d.ts.map +1 -1
- package/dest/services/tx_collection/tx_collection.js +75 -3
- package/dest/services/tx_collection/tx_collection_sink.d.ts +15 -6
- package/dest/services/tx_collection/tx_collection_sink.d.ts.map +1 -1
- package/dest/services/tx_collection/tx_collection_sink.js +13 -7
- package/dest/services/tx_file_store/config.d.ts +1 -3
- package/dest/services/tx_file_store/config.d.ts.map +1 -1
- package/dest/services/tx_file_store/config.js +0 -4
- package/dest/services/tx_file_store/tx_file_store.d.ts +4 -3
- package/dest/services/tx_file_store/tx_file_store.d.ts.map +1 -1
- package/dest/services/tx_file_store/tx_file_store.js +8 -5
- package/dest/services/tx_provider.d.ts +3 -3
- package/dest/services/tx_provider.d.ts.map +1 -1
- package/dest/services/tx_provider.js +5 -4
- package/dest/test-helpers/make-test-p2p-clients.d.ts +3 -3
- package/dest/test-helpers/make-test-p2p-clients.d.ts.map +1 -1
- package/dest/test-helpers/mock-pubsub.d.ts +27 -1
- package/dest/test-helpers/mock-pubsub.d.ts.map +1 -1
- package/dest/test-helpers/mock-pubsub.js +97 -2
- package/dest/test-helpers/reqresp-nodes.d.ts +1 -1
- package/dest/test-helpers/reqresp-nodes.d.ts.map +1 -1
- package/dest/test-helpers/reqresp-nodes.js +2 -1
- package/dest/test-helpers/testbench-utils.d.ts +40 -38
- package/dest/test-helpers/testbench-utils.d.ts.map +1 -1
- package/dest/test-helpers/testbench-utils.js +128 -59
- package/dest/testbench/p2p_client_testbench_worker.js +4 -4
- package/package.json +14 -14
- package/src/client/factory.ts +81 -13
- package/src/client/interface.ts +45 -14
- package/src/client/p2p_client.ts +151 -161
- package/src/client/test/tx_proposal_collector/proposal_tx_collector_worker.ts +1 -1
- package/src/config.ts +34 -2
- package/src/index.ts +1 -0
- package/src/mem_pools/attestation_pool/attestation_pool.ts +496 -91
- package/src/mem_pools/attestation_pool/attestation_pool_test_suite.ts +442 -102
- package/src/mem_pools/attestation_pool/index.ts +9 -2
- package/src/mem_pools/attestation_pool/mocks.ts +2 -1
- package/src/mem_pools/index.ts +4 -1
- package/src/mem_pools/interface.ts +4 -4
- package/src/mem_pools/tx_pool/README.md +1 -1
- package/src/mem_pools/tx_pool/eviction/invalid_txs_after_mining_rule.ts +3 -3
- package/src/mem_pools/tx_pool_v2/README.md +275 -0
- package/src/mem_pools/tx_pool_v2/archive/index.ts +1 -0
- package/src/mem_pools/tx_pool_v2/archive/tx_archive.ts +120 -0
- package/src/mem_pools/tx_pool_v2/deleted_pool.ts +310 -0
- package/src/mem_pools/tx_pool_v2/eviction/eviction_manager.ts +147 -0
- package/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_eviction_rule.ts +121 -0
- package/src/mem_pools/tx_pool_v2/eviction/fee_payer_balance_pre_add_rule.ts +111 -0
- package/src/mem_pools/tx_pool_v2/eviction/index.ts +23 -0
- package/src/mem_pools/tx_pool_v2/eviction/interfaces.ts +164 -0
- package/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_mining_rule.ts +74 -0
- package/src/mem_pools/tx_pool_v2/eviction/invalid_txs_after_reorg_rule.ts +101 -0
- package/src/mem_pools/tx_pool_v2/eviction/low_priority_eviction_rule.ts +88 -0
- package/src/mem_pools/tx_pool_v2/eviction/low_priority_pre_add_rule.ts +72 -0
- package/src/mem_pools/tx_pool_v2/eviction/nullifier_conflict_rule.ts +31 -0
- package/src/mem_pools/tx_pool_v2/index.ts +12 -0
- package/src/mem_pools/tx_pool_v2/interfaces.ts +233 -0
- package/src/mem_pools/tx_pool_v2/tx_metadata.ts +211 -0
- package/src/mem_pools/tx_pool_v2/tx_pool_bench_metrics.ts +77 -0
- package/src/mem_pools/tx_pool_v2/tx_pool_indices.ts +433 -0
- package/src/mem_pools/tx_pool_v2/tx_pool_v2.ts +218 -0
- package/src/mem_pools/tx_pool_v2/tx_pool_v2_impl.ts +924 -0
- package/src/msg_validators/attestation_validator/fisherman_attestation_validator.ts +2 -2
- package/src/msg_validators/tx_validator/aggregate_tx_validator.ts +2 -2
- package/src/msg_validators/tx_validator/block_header_validator.ts +15 -3
- package/src/msg_validators/tx_validator/double_spend_validator.ts +11 -6
- package/src/msg_validators/tx_validator/timestamp_validator.ts +23 -18
- package/src/services/dummy_service.ts +12 -0
- package/src/services/encoding.ts +2 -2
- package/src/services/gossipsub/README.md +626 -0
- package/src/services/gossipsub/index.ts +2 -0
- package/src/services/gossipsub/scoring.ts +29 -5
- package/src/services/gossipsub/topic_score_params.ts +451 -0
- package/src/services/libp2p/libp2p_service.ts +372 -277
- package/src/services/peer-manager/peer_scoring.ts +25 -0
- package/src/services/reqresp/batch-tx-requester/README.md +7 -7
- package/src/services/reqresp/batch-tx-requester/batch_tx_requester.ts +11 -11
- package/src/services/reqresp/interface.ts +26 -1
- package/src/services/reqresp/protocols/block_txs/block_txs_handler.ts +23 -14
- package/src/services/reqresp/protocols/block_txs/block_txs_reqresp.ts +38 -15
- package/src/services/reqresp/protocols/tx.ts +22 -0
- package/src/services/reqresp/reqresp.ts +13 -3
- package/src/services/service.ts +40 -0
- package/src/services/tx_collection/config.ts +74 -6
- package/src/services/tx_collection/fast_tx_collection.ts +28 -26
- package/src/services/tx_collection/file_store_tx_collection.ts +198 -0
- package/src/services/tx_collection/file_store_tx_source.ts +73 -0
- package/src/services/tx_collection/index.ts +2 -1
- package/src/services/tx_collection/proposal_tx_collector.ts +12 -14
- package/src/services/tx_collection/slow_tx_collection.ts +64 -30
- package/src/services/tx_collection/tx_collection.ts +109 -13
- package/src/services/tx_collection/tx_collection_sink.ts +17 -7
- package/src/services/tx_file_store/config.ts +0 -6
- package/src/services/tx_file_store/tx_file_store.ts +9 -7
- package/src/services/tx_provider.ts +8 -7
- package/src/test-helpers/make-test-p2p-clients.ts +3 -3
- package/src/test-helpers/mock-pubsub.ts +133 -3
- package/src/test-helpers/reqresp-nodes.ts +2 -1
- package/src/test-helpers/testbench-utils.ts +127 -71
- package/src/testbench/p2p_client_testbench_worker.ts +4 -4
- package/dest/mem_pools/attestation_pool/kv_attestation_pool.d.ts +0 -40
- package/dest/mem_pools/attestation_pool/kv_attestation_pool.d.ts.map +0 -1
- package/dest/mem_pools/attestation_pool/kv_attestation_pool.js +0 -218
- package/dest/mem_pools/attestation_pool/memory_attestation_pool.d.ts +0 -31
- package/dest/mem_pools/attestation_pool/memory_attestation_pool.d.ts.map +0 -1
- package/dest/mem_pools/attestation_pool/memory_attestation_pool.js +0 -180
- package/src/mem_pools/attestation_pool/kv_attestation_pool.ts +0 -320
- package/src/mem_pools/attestation_pool/memory_attestation_pool.ts +0 -264
|
@@ -2,7 +2,6 @@ import { BlockNumber } from '@aztec/foundation/branded-types';
|
|
|
2
2
|
import { times } from '@aztec/foundation/collection';
|
|
3
3
|
import { AbortError, TimeoutError } from '@aztec/foundation/error';
|
|
4
4
|
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
5
|
-
import { boundInclusive } from '@aztec/foundation/number';
|
|
6
5
|
import { promiseWithResolvers } from '@aztec/foundation/promise';
|
|
7
6
|
import { sleep } from '@aztec/foundation/sleep';
|
|
8
7
|
import { DateProvider, elapsed } from '@aztec/foundation/timer';
|
|
@@ -14,37 +13,35 @@ import type { PeerId } from '@libp2p/interface';
|
|
|
14
13
|
|
|
15
14
|
import type { BatchTxRequesterConfig } from '../reqresp/batch-tx-requester/config.js';
|
|
16
15
|
import type { BatchTxRequesterLibP2PService } from '../reqresp/batch-tx-requester/interface.js';
|
|
17
|
-
import { ReqRespSubProtocol } from '../reqresp/interface.js';
|
|
18
|
-
import { chunkTxHashesRequest } from '../reqresp/protocols/tx.js';
|
|
19
16
|
import type { TxCollectionConfig } from './config.js';
|
|
20
17
|
import {
|
|
21
18
|
BatchTxRequesterCollector,
|
|
22
|
-
type
|
|
19
|
+
type MissingTxsCollector,
|
|
23
20
|
SendBatchRequestCollector,
|
|
24
21
|
} from './proposal_tx_collector.js';
|
|
25
22
|
import type { FastCollectionRequest, FastCollectionRequestInput } from './tx_collection.js';
|
|
26
|
-
import type { TxCollectionSink } from './tx_collection_sink.js';
|
|
23
|
+
import type { TxAddContext, TxCollectionSink } from './tx_collection_sink.js';
|
|
27
24
|
import type { TxSource } from './tx_source.js';
|
|
28
25
|
|
|
29
26
|
export class FastTxCollection {
|
|
30
27
|
// eslint-disable-next-line aztec-custom/no-non-primitive-in-collections
|
|
31
28
|
protected requests: Set<FastCollectionRequest> = new Set();
|
|
32
|
-
private
|
|
29
|
+
private missingTxsCollector: MissingTxsCollector;
|
|
33
30
|
|
|
34
31
|
constructor(
|
|
35
|
-
|
|
32
|
+
p2pService: BatchTxRequesterLibP2PService,
|
|
36
33
|
private nodes: TxSource[],
|
|
37
34
|
private txCollectionSink: TxCollectionSink,
|
|
38
35
|
private config: TxCollectionConfig,
|
|
39
36
|
private dateProvider: DateProvider = new DateProvider(),
|
|
40
37
|
private log: Logger = createLogger('p2p:tx_collection_service'),
|
|
41
|
-
|
|
38
|
+
missingTxsCollector?: MissingTxsCollector,
|
|
42
39
|
) {
|
|
43
40
|
const batchTxRequesterConfig = this.config as Partial<BatchTxRequesterConfig>;
|
|
44
|
-
const
|
|
45
|
-
this.
|
|
46
|
-
|
|
47
|
-
(
|
|
41
|
+
const missingTxsCollectorType = this.config.txCollectionMissingTxsCollectorType;
|
|
42
|
+
this.missingTxsCollector =
|
|
43
|
+
missingTxsCollector ??
|
|
44
|
+
(missingTxsCollectorType === 'old'
|
|
48
45
|
? new SendBatchRequestCollector(p2pService)
|
|
49
46
|
: new BatchTxRequesterCollector(p2pService, log, dateProvider, undefined, batchTxRequesterConfig));
|
|
50
47
|
}
|
|
@@ -80,7 +77,7 @@ export class FastTxCollection {
|
|
|
80
77
|
// This promise is used to await for the collection to finish during the main collectFast method.
|
|
81
78
|
// It gets resolved in `foundTxs` when all txs have been collected, or rejected if the request is aborted or hits the deadline.
|
|
82
79
|
const promise = promiseWithResolvers<void>();
|
|
83
|
-
setTimeout(() => promise.reject(new TimeoutError(`Timed out while collecting txs`)), timeout);
|
|
80
|
+
const timeoutTimer = setTimeout(() => promise.reject(new TimeoutError(`Timed out while collecting txs`)), timeout);
|
|
84
81
|
|
|
85
82
|
const request: FastCollectionRequest = {
|
|
86
83
|
...input,
|
|
@@ -92,6 +89,7 @@ export class FastTxCollection {
|
|
|
92
89
|
};
|
|
93
90
|
|
|
94
91
|
const [duration] = await elapsed(() => this.collectFast(request, { ...opts }));
|
|
92
|
+
clearTimeout(timeoutTimer);
|
|
95
93
|
|
|
96
94
|
this.log.verbose(
|
|
97
95
|
`Collected ${request.foundTxs.size} txs out of ${txHashes.length} for ${input.type} at slot ${blockInfo.slotNumber}`,
|
|
@@ -237,6 +235,7 @@ export class FastTxCollection {
|
|
|
237
235
|
method: 'fast-node-rpc',
|
|
238
236
|
...request.blockInfo,
|
|
239
237
|
},
|
|
238
|
+
this.getAddContext(request),
|
|
240
239
|
);
|
|
241
240
|
|
|
242
241
|
// Clear from the active requests the txs we just requested
|
|
@@ -258,8 +257,6 @@ export class FastTxCollection {
|
|
|
258
257
|
private async collectFastViaReqResp(request: FastCollectionRequest, opts: { pinnedPeer?: PeerId }) {
|
|
259
258
|
const timeoutMs = +request.deadline - this.dateProvider.now();
|
|
260
259
|
const pinnedPeer = opts.pinnedPeer;
|
|
261
|
-
const maxPeers = boundInclusive(Math.ceil(request.missingTxHashes.size / 2), 8, 32);
|
|
262
|
-
const maxRetryAttempts = 5;
|
|
263
260
|
const blockInfo = request.blockInfo;
|
|
264
261
|
const slotNumber = blockInfo.slotNumber;
|
|
265
262
|
if (timeoutMs < 100) {
|
|
@@ -279,24 +276,20 @@ export class FastTxCollection {
|
|
|
279
276
|
await this.txCollectionSink.collect(
|
|
280
277
|
async txHashes => {
|
|
281
278
|
if (request.type === 'proposal') {
|
|
282
|
-
return await this.
|
|
279
|
+
return await this.missingTxsCollector.collectTxs(txHashes, request.blockProposal, pinnedPeer, timeoutMs);
|
|
283
280
|
} else if (request.type === 'block') {
|
|
284
|
-
const
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
maxPeers,
|
|
290
|
-
maxRetryAttempts,
|
|
291
|
-
);
|
|
292
|
-
|
|
293
|
-
return txs.flat();
|
|
281
|
+
const blockTxsSource = {
|
|
282
|
+
txHashes: request.block.body.txEffects.map(e => e.txHash),
|
|
283
|
+
archive: request.block.archive.root,
|
|
284
|
+
};
|
|
285
|
+
return await this.missingTxsCollector.collectTxs(txHashes, blockTxsSource, pinnedPeer, timeoutMs);
|
|
294
286
|
} else {
|
|
295
287
|
throw new Error(`Unknown request type: ${(request as any).type}`);
|
|
296
288
|
}
|
|
297
289
|
},
|
|
298
290
|
Array.from(request.missingTxHashes).map(txHash => TxHash.fromString(txHash)),
|
|
299
291
|
{ description: `reqresp for slot ${slotNumber}`, method: 'fast-req-resp', ...opts, ...request.blockInfo },
|
|
292
|
+
this.getAddContext(request),
|
|
300
293
|
);
|
|
301
294
|
} catch (err) {
|
|
302
295
|
this.log.error(`Error sending fast reqresp request for txs`, err, {
|
|
@@ -306,6 +299,15 @@ export class FastTxCollection {
|
|
|
306
299
|
}
|
|
307
300
|
}
|
|
308
301
|
|
|
302
|
+
/** Returns the TxAddContext for the given request, used by the sink to add txs to the pool correctly. */
|
|
303
|
+
private getAddContext(request: FastCollectionRequest): TxAddContext {
|
|
304
|
+
if (request.type === 'proposal') {
|
|
305
|
+
return { type: 'proposal', blockHeader: request.blockProposal.blockHeader };
|
|
306
|
+
} else {
|
|
307
|
+
return { type: 'mined', block: request.block };
|
|
308
|
+
}
|
|
309
|
+
}
|
|
310
|
+
|
|
309
311
|
/**
|
|
310
312
|
* Handle txs by marking them as found for the requests that are waiting for them, and resolves the request if all its txs have been found.
|
|
311
313
|
* Called internally and from the main tx collection manager whenever the tx pool emits a tx-added event.
|
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
2
|
+
import { type PromiseWithResolvers, promiseWithResolvers } from '@aztec/foundation/promise';
|
|
3
|
+
import { sleep } from '@aztec/foundation/sleep';
|
|
4
|
+
import { DateProvider } from '@aztec/foundation/timer';
|
|
5
|
+
import { Tx, TxHash } from '@aztec/stdlib/tx';
|
|
6
|
+
|
|
7
|
+
import type { FileStoreTxSource } from './file_store_tx_source.js';
|
|
8
|
+
import type { TxAddContext, TxCollectionSink } from './tx_collection_sink.js';
|
|
9
|
+
|
|
10
|
+
/** Configuration for a FileStoreTxCollection instance. */
|
|
11
|
+
export type FileStoreCollectionConfig = {
|
|
12
|
+
workerCount: number;
|
|
13
|
+
backoffBaseMs: number;
|
|
14
|
+
backoffMaxMs: number;
|
|
15
|
+
};
|
|
16
|
+
|
|
17
|
+
type FileStoreTxEntry = {
|
|
18
|
+
txHash: string;
|
|
19
|
+
context: TxAddContext;
|
|
20
|
+
deadline: Date;
|
|
21
|
+
attempts: number;
|
|
22
|
+
lastAttemptTime: number;
|
|
23
|
+
nextSourceIndex: number;
|
|
24
|
+
};
|
|
25
|
+
|
|
26
|
+
/**
|
|
27
|
+
* Collects txs from file stores as a fallback after P2P methods have been tried.
|
|
28
|
+
* Uses a shared worker pool that pulls entries with priority (fewest attempts first),
|
|
29
|
+
* retries with round-robin across sources, and applies exponential backoff between
|
|
30
|
+
* full cycles through all sources.
|
|
31
|
+
*/
|
|
32
|
+
export class FileStoreTxCollection {
|
|
33
|
+
/** Map from tx hash string to entry for all pending downloads. */
|
|
34
|
+
private entries = new Map<string, FileStoreTxEntry>();
|
|
35
|
+
|
|
36
|
+
/** Worker promises for the shared worker pool. */
|
|
37
|
+
private workers: Promise<void>[] = [];
|
|
38
|
+
|
|
39
|
+
/** Whether the worker pool is running. */
|
|
40
|
+
private running = false;
|
|
41
|
+
|
|
42
|
+
/** Signal used to wake sleeping workers when new entries arrive or stop is called. */
|
|
43
|
+
private wakeSignal: PromiseWithResolvers<void>;
|
|
44
|
+
|
|
45
|
+
constructor(
|
|
46
|
+
private readonly sources: FileStoreTxSource[],
|
|
47
|
+
private readonly txCollectionSink: TxCollectionSink,
|
|
48
|
+
private readonly config: FileStoreCollectionConfig,
|
|
49
|
+
private readonly dateProvider: DateProvider = new DateProvider(),
|
|
50
|
+
private readonly log: Logger = createLogger('p2p:file_store_tx_collection'),
|
|
51
|
+
) {
|
|
52
|
+
this.wakeSignal = promiseWithResolvers<void>();
|
|
53
|
+
}
|
|
54
|
+
|
|
55
|
+
/** Starts the shared worker pool. */
|
|
56
|
+
public start(): void {
|
|
57
|
+
if (this.sources.length === 0) {
|
|
58
|
+
this.log.debug('No file store sources configured');
|
|
59
|
+
return;
|
|
60
|
+
}
|
|
61
|
+
this.running = true;
|
|
62
|
+
for (let i = 0; i < this.config.workerCount; i++) {
|
|
63
|
+
this.workers.push(this.workerLoop());
|
|
64
|
+
}
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/** Stops all workers and clears state. */
|
|
68
|
+
public async stop(): Promise<void> {
|
|
69
|
+
this.running = false;
|
|
70
|
+
this.wake();
|
|
71
|
+
await Promise.all(this.workers);
|
|
72
|
+
this.workers = [];
|
|
73
|
+
this.entries.clear();
|
|
74
|
+
}
|
|
75
|
+
|
|
76
|
+
/** Adds entries to the shared map and wakes workers. */
|
|
77
|
+
public startCollecting(txHashes: TxHash[], context: TxAddContext, deadline: Date): void {
|
|
78
|
+
if (this.sources.length === 0 || txHashes.length === 0) {
|
|
79
|
+
return;
|
|
80
|
+
}
|
|
81
|
+
if (+deadline <= this.dateProvider.now()) {
|
|
82
|
+
return;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
for (const txHash of txHashes) {
|
|
86
|
+
const hashStr = txHash.toString();
|
|
87
|
+
if (!this.entries.has(hashStr)) {
|
|
88
|
+
this.entries.set(hashStr, {
|
|
89
|
+
txHash: hashStr,
|
|
90
|
+
context,
|
|
91
|
+
deadline,
|
|
92
|
+
attempts: 0,
|
|
93
|
+
lastAttemptTime: 0,
|
|
94
|
+
nextSourceIndex: Math.floor(Math.random() * this.sources.length),
|
|
95
|
+
});
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
this.wake();
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/** Removes entries for txs that have been found elsewhere. */
|
|
102
|
+
public foundTxs(txs: Tx[]): void {
|
|
103
|
+
for (const tx of txs) {
|
|
104
|
+
this.entries.delete(tx.getTxHash().toString());
|
|
105
|
+
}
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
/** Clears all pending entries. */
|
|
109
|
+
public clearPending(): void {
|
|
110
|
+
this.entries.clear();
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
private async workerLoop(): Promise<void> {
|
|
114
|
+
while (this.running) {
|
|
115
|
+
const action = this.getNextAction();
|
|
116
|
+
if (action.type === 'sleep') {
|
|
117
|
+
await action.promise;
|
|
118
|
+
continue;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
const entry = action.entry;
|
|
122
|
+
const source = this.sources[entry.nextSourceIndex % this.sources.length];
|
|
123
|
+
entry.nextSourceIndex++;
|
|
124
|
+
entry.attempts++;
|
|
125
|
+
entry.lastAttemptTime = this.dateProvider.now();
|
|
126
|
+
|
|
127
|
+
try {
|
|
128
|
+
const result = await this.txCollectionSink.collect(
|
|
129
|
+
hashes => source.getTxsByHash(hashes),
|
|
130
|
+
[TxHash.fromString(entry.txHash)],
|
|
131
|
+
{ description: `file-store ${source.getInfo()}`, method: 'file-store', fileStore: source.getInfo() },
|
|
132
|
+
entry.context,
|
|
133
|
+
);
|
|
134
|
+
if (result.txs.length > 0) {
|
|
135
|
+
this.entries.delete(entry.txHash);
|
|
136
|
+
}
|
|
137
|
+
} catch (err) {
|
|
138
|
+
this.log.trace(`Error downloading tx ${entry.txHash} from ${source.getInfo()}`, { err });
|
|
139
|
+
}
|
|
140
|
+
}
|
|
141
|
+
}
|
|
142
|
+
|
|
143
|
+
/** Single-pass scan: removes expired entries, finds the best ready entry, or computes sleep time. */
|
|
144
|
+
private getNextAction(): { type: 'process'; entry: FileStoreTxEntry } | { type: 'sleep'; promise: Promise<void> } {
|
|
145
|
+
const now = this.dateProvider.now();
|
|
146
|
+
let best: FileStoreTxEntry | undefined;
|
|
147
|
+
let earliestReadyAt = Infinity;
|
|
148
|
+
|
|
149
|
+
for (const [key, entry] of this.entries) {
|
|
150
|
+
if (+entry.deadline <= now) {
|
|
151
|
+
this.entries.delete(key);
|
|
152
|
+
continue;
|
|
153
|
+
}
|
|
154
|
+
const backoffMs = this.getBackoffMs(entry);
|
|
155
|
+
const readyAt = entry.lastAttemptTime + backoffMs;
|
|
156
|
+
if (readyAt > now) {
|
|
157
|
+
earliestReadyAt = Math.min(earliestReadyAt, readyAt);
|
|
158
|
+
continue;
|
|
159
|
+
}
|
|
160
|
+
if (!best || entry.attempts < best.attempts) {
|
|
161
|
+
best = entry;
|
|
162
|
+
}
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
if (best) {
|
|
166
|
+
return { type: 'process', entry: best };
|
|
167
|
+
}
|
|
168
|
+
if (earliestReadyAt < Infinity) {
|
|
169
|
+
return { type: 'sleep', promise: this.sleepOrWake(earliestReadyAt - now) };
|
|
170
|
+
}
|
|
171
|
+
return { type: 'sleep', promise: this.waitForWake() };
|
|
172
|
+
}
|
|
173
|
+
|
|
174
|
+
/** Computes backoff for an entry. Backoff applies after a full cycle through all sources. */
|
|
175
|
+
private getBackoffMs(entry: FileStoreTxEntry): number {
|
|
176
|
+
const fullCycles = Math.floor(entry.attempts / this.sources.length);
|
|
177
|
+
if (fullCycles === 0) {
|
|
178
|
+
return 0;
|
|
179
|
+
}
|
|
180
|
+
return Math.min(this.config.backoffBaseMs * Math.pow(2, fullCycles - 1), this.config.backoffMaxMs);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
/** Resolves the current wake signal and creates a new one. */
|
|
184
|
+
private wake(): void {
|
|
185
|
+
this.wakeSignal.resolve();
|
|
186
|
+
this.wakeSignal = promiseWithResolvers<void>();
|
|
187
|
+
}
|
|
188
|
+
|
|
189
|
+
/** Waits until the wake signal is resolved. */
|
|
190
|
+
private async waitForWake(): Promise<void> {
|
|
191
|
+
await this.wakeSignal.promise;
|
|
192
|
+
}
|
|
193
|
+
|
|
194
|
+
/** Sleeps for the given duration or until the wake signal is resolved. */
|
|
195
|
+
private async sleepOrWake(ms: number): Promise<void> {
|
|
196
|
+
await Promise.race([sleep(ms), this.wakeSignal.promise]);
|
|
197
|
+
}
|
|
198
|
+
}
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import { type Logger, createLogger } from '@aztec/foundation/log';
|
|
2
|
+
import { type ReadOnlyFileStore, createReadOnlyFileStore } from '@aztec/stdlib/file-store';
|
|
3
|
+
import { Tx, type TxHash } from '@aztec/stdlib/tx';
|
|
4
|
+
|
|
5
|
+
import type { TxSource } from './tx_source.js';
|
|
6
|
+
|
|
7
|
+
/** TxSource implementation that downloads txs from a file store. */
|
|
8
|
+
export class FileStoreTxSource implements TxSource {
|
|
9
|
+
private constructor(
|
|
10
|
+
private readonly fileStore: ReadOnlyFileStore,
|
|
11
|
+
private readonly baseUrl: string,
|
|
12
|
+
private readonly basePath: string,
|
|
13
|
+
private readonly log: Logger,
|
|
14
|
+
) {}
|
|
15
|
+
|
|
16
|
+
/**
|
|
17
|
+
* Creates a FileStoreTxSource from a URL.
|
|
18
|
+
* @param url - The file store URL (s3://, gs://, file://, http://, https://).
|
|
19
|
+
* @param log - Optional logger.
|
|
20
|
+
* @returns The FileStoreTxSource instance, or undefined if creation fails.
|
|
21
|
+
*/
|
|
22
|
+
public static async create(
|
|
23
|
+
url: string,
|
|
24
|
+
basePath: string,
|
|
25
|
+
log: Logger = createLogger('p2p:file_store_tx_source'),
|
|
26
|
+
): Promise<FileStoreTxSource | undefined> {
|
|
27
|
+
try {
|
|
28
|
+
const fileStore = await createReadOnlyFileStore(url, log);
|
|
29
|
+
if (!fileStore) {
|
|
30
|
+
log.warn(`Failed to create file store for URL: ${url}`);
|
|
31
|
+
return undefined;
|
|
32
|
+
}
|
|
33
|
+
return new FileStoreTxSource(fileStore, url, basePath, log);
|
|
34
|
+
} catch (err) {
|
|
35
|
+
log.warn(`Error creating file store for URL: ${url}`, { error: err });
|
|
36
|
+
return undefined;
|
|
37
|
+
}
|
|
38
|
+
}
|
|
39
|
+
|
|
40
|
+
public getInfo(): string {
|
|
41
|
+
return `file-store:${this.baseUrl}`;
|
|
42
|
+
}
|
|
43
|
+
|
|
44
|
+
public getTxsByHash(txHashes: TxHash[]): Promise<(Tx | undefined)[]> {
|
|
45
|
+
return Promise.all(
|
|
46
|
+
txHashes.map(async txHash => {
|
|
47
|
+
const path = `${this.basePath}/txs/${txHash.toString()}.bin`;
|
|
48
|
+
try {
|
|
49
|
+
const buffer = await this.fileStore.read(path);
|
|
50
|
+
return Tx.fromBuffer(buffer);
|
|
51
|
+
} catch {
|
|
52
|
+
// Tx not found or error reading - return undefined
|
|
53
|
+
return undefined;
|
|
54
|
+
}
|
|
55
|
+
}),
|
|
56
|
+
);
|
|
57
|
+
}
|
|
58
|
+
}
|
|
59
|
+
|
|
60
|
+
/**
|
|
61
|
+
* Creates FileStoreTxSource instances from URLs.
|
|
62
|
+
* @param urls - Array of file store URLs.
|
|
63
|
+
* @param log - Optional logger.
|
|
64
|
+
* @returns Array of successfully created FileStoreTxSource instances.
|
|
65
|
+
*/
|
|
66
|
+
export async function createFileStoreTxSources(
|
|
67
|
+
urls: string[],
|
|
68
|
+
basePath: string,
|
|
69
|
+
log: Logger = createLogger('p2p:file_store_tx_source'),
|
|
70
|
+
): Promise<FileStoreTxSource[]> {
|
|
71
|
+
const sources = await Promise.all(urls.map(url => FileStoreTxSource.create(url, basePath, log)));
|
|
72
|
+
return sources.filter((s): s is FileStoreTxSource => s !== undefined);
|
|
73
|
+
}
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
export { TxCollection, type FastCollectionRequestInput } from './tx_collection.js';
|
|
2
2
|
export { type TxSource, createNodeRpcTxSources, NodeRpcTxSource } from './tx_source.js';
|
|
3
3
|
export {
|
|
4
|
-
type
|
|
4
|
+
type MissingTxsCollector,
|
|
5
5
|
BatchTxRequesterCollector,
|
|
6
6
|
SendBatchRequestCollector,
|
|
7
7
|
} from './proposal_tx_collector.js';
|
|
8
|
+
export { FileStoreTxSource, createFileStoreTxSources } from './file_store_tx_source.js';
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
import type { Logger } from '@aztec/foundation/log';
|
|
2
2
|
import type { DateProvider } from '@aztec/foundation/timer';
|
|
3
|
-
import type { BlockProposal } from '@aztec/stdlib/p2p';
|
|
4
3
|
import type { Tx, TxHash } from '@aztec/stdlib/tx';
|
|
5
4
|
|
|
6
5
|
import type { PeerId } from '@libp2p/interface';
|
|
@@ -9,25 +8,24 @@ import { BatchTxRequester } from '../reqresp/batch-tx-requester/batch_tx_request
|
|
|
9
8
|
import type { BatchTxRequesterConfig } from '../reqresp/batch-tx-requester/config.js';
|
|
10
9
|
import type { BatchTxRequesterLibP2PService } from '../reqresp/batch-tx-requester/interface.js';
|
|
11
10
|
import type { IBatchRequestTxValidator } from '../reqresp/batch-tx-requester/tx_validator.js';
|
|
12
|
-
import { ReqRespSubProtocol } from '../reqresp/
|
|
13
|
-
import { chunkTxHashesRequest } from '../reqresp/protocols/tx.js';
|
|
11
|
+
import { type BlockTxsSource, ReqRespSubProtocol, chunkTxHashesRequest } from '../reqresp/index.js';
|
|
14
12
|
|
|
15
13
|
/**
|
|
16
|
-
* Strategy interface for collecting transactions for block
|
|
14
|
+
* Strategy interface for collecting missing transactions for a block or proposal.
|
|
17
15
|
* Allows swapping between different tx collection implementations for benchmarking.
|
|
18
16
|
*/
|
|
19
|
-
export interface
|
|
17
|
+
export interface MissingTxsCollector {
|
|
20
18
|
/**
|
|
21
|
-
* Collect transactions for a block proposal.
|
|
19
|
+
* Collect missing transactions for a block or proposal.
|
|
22
20
|
* @param txHashes - The transaction hashes to collect
|
|
23
|
-
* @param
|
|
24
|
-
* @param pinnedPeer - Optional peer
|
|
21
|
+
* @param blockTxsSource - The block or proposal containing the transactions
|
|
22
|
+
* @param pinnedPeer - Optional peer expected to have the transactions
|
|
25
23
|
* @param timeoutMs - Timeout in milliseconds
|
|
26
24
|
* @returns The collected transactions
|
|
27
25
|
*/
|
|
28
26
|
collectTxs(
|
|
29
27
|
txHashes: TxHash[],
|
|
30
|
-
|
|
28
|
+
blockTxsSource: BlockTxsSource,
|
|
31
29
|
pinnedPeer: PeerId | undefined,
|
|
32
30
|
timeoutMs: number,
|
|
33
31
|
): Promise<Tx[]>;
|
|
@@ -37,7 +35,7 @@ export interface ProposalTxCollector {
|
|
|
37
35
|
* Collects transactions using the BatchTxRequester implementation.
|
|
38
36
|
* This uses a smart/dumb peer strategy with parallel workers.
|
|
39
37
|
*/
|
|
40
|
-
export class BatchTxRequesterCollector implements
|
|
38
|
+
export class BatchTxRequesterCollector implements MissingTxsCollector {
|
|
41
39
|
constructor(
|
|
42
40
|
private p2pService: BatchTxRequesterLibP2PService,
|
|
43
41
|
private log: Logger,
|
|
@@ -48,7 +46,7 @@ export class BatchTxRequesterCollector implements ProposalTxCollector {
|
|
|
48
46
|
|
|
49
47
|
async collectTxs(
|
|
50
48
|
txHashes: TxHash[],
|
|
51
|
-
|
|
49
|
+
blockTxsSource: BlockTxsSource,
|
|
52
50
|
pinnedPeer: PeerId | undefined,
|
|
53
51
|
timeoutMs: number,
|
|
54
52
|
): Promise<Tx[]> {
|
|
@@ -61,7 +59,7 @@ export class BatchTxRequesterCollector implements ProposalTxCollector {
|
|
|
61
59
|
|
|
62
60
|
const batchRequester = new BatchTxRequester(
|
|
63
61
|
txHashes,
|
|
64
|
-
|
|
62
|
+
blockTxsSource,
|
|
65
63
|
pinnedPeer,
|
|
66
64
|
timeoutMs,
|
|
67
65
|
this.p2pService,
|
|
@@ -87,7 +85,7 @@ const DEFAULT_MAX_RETRY_ATTEMPTS = 3;
|
|
|
87
85
|
* Collects transactions using the sendBatchRequest implementation from ReqResp.
|
|
88
86
|
* This is the original implementation that balances requests across peers.
|
|
89
87
|
*/
|
|
90
|
-
export class SendBatchRequestCollector implements
|
|
88
|
+
export class SendBatchRequestCollector implements MissingTxsCollector {
|
|
91
89
|
constructor(
|
|
92
90
|
private p2pService: BatchTxRequesterLibP2PService,
|
|
93
91
|
private maxPeers: number = DEFAULT_MAX_PEERS,
|
|
@@ -96,7 +94,7 @@ export class SendBatchRequestCollector implements ProposalTxCollector {
|
|
|
96
94
|
|
|
97
95
|
async collectTxs(
|
|
98
96
|
txHashes: TxHash[],
|
|
99
|
-
|
|
97
|
+
_blockTxsSource: BlockTxsSource,
|
|
100
98
|
pinnedPeer: PeerId | undefined,
|
|
101
99
|
timeoutMs: number,
|
|
102
100
|
): Promise<Tx[]> {
|
|
@@ -91,6 +91,7 @@ export class SlowTxCollection {
|
|
|
91
91
|
|
|
92
92
|
for (const txHash of txHashes) {
|
|
93
93
|
this.missingTxs.set(txHash.toString(), {
|
|
94
|
+
block,
|
|
94
95
|
blockNumber: block.number,
|
|
95
96
|
deadline: this.getDeadlineForSlot(block.header.getSlot()),
|
|
96
97
|
readyForReqResp: this.nodes.length === 0, // If we have no nodes, we can start reqresp immediately
|
|
@@ -109,18 +110,26 @@ export class SlowTxCollection {
|
|
|
109
110
|
|
|
110
111
|
// Gather all missing txs that are not in fast collection and request them from the node
|
|
111
112
|
const missingTxs = this.getMissingTxsForSlowCollection();
|
|
112
|
-
|
|
113
|
-
if (missingTxHashes.length === 0) {
|
|
113
|
+
if (missingTxs.length === 0) {
|
|
114
114
|
return;
|
|
115
115
|
}
|
|
116
116
|
|
|
117
|
-
//
|
|
118
|
-
for (const
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
117
|
+
// Group by block so we pass the correct mined context to the sink
|
|
118
|
+
for (const entries of this.groupByBlock(missingTxs)) {
|
|
119
|
+
const block = entries[0][1].block;
|
|
120
|
+
const txHashes = entries.map(([txHash]) => TxHash.fromString(txHash));
|
|
121
|
+
for (const batch of chunk(txHashes, this.config.txCollectionNodeRpcMaxBatchSize)) {
|
|
122
|
+
await this.txCollectionSink.collect(
|
|
123
|
+
hashes => node.getTxsByHash(hashes),
|
|
124
|
+
batch,
|
|
125
|
+
{
|
|
126
|
+
description: `node ${node.getInfo()}`,
|
|
127
|
+
node: node.getInfo(),
|
|
128
|
+
method: 'slow-node-rpc',
|
|
129
|
+
},
|
|
130
|
+
{ type: 'mined', block },
|
|
131
|
+
);
|
|
132
|
+
}
|
|
124
133
|
}
|
|
125
134
|
|
|
126
135
|
// Mark every tx that is still missing as ready for reqresp.
|
|
@@ -149,25 +158,30 @@ export class SlowTxCollection {
|
|
|
149
158
|
|
|
150
159
|
const pinnedPeer = undefined;
|
|
151
160
|
const timeoutMs = this.config.txCollectionSlowReqRespTimeoutMs;
|
|
152
|
-
const maxPeers = boundInclusive(Math.ceil(missingTxs.length / 3), 4, 16);
|
|
153
161
|
const maxRetryAttempts = 3;
|
|
154
|
-
// Send a batch request via reqresp for the missing txs
|
|
155
|
-
await this.txCollectionSink.collect(
|
|
156
|
-
async txHashes => {
|
|
157
|
-
const txs = await this.reqResp.sendBatchRequest<ReqRespSubProtocol.TX>(
|
|
158
|
-
ReqRespSubProtocol.TX,
|
|
159
|
-
chunkTxHashesRequest(txHashes),
|
|
160
|
-
pinnedPeer,
|
|
161
|
-
timeoutMs,
|
|
162
|
-
maxPeers,
|
|
163
|
-
maxRetryAttempts,
|
|
164
|
-
);
|
|
165
162
|
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
163
|
+
// Group by block so we pass the correct mined context to the sink
|
|
164
|
+
for (const entries of this.groupByBlock(missingTxs)) {
|
|
165
|
+
const block = entries[0][1].block;
|
|
166
|
+
const txHashes = entries.map(([txHash]) => TxHash.fromString(txHash));
|
|
167
|
+
const maxPeers = boundInclusive(Math.ceil(txHashes.length / 3), 4, 16);
|
|
168
|
+
await this.txCollectionSink.collect(
|
|
169
|
+
async hashes => {
|
|
170
|
+
const txs = await this.reqResp.sendBatchRequest<ReqRespSubProtocol.TX>(
|
|
171
|
+
ReqRespSubProtocol.TX,
|
|
172
|
+
chunkTxHashesRequest(hashes),
|
|
173
|
+
pinnedPeer,
|
|
174
|
+
timeoutMs,
|
|
175
|
+
maxPeers,
|
|
176
|
+
maxRetryAttempts,
|
|
177
|
+
);
|
|
178
|
+
return txs.flat();
|
|
179
|
+
},
|
|
180
|
+
txHashes,
|
|
181
|
+
{ description: 'slow reqresp', timeoutMs, method: 'slow-req-resp' },
|
|
182
|
+
{ type: 'mined', block },
|
|
183
|
+
);
|
|
184
|
+
}
|
|
171
185
|
}
|
|
172
186
|
|
|
173
187
|
/** Retrieves all missing txs for the slow collection process. This is, all missing txs that are not part of a fast request. */
|
|
@@ -223,11 +237,31 @@ export class SlowTxCollection {
|
|
|
223
237
|
}
|
|
224
238
|
}
|
|
225
239
|
|
|
240
|
+
/** Groups missing tx entries by block number. */
|
|
241
|
+
private groupByBlock(entries: [string, MissingTxInfo][]): [string, MissingTxInfo][][] {
|
|
242
|
+
const groups = new Map<number, [string, MissingTxInfo][]>();
|
|
243
|
+
for (const entry of entries) {
|
|
244
|
+
const bn = +entry[1].blockNumber;
|
|
245
|
+
let group = groups.get(bn);
|
|
246
|
+
if (!group) {
|
|
247
|
+
group = [];
|
|
248
|
+
groups.set(bn, group);
|
|
249
|
+
}
|
|
250
|
+
group.push(entry);
|
|
251
|
+
}
|
|
252
|
+
return [...groups.values()];
|
|
253
|
+
}
|
|
254
|
+
|
|
226
255
|
/** Computes the proof submission deadline for a given slot, a tx mined in this slot is no longer interesting after this deadline */
|
|
227
256
|
private getDeadlineForSlot(slotNumber: SlotNumber): Date {
|
|
228
|
-
|
|
229
|
-
const submissionEndEpoch = EpochNumber(epoch + this.constants.proofSubmissionEpochs);
|
|
230
|
-
const submissionEndTimestamp = getTimestampRangeForEpoch(submissionEndEpoch, this.constants)[1];
|
|
231
|
-
return new Date(Number(submissionEndTimestamp) * 1000);
|
|
257
|
+
return getProofDeadlineForSlot(slotNumber, this.constants);
|
|
232
258
|
}
|
|
233
259
|
}
|
|
260
|
+
|
|
261
|
+
/** Computes the proof submission deadline for a given slot. A tx mined in this slot is no longer interesting after this deadline. */
|
|
262
|
+
export function getProofDeadlineForSlot(slotNumber: SlotNumber, constants: L1RollupConstants): Date {
|
|
263
|
+
const epoch = getEpochAtSlot(slotNumber, constants);
|
|
264
|
+
const submissionEndEpoch = EpochNumber(epoch + constants.proofSubmissionEpochs);
|
|
265
|
+
const submissionEndTimestamp = getTimestampRangeForEpoch(submissionEndEpoch, constants)[1];
|
|
266
|
+
return new Date(Number(submissionEndTimestamp) * 1000);
|
|
267
|
+
}
|