chia-blockchain 2.5.8rc1__py3-none-any.whl → 2.6.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. chia/_tests/blockchain/test_blockchain_transactions.py +5 -2
  2. chia/_tests/conftest.py +8 -2
  3. chia/_tests/core/full_node/test_full_node.py +48 -8
  4. chia/_tests/core/full_node/test_hard_fork_utils.py +92 -0
  5. chia/_tests/core/full_node/test_prev_tx_block.py +6 -6
  6. chia/_tests/core/full_node/test_tx_processing_queue.py +92 -8
  7. chia/_tests/core/mempool/test_mempool.py +27 -15
  8. chia/_tests/core/mempool/test_mempool_manager.py +25 -15
  9. chia/_tests/util/test_replace_str_to_bytes.py +1 -0
  10. chia/_tests/util/test_testnet_overrides.py +3 -3
  11. chia/_tests/wallet/sync/test_wallet_sync.py +20 -13
  12. chia/_tests/wallet/test_new_wallet_protocol.py +14 -12
  13. chia/_tests/wallet/test_wallet_node.py +2 -1
  14. chia/apis/full_node_stub.py +4 -2
  15. chia/consensus/block_header_validation.py +1 -1
  16. chia/consensus/blockchain.py +2 -2
  17. chia/consensus/default_constants.py +3 -0
  18. chia/consensus/get_block_challenge.py +14 -6
  19. chia/consensus/multiprocess_validation.py +2 -2
  20. chia/full_node/full_node.py +3 -1
  21. chia/full_node/full_node_api.py +16 -14
  22. chia/full_node/full_node_rpc_api.py +6 -26
  23. chia/full_node/hard_fork_utils.py +44 -0
  24. chia/full_node/tx_processing_queue.py +101 -38
  25. chia/full_node/weight_proof.py +1 -1
  26. chia/simulator/block_tools.py +3 -3
  27. chia/util/initial-config.yaml +1 -0
  28. chia/util/streamable.py +2 -2
  29. chia/wallet/conditions.py +22 -1
  30. {chia_blockchain-2.5.8rc1.dist-info → chia_blockchain-2.6.0rc3.dist-info}/METADATA +2 -2
  31. {chia_blockchain-2.5.8rc1.dist-info → chia_blockchain-2.6.0rc3.dist-info}/RECORD +34 -32
  32. {chia_blockchain-2.5.8rc1.dist-info → chia_blockchain-2.6.0rc3.dist-info}/WHEEL +1 -1
  33. {chia_blockchain-2.5.8rc1.dist-info → chia_blockchain-2.6.0rc3.dist-info}/entry_points.txt +0 -0
  34. {chia_blockchain-2.5.8rc1.dist-info → chia_blockchain-2.6.0rc3.dist-info}/licenses/LICENSE +0 -0
@@ -8,6 +8,7 @@ from chia_rs.sized_bytes import bytes32
8
8
  from chia_rs.sized_ints import uint32, uint64
9
9
 
10
10
  from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block
11
+ from chia._tests.connection_utils import add_dummy_connection
11
12
  from chia._tests.util.generator_tools_testing import run_and_get_removals_and_additions
12
13
  from chia.consensus.blockchain import AddBlockResult
13
14
  from chia.full_node.full_node_api import FullNodeAPI
@@ -34,7 +35,7 @@ log = logging.getLogger(__name__)
34
35
  class TestBlockchainTransactions:
35
36
  @pytest.mark.anyio
36
37
  async def test_basic_blockchain_tx(
37
- self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools]
38
+ self, two_nodes: tuple[FullNodeAPI, FullNodeAPI, ChiaServer, ChiaServer, BlockTools], self_hostname: str
38
39
  ) -> None:
39
40
  num_blocks = 10
40
41
  wallet_a = WALLET_A
@@ -60,7 +61,9 @@ class TestBlockchainTransactions:
60
61
  assert spend_bundle is not None
61
62
  tx: wallet_protocol.SendTransaction = wallet_protocol.SendTransaction(spend_bundle)
62
63
 
63
- await full_node_api_1.send_transaction(tx)
64
+ _, dummy_node_id = await add_dummy_connection(full_node_api_1.server, self_hostname, 12312)
65
+ dummy_peer = full_node_api_1.server.all_connections[dummy_node_id]
66
+ await full_node_api_1.send_transaction(tx, dummy_peer)
64
67
 
65
68
  sb = full_node_1.mempool_manager.get_spendbundle(spend_bundle.name())
66
69
  assert sb == spend_bundle
chia/_tests/conftest.py CHANGED
@@ -208,13 +208,14 @@ def get_keychain():
208
208
  class ConsensusMode(ComparableEnum):
209
209
  PLAIN = 0
210
210
  HARD_FORK_2_0 = 1
211
- HARD_FORK_3_0 = 2
211
+ SOFT_FORK_2_6 = 2
212
+ HARD_FORK_3_0 = 3
212
213
 
213
214
 
214
215
  @pytest.fixture(
215
216
  scope="session",
216
217
  # TODO: todo_v2_plots add HARD_FORK_3_0 mode as well as after phase-out
217
- params=[ConsensusMode.PLAIN, ConsensusMode.HARD_FORK_2_0],
218
+ params=[ConsensusMode.PLAIN, ConsensusMode.HARD_FORK_2_0, ConsensusMode.SOFT_FORK_2_6],
218
219
  )
219
220
  def consensus_mode(request):
220
221
  return request.param
@@ -231,6 +232,11 @@ def blockchain_constants(consensus_mode: ConsensusMode) -> ConsensusConstants:
231
232
  PLOT_FILTER_32_HEIGHT=uint32(20),
232
233
  )
233
234
 
235
+ if consensus_mode >= ConsensusMode.SOFT_FORK_2_6:
236
+ ret = ret.replace(
237
+ SOFT_FORK8_HEIGHT=uint32(2),
238
+ )
239
+
234
240
  if consensus_mode >= ConsensusMode.HARD_FORK_3_0:
235
241
  ret = ret.replace(
236
242
  HARD_FORK_HEIGHT=uint32(2),
@@ -36,7 +36,7 @@ from packaging.version import Version
36
36
 
37
37
  from chia._tests.blockchain.blockchain_test_utils import _validate_and_add_block, _validate_and_add_block_no_error
38
38
  from chia._tests.conftest import ConsensusMode
39
- from chia._tests.connection_utils import add_dummy_connection, connect_and_get_peer
39
+ from chia._tests.connection_utils import add_dummy_connection, add_dummy_connection_wsc, connect_and_get_peer
40
40
  from chia._tests.core.full_node.stores.test_coin_store import get_future_reward_coins
41
41
  from chia._tests.core.make_block_generator import make_spend_bundle
42
42
  from chia._tests.core.node_height import node_height_at_least
@@ -1032,7 +1032,7 @@ async def test_new_transaction_and_mempool(
1032
1032
  assert estimate_fees(spend_bundle) == fee
1033
1033
  respond_transaction = wallet_protocol.SendTransaction(spend_bundle)
1034
1034
 
1035
- await full_node_1.send_transaction(respond_transaction)
1035
+ await full_node_1.send_transaction(respond_transaction, fake_peer)
1036
1036
 
1037
1037
  request = fnp.RequestTransaction(spend_bundle.get_hash())
1038
1038
  req = await full_node_1.request_transaction(request)
@@ -1083,7 +1083,7 @@ async def test_new_transaction_and_mempool(
1083
1083
  assert err is None
1084
1084
 
1085
1085
  # Resubmission through wallet is also fine
1086
- response_msg = await full_node_1.send_transaction(SendTransaction(successful_bundle), test=True)
1086
+ response_msg = await full_node_1.send_transaction(SendTransaction(successful_bundle), fake_peer, test=True)
1087
1087
  assert response_msg is not None
1088
1088
  assert TransactionAck.from_bytes(response_msg.data).status == MempoolInclusionStatus.SUCCESS.value
1089
1089
 
@@ -2950,7 +2950,13 @@ async def test_declare_proof_of_space_no_overflow(
2950
2950
  assert full_node_api.full_node.blockchain.get_peak_height() == blocks[-1].height
2951
2951
  for i in range(10, 100):
2952
2952
  sb = await add_tx_to_mempool(
2953
- full_node_api, wallet, blocks[-8], coinbase_puzzlehash, bytes32(i.to_bytes(32, "big")), uint64(i)
2953
+ full_node_api,
2954
+ dummy_peer,
2955
+ wallet,
2956
+ blocks[-8],
2957
+ coinbase_puzzlehash,
2958
+ bytes32(i.to_bytes(32, "big")),
2959
+ uint64(i),
2954
2960
  )
2955
2961
  blocks = bt.get_consecutive_blocks(
2956
2962
  block_list_input=blocks,
@@ -2994,7 +3000,13 @@ async def test_declare_proof_of_space_overflow(
2994
3000
  assert full_node_api.full_node.blockchain.get_peak_height() == blocks[-1].height
2995
3001
  for i in range(10, 100):
2996
3002
  sb = await add_tx_to_mempool(
2997
- full_node_api, wallet, blocks[-8], coinbase_puzzlehash, bytes32(i.to_bytes(32, "big")), uint64(i)
3003
+ full_node_api,
3004
+ dummy_peer,
3005
+ wallet,
3006
+ blocks[-8],
3007
+ coinbase_puzzlehash,
3008
+ bytes32(i.to_bytes(32, "big")),
3009
+ uint64(i),
2998
3010
  )
2999
3011
 
3000
3012
  blocks = bt.get_consecutive_blocks(
@@ -3254,6 +3266,7 @@ async def declare_pos_unfinished_block(
3254
3266
 
3255
3267
  async def add_tx_to_mempool(
3256
3268
  full_node_api: FullNodeAPI,
3269
+ dummy_peer: WSChiaConnection,
3257
3270
  wallet: WalletTool,
3258
3271
  spend_block: FullBlock,
3259
3272
  coinbase_puzzlehash: bytes32,
@@ -3269,7 +3282,7 @@ async def add_tx_to_mempool(
3269
3282
  assert spend_coin is not None
3270
3283
  spend_bundle = wallet.generate_signed_transaction(amount, receiver_puzzlehash, spend_coin)
3271
3284
  assert spend_bundle is not None
3272
- response_msg = await full_node_api.send_transaction(wallet_protocol.SendTransaction(spend_bundle))
3285
+ response_msg = await full_node_api.send_transaction(wallet_protocol.SendTransaction(spend_bundle), dummy_peer)
3273
3286
  assert (
3274
3287
  response_msg is not None
3275
3288
  and TransactionAck.from_bytes(response_msg.data).status == MempoolInclusionStatus.SUCCESS.value
@@ -3312,7 +3325,11 @@ def compare_unfinished_blocks(block1: UnfinishedBlock, block2: UnfinishedBlock)
3312
3325
  ],
3313
3326
  )
3314
3327
  async def test_pending_tx_cache_retry_on_new_peak(
3315
- condition: ConditionOpcode, error: str, blockchain_constants: ConsensusConstants, caplog: pytest.LogCaptureFixture
3328
+ condition: ConditionOpcode,
3329
+ error: str,
3330
+ blockchain_constants: ConsensusConstants,
3331
+ caplog: pytest.LogCaptureFixture,
3332
+ self_hostname: str,
3316
3333
  ) -> None:
3317
3334
  """
3318
3335
  Covers PendingTXCache items that are placed there due to unmet relative or
@@ -3344,7 +3361,9 @@ async def test_pending_tx_cache_retry_on_new_peak(
3344
3361
  sb = wallet.generate_signed_transaction(uint64(42), ph, coin, condition_dic)
3345
3362
  sb_name = sb.name()
3346
3363
  # Send the transaction
3347
- res = await full_node_api.send_transaction(SendTransaction(sb))
3364
+ _, dummy_node_id = await add_dummy_connection(full_node_api.server, self_hostname, 12312)
3365
+ dummy_peer = full_node_api.server.all_connections[dummy_node_id]
3366
+ res = await full_node_api.send_transaction(SendTransaction(sb), dummy_peer)
3348
3367
  assert res is not None
3349
3368
  assert ProtocolMessageTypes(res.type) == ProtocolMessageTypes.transaction_ack
3350
3369
  transaction_ack = TransactionAck.from_bytes(res.data)
@@ -3520,3 +3539,24 @@ async def test_corrupt_blockchain(bt: BlockTools, default_400_blocks: list[FullB
3520
3539
  # but there are coins in the coin store
3521
3540
  async with full_node.manage():
3522
3541
  pass # pragma: no cover
3542
+
3543
+
3544
+ @pytest.mark.anyio
3545
+ async def test_send_transaction_peer_tx_queue_full(
3546
+ one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], self_hostname: str
3547
+ ) -> None:
3548
+ """
3549
+ Covers the case where a peer's transaction queue is full and it sends a
3550
+ `SendTransaction` message. The full node should send the proper
3551
+ `TransactionAck` response with a correct error.
3552
+ """
3553
+ full_node_api, server, _ = one_node_one_block
3554
+ # Set limit to 0 to trigger queue full exception
3555
+ full_node_api.full_node.transaction_queue.peer_size_limit = 0
3556
+ spend_bundle = SpendBundle([], G2Element())
3557
+ dummy_peer, _ = await add_dummy_connection_wsc(server, self_hostname, 1337, NodeType.WALLET)
3558
+ response_msg = await full_node_api.send_transaction(wallet_protocol.SendTransaction(spend_bundle), dummy_peer)
3559
+ assert response_msg is not None
3560
+ response = wallet_protocol.TransactionAck.from_bytes(response_msg.data)
3561
+ assert MempoolInclusionStatus(response.status) == MempoolInclusionStatus.FAILED
3562
+ assert response.error == "Transaction queue full"
@@ -0,0 +1,92 @@
1
+ from __future__ import annotations
2
+
3
+ import pytest
4
+ from chia_rs import BlockRecord, get_flags_for_height_and_constants
5
+ from chia_rs.sized_bytes import bytes32
6
+ from chia_rs.sized_ints import uint32
7
+
8
+ from chia.consensus.get_block_challenge import pre_sp_tx_block_height
9
+ from chia.full_node.hard_fork_utils import get_flags
10
+ from chia.simulator.block_tools import BlockTools, load_block_list, test_constants
11
+ from chia.util.block_cache import BlockCache
12
+
13
+
14
+ class MockBlocksProtocol(BlockCache):
15
+ """Mock implementation of BlocksProtocol for testing get_flags."""
16
+
17
+ async def get_block_record_from_db(self, header_hash: bytes32) -> BlockRecord | None:
18
+ return self.try_block_record(header_hash)
19
+
20
+ async def lookup_block_generators(self, header_hash: bytes32, generator_refs: set[uint32]) -> dict[uint32, bytes]:
21
+ return {}
22
+
23
+ def add_block_record(self, block_record: BlockRecord) -> None:
24
+ self.add_block(block_record)
25
+
26
+
27
+ @pytest.mark.anyio
28
+ async def test_get_flags_outside_transition_period(bt: BlockTools) -> None:
29
+ """Test get_flags when block is outside the transition period."""
30
+ block_list = bt.get_consecutive_blocks(
31
+ 10,
32
+ block_list_input=[],
33
+ guarantee_transaction_block=True,
34
+ )
35
+ _, _, blocks = load_block_list(block_list, bt.constants)
36
+ block = block_list[-1]
37
+ mock_blocks = MockBlocksProtocol(blocks)
38
+
39
+ # Before hard fork: block.height < HARD_FORK2_HEIGHT, expects 0
40
+ constants = test_constants.replace(HARD_FORK2_HEIGHT=uint32(1000))
41
+ assert block.height < constants.HARD_FORK2_HEIGHT
42
+ result = await get_flags(constants, mock_blocks, block)
43
+ assert result == 0
44
+
45
+ # After transition period: block.height >= HARD_FORK2_HEIGHT + SUB_EPOCH_BLOCKS
46
+ constants = test_constants.replace(
47
+ HARD_FORK2_HEIGHT=uint32(0),
48
+ SUB_EPOCH_BLOCKS=uint32(min(5, block.height)),
49
+ )
50
+ assert block.height >= constants.HARD_FORK2_HEIGHT + constants.SUB_EPOCH_BLOCKS
51
+ result = await get_flags(constants, mock_blocks, block)
52
+ assert result == get_flags_for_height_and_constants(block.height, constants)
53
+
54
+
55
+ @pytest.mark.anyio
56
+ async def test_get_flags_during_transition_period(bt: BlockTools) -> None:
57
+ """When block.height is in the transition period, get_flags should walk
58
+ the chain and return flags based on the latest tx block before signage point."""
59
+ # We need more blocks to ensure we're in the transition period
60
+ block_list = bt.get_consecutive_blocks(
61
+ 15,
62
+ block_list_input=[],
63
+ guarantee_transaction_block=True,
64
+ )
65
+ _, _, blocks = load_block_list(block_list, bt.constants)
66
+ mock_blocks = MockBlocksProtocol(blocks)
67
+
68
+ # Configure constants so that the block is in the transition period
69
+ # HARD_FORK2_HEIGHT <= block.height < HARD_FORK2_HEIGHT + SUB_EPOCH_BLOCKS
70
+ block = block_list[-1]
71
+ # Set HARD_FORK2_HEIGHT to be close to block height but leave room for transition
72
+ constants = test_constants.replace(
73
+ HARD_FORK2_HEIGHT=uint32(max(0, block.height - 5)),
74
+ SUB_EPOCH_BLOCKS=test_constants.SUB_EPOCH_BLOCKS,
75
+ )
76
+
77
+ # Ensure we're in the transition period
78
+ assert block.height >= constants.HARD_FORK2_HEIGHT
79
+ assert block.height < constants.HARD_FORK2_HEIGHT + constants.SUB_EPOCH_BLOCKS
80
+
81
+ result = await get_flags(constants, mock_blocks, block)
82
+
83
+ # The result should be based on the height of the latest tx block before the signage point
84
+ expected_height = pre_sp_tx_block_height(
85
+ constants=constants,
86
+ blocks=mock_blocks,
87
+ prev_b_hash=block.prev_header_hash,
88
+ sp_index=block.reward_chain_block.signage_point_index,
89
+ finished_sub_slots=len(block.finished_sub_slots),
90
+ )
91
+ expected = get_flags_for_height_and_constants(expected_height, constants)
92
+ assert result == expected
@@ -15,14 +15,14 @@ def test_prev_tx_block_none() -> None:
15
15
  blocks=BlockCache({}),
16
16
  prev_b_hash=test_constants.GENESIS_CHALLENGE,
17
17
  sp_index=uint8(0),
18
- first_in_sub_slot=False,
18
+ finished_sub_slots=0,
19
19
  ) == uint32(0)
20
20
  assert pre_sp_tx_block_height(
21
21
  constants=test_constants,
22
22
  blocks=BlockCache({}),
23
23
  prev_b_hash=test_constants.GENESIS_CHALLENGE,
24
24
  sp_index=uint8(1),
25
- first_in_sub_slot=True,
25
+ finished_sub_slots=1,
26
26
  ) == uint32(0)
27
27
 
28
28
 
@@ -43,7 +43,7 @@ def test_prev_tx_block_blockrecord_tx(bt: BlockTools) -> None:
43
43
  blocks=BlockCache(blocks),
44
44
  prev_b_hash=block.prev_header_hash,
45
45
  sp_index=block.reward_chain_block.signage_point_index,
46
- first_in_sub_slot=len(block.finished_sub_slots) > 0,
46
+ finished_sub_slots=len(block.finished_sub_slots),
47
47
  )
48
48
  == latest_tx_before_sp.height
49
49
  )
@@ -56,7 +56,7 @@ def test_prev_tx_block_blockrecord_tx(bt: BlockTools) -> None:
56
56
  blocks=BlockCache(blocks),
57
57
  prev_b_hash=block.prev_header_hash,
58
58
  sp_index=block.reward_chain_block.signage_point_index,
59
- first_in_sub_slot=len(block.finished_sub_slots) > 0,
59
+ finished_sub_slots=len(block.finished_sub_slots),
60
60
  )
61
61
  == latest_tx_before_sp.height
62
62
  )
@@ -69,7 +69,7 @@ def test_prev_tx_block_blockrecord_tx(bt: BlockTools) -> None:
69
69
  blocks=BlockCache(blocks),
70
70
  prev_b_hash=block.prev_header_hash,
71
71
  sp_index=block.reward_chain_block.signage_point_index,
72
- first_in_sub_slot=len(block.finished_sub_slots) > 0,
72
+ finished_sub_slots=len(block.finished_sub_slots),
73
73
  )
74
74
  == latest_tx_before_sp.height
75
75
  )
@@ -95,7 +95,7 @@ def test_prev_tx_block_blockrecord_not_tx(bt: BlockTools) -> None:
95
95
  blocks=BlockCache(blocks),
96
96
  prev_b_hash=block.prev_header_hash,
97
97
  sp_index=block.reward_chain_block.signage_point_index,
98
- first_in_sub_slot=len(block.finished_sub_slots) > 0,
98
+ finished_sub_slots=len(block.finished_sub_slots),
99
99
  ) == uint32(latest_tx_before_sp.height)
100
100
 
101
101
 
@@ -13,10 +13,13 @@ from chia_rs.sized_bytes import bytes32
13
13
  from chia_rs.sized_ints import uint64
14
14
 
15
15
  from chia.full_node.tx_processing_queue import PeerWithTx, TransactionQueue, TransactionQueueEntry, TransactionQueueFull
16
+ from chia.simulator.block_tools import test_constants
16
17
  from chia.util.task_referencer import create_referenced_task
17
18
 
18
19
  log = logging.getLogger(__name__)
19
20
 
21
+ TEST_MAX_TX_CLVM_COST = uint64(test_constants.MAX_BLOCK_COST_CLVM // 2)
22
+
20
23
 
21
24
  @dataclass(frozen=True)
22
25
  class FakeTransactionQueueEntry:
@@ -35,7 +38,7 @@ def get_transaction_queue_entry(
35
38
 
36
39
  @pytest.mark.anyio
37
40
  async def test_local_txs(seeded_random: random.Random) -> None:
38
- transaction_queue = TransactionQueue(1000, log)
41
+ transaction_queue = TransactionQueue(1000, log, max_tx_clvm_cost=TEST_MAX_TX_CLVM_COST)
39
42
  # test 1 tx
40
43
  first_tx = get_transaction_queue_entry(None, 0)
41
44
  transaction_queue.put(first_tx, None)
@@ -60,7 +63,7 @@ async def test_local_txs(seeded_random: random.Random) -> None:
60
63
 
61
64
  @pytest.mark.anyio
62
65
  async def test_one_peer_and_await(seeded_random: random.Random) -> None:
63
- transaction_queue = TransactionQueue(1000, log)
66
+ transaction_queue = TransactionQueue(1000, log, max_tx_clvm_cost=TEST_MAX_TX_CLVM_COST)
64
67
  num_txs = 100
65
68
  peer_id = bytes32.random(seeded_random)
66
69
 
@@ -94,7 +97,7 @@ async def test_one_peer_and_await(seeded_random: random.Random) -> None:
94
97
 
95
98
  @pytest.mark.anyio
96
99
  async def test_lots_of_peers(seeded_random: random.Random) -> None:
97
- transaction_queue = TransactionQueue(1000, log)
100
+ transaction_queue = TransactionQueue(1000, log, max_tx_clvm_cost=TEST_MAX_TX_CLVM_COST)
98
101
  num_peers = 1000
99
102
  num_txs = 100
100
103
  total_txs = num_txs * num_peers
@@ -116,7 +119,7 @@ async def test_lots_of_peers(seeded_random: random.Random) -> None:
116
119
 
117
120
  @pytest.mark.anyio
118
121
  async def test_full_queue(seeded_random: random.Random) -> None:
119
- transaction_queue = TransactionQueue(1000, log)
122
+ transaction_queue = TransactionQueue(1000, log, max_tx_clvm_cost=TEST_MAX_TX_CLVM_COST)
120
123
  num_peers = 100
121
124
  num_txs = 1000
122
125
  total_txs = num_txs * num_peers
@@ -138,7 +141,7 @@ async def test_full_queue(seeded_random: random.Random) -> None:
138
141
 
139
142
  @pytest.mark.anyio
140
143
  async def test_queue_cleanup_and_fairness(seeded_random: random.Random) -> None:
141
- transaction_queue = TransactionQueue(1000, log)
144
+ transaction_queue = TransactionQueue(1000, log, max_tx_clvm_cost=TEST_MAX_TX_CLVM_COST)
142
145
  peer_a = bytes32.random(seeded_random)
143
146
  peer_b = bytes32.random(seeded_random)
144
147
  peer_c = bytes32.random(seeded_random)
@@ -201,7 +204,7 @@ async def test_peer_queue_prioritization_fallback() -> None:
201
204
  """
202
205
  Tests prioritization fallback, when `peer_id` is not in `peers_with_tx`.
203
206
  """
204
- queue = TransactionQueue(42, log)
207
+ queue = TransactionQueue(42, log, max_tx_clvm_cost=TEST_MAX_TX_CLVM_COST)
205
208
  peer1 = bytes32.random()
206
209
  peer2 = bytes32.random()
207
210
  # We'll be using this peer to test the fallback, so we don't include it in
@@ -226,13 +229,94 @@ async def test_peer_queue_prioritization_fallback() -> None:
226
229
  tx2 = get_transaction_queue_entry(peer3, 1, peers_with_tx2)
227
230
  queue.put(tx2, peer3)
228
231
  # tx2 gets top priority with FPC 1.0
229
- assert math.isclose(queue._queue_dict[peer3].queue[0][0], -1.0)
232
+ assert math.isclose(queue._peers_transactions_queues[peer3].priority_queue.queue[0][0], -1.0)
230
233
  entry = await queue.pop()
231
234
  # NOTE: This whole test file uses `index` as an addition to
232
235
  # `TransactionQueueEntry` for easier testing, hence this type ignore here
233
236
  # and everywhere else.
234
237
  assert entry.index == 1 # type: ignore[attr-defined]
235
238
  # tx1 comes next due to lowest priority fallback
236
- assert math.isinf(queue._queue_dict[peer3].queue[0][0])
239
+ assert math.isinf(queue._peers_transactions_queues[peer3].priority_queue.queue[0][0])
237
240
  entry = await queue.pop()
238
241
  assert entry.index == 0 # type: ignore[attr-defined]
242
+
243
+
244
+ @pytest.mark.anyio
245
+ async def test_normal_queue_deficit_round_robin() -> None:
246
+ """
247
+ Covers the deficit round robin behavior of the normal transaction queue where
248
+ we cycle through peers and pick their top transactions when their deficit
249
+ counters allow them to afford it, and we ensure that their deficit counters
250
+ adapt accordingly.
251
+ This also covers the case where a peer's top transaction does not advertise
252
+ cost, so that falls back to `max_tx_clvm_cost`.
253
+ This also covers the cleanup behavior where peers with no remaining
254
+ transactions are removed periodically (each 100 pop) from the queue.
255
+ """
256
+ test_max_tx_clvm_cost = uint64(20)
257
+ queue = TransactionQueue(42, log, max_tx_clvm_cost=test_max_tx_clvm_cost)
258
+ peer1 = bytes32.random()
259
+ peer2 = bytes32.random()
260
+ peer3 = bytes32.random()
261
+ peer4 = bytes32.random()
262
+ test_fee = uint64(42)
263
+ # We give this one the highest cost
264
+ tx1 = get_transaction_queue_entry(peer1, 0, {peer1: PeerWithTx(str(peer1), test_fee, uint64(15))})
265
+ queue.put(tx1, peer1)
266
+ # And this one the lowest cost
267
+ tx2 = get_transaction_queue_entry(peer2, 1, {peer2: PeerWithTx(str(peer2), test_fee, uint64(5))})
268
+ queue.put(tx2, peer2)
269
+ # And this one a cost in between
270
+ tx3 = get_transaction_queue_entry(peer3, 2, {peer3: PeerWithTx(str(peer3), test_fee, uint64(10))})
271
+ queue.put(tx3, peer3)
272
+ # This one has no cost information so its top transaction's advertised cost
273
+ # falls back to `test_max_tx_clvm_cost`.
274
+ tx4 = get_transaction_queue_entry(peer4, 3, {})
275
+ queue.put(tx4, peer4)
276
+ # When we try to pop a transaction, none of the peers initially can
277
+ # afford to send their top transactions, so we add the lowest cost among
278
+ # transactions (5) to all the peers' deficit counters and try again. This
279
+ # makes peer2 able to send its transaction tx2.
280
+ entry = await queue.pop()
281
+ assert entry.index == 1 # type: ignore[attr-defined]
282
+ assert queue._list_cursor == 2
283
+ assert queue._peers_transactions_queues[peer1].deficit == 5
284
+ assert queue._peers_transactions_queues[peer2].deficit == 0
285
+ assert queue._peers_transactions_queues[peer3].deficit == 5
286
+ assert queue._peers_transactions_queues[peer4].deficit == 5
287
+ # Now peer3, peer4 and peer1 can't afford to send their top transactions so
288
+ # we add the lowest cost among transactions (10) to their deficit counters
289
+ # and try again. This makes peer3 able to send its transaction tx3.
290
+ entry = await queue.pop()
291
+ assert entry.index == 2 # type: ignore[attr-defined]
292
+ assert queue._list_cursor == 3
293
+ assert queue._peers_transactions_queues[peer1].deficit == 15
294
+ assert queue._peers_transactions_queues[peer2].deficit == 0
295
+ assert queue._peers_transactions_queues[peer3].deficit == 0
296
+ assert queue._peers_transactions_queues[peer4].deficit == 15
297
+ # Let's force cleanup to happen on the next pop
298
+ queue._cleanup_counter = 99
299
+ # Now peer4 can't afford to send its top transaction (20) but peer1 can
300
+ # send tx1 (15) so it does.
301
+ entry = await queue.pop()
302
+ assert entry.index == 0 # type: ignore[attr-defined]
303
+ # This pop triggers cleanup, so peer1, peer2 and peer3 are removed from
304
+ # the transaction queue (they have nothing left) and only peer4 remains.
305
+ for peer in [peer1, peer2, peer3]:
306
+ assert peer not in queue._index_to_peer_map
307
+ assert peer not in queue._peers_transactions_queues
308
+ assert len(queue._index_to_peer_map) == 1
309
+ assert peer4 in queue._index_to_peer_map
310
+ assert queue._list_cursor == 0
311
+ # At this point we didn't have to increment deficit counters because peer1
312
+ # could already afford to send its top transaction, so peer4's deficit
313
+ # counter stays the same.
314
+ assert queue._peers_transactions_queues[peer4].deficit == 15
315
+ # Finally, peer4 is tried but it can't send its top transaction, which has
316
+ # a fallback cost of `test_max_tx_clvm_cost` (20), so we add that to its
317
+ # deficit counter, making it 35, and upon retrying now it's able to send
318
+ # its transaction tx4.
319
+ entry = await queue.pop()
320
+ assert entry.index == 3 # type: ignore[attr-defined]
321
+ assert queue._peers_transactions_queues[peer4].deficit == 0
322
+ assert queue._list_cursor == 0
@@ -395,15 +395,17 @@ co = ConditionOpcode
395
395
  mis = MempoolInclusionStatus
396
396
 
397
397
 
398
- async def send_sb(node: FullNodeAPI, sb: SpendBundle) -> Message | None:
398
+ async def send_sb(node: FullNodeAPI, dummy_peer: WSChiaConnection, sb: SpendBundle) -> Message | None:
399
399
  tx = wallet_protocol.SendTransaction(sb)
400
- return await node.send_transaction(tx, test=True)
400
+ return await node.send_transaction(tx, dummy_peer, test=True)
401
401
 
402
402
 
403
- async def gen_and_send_sb(node: FullNodeAPI, wallet: WalletTool, coin: Coin, fee: uint64 = uint64(0)) -> SpendBundle:
403
+ async def gen_and_send_sb(
404
+ node: FullNodeAPI, dummy_peer: WSChiaConnection, wallet: WalletTool, coin: Coin, fee: uint64 = uint64(0)
405
+ ) -> SpendBundle:
404
406
  sb = generate_test_spend_bundle(wallet=wallet, coin=coin, fee=fee)
405
407
  assert sb is not None
406
- await send_sb(node, sb)
408
+ await send_sb(node, dummy_peer, sb)
407
409
  return sb
408
410
 
409
411
 
@@ -611,7 +613,10 @@ class TestMempoolManager:
611
613
 
612
614
  @pytest.mark.anyio
613
615
  async def test_double_spend_with_higher_fee(
614
- self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool
616
+ self,
617
+ one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools],
618
+ wallet_a: WalletTool,
619
+ self_hostname: str,
615
620
  ) -> None:
616
621
  full_node_1, _, bt = one_node_one_block
617
622
  blocks = await full_node_1.get_all_full_blocks()
@@ -633,15 +638,17 @@ class TestMempoolManager:
633
638
  coins = iter(blocks[-2].get_included_reward_coins())
634
639
  coin3, coin4 = next(coins), next(coins)
635
640
 
636
- sb1_1 = await gen_and_send_sb(full_node_1, wallet_a, coin1)
637
- sb1_2 = await gen_and_send_sb(full_node_1, wallet_a, coin1, fee=uint64(1))
641
+ _, dummy_node_id = await add_dummy_connection(full_node_1.server, self_hostname, 12312)
642
+ dummy_peer = full_node_1.server.all_connections[dummy_node_id]
643
+ sb1_1 = await gen_and_send_sb(full_node_1, dummy_peer, wallet_a, coin1)
644
+ sb1_2 = await gen_and_send_sb(full_node_1, dummy_peer, wallet_a, coin1, fee=uint64(1))
638
645
 
639
646
  # Fee increase is insufficient, the old spendbundle must stay
640
647
  assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb1_1)
641
648
  assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1_2)
642
649
  invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool)
643
650
 
644
- sb1_3 = await gen_and_send_sb(full_node_1, wallet_a, coin1, fee=MEMPOOL_MIN_FEE_INCREASE)
651
+ sb1_3 = await gen_and_send_sb(full_node_1, dummy_peer, wallet_a, coin1, fee=MEMPOOL_MIN_FEE_INCREASE)
645
652
 
646
653
  # Fee increase is sufficiently high, sb1_1 gets replaced with sb1_3
647
654
  assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1_1)
@@ -650,7 +657,7 @@ class TestMempoolManager:
650
657
 
651
658
  sb2 = generate_test_spend_bundle(wallet_a, coin2, fee=MEMPOOL_MIN_FEE_INCREASE)
652
659
  sb12 = SpendBundle.aggregate([sb2, sb1_3])
653
- await send_sb(full_node_1, sb12)
660
+ await send_sb(full_node_1, dummy_peer, sb12)
654
661
 
655
662
  # Aggregated spendbundle sb12 replaces sb1_3 since it spends a superset
656
663
  # of coins spent in sb1_3
@@ -660,7 +667,7 @@ class TestMempoolManager:
660
667
 
661
668
  sb3 = generate_test_spend_bundle(wallet_a, coin3, fee=uint64(MEMPOOL_MIN_FEE_INCREASE * 2))
662
669
  sb23 = SpendBundle.aggregate([sb2, sb3])
663
- await send_sb(full_node_1, sb23)
670
+ await send_sb(full_node_1, dummy_peer, sb23)
664
671
 
665
672
  # sb23 must not replace existing sb12 as the former does not spend all
666
673
  # coins that are spent in the latter (specifically, coin1)
@@ -668,21 +675,21 @@ class TestMempoolManager:
668
675
  assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb23)
669
676
  invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool)
670
677
 
671
- await send_sb(full_node_1, sb3)
678
+ await send_sb(full_node_1, dummy_peer, sb3)
672
679
  # Adding non-conflicting sb3 should succeed
673
680
  assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb3)
674
681
  invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool)
675
682
 
676
683
  sb4_1 = generate_test_spend_bundle(wallet_a, coin4, fee=MEMPOOL_MIN_FEE_INCREASE)
677
684
  sb1234_1 = SpendBundle.aggregate([sb12, sb3, sb4_1])
678
- await send_sb(full_node_1, sb1234_1)
685
+ await send_sb(full_node_1, dummy_peer, sb1234_1)
679
686
  # sb1234_1 should not be in pool as it decreases total fees per cost
680
687
  assert_sb_not_in_pool(full_node_1.full_node.mempool_manager, sb1234_1)
681
688
  invariant_check_mempool(full_node_1.full_node.mempool_manager.mempool)
682
689
 
683
690
  sb4_2 = generate_test_spend_bundle(wallet_a, coin4, fee=uint64(MEMPOOL_MIN_FEE_INCREASE * 2))
684
691
  sb1234_2 = SpendBundle.aggregate([sb12, sb3, sb4_2])
685
- await send_sb(full_node_1, sb1234_2)
692
+ await send_sb(full_node_1, dummy_peer, sb1234_2)
686
693
  # sb1234_2 has a higher fee per cost than its conflicts and should get
687
694
  # into mempool
688
695
  assert_sb_in_pool(full_node_1.full_node.mempool_manager, sb1234_2)
@@ -692,7 +699,10 @@ class TestMempoolManager:
692
699
 
693
700
  @pytest.mark.anyio
694
701
  async def test_invalid_signature(
695
- self, one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools], wallet_a: WalletTool
702
+ self,
703
+ one_node_one_block: tuple[FullNodeSimulator, ChiaServer, BlockTools],
704
+ wallet_a: WalletTool,
705
+ self_hostname: str,
696
706
  ) -> None:
697
707
  reward_ph = wallet_a.get_new_puzzlehash()
698
708
 
@@ -714,7 +724,9 @@ class TestMempoolManager:
714
724
  sb: SpendBundle = generate_test_spend_bundle(wallet_a, coin1)
715
725
  assert sb.aggregated_signature != G2Element.generator()
716
726
  sb = sb.replace(aggregated_signature=G2Element.generator())
717
- res: Message | None = await send_sb(full_node_1, sb)
727
+ _, dummy_node_id = await add_dummy_connection(full_node_1.server, self_hostname, 12312)
728
+ dummy_peer = full_node_1.server.all_connections[dummy_node_id]
729
+ res: Message | None = await send_sb(full_node_1, dummy_peer, sb)
718
730
  assert res is not None
719
731
  ack: TransactionAck = TransactionAck.from_bytes(res.data)
720
732
  assert ack.status == MempoolInclusionStatus.FAILED.value