chia-blockchain 2.5.0rc2__py3-none-any.whl → 2.5.1rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (637) hide show
  1. chia/_tests/README.md +1 -1
  2. chia/_tests/blockchain/blockchain_test_utils.py +24 -26
  3. chia/_tests/blockchain/test_augmented_chain.py +6 -8
  4. chia/_tests/blockchain/test_blockchain.py +409 -307
  5. chia/_tests/blockchain/test_blockchain_transactions.py +56 -75
  6. chia/_tests/blockchain/test_build_chains.py +11 -13
  7. chia/_tests/blockchain/test_get_block_generator.py +8 -8
  8. chia/_tests/blockchain/test_lookup_fork_chain.py +3 -4
  9. chia/_tests/build-init-files.py +3 -4
  10. chia/_tests/build-job-matrix.py +9 -9
  11. chia/_tests/check_sql_statements.py +2 -3
  12. chia/_tests/clvm/benchmark_costs.py +1 -1
  13. chia/_tests/clvm/coin_store.py +7 -5
  14. chia/_tests/clvm/test_chialisp_deserialization.py +8 -8
  15. chia/_tests/clvm/test_condition_codes.py +2 -2
  16. chia/_tests/clvm/test_curry_and_treehash.py +2 -4
  17. chia/_tests/clvm/test_message_conditions.py +184 -0
  18. chia/_tests/clvm/test_puzzle_compression.py +1 -2
  19. chia/_tests/clvm/test_puzzle_drivers.py +3 -3
  20. chia/_tests/clvm/test_puzzles.py +13 -18
  21. chia/_tests/clvm/test_singletons.py +17 -17
  22. chia/_tests/clvm/test_spend_sim.py +7 -7
  23. chia/_tests/cmds/cmd_test_utils.py +42 -45
  24. chia/_tests/cmds/conftest.py +2 -2
  25. chia/_tests/cmds/test_click_types.py +21 -16
  26. chia/_tests/cmds/test_cmd_framework.py +255 -35
  27. chia/_tests/cmds/test_cmds_util.py +2 -2
  28. chia/_tests/cmds/test_daemon.py +3 -3
  29. chia/_tests/cmds/test_dev_gh.py +131 -0
  30. chia/_tests/cmds/test_farm_cmd.py +1 -2
  31. chia/_tests/cmds/test_show.py +6 -6
  32. chia/_tests/cmds/test_tx_config_args.py +2 -1
  33. chia/_tests/cmds/wallet/test_dao.py +23 -23
  34. chia/_tests/cmds/wallet/test_did.py +29 -29
  35. chia/_tests/cmds/wallet/test_nft.py +24 -23
  36. chia/_tests/cmds/wallet/test_notifications.py +8 -8
  37. chia/_tests/cmds/wallet/test_tx_decorators.py +3 -3
  38. chia/_tests/cmds/wallet/test_vcs.py +97 -73
  39. chia/_tests/cmds/wallet/test_wallet.py +74 -75
  40. chia/_tests/cmds/wallet/test_wallet_check.py +5 -7
  41. chia/_tests/conftest.py +153 -38
  42. chia/_tests/connection_utils.py +7 -6
  43. chia/_tests/core/cmds/test_beta.py +3 -3
  44. chia/_tests/core/cmds/test_keys.py +6 -6
  45. chia/_tests/core/cmds/test_wallet.py +3 -3
  46. chia/_tests/core/consensus/test_block_creation.py +3 -5
  47. chia/_tests/core/custom_types/test_coin.py +1 -3
  48. chia/_tests/core/custom_types/test_spend_bundle.py +3 -4
  49. chia/_tests/core/daemon/test_daemon.py +58 -58
  50. chia/_tests/core/daemon/test_keychain_proxy.py +2 -1
  51. chia/_tests/core/data_layer/conftest.py +4 -3
  52. chia/_tests/core/data_layer/test_data_cli.py +1 -2
  53. chia/_tests/core/data_layer/test_data_layer.py +5 -5
  54. chia/_tests/core/data_layer/test_data_layer_util.py +8 -9
  55. chia/_tests/core/data_layer/test_data_rpc.py +75 -93
  56. chia/_tests/core/data_layer/test_data_store.py +38 -37
  57. chia/_tests/core/data_layer/test_data_store_schema.py +11 -11
  58. chia/_tests/core/data_layer/util.py +11 -10
  59. chia/_tests/core/farmer/test_farmer_api.py +6 -4
  60. chia/_tests/core/full_node/full_sync/test_full_sync.py +5 -10
  61. chia/_tests/core/full_node/ram_db.py +2 -2
  62. chia/_tests/core/full_node/stores/test_block_store.py +113 -11
  63. chia/_tests/core/full_node/stores/test_coin_store.py +37 -28
  64. chia/_tests/core/full_node/stores/test_full_node_store.py +34 -30
  65. chia/_tests/core/full_node/stores/test_hint_store.py +3 -4
  66. chia/_tests/core/full_node/test_address_manager.py +2 -2
  67. chia/_tests/core/full_node/test_block_height_map.py +1 -1
  68. chia/_tests/core/full_node/test_conditions.py +10 -12
  69. chia/_tests/core/full_node/test_full_node.py +2077 -1822
  70. chia/_tests/core/full_node/test_generator_tools.py +4 -4
  71. chia/_tests/core/full_node/test_hint_management.py +2 -2
  72. chia/_tests/core/full_node/test_performance.py +2 -5
  73. chia/_tests/core/full_node/test_subscriptions.py +4 -4
  74. chia/_tests/core/full_node/test_tx_processing_queue.py +5 -4
  75. chia/_tests/core/make_block_generator.py +5 -7
  76. chia/_tests/core/mempool/test_mempool.py +205 -208
  77. chia/_tests/core/mempool/test_mempool_fee_protocol.py +5 -5
  78. chia/_tests/core/mempool/test_mempool_item_queries.py +2 -4
  79. chia/_tests/core/mempool/test_mempool_manager.py +109 -80
  80. chia/_tests/core/mempool/test_mempool_performance.py +3 -4
  81. chia/_tests/core/mempool/test_singleton_fast_forward.py +12 -12
  82. chia/_tests/core/server/flood.py +6 -4
  83. chia/_tests/core/server/serve.py +10 -7
  84. chia/_tests/core/server/test_api_protocol.py +21 -0
  85. chia/_tests/core/server/test_capabilities.py +3 -5
  86. chia/_tests/core/server/test_dos.py +15 -16
  87. chia/_tests/core/server/test_loop.py +14 -10
  88. chia/_tests/core/server/test_node_discovery.py +1 -2
  89. chia/_tests/core/server/test_rate_limits.py +156 -44
  90. chia/_tests/core/server/test_server.py +8 -7
  91. chia/_tests/core/services/test_services.py +59 -37
  92. chia/_tests/core/ssl/test_ssl.py +5 -3
  93. chia/_tests/core/test_cost_calculation.py +5 -6
  94. chia/_tests/core/test_crawler.py +2 -2
  95. chia/_tests/core/test_db_conversion.py +5 -4
  96. chia/_tests/core/test_db_validation.py +6 -5
  97. chia/_tests/core/test_farmer_harvester_rpc.py +8 -7
  98. chia/_tests/core/test_filter.py +3 -5
  99. chia/_tests/core/test_full_node_rpc.py +64 -90
  100. chia/_tests/core/test_merkle_set.py +10 -10
  101. chia/_tests/core/test_program.py +2 -4
  102. chia/_tests/core/test_rpc_util.py +1 -2
  103. chia/_tests/core/test_seeder.py +124 -12
  104. chia/_tests/core/util/test_block_cache.py +5 -5
  105. chia/_tests/core/util/test_cached_bls.py +3 -3
  106. chia/_tests/core/util/test_config.py +13 -13
  107. chia/_tests/core/util/test_files.py +2 -2
  108. chia/_tests/core/util/test_jsonify.py +9 -9
  109. chia/_tests/core/util/test_keychain.py +13 -5
  110. chia/_tests/core/util/test_keyring_wrapper.py +6 -5
  111. chia/_tests/core/util/test_log_exceptions.py +3 -3
  112. chia/_tests/core/util/test_streamable.py +38 -38
  113. chia/_tests/db/test_db_wrapper.py +13 -12
  114. chia/_tests/environments/common.py +2 -2
  115. chia/_tests/environments/full_node.py +2 -2
  116. chia/_tests/environments/wallet.py +109 -48
  117. chia/_tests/farmer_harvester/test_farmer.py +35 -35
  118. chia/_tests/farmer_harvester/test_farmer_harvester.py +17 -17
  119. chia/_tests/farmer_harvester/test_filter_prefix_bits.py +6 -5
  120. chia/_tests/farmer_harvester/test_third_party_harvesters.py +73 -46
  121. chia/_tests/fee_estimation/test_fee_estimation_integration.py +8 -8
  122. chia/_tests/fee_estimation/test_fee_estimation_rpc.py +47 -47
  123. chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +6 -7
  124. chia/_tests/fee_estimation/test_mempoolitem_height_added.py +11 -11
  125. chia/_tests/generator/test_compression.py +13 -30
  126. chia/_tests/generator/test_generator_types.py +3 -3
  127. chia/_tests/generator/test_rom.py +7 -9
  128. chia/_tests/plot_sync/test_delta.py +2 -3
  129. chia/_tests/plot_sync/test_plot_sync.py +25 -24
  130. chia/_tests/plot_sync/test_receiver.py +9 -9
  131. chia/_tests/plot_sync/test_sender.py +1 -1
  132. chia/_tests/plot_sync/test_sync_simulated.py +27 -26
  133. chia/_tests/plot_sync/util.py +2 -1
  134. chia/_tests/plotting/test_plot_manager.py +54 -11
  135. chia/_tests/plotting/util.py +2 -3
  136. chia/_tests/pools/test_pool_cli_parsing.py +128 -0
  137. chia/_tests/pools/test_pool_cmdline.py +993 -15
  138. chia/_tests/pools/test_pool_config.py +3 -5
  139. chia/_tests/pools/test_pool_puzzles_lifecycle.py +10 -11
  140. chia/_tests/pools/test_pool_rpc.py +203 -90
  141. chia/_tests/pools/test_pool_wallet.py +12 -8
  142. chia/_tests/pools/test_wallet_pool_store.py +3 -3
  143. chia/_tests/process_junit.py +16 -17
  144. chia/_tests/rpc/test_rpc_client.py +59 -2
  145. chia/_tests/rpc/test_rpc_server.py +183 -0
  146. chia/_tests/simulation/test_simulation.py +5 -5
  147. chia/_tests/simulation/test_simulator.py +8 -10
  148. chia/_tests/simulation/test_start_simulator.py +5 -4
  149. chia/_tests/timelord/test_new_peak.py +19 -19
  150. chia/_tests/tools/test_run_block.py +1 -2
  151. chia/_tests/tools/test_virtual_project.py +591 -0
  152. chia/_tests/util/benchmark_cost.py +9 -9
  153. chia/_tests/util/benchmarks.py +1 -2
  154. chia/_tests/util/blockchain.py +12 -11
  155. chia/_tests/util/blockchain_mock.py +15 -15
  156. chia/_tests/util/build_network_protocol_files.py +12 -12
  157. chia/_tests/util/db_connection.py +3 -2
  158. chia/_tests/util/full_sync.py +14 -6
  159. chia/_tests/util/gen_ssl_certs.py +4 -5
  160. chia/_tests/util/generator_tools_testing.py +5 -7
  161. chia/_tests/util/get_name_puzzle_conditions.py +52 -0
  162. chia/_tests/util/key_tool.py +2 -3
  163. chia/_tests/util/misc.py +59 -106
  164. chia/_tests/util/network_protocol_data.py +7 -9
  165. chia/_tests/util/protocol_messages_json.py +112 -111
  166. chia/_tests/util/rpc.py +3 -0
  167. chia/_tests/util/run_block.py +16 -16
  168. chia/_tests/util/setup_nodes.py +25 -23
  169. chia/{clvm → _tests/util}/spend_sim.py +59 -55
  170. chia/_tests/util/split_managers.py +12 -9
  171. chia/_tests/util/temp_file.py +1 -1
  172. chia/_tests/util/test_action_scope.py +2 -1
  173. chia/_tests/util/test_async_pool.py +8 -8
  174. chia/_tests/util/test_build_job_matrix.py +2 -3
  175. chia/_tests/util/test_condition_tools.py +4 -6
  176. chia/_tests/util/test_config.py +5 -5
  177. chia/_tests/util/test_dump_keyring.py +1 -1
  178. chia/_tests/util/test_full_block_utils.py +19 -11
  179. chia/_tests/util/test_limited_semaphore.py +4 -3
  180. chia/_tests/util/test_logging_filter.py +2 -3
  181. chia/_tests/util/test_misc.py +29 -28
  182. chia/_tests/util/test_network.py +32 -31
  183. chia/_tests/util/test_network_protocol_files.py +2 -3
  184. chia/_tests/util/test_network_protocol_json.py +1 -0
  185. chia/_tests/util/test_network_protocol_test.py +18 -19
  186. chia/_tests/util/test_paginator.py +3 -4
  187. chia/_tests/util/test_pprint.py +1 -1
  188. chia/_tests/util/test_priority_mutex.py +18 -17
  189. chia/_tests/util/test_recursive_replace.py +2 -2
  190. chia/_tests/util/test_testnet_overrides.py +3 -3
  191. chia/_tests/util/test_timing.py +1 -1
  192. chia/_tests/util/test_trusted_peer.py +2 -2
  193. chia/_tests/util/time_out_assert.py +43 -6
  194. chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py +13 -13
  195. chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py +1 -1
  196. chia/_tests/wallet/cat_wallet/test_cat_wallet.py +117 -29
  197. chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +15 -15
  198. chia/_tests/wallet/cat_wallet/test_trades.py +50 -28
  199. chia/_tests/wallet/clawback/test_clawback_decorator.py +3 -5
  200. chia/_tests/wallet/clawback/test_clawback_lifecycle.py +6 -6
  201. chia/_tests/wallet/clawback/test_clawback_metadata.py +1 -2
  202. chia/_tests/wallet/conftest.py +135 -74
  203. chia/_tests/wallet/dao_wallet/test_dao_clvm.py +25 -17
  204. chia/_tests/wallet/dao_wallet/test_dao_wallets.py +75 -75
  205. chia/_tests/wallet/db_wallet/test_db_graftroot.py +10 -12
  206. chia/_tests/wallet/db_wallet/test_dl_offers.py +6 -6
  207. chia/_tests/wallet/db_wallet/test_dl_wallet.py +18 -18
  208. chia/_tests/wallet/did_wallet/test_did.py +1277 -474
  209. chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +12 -11
  210. chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py +115 -105
  211. chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +6 -7
  212. chia/_tests/wallet/nft_wallet/test_nft_offers.py +16 -16
  213. chia/_tests/wallet/nft_wallet/test_nft_puzzles.py +3 -3
  214. chia/_tests/wallet/nft_wallet/test_nft_wallet.py +38 -12
  215. chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py +1 -1
  216. chia/_tests/wallet/rpc/test_dl_wallet_rpc.py +31 -33
  217. chia/_tests/wallet/rpc/test_wallet_rpc.py +218 -171
  218. chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py +36 -37
  219. chia/_tests/wallet/sync/test_wallet_sync.py +241 -78
  220. chia/_tests/wallet/test_address_type.py +20 -20
  221. chia/_tests/wallet/test_clvm_streamable.py +5 -5
  222. chia/_tests/wallet/test_coin_management.py +354 -0
  223. chia/_tests/wallet/test_coin_selection.py +34 -35
  224. chia/_tests/wallet/test_conditions.py +28 -16
  225. chia/_tests/wallet/test_debug_spend_bundle.py +156 -14
  226. chia/_tests/wallet/test_new_wallet_protocol.py +29 -31
  227. chia/_tests/wallet/test_nft_store.py +1 -2
  228. chia/_tests/wallet/test_notifications.py +2 -2
  229. chia/_tests/wallet/test_offer_parsing_performance.py +1 -1
  230. chia/_tests/wallet/test_puzzle_store.py +2 -3
  231. chia/_tests/wallet/test_sign_coin_spends.py +3 -3
  232. chia/_tests/wallet/test_signer_protocol.py +33 -34
  233. chia/_tests/wallet/test_singleton_lifecycle_fast.py +29 -29
  234. chia/_tests/wallet/test_taproot.py +1 -1
  235. chia/_tests/wallet/test_transaction_store.py +23 -19
  236. chia/_tests/wallet/test_util.py +36 -32
  237. chia/_tests/wallet/test_wallet.py +37 -37
  238. chia/_tests/wallet/test_wallet_action_scope.py +8 -8
  239. chia/_tests/wallet/test_wallet_blockchain.py +4 -6
  240. chia/_tests/wallet/test_wallet_coin_store.py +34 -34
  241. chia/_tests/wallet/test_wallet_node.py +69 -72
  242. chia/_tests/wallet/test_wallet_retry.py +3 -3
  243. chia/_tests/wallet/test_wallet_state_manager.py +12 -5
  244. chia/_tests/wallet/test_wallet_trade_store.py +2 -2
  245. chia/_tests/wallet/test_wallet_utils.py +5 -4
  246. chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py +3 -3
  247. chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +18 -18
  248. chia/_tests/wallet/vc_wallet/test_vc_wallet.py +69 -40
  249. chia/_tests/wallet/wallet_block_tools.py +27 -27
  250. chia/_tests/weight_proof/test_weight_proof.py +30 -30
  251. chia/apis.py +19 -0
  252. chia/cmds/beta.py +8 -7
  253. chia/cmds/beta_funcs.py +15 -11
  254. chia/cmds/check_wallet_db.py +29 -27
  255. chia/cmds/chia.py +17 -9
  256. chia/cmds/cmd_classes.py +87 -79
  257. chia/cmds/cmd_helpers.py +242 -0
  258. chia/cmds/cmds_util.py +56 -66
  259. chia/cmds/coin_funcs.py +168 -153
  260. chia/cmds/coins.py +156 -194
  261. chia/cmds/configure.py +4 -3
  262. chia/cmds/dao.py +89 -33
  263. chia/cmds/dao_funcs.py +55 -33
  264. chia/cmds/data.py +7 -6
  265. chia/cmds/data_funcs.py +26 -21
  266. chia/cmds/db.py +4 -3
  267. chia/cmds/db_backup_func.py +2 -2
  268. chia/cmds/db_upgrade_func.py +3 -3
  269. chia/cmds/db_validate_func.py +2 -2
  270. chia/cmds/dev.py +2 -0
  271. chia/cmds/farm.py +18 -5
  272. chia/cmds/farm_funcs.py +17 -24
  273. chia/cmds/gh.py +275 -0
  274. chia/cmds/init.py +4 -11
  275. chia/cmds/init_funcs.py +9 -9
  276. chia/cmds/installers.py +5 -3
  277. chia/cmds/keys.py +56 -39
  278. chia/cmds/keys_funcs.py +30 -31
  279. chia/cmds/netspace.py +6 -3
  280. chia/cmds/netspace_funcs.py +3 -2
  281. chia/cmds/param_types.py +16 -6
  282. chia/cmds/passphrase.py +8 -7
  283. chia/cmds/passphrase_funcs.py +7 -61
  284. chia/cmds/peer.py +2 -1
  285. chia/cmds/peer_funcs.py +5 -5
  286. chia/cmds/plotnft.py +207 -153
  287. chia/cmds/plotnft_funcs.py +205 -174
  288. chia/cmds/plots.py +14 -6
  289. chia/cmds/plotters.py +2 -1
  290. chia/cmds/rpc.py +48 -28
  291. chia/cmds/show.py +2 -1
  292. chia/cmds/show_funcs.py +7 -6
  293. chia/cmds/signer.py +50 -58
  294. chia/cmds/sim.py +22 -14
  295. chia/cmds/sim_funcs.py +11 -11
  296. chia/cmds/start.py +3 -3
  297. chia/cmds/start_funcs.py +9 -12
  298. chia/cmds/stop.py +4 -3
  299. chia/cmds/units.py +1 -3
  300. chia/cmds/wallet.py +252 -96
  301. chia/cmds/wallet_funcs.py +217 -143
  302. chia/consensus/block_body_validation.py +133 -86
  303. chia/consensus/block_creation.py +42 -21
  304. chia/consensus/block_header_validation.py +32 -37
  305. chia/consensus/block_record.py +1 -2
  306. chia/consensus/blockchain.py +167 -180
  307. chia/consensus/blockchain_interface.py +10 -10
  308. chia/consensus/constants.py +2 -2
  309. chia/consensus/default_constants.py +3 -4
  310. chia/consensus/difficulty_adjustment.py +5 -5
  311. chia/consensus/find_fork_point.py +5 -5
  312. chia/consensus/full_block_to_block_record.py +4 -4
  313. chia/consensus/get_block_challenge.py +2 -2
  314. chia/consensus/get_block_generator.py +4 -3
  315. chia/consensus/multiprocess_validation.py +207 -304
  316. chia/consensus/vdf_info_computation.py +3 -3
  317. chia/daemon/client.py +46 -27
  318. chia/daemon/keychain_proxy.py +10 -9
  319. chia/daemon/keychain_server.py +18 -18
  320. chia/daemon/server.py +103 -113
  321. chia/daemon/windows_signal.py +2 -2
  322. chia/data_layer/data_layer.py +64 -76
  323. chia/data_layer/data_layer_api.py +8 -0
  324. chia/data_layer/data_layer_errors.py +3 -3
  325. chia/data_layer/data_layer_server.py +2 -2
  326. chia/data_layer/data_layer_util.py +71 -71
  327. chia/data_layer/data_layer_wallet.py +63 -67
  328. chia/data_layer/data_store.py +72 -72
  329. chia/data_layer/dl_wallet_store.py +10 -10
  330. chia/data_layer/download_data.py +5 -5
  331. chia/data_layer/s3_plugin_service.py +9 -9
  332. chia/data_layer/util/benchmark.py +0 -1
  333. chia/data_layer/util/plugin.py +2 -3
  334. chia/farmer/farmer.py +46 -43
  335. chia/farmer/farmer_api.py +27 -21
  336. chia/full_node/block_height_map.py +6 -6
  337. chia/full_node/block_store.py +41 -35
  338. chia/full_node/coin_store.py +42 -41
  339. chia/full_node/fee_estimate.py +2 -2
  340. chia/full_node/fee_estimation.py +1 -2
  341. chia/full_node/fee_history.py +5 -6
  342. chia/full_node/fee_tracker.py +24 -24
  343. chia/full_node/full_node.py +574 -300
  344. chia/full_node/full_node_api.py +181 -130
  345. chia/full_node/full_node_store.py +43 -43
  346. chia/full_node/hint_management.py +4 -4
  347. chia/full_node/hint_store.py +9 -10
  348. chia/full_node/mempool.py +25 -19
  349. chia/full_node/mempool_check_conditions.py +11 -42
  350. chia/full_node/mempool_manager.py +48 -53
  351. chia/full_node/pending_tx_cache.py +9 -9
  352. chia/full_node/subscriptions.py +23 -24
  353. chia/full_node/sync_store.py +8 -7
  354. chia/full_node/tx_processing_queue.py +3 -3
  355. chia/full_node/util/__init__.py +0 -0
  356. chia/full_node/weight_proof.py +79 -78
  357. chia/harvester/harvester.py +9 -8
  358. chia/harvester/harvester_api.py +19 -13
  359. chia/introducer/introducer.py +7 -5
  360. chia/introducer/introducer_api.py +9 -3
  361. chia/legacy/keyring.py +6 -5
  362. chia/plot_sync/delta.py +8 -8
  363. chia/plot_sync/receiver.py +12 -11
  364. chia/plot_sync/sender.py +15 -12
  365. chia/plotters/bladebit.py +12 -12
  366. chia/plotters/chiapos.py +2 -2
  367. chia/plotters/madmax.py +8 -8
  368. chia/plotters/plotters.py +6 -6
  369. chia/plotters/plotters_util.py +6 -4
  370. chia/plotting/cache.py +8 -7
  371. chia/plotting/check_plots.py +8 -8
  372. chia/plotting/create_plots.py +6 -6
  373. chia/plotting/manager.py +22 -22
  374. chia/plotting/util.py +31 -19
  375. chia/pools/pool_config.py +7 -7
  376. chia/pools/pool_puzzles.py +16 -16
  377. chia/pools/pool_wallet.py +64 -57
  378. chia/pools/pool_wallet_info.py +3 -3
  379. chia/protocols/full_node_protocol.py +3 -3
  380. chia/protocols/harvester_protocol.py +12 -12
  381. chia/protocols/introducer_protocol.py +1 -2
  382. chia/protocols/protocol_message_types.py +4 -4
  383. chia/protocols/protocol_state_machine.py +2 -2
  384. chia/protocols/protocol_timing.py +1 -0
  385. chia/protocols/shared_protocol.py +3 -3
  386. chia/protocols/timelord_protocol.py +2 -2
  387. chia/protocols/wallet_protocol.py +33 -33
  388. chia/rpc/crawler_rpc_api.py +12 -7
  389. chia/rpc/data_layer_rpc_api.py +49 -44
  390. chia/rpc/data_layer_rpc_client.py +41 -41
  391. chia/rpc/data_layer_rpc_util.py +7 -11
  392. chia/rpc/farmer_rpc_api.py +32 -27
  393. chia/rpc/farmer_rpc_client.py +14 -14
  394. chia/rpc/full_node_rpc_api.py +53 -48
  395. chia/rpc/full_node_rpc_client.py +30 -30
  396. chia/rpc/harvester_rpc_api.py +16 -11
  397. chia/rpc/harvester_rpc_client.py +6 -6
  398. chia/rpc/rpc_client.py +34 -14
  399. chia/rpc/rpc_server.py +117 -43
  400. chia/rpc/timelord_rpc_api.py +9 -4
  401. chia/rpc/util.py +11 -211
  402. chia/rpc/wallet_request_types.py +276 -60
  403. chia/rpc/wallet_rpc_api.py +563 -399
  404. chia/rpc/wallet_rpc_client.py +220 -250
  405. chia/seeder/crawl_store.py +6 -8
  406. chia/seeder/crawler.py +23 -36
  407. chia/seeder/crawler_api.py +28 -22
  408. chia/seeder/dns_server.py +99 -50
  409. chia/seeder/start_crawler.py +13 -9
  410. chia/server/address_manager.py +19 -19
  411. chia/server/address_manager_store.py +17 -17
  412. chia/server/api_protocol.py +106 -1
  413. chia/server/capabilities.py +3 -3
  414. chia/server/chia_policy.py +17 -16
  415. chia/server/introducer_peers.py +3 -3
  416. chia/server/node_discovery.py +34 -38
  417. chia/server/rate_limit_numbers.py +26 -16
  418. chia/server/rate_limits.py +67 -27
  419. chia/server/server.py +52 -31
  420. chia/server/signal_handlers.py +6 -3
  421. chia/server/ssl_context.py +5 -5
  422. chia/server/start_data_layer.py +37 -23
  423. chia/server/start_farmer.py +28 -16
  424. chia/server/start_full_node.py +29 -23
  425. chia/server/start_harvester.py +28 -15
  426. chia/server/start_introducer.py +27 -15
  427. chia/server/start_service.py +17 -29
  428. chia/server/start_timelord.py +25 -18
  429. chia/server/start_wallet.py +22 -18
  430. chia/server/upnp.py +4 -3
  431. chia/server/ws_connection.py +68 -54
  432. chia/simulator/add_blocks_in_batches.py +54 -0
  433. chia/simulator/block_tools.py +65 -64
  434. chia/simulator/full_node_simulator.py +66 -74
  435. chia/simulator/setup_services.py +10 -9
  436. chia/simulator/simulator_full_node_rpc_api.py +12 -14
  437. chia/simulator/simulator_full_node_rpc_client.py +3 -5
  438. chia/simulator/simulator_test_tools.py +8 -7
  439. chia/simulator/socket.py +1 -4
  440. chia/simulator/ssl_certs.py +5 -5
  441. chia/simulator/ssl_certs_1.py +2 -4
  442. chia/simulator/ssl_certs_10.py +2 -4
  443. chia/simulator/ssl_certs_2.py +2 -4
  444. chia/simulator/ssl_certs_3.py +2 -4
  445. chia/simulator/ssl_certs_4.py +2 -4
  446. chia/simulator/ssl_certs_5.py +2 -4
  447. chia/simulator/ssl_certs_6.py +2 -4
  448. chia/simulator/ssl_certs_7.py +2 -4
  449. chia/simulator/ssl_certs_8.py +2 -4
  450. chia/simulator/ssl_certs_9.py +2 -4
  451. chia/simulator/start_simulator.py +14 -6
  452. chia/simulator/wallet_tools.py +21 -20
  453. chia/ssl/create_ssl.py +11 -11
  454. chia/timelord/iters_from_block.py +2 -2
  455. chia/timelord/timelord.py +57 -33
  456. chia/timelord/timelord_api.py +12 -6
  457. chia/timelord/timelord_launcher.py +10 -8
  458. chia/timelord/timelord_state.py +5 -5
  459. chia/types/block_protocol.py +2 -2
  460. chia/types/blockchain_format/coin.py +3 -3
  461. chia/types/blockchain_format/program.py +17 -18
  462. chia/types/blockchain_format/tree_hash.py +9 -9
  463. chia/types/coin_spend.py +8 -8
  464. chia/types/condition_with_args.py +1 -2
  465. chia/types/eligible_coin_spends.py +16 -15
  466. chia/types/generator_types.py +1 -2
  467. chia/types/internal_mempool_item.py +1 -2
  468. chia/types/mempool_item.py +7 -7
  469. chia/types/mempool_submission_status.py +2 -2
  470. chia/types/peer_info.py +1 -1
  471. chia/types/spend_bundle.py +1 -2
  472. chia/types/transaction_queue_entry.py +2 -2
  473. chia/types/unfinished_header_block.py +2 -2
  474. chia/types/validation_state.py +14 -0
  475. chia/types/weight_proof.py +5 -6
  476. chia/util/action_scope.py +8 -8
  477. chia/util/async_pool.py +6 -4
  478. chia/util/augmented_chain.py +13 -9
  479. chia/util/batches.py +5 -2
  480. chia/util/bech32m.py +14 -11
  481. chia/util/beta_metrics.py +5 -4
  482. chia/util/block_cache.py +5 -5
  483. chia/util/byte_types.py +2 -0
  484. chia/util/check_fork_next_block.py +3 -2
  485. chia/util/chia_logging.py +41 -21
  486. chia/util/collection.py +3 -3
  487. chia/util/condition_tools.py +18 -18
  488. chia/util/config.py +26 -25
  489. chia/util/cpu.py +2 -0
  490. chia/util/db_synchronous.py +2 -0
  491. chia/util/db_version.py +2 -0
  492. chia/util/db_wrapper.py +13 -10
  493. chia/util/default_root.py +17 -0
  494. chia/util/dump_keyring.py +6 -6
  495. chia/util/errors.py +5 -3
  496. chia/util/file_keyring.py +22 -33
  497. chia/util/files.py +2 -0
  498. chia/util/full_block_utils.py +31 -7
  499. chia/util/generator_tools.py +18 -8
  500. chia/util/hash.py +3 -1
  501. chia/util/initial-config.yaml +19 -0
  502. chia/util/inline_executor.py +2 -0
  503. chia/util/ip_address.py +39 -0
  504. chia/util/json_util.py +0 -4
  505. chia/util/keychain.py +27 -24
  506. chia/util/keyring_wrapper.py +65 -4
  507. chia/util/limited_semaphore.py +3 -1
  508. chia/util/lock.py +4 -2
  509. chia/util/log_exceptions.py +5 -2
  510. chia/util/logging.py +3 -1
  511. chia/util/lru_cache.py +2 -0
  512. chia/util/math.py +4 -4
  513. chia/util/network.py +15 -73
  514. chia/util/paginator.py +3 -1
  515. chia/util/path.py +2 -0
  516. chia/util/permissions.py +3 -2
  517. chia/util/prev_transaction_block.py +1 -3
  518. chia/util/priority_mutex.py +6 -3
  519. chia/util/profiler.py +7 -4
  520. chia/util/recursive_replace.py +2 -0
  521. chia/util/safe_cancel_task.py +2 -0
  522. chia/util/service_groups.py +2 -2
  523. chia/util/setproctitle.py +2 -0
  524. chia/util/significant_bits.py +2 -0
  525. chia/util/ssl_check.py +11 -11
  526. chia/util/streamable.py +44 -56
  527. chia/util/task_referencer.py +59 -0
  528. chia/util/task_timing.py +22 -18
  529. chia/util/timing.py +4 -1
  530. chia/util/vdf_prover.py +2 -3
  531. chia/util/virtual_project_analysis.py +540 -0
  532. chia/util/ws_message.py +6 -6
  533. chia/wallet/cat_wallet/cat_info.py +3 -3
  534. chia/wallet/cat_wallet/cat_outer_puzzle.py +3 -3
  535. chia/wallet/cat_wallet/cat_utils.py +5 -4
  536. chia/wallet/cat_wallet/cat_wallet.py +56 -70
  537. chia/wallet/cat_wallet/dao_cat_info.py +3 -3
  538. chia/wallet/cat_wallet/dao_cat_wallet.py +18 -18
  539. chia/wallet/cat_wallet/lineage_store.py +2 -2
  540. chia/wallet/coin_selection.py +15 -15
  541. chia/wallet/conditions.py +257 -71
  542. chia/wallet/dao_wallet/dao_info.py +4 -4
  543. chia/wallet/dao_wallet/dao_utils.py +43 -42
  544. chia/wallet/dao_wallet/dao_wallet.py +66 -68
  545. chia/wallet/db_wallet/db_wallet_puzzles.py +12 -8
  546. chia/wallet/derive_keys.py +11 -11
  547. chia/wallet/did_wallet/did_info.py +3 -3
  548. chia/wallet/did_wallet/did_wallet.py +56 -47
  549. chia/wallet/did_wallet/did_wallet_puzzles.py +7 -6
  550. chia/wallet/lineage_proof.py +4 -4
  551. chia/wallet/nft_wallet/metadata_outer_puzzle.py +2 -2
  552. chia/wallet/nft_wallet/nft_info.py +4 -4
  553. chia/wallet/nft_wallet/nft_puzzles.py +16 -16
  554. chia/wallet/nft_wallet/nft_wallet.py +90 -89
  555. chia/wallet/nft_wallet/ownership_outer_puzzle.py +2 -2
  556. chia/wallet/nft_wallet/singleton_outer_puzzle.py +2 -2
  557. chia/wallet/nft_wallet/transfer_program_puzzle.py +2 -2
  558. chia/wallet/nft_wallet/uncurry_nft.py +2 -2
  559. chia/wallet/notification_manager.py +5 -5
  560. chia/wallet/notification_store.py +6 -6
  561. chia/wallet/outer_puzzles.py +2 -2
  562. chia/wallet/payment.py +4 -5
  563. chia/wallet/puzzle_drivers.py +4 -4
  564. chia/wallet/puzzles/clawback/drivers.py +5 -5
  565. chia/wallet/puzzles/clawback/puzzle_decorator.py +7 -7
  566. chia/wallet/puzzles/load_clvm.py +2 -3
  567. chia/wallet/puzzles/p2_conditions.py +1 -2
  568. chia/wallet/puzzles/p2_delegated_conditions.py +1 -2
  569. chia/wallet/puzzles/p2_delegated_puzzle.py +2 -3
  570. chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py +3 -4
  571. chia/wallet/puzzles/p2_m_of_n_delegate_direct.py +1 -2
  572. chia/wallet/puzzles/p2_puzzle_hash.py +1 -2
  573. chia/wallet/puzzles/puzzle_utils.py +7 -7
  574. chia/wallet/puzzles/singleton_top_layer.py +6 -5
  575. chia/wallet/puzzles/singleton_top_layer_v1_1.py +6 -5
  576. chia/wallet/puzzles/tails.py +34 -30
  577. chia/wallet/signer_protocol.py +7 -8
  578. chia/wallet/singleton.py +4 -4
  579. chia/wallet/trade_manager.py +155 -141
  580. chia/wallet/trade_record.py +5 -5
  581. chia/wallet/trading/offer.py +100 -101
  582. chia/wallet/trading/trade_store.py +14 -14
  583. chia/wallet/transaction_record.py +31 -16
  584. chia/wallet/util/address_type.py +4 -4
  585. chia/wallet/util/blind_signer_tl.py +8 -12
  586. chia/wallet/util/clvm_streamable.py +15 -15
  587. chia/wallet/util/compute_hints.py +5 -5
  588. chia/wallet/util/compute_memos.py +4 -6
  589. chia/wallet/util/curry_and_treehash.py +3 -2
  590. chia/wallet/util/debug_spend_bundle.py +6 -8
  591. chia/wallet/util/merkle_tree.py +10 -10
  592. chia/wallet/util/merkle_utils.py +10 -10
  593. chia/wallet/util/new_peak_queue.py +3 -3
  594. chia/wallet/util/peer_request_cache.py +8 -8
  595. chia/{util → wallet/util}/pprint.py +2 -3
  596. chia/wallet/util/puzzle_compression.py +3 -4
  597. chia/wallet/util/puzzle_decorator.py +10 -10
  598. chia/wallet/util/query_filter.py +9 -10
  599. chia/wallet/util/tx_config.py +12 -12
  600. chia/wallet/util/wallet_sync_utils.py +24 -21
  601. chia/wallet/util/wallet_types.py +9 -2
  602. chia/wallet/vc_wallet/cr_cat_drivers.py +28 -27
  603. chia/wallet/vc_wallet/cr_cat_wallet.py +42 -40
  604. chia/wallet/vc_wallet/cr_outer_puzzle.py +4 -4
  605. chia/wallet/vc_wallet/vc_drivers.py +16 -16
  606. chia/wallet/vc_wallet/vc_store.py +9 -9
  607. chia/wallet/vc_wallet/vc_wallet.py +35 -35
  608. chia/wallet/wallet.py +54 -54
  609. chia/wallet/wallet_action_scope.py +14 -13
  610. chia/wallet/wallet_blockchain.py +10 -10
  611. chia/wallet/wallet_coin_record.py +2 -2
  612. chia/wallet/wallet_coin_store.py +10 -10
  613. chia/wallet/wallet_info.py +1 -2
  614. chia/wallet/wallet_interested_store.py +5 -5
  615. chia/wallet/wallet_nft_store.py +6 -6
  616. chia/wallet/wallet_node.py +72 -76
  617. chia/wallet/wallet_node_api.py +33 -27
  618. chia/wallet/wallet_pool_store.py +1 -2
  619. chia/wallet/wallet_protocol.py +15 -15
  620. chia/wallet/wallet_puzzle_store.py +35 -4
  621. chia/wallet/wallet_retry_store.py +2 -2
  622. chia/wallet/wallet_singleton_store.py +10 -9
  623. chia/wallet/wallet_spend_bundle.py +4 -20
  624. chia/wallet/wallet_state_manager.py +223 -224
  625. chia/wallet/wallet_transaction_store.py +44 -18
  626. chia/wallet/wallet_user_store.py +2 -2
  627. chia/wallet/wallet_weight_proof_handler.py +2 -2
  628. {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/LICENSE +1 -1
  629. {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/METADATA +67 -72
  630. chia_blockchain-2.5.1rc2.dist-info/RECORD +1042 -0
  631. {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/WHEEL +1 -1
  632. mozilla-ca/cacert.pem +32 -87
  633. chia/_tests/cmds/wallet/test_coins.py +0 -195
  634. chia/consensus/block_root_validation.py +0 -46
  635. chia/util/api_decorators.py +0 -89
  636. chia_blockchain-2.5.0rc2.dist-info/RECORD +0 -1028
  637. {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/entry_points.txt +0 -0
@@ -2,6 +2,7 @@ from __future__ import annotations
2
2
 
3
3
  import asyncio
4
4
  import contextlib
5
+ import copy
5
6
  import dataclasses
6
7
  import logging
7
8
  import multiprocessing
@@ -9,27 +10,18 @@ import random
9
10
  import sqlite3
10
11
  import time
11
12
  import traceback
13
+ from collections.abc import AsyncIterator, Awaitable, Sequence
12
14
  from multiprocessing.context import BaseContext
13
15
  from pathlib import Path
14
- from typing import (
15
- TYPE_CHECKING,
16
- Any,
17
- AsyncIterator,
18
- Awaitable,
19
- Callable,
20
- ClassVar,
21
- Dict,
22
- List,
23
- Optional,
24
- Set,
25
- TextIO,
26
- Tuple,
27
- Union,
28
- cast,
29
- final,
16
+ from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TextIO, Union, cast, final
17
+
18
+ from chia_rs import (
19
+ AugSchemeMPL,
20
+ BLSCache,
21
+ get_flags_for_height_and_constants,
22
+ run_block_generator,
23
+ run_block_generator2,
30
24
  )
31
-
32
- from chia_rs import AugSchemeMPL, BLSCache
33
25
  from packaging.version import Version
34
26
 
35
27
  from chia.consensus.block_body_validation import ForkInfo
@@ -40,9 +32,8 @@ from chia.consensus.blockchain_interface import BlockchainInterface
40
32
  from chia.consensus.constants import ConsensusConstants
41
33
  from chia.consensus.cost_calculator import NPCResult
42
34
  from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
43
- from chia.consensus.get_block_generator import get_block_generator
44
35
  from chia.consensus.make_sub_epoch_summary import next_sub_epoch_summary
45
- from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing
36
+ from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_block
46
37
  from chia.consensus.pot_iterations import calculate_sp_iters
47
38
  from chia.full_node.block_store import BlockStore
48
39
  from chia.full_node.coin_store import CoinStore
@@ -76,7 +67,6 @@ from chia.types.blockchain_format.vdf import CompressibleVDFField, VDFInfo, VDFP
76
67
  from chia.types.coin_record import CoinRecord
77
68
  from chia.types.end_of_slot_bundle import EndOfSubSlotBundle
78
69
  from chia.types.full_block import FullBlock
79
- from chia.types.generator_types import BlockGenerator
80
70
  from chia.types.header_block import HeaderBlock
81
71
  from chia.types.mempool_inclusion_status import MempoolInclusionStatus
82
72
  from chia.types.mempool_item import MempoolItem
@@ -84,7 +74,9 @@ from chia.types.peer_info import PeerInfo
84
74
  from chia.types.spend_bundle import SpendBundle
85
75
  from chia.types.transaction_queue_entry import TransactionQueueEntry
86
76
  from chia.types.unfinished_block import UnfinishedBlock
77
+ from chia.types.validation_state import ValidationState
87
78
  from chia.types.weight_proof import WeightProof
79
+ from chia.util.augmented_chain import AugmentedBlockchain
88
80
  from chia.util.bech32m import encode_puzzle_hash
89
81
  from chia.util.check_fork_next_block import check_fork_next_block
90
82
  from chia.util.condition_tools import pkm_pairs
@@ -95,28 +87,29 @@ from chia.util.db_wrapper import DBWrapper2, manage_connection
95
87
  from chia.util.errors import ConsensusError, Err, TimestampError, ValidationError
96
88
  from chia.util.ints import uint8, uint32, uint64, uint128
97
89
  from chia.util.limited_semaphore import LimitedSemaphore
98
- from chia.util.log_exceptions import log_exceptions
90
+ from chia.util.network import is_localhost
99
91
  from chia.util.path import path_from_root
100
92
  from chia.util.profiler import enable_profiler, mem_profile_task, profile_task
101
93
  from chia.util.safe_cancel_task import cancel_task_safe
94
+ from chia.util.task_referencer import create_referenced_task
102
95
 
103
96
 
104
97
  # This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2
105
98
  @dataclasses.dataclass
106
99
  class PeakPostProcessingResult:
107
- mempool_peak_result: List[NewPeakItem] # The new items from calling MempoolManager.new_peak
108
- mempool_removals: List[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak
100
+ mempool_peak_result: list[NewPeakItem] # The new items from calling MempoolManager.new_peak
101
+ mempool_removals: list[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak
109
102
  fns_peak_result: FullNodeStorePeakResult # The result of calling FullNodeStore.new_peak
110
- hints: List[Tuple[bytes32, bytes]] # The hints added to the DB
111
- lookup_coin_ids: List[bytes32] # The coin IDs that we need to look up to notify wallets of changes
103
+ hints: list[tuple[bytes32, bytes]] # The hints added to the DB
104
+ lookup_coin_ids: list[bytes32] # The coin IDs that we need to look up to notify wallets of changes
112
105
 
113
106
 
114
107
  @dataclasses.dataclass(frozen=True)
115
108
  class WalletUpdate:
116
109
  fork_height: uint32
117
110
  peak: Peak
118
- coin_records: List[CoinRecord]
119
- hints: Dict[bytes32, bytes32]
111
+ coin_records: list[CoinRecord]
112
+ hints: dict[bytes32, bytes32]
120
113
 
121
114
 
122
115
  @final
@@ -128,32 +121,33 @@ class FullNode:
128
121
  _protocol_check: ClassVar[RpcServiceProtocol] = cast("FullNode", None)
129
122
 
130
123
  root_path: Path
131
- config: Dict[str, Any]
124
+ config: dict[str, Any]
132
125
  constants: ConsensusConstants
133
- signage_point_times: List[float]
126
+ signage_point_times: list[float]
134
127
  full_node_store: FullNodeStore
135
128
  log: logging.Logger
136
129
  db_path: Path
137
130
  wallet_sync_queue: asyncio.Queue[WalletUpdate]
138
- _segment_task: Optional[asyncio.Task[None]] = None
131
+ _segment_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list)
139
132
  initialized: bool = False
140
133
  _server: Optional[ChiaServer] = None
141
134
  _shut_down: bool = False
142
- pow_creation: Dict[bytes32, asyncio.Event] = dataclasses.field(default_factory=dict)
135
+ pow_creation: dict[bytes32, asyncio.Event] = dataclasses.field(default_factory=dict)
143
136
  state_changed_callback: Optional[StateChangedProtocol] = None
144
137
  full_node_peers: Optional[FullNodePeers] = None
145
138
  sync_store: SyncStore = dataclasses.field(default_factory=SyncStore)
146
139
  uncompact_task: Optional[asyncio.Task[None]] = None
147
- compact_vdf_requests: Set[bytes32] = dataclasses.field(default_factory=set)
140
+ compact_vdf_requests: set[bytes32] = dataclasses.field(default_factory=set)
148
141
  # TODO: Logging isn't setup yet so the log entries related to parsing the
149
142
  # config would end up on stdout if handled here.
150
143
  multiprocessing_context: Optional[BaseContext] = None
151
- _ui_tasks: Set[asyncio.Task[None]] = dataclasses.field(default_factory=set)
144
+ _ui_tasks: set[asyncio.Task[None]] = dataclasses.field(default_factory=set)
152
145
  subscriptions: PeerSubscriptions = dataclasses.field(default_factory=PeerSubscriptions)
153
146
  _transaction_queue_task: Optional[asyncio.Task[None]] = None
154
147
  simulator_transaction_callback: Optional[Callable[[bytes32], Awaitable[None]]] = None
155
- _sync_task: Optional[asyncio.Task[None]] = None
148
+ _sync_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list)
156
149
  _transaction_queue: Optional[TransactionQueue] = None
150
+ _tx_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list)
157
151
  _compact_vdf_sem: Optional[LimitedSemaphore] = None
158
152
  _new_peak_sem: Optional[LimitedSemaphore] = None
159
153
  _add_transaction_semaphore: Optional[asyncio.Semaphore] = None
@@ -167,7 +161,7 @@ class FullNode:
167
161
  _timelord_lock: Optional[asyncio.Lock] = None
168
162
  weight_proof_handler: Optional[WeightProofHandler] = None
169
163
  # hashes of peaks that failed long sync on chip13 Validation
170
- bad_peak_cache: Dict[bytes32, uint32] = dataclasses.field(default_factory=dict)
164
+ bad_peak_cache: dict[bytes32, uint32] = dataclasses.field(default_factory=dict)
171
165
  wallet_sync_task: Optional[asyncio.Task[None]] = None
172
166
  _bls_cache: BLSCache = dataclasses.field(default_factory=lambda: BLSCache(50000))
173
167
 
@@ -183,7 +177,7 @@ class FullNode:
183
177
  @classmethod
184
178
  async def create(
185
179
  cls,
186
- config: Dict[str, Any],
180
+ config: dict[str, Any],
187
181
  root_path: Path,
188
182
  consensus_constants: ConsensusConstants,
189
183
  name: str = __name__,
@@ -265,6 +259,7 @@ class FullNode:
265
259
  start_time = time.monotonic()
266
260
  reserved_cores = self.config.get("reserved_cores", 0)
267
261
  single_threaded = self.config.get("single_threaded", False)
262
+ log_coins = self.config.get("log_coins", False)
268
263
  multiprocessing_start_method = process_config_start_method(config=self.config, log=self.log)
269
264
  self.multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method)
270
265
  self._blockchain = await Blockchain.create(
@@ -273,8 +268,8 @@ class FullNode:
273
268
  consensus_constants=self.constants,
274
269
  blockchain_dir=self.db_path.parent,
275
270
  reserved_cores=reserved_cores,
276
- multiprocessing_context=self.multiprocessing_context,
277
271
  single_threaded=single_threaded,
272
+ log_coins=log_coins,
278
273
  )
279
274
 
280
275
  self._mempool_manager = MempoolManager(
@@ -285,12 +280,12 @@ class FullNode:
285
280
 
286
281
  # Transactions go into this queue from the server, and get sent to respond_transaction
287
282
  self._transaction_queue = TransactionQueue(1000, self.log)
288
- self._transaction_queue_task: asyncio.Task[None] = asyncio.create_task(self._handle_transactions())
283
+ self._transaction_queue_task: asyncio.Task[None] = create_referenced_task(self._handle_transactions())
289
284
 
290
- self._init_weight_proof = asyncio.create_task(self.initialize_weight_proof())
285
+ self._init_weight_proof = create_referenced_task(self.initialize_weight_proof())
291
286
 
292
287
  if self.config.get("enable_profiler", False):
293
- asyncio.create_task(profile_task(self.root_path, "node", self.log))
288
+ create_referenced_task(profile_task(self.root_path, "node", self.log), known_unreferenced=True)
294
289
 
295
290
  self.profile_block_validation = self.config.get("profile_block_validation", False)
296
291
  if self.profile_block_validation: # pragma: no cover
@@ -300,7 +295,7 @@ class FullNode:
300
295
  profile_dir.mkdir(parents=True, exist_ok=True)
301
296
 
302
297
  if self.config.get("enable_memory_profiler", False):
303
- asyncio.create_task(mem_profile_task(self.root_path, "node", self.log))
298
+ create_referenced_task(mem_profile_task(self.root_path, "node", self.log), known_unreferenced=True)
304
299
 
305
300
  time_taken = time.monotonic() - start_time
306
301
  peak: Optional[BlockRecord] = self.blockchain.get_peak()
@@ -321,21 +316,23 @@ class FullNode:
321
316
  )
322
317
  async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
323
318
  pending_tx = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), None)
324
- assert len(pending_tx.items) == 0 # no pending transactions when starting up
325
-
326
- full_peak: Optional[FullBlock] = await self.blockchain.get_full_peak()
327
- assert full_peak is not None
328
- state_change_summary = StateChangeSummary(peak, uint32(max(peak.height - 1, 0)), [], [], [], [])
329
- ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
330
- full_peak, state_change_summary, None
331
- )
319
+ assert len(pending_tx.items) == 0 # no pending transactions when starting up
320
+
321
+ full_peak: Optional[FullBlock] = await self.blockchain.get_full_peak()
322
+ assert full_peak is not None
323
+ state_change_summary = StateChangeSummary(peak, uint32(max(peak.height - 1, 0)), [], [], [], [])
324
+ # Must be called under priority_mutex
325
+ ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
326
+ full_peak, state_change_summary, None
327
+ )
328
+ # Can be called outside of priority_mutex
332
329
  await self.peak_post_processing_2(full_peak, None, state_change_summary, ppp_result)
333
330
  if self.config["send_uncompact_interval"] != 0:
334
331
  sanitize_weight_proof_only = False
335
332
  if "sanitize_weight_proof_only" in self.config:
336
333
  sanitize_weight_proof_only = self.config["sanitize_weight_proof_only"]
337
334
  assert self.config["target_uncompact_proofs"] != 0
338
- self.uncompact_task = asyncio.create_task(
335
+ self.uncompact_task = create_referenced_task(
339
336
  self.broadcast_uncompact_blocks(
340
337
  self.config["send_uncompact_interval"],
341
338
  self.config["target_uncompact_proofs"],
@@ -343,11 +340,11 @@ class FullNode:
343
340
  )
344
341
  )
345
342
  if self.wallet_sync_task is None or self.wallet_sync_task.done():
346
- self.wallet_sync_task = asyncio.create_task(self._wallets_sync_task_handler())
343
+ self.wallet_sync_task = create_referenced_task(self._wallets_sync_task_handler())
347
344
 
348
345
  self.initialized = True
349
346
  if self.full_node_peers is not None:
350
- asyncio.create_task(self.full_node_peers.start())
347
+ create_referenced_task(self.full_node_peers.start(), known_unreferenced=True)
351
348
  try:
352
349
  yield
353
350
  finally:
@@ -363,21 +360,39 @@ class FullNode:
363
360
  self.mempool_manager.shut_down()
364
361
 
365
362
  if self.full_node_peers is not None:
366
- asyncio.create_task(self.full_node_peers.close())
363
+ create_referenced_task(self.full_node_peers.close(), known_unreferenced=True)
367
364
  if self.uncompact_task is not None:
368
365
  self.uncompact_task.cancel()
369
366
  if self._transaction_queue_task is not None:
370
367
  self._transaction_queue_task.cancel()
371
368
  cancel_task_safe(task=self.wallet_sync_task, log=self.log)
372
- cancel_task_safe(task=self._sync_task, log=self.log)
373
-
369
+ for one_tx_task in self._tx_task_list:
370
+ if not one_tx_task.done():
371
+ cancel_task_safe(task=one_tx_task, log=self.log)
372
+ for one_sync_task in self._sync_task_list:
373
+ if not one_sync_task.done():
374
+ cancel_task_safe(task=one_sync_task, log=self.log)
375
+ for segment_task in self._segment_task_list:
376
+ cancel_task_safe(segment_task, self.log)
374
377
  for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()):
375
378
  cancel_task_safe(task, self.log)
376
379
  if self._init_weight_proof is not None:
377
380
  await asyncio.wait([self._init_weight_proof])
378
- if self._sync_task is not None:
379
- with contextlib.suppress(asyncio.CancelledError):
380
- await self._sync_task
381
+ for one_tx_task in self._tx_task_list:
382
+ if one_tx_task.done():
383
+ self.log.info(f"TX task {one_tx_task.get_name()} done")
384
+ else:
385
+ with contextlib.suppress(asyncio.CancelledError):
386
+ self.log.info(f"Awaiting TX task {one_tx_task.get_name()}")
387
+ await one_tx_task
388
+ for one_sync_task in self._sync_task_list:
389
+ if one_sync_task.done():
390
+ self.log.info(f"Long sync task {one_sync_task.get_name()} done")
391
+ else:
392
+ with contextlib.suppress(asyncio.CancelledError):
393
+ self.log.info(f"Awaiting long sync task {one_sync_task.get_name()}")
394
+ await one_sync_task
395
+ await asyncio.gather(*self._segment_task_list, return_exceptions=True)
381
396
 
382
397
  @property
383
398
  def block_store(self) -> BlockStore:
@@ -434,9 +449,9 @@ class FullNode:
434
449
  assert self._compact_vdf_sem is not None
435
450
  return self._compact_vdf_sem
436
451
 
437
- def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]:
452
+ def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]:
438
453
  connections = self.server.get_connections(request_node_type)
439
- con_info: List[Dict[str, Any]] = []
454
+ con_info: list[dict[str, Any]] = []
440
455
  if self.sync_store is not None:
441
456
  peak_store = self.sync_store.peer_to_peak
442
457
  else:
@@ -451,7 +466,7 @@ class FullNode:
451
466
  peak_height = None
452
467
  peak_hash = None
453
468
  peak_weight = None
454
- con_dict: Dict[str, Any] = {
469
+ con_dict: dict[str, Any] = {
455
470
  "type": con.connection_type,
456
471
  "local_port": con.local_port,
457
472
  "peer_host": con.peer_info.host,
@@ -494,8 +509,14 @@ class FullNode:
494
509
  # We use a semaphore to make sure we don't send more than 200 concurrent calls of respond_transaction.
495
510
  # However, doing them one at a time would be slow, because they get sent to other processes.
496
511
  await self.add_transaction_semaphore.acquire()
512
+
513
+ # Clean up task reference list (used to prevent gc from killing running tasks)
514
+ for oldtask in self._tx_task_list[:]:
515
+ if oldtask.done():
516
+ self._tx_task_list.remove(oldtask)
517
+
497
518
  item: TransactionQueueEntry = await self.transaction_queue.pop()
498
- asyncio.create_task(self._handle_one_transaction(item))
519
+ self._tx_task_list.append(create_referenced_task(self._handle_one_transaction(item)))
499
520
 
500
521
  async def initialize_weight_proof(self) -> None:
501
522
  self.weight_proof_handler = WeightProofHandler(
@@ -509,7 +530,7 @@ class FullNode:
509
530
 
510
531
  def set_server(self, server: ChiaServer) -> None:
511
532
  self._server = server
512
- dns_servers: List[str] = []
533
+ dns_servers: list[str] = []
513
534
  network_name = self.config["selected_network"]
514
535
  try:
515
536
  default_port = self.config["network_overrides"]["config"][network_name]["default_full_node_port"]
@@ -539,7 +560,7 @@ class FullNode:
539
560
  self.log.error(f"Exception in peer discovery: {e}")
540
561
  self.log.error(f"Exception Stack: {error_stack}")
541
562
 
542
- def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None:
563
+ def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None:
543
564
  if self.state_changed_callback is not None:
544
565
  self.state_changed_callback(change, change_data)
545
566
 
@@ -575,22 +596,29 @@ class FullNode:
575
596
  self.sync_store.batch_syncing.remove(peer.peer_node_id)
576
597
  self.log.error(f"Error short batch syncing, could not fetch block at height {start_height}")
577
598
  return False
578
- if not self.blockchain.contains_block(first.block.prev_header_hash):
599
+ hash = self.blockchain.height_to_hash(first.block.height - 1)
600
+ assert hash is not None
601
+ if hash != first.block.prev_header_hash:
579
602
  self.log.info("Batch syncing stopped, this is a deep chain")
580
603
  self.sync_store.batch_syncing.remove(peer.peer_node_id)
581
604
  # First sb not connected to our blockchain, do a long sync instead
582
605
  return False
583
606
 
584
607
  batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
585
- if self._segment_task is not None and (not self._segment_task.done()):
586
- try:
587
- self._segment_task.cancel()
588
- except Exception as e:
589
- self.log.warning(f"failed to cancel segment task {e}")
590
- self._segment_task = None
608
+ for task in self._segment_task_list[:]:
609
+ if task.done():
610
+ self._segment_task_list.remove(task)
611
+ else:
612
+ cancel_task_safe(task=task, log=self.log)
591
613
 
592
614
  try:
593
615
  peer_info = peer.get_peer_logging()
616
+ if start_height > 0:
617
+ fork_hash = self.blockchain.height_to_hash(uint32(start_height - 1))
618
+ else:
619
+ fork_hash = self.constants.GENESIS_CHALLENGE
620
+ assert fork_hash
621
+ fork_info = ForkInfo(start_height - 1, start_height - 1, fork_hash)
594
622
  for height in range(start_height, target_height, batch_size):
595
623
  end_height = min(target_height, height + batch_size)
596
624
  request = RequestBlocks(uint32(height), uint32(end_height), True)
@@ -607,8 +635,9 @@ class FullNode:
607
635
  ssi, diff = get_next_sub_slot_iters_and_difficulty(
608
636
  self.constants, new_slot, prev_b, self.blockchain
609
637
  )
610
- success, state_change_summary, ssi, diff, _, _ = await self.add_block_batch(
611
- response.blocks, peer_info, None, ssi, diff
638
+ vs = ValidationState(ssi, diff, None)
639
+ success, state_change_summary = await self.add_block_batch(
640
+ response.blocks, peer_info, fork_info, vs
612
641
  )
613
642
  if not success:
614
643
  raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}")
@@ -621,7 +650,6 @@ class FullNode:
621
650
  state_change_summary,
622
651
  peer,
623
652
  )
624
- await self.peak_post_processing_2(peak_fb, peer, state_change_summary, ppp_result)
625
653
  except Exception:
626
654
  # Still do post processing after cancel (or exception)
627
655
  peak_fb = await self.blockchain.get_full_peak()
@@ -630,10 +658,11 @@ class FullNode:
630
658
  raise
631
659
  finally:
632
660
  self.log.info(f"Added blocks {height}-{end_height}")
633
- except (asyncio.CancelledError, Exception):
661
+ if state_change_summary is not None and peak_fb is not None:
662
+ # Call outside of priority_mutex to encourage concurrency
663
+ await self.peak_post_processing_2(peak_fb, peer, state_change_summary, ppp_result)
664
+ finally:
634
665
  self.sync_store.batch_syncing.remove(peer.peer_node_id)
635
- raise
636
- self.sync_store.batch_syncing.remove(peer.peer_node_id)
637
666
  return True
638
667
 
639
668
  async def short_sync_backtrack(
@@ -674,15 +703,22 @@ class FullNode:
674
703
  f"Failed to fetch block {curr_height} from {peer.get_peer_logging()}, wrong type {type(curr)}"
675
704
  )
676
705
  blocks.append(curr.block)
677
- if self.blockchain.contains_block(curr.block.prev_header_hash) or curr_height == 0:
706
+ if curr_height == 0:
707
+ found_fork_point = True
708
+ break
709
+ hash_at_height = self.blockchain.height_to_hash(curr.block.height - 1)
710
+ if hash_at_height is not None and hash_at_height == curr.block.prev_header_hash:
678
711
  found_fork_point = True
679
712
  break
680
713
  curr_height -= 1
681
714
  if found_fork_point:
715
+ first_block = blocks[-1] # blocks are reveresd this is the lowest block to add
716
+ # we create the fork_info and pass it here so it would be updated on each call to add_block
717
+ fork_info = ForkInfo(first_block.height - 1, first_block.height - 1, first_block.prev_header_hash)
682
718
  for block in reversed(blocks):
683
719
  # when syncing, we won't share any signatures with the
684
720
  # mempool, so there's no need to pass in the BLS cache.
685
- await self.add_block(block, peer)
721
+ await self.add_block(block, peer, fork_info=fork_info)
686
722
  except (asyncio.CancelledError, Exception):
687
723
  self.sync_store.decrement_backtrack_syncing(node_id=peer.peer_node_id)
688
724
  raise
@@ -711,7 +747,7 @@ class FullNode:
711
747
  # Updates heights in the UI. Sleeps 1.5s before, so other peers have time to update their peaks as well.
712
748
  # Limit to 3 refreshes.
713
749
  if not seen_header_hash and len(self._ui_tasks) < 3:
714
- self._ui_tasks.add(asyncio.create_task(self._refresh_ui_connections(1.5)))
750
+ self._ui_tasks.add(create_referenced_task(self._refresh_ui_connections(1.5)))
715
751
  # Prune completed connect tasks
716
752
  self._ui_tasks = set(filter(lambda t: not t.done(), self._ui_tasks))
717
753
  except Exception as e:
@@ -733,7 +769,7 @@ class FullNode:
733
769
  # If peer connects while we are syncing, check if they have the block we are syncing towards
734
770
  target_peak = self.sync_store.target_peak
735
771
  if target_peak is not None and request.header_hash != target_peak.header_hash:
736
- peak_peers: Set[bytes32] = self.sync_store.get_peers_that_have_peak([target_peak.header_hash])
772
+ peak_peers: set[bytes32] = self.sync_store.get_peers_that_have_peak([target_peak.header_hash])
737
773
  # Don't ask if we already know this peer has the peak
738
774
  if peer.peer_node_id not in peak_peers:
739
775
  target_peak_response: Optional[RespondBlock] = await peer.call_api(
@@ -774,9 +810,16 @@ class FullNode:
774
810
  if await self.short_sync_batch(peer, uint32(max(curr_peak_height - 6, 0)), request.height):
775
811
  return None
776
812
 
813
+ # Clean up task reference list (used to prevent gc from killing running tasks)
814
+ for oldtask in self._sync_task_list[:]:
815
+ if oldtask.done():
816
+ self._sync_task_list.remove(oldtask)
817
+
777
818
  # This is the either the case where we were not able to sync successfully (for example, due to the fork
778
819
  # point being in the past), or we are very far behind. Performs a long sync.
779
- self._sync_task = asyncio.create_task(self._sync())
820
+ # Multiple tasks may be created here. If we don't save all handles, a task could enter a sync object
821
+ # and be cleaned up by the GC, corrupting the sync object and possibly not allowing anything else in.
822
+ self._sync_task_list.append(create_referenced_task(self._sync()))
780
823
 
781
824
  async def send_peak_to_timelords(
782
825
  self, peak_block: Optional[FullBlock] = None, peer: Optional[WSChiaConnection] = None
@@ -864,7 +907,7 @@ class FullNode:
864
907
  self._state_changed("add_connection")
865
908
  self._state_changed("sync_mode")
866
909
  if self.full_node_peers is not None:
867
- asyncio.create_task(self.full_node_peers.on_connect(connection))
910
+ create_referenced_task(self.full_node_peers.on_connect(connection))
868
911
 
869
912
  if self.initialized is False:
870
913
  return None
@@ -926,6 +969,7 @@ class FullNode:
926
969
  - Disconnect peers that provide invalid blocks or don't have the blocks
927
970
  """
928
971
  # Ensure we are only syncing once and not double calling this method
972
+ fork_point: Optional[uint32] = None
929
973
  if self.sync_store.get_sync_mode():
930
974
  return None
931
975
 
@@ -941,7 +985,7 @@ class FullNode:
941
985
  # Wait until we have 3 peaks or up to a max of 30 seconds
942
986
  max_iterations = int(self.config.get("max_sync_wait", 30)) * 10
943
987
 
944
- self.log.info(f"Waiting to receive peaks from peers. (timeout: {max_iterations/10}s)")
988
+ self.log.info(f"Waiting to receive peaks from peers. (timeout: {max_iterations / 10}s)")
945
989
  peaks = []
946
990
  for i in range(max_iterations):
947
991
  peaks = [peak.header_hash for peak in self.sync_store.get_peak_of_each_peer().values()]
@@ -988,6 +1032,12 @@ class FullNode:
988
1032
  # Ensures that the fork point does not change
989
1033
  async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
990
1034
  await self.blockchain.warmup(fork_point)
1035
+ fork_point = await check_fork_next_block(
1036
+ self.blockchain,
1037
+ fork_point,
1038
+ self.get_peers_with_peak(target_peak.header_hash),
1039
+ node_next_block_check,
1040
+ )
991
1041
  await self.sync_from_fork_point(fork_point, target_peak.height, target_peak.header_hash, summaries)
992
1042
  except asyncio.CancelledError:
993
1043
  self.log.warning("Syncing failed, CancelledError")
@@ -997,16 +1047,20 @@ class FullNode:
997
1047
  finally:
998
1048
  if self._shut_down:
999
1049
  return None
1000
- await self._finish_sync()
1050
+ await self._finish_sync(fork_point)
1001
1051
 
1002
1052
  async def request_validate_wp(
1003
1053
  self, peak_header_hash: bytes32, peak_height: uint32, peak_weight: uint128
1004
- ) -> Tuple[uint32, List[SubEpochSummary]]:
1054
+ ) -> tuple[uint32, list[SubEpochSummary]]:
1005
1055
  if self.weight_proof_handler is None:
1006
1056
  raise RuntimeError("Weight proof handler is None")
1007
1057
  peers_with_peak = self.get_peers_with_peak(peak_header_hash)
1008
1058
  # Request weight proof from a random peer
1009
- self.log.info(f"Total of {len(peers_with_peak)} peers with peak {peak_height}")
1059
+ peers_with_peak_len = len(peers_with_peak)
1060
+ self.log.info(f"Total of {peers_with_peak_len} peers with peak {peak_height}")
1061
+ # We can't choose from an empty sequence
1062
+ if peers_with_peak_len == 0:
1063
+ raise RuntimeError(f"Not performing sync, no peers with peak {peak_height}")
1010
1064
  weight_proof_peer: WSChiaConnection = random.choice(peers_with_peak)
1011
1065
  self.log.info(
1012
1066
  f"Requesting weight proof from peer {weight_proof_peer.peer_info.host} up to height {peak_height}"
@@ -1058,14 +1112,9 @@ class FullNode:
1058
1112
  fork_point_height: uint32,
1059
1113
  target_peak_sb_height: uint32,
1060
1114
  peak_hash: bytes32,
1061
- summaries: List[SubEpochSummary],
1115
+ summaries: list[SubEpochSummary],
1062
1116
  ) -> None:
1063
- buffer_size = 4
1064
1117
  self.log.info(f"Start syncing from fork point at {fork_point_height} up to {target_peak_sb_height}")
1065
- peers_with_peak: List[WSChiaConnection] = self.get_peers_with_peak(peak_hash)
1066
- fork_point_height = await check_fork_next_block(
1067
- self.blockchain, fork_point_height, peers_with_peak, node_next_block_check
1068
- )
1069
1118
  batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
1070
1119
  counter = 0
1071
1120
  if fork_point_height != 0:
@@ -1085,12 +1134,60 @@ class FullNode:
1085
1134
  # normally "fork_point" or "fork_height" refers to the first common
1086
1135
  # block between the main chain and the fork. Here "fork_point_height"
1087
1136
  # seems to refer to the first diverging block
1137
+ # in case we're validating a reorg fork (i.e. not extending the
1138
+ # main chain), we need to record the coin set from that fork in
1139
+ # fork_info. Otherwise validation is very expensive, especially
1140
+ # for deep reorgs
1141
+ if fork_point_height > 0:
1142
+ fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
1143
+ assert fork_hash is not None
1144
+ else:
1145
+ fork_hash = self.constants.GENESIS_CHALLENGE
1146
+ fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)
1088
1147
 
1089
- async def fetch_block_batches(
1090
- batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]]
1091
- ) -> None:
1148
+ if fork_point_height == 0:
1149
+ ssi = self.constants.SUB_SLOT_ITERS_STARTING
1150
+ diff = self.constants.DIFFICULTY_STARTING
1151
+ prev_ses_block = None
1152
+ else:
1153
+ prev_b_hash = self.blockchain.height_to_hash(fork_point_height)
1154
+ assert prev_b_hash is not None
1155
+ prev_b = await self.blockchain.get_full_block(prev_b_hash)
1156
+ assert prev_b is not None
1157
+ ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None)
1158
+
1159
+ # we need an augmented blockchain to validate blocks in batches. The
1160
+ # batch must be treated as if it's part of the chain to validate the
1161
+ # blocks in it. We also need them to keep appearing as if they're part
1162
+ # of the chain when pipelining the validation of blocks. We start
1163
+ # validating the next batch while still adding the first batch to the
1164
+ # chain.
1165
+ blockchain = AugmentedBlockchain(self.blockchain)
1166
+ peers_with_peak: list[WSChiaConnection] = self.get_peers_with_peak(peak_hash)
1167
+
1168
+ async def fetch_blocks(output_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]]) -> None:
1169
+ # the rate limit for respond_blocks is 100 messages / 60 seconds.
1170
+ # But the limit is scaled to 30% for outbound messages, so that's 30
1171
+ # messages per 60 seconds.
1172
+ # That's 2 seconds per request.
1173
+ seconds_per_request = 2
1092
1174
  start_height, end_height = 0, 0
1093
- new_peers_with_peak: List[WSChiaConnection] = peers_with_peak[:]
1175
+
1176
+ # the timestamp of when the next request_block message is allowed to
1177
+ # be sent. It's initialized to the current time, and bumped by the
1178
+ # seconds_per_request every time we send a request. This ensures we
1179
+ # won't exceed the 100 requests / 60 seconds rate limit.
1180
+ # Whichever peer has the lowest timestamp is the one we request
1181
+ # from. peers that take more than 5 seconds to respond are pushed to
1182
+ # the end of the queue, to be less likely to request from.
1183
+
1184
+ # This should be cleaned up to not be a hard coded value, and maybe
1185
+ # allow higher request rates (and align the request_blocks and
1186
+ # respond_blocks rate limits).
1187
+ now = time.monotonic()
1188
+ new_peers_with_peak: list[tuple[WSChiaConnection, float]] = [(c, now) for c in peers_with_peak[:]]
1189
+ self.log.info(f"peers with peak: {len(new_peers_with_peak)}")
1190
+ random.shuffle(new_peers_with_peak)
1094
1191
  try:
1095
1192
  # block request ranges are *inclusive*, this requires some
1096
1193
  # gymnastics of this range (+1 to make it exclusive, like normal
@@ -1098,100 +1195,185 @@ class FullNode:
1098
1195
  for start_height in range(fork_point_height, target_peak_sb_height + 1, batch_size):
1099
1196
  end_height = min(target_peak_sb_height, start_height + batch_size - 1)
1100
1197
  request = RequestBlocks(uint32(start_height), uint32(end_height), True)
1198
+ new_peers_with_peak.sort(key=lambda pair: pair[1])
1101
1199
  fetched = False
1102
- for peer in random.sample(new_peers_with_peak, len(new_peers_with_peak)):
1200
+ for idx, (peer, timestamp) in enumerate(new_peers_with_peak):
1103
1201
  if peer.closed:
1104
1202
  continue
1105
- response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=30)
1203
+
1204
+ start = time.monotonic()
1205
+ if start < timestamp:
1206
+ # rate limit ourselves, since we sent a message to
1207
+ # this peer too recently
1208
+ await asyncio.sleep(timestamp - start)
1209
+ start = time.monotonic()
1210
+
1211
+ # update the timestamp, now that we're sending a request
1212
+ # it's OK for the timestamp to fall behind wall-clock
1213
+ # time. It just means we're allowed to send more
1214
+ # requests to catch up
1215
+ if is_localhost(peer.peer_info.host):
1216
+ # we don't apply rate limits to localhost, and our
1217
+ # tests depend on it
1218
+ bump = 0.1
1219
+ else:
1220
+ bump = seconds_per_request
1221
+
1222
+ new_peers_with_peak[idx] = (
1223
+ new_peers_with_peak[idx][0],
1224
+ new_peers_with_peak[idx][1] + bump,
1225
+ )
1226
+ # the fewer peers we have, the more willing we should be
1227
+ # to wait for them.
1228
+ timeout = int(30 + 30 / len(new_peers_with_peak))
1229
+ response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=timeout)
1230
+ end = time.monotonic()
1106
1231
  if response is None:
1232
+ self.log.info(f"peer timed out after {end - start:.1f} s")
1107
1233
  await peer.close()
1108
1234
  elif isinstance(response, RespondBlocks):
1109
- await batch_queue.put((peer, response.blocks))
1235
+ if end - start > 5:
1236
+ self.log.info(f"peer took {end - start:.1f} s to respond to request_blocks")
1237
+ # this isn't a great peer, reduce its priority
1238
+ # to prefer any peers that had to wait for it.
1239
+ # By setting the next allowed timestamp to now,
1240
+ # means that any other peer that has waited for
1241
+ # this will have its next allowed timestamp in
1242
+ # the passed, and be prefered multiple times
1243
+ # over this peer.
1244
+ new_peers_with_peak[idx] = (
1245
+ new_peers_with_peak[idx][0],
1246
+ end,
1247
+ )
1248
+ start = time.monotonic()
1249
+ await output_queue.put((peer, response.blocks))
1250
+ end = time.monotonic()
1251
+ if end - start > 1:
1252
+ self.log.info(
1253
+ f"sync pipeline back-pressure. stalled {end - start:0.2f} "
1254
+ "seconds on prevalidate block"
1255
+ )
1110
1256
  fetched = True
1111
1257
  break
1112
1258
  if fetched is False:
1113
1259
  self.log.error(f"failed fetching {start_height} to {end_height} from peers")
1114
1260
  return
1115
1261
  if self.sync_store.peers_changed.is_set():
1116
- new_peers_with_peak = self.get_peers_with_peak(peak_hash)
1262
+ existing_peers = {id(c): timestamp for c, timestamp in new_peers_with_peak}
1263
+ peers = self.get_peers_with_peak(peak_hash)
1264
+ new_peers_with_peak = [(c, existing_peers.get(id(c), end)) for c in peers]
1265
+ random.shuffle(new_peers_with_peak)
1117
1266
  self.sync_store.peers_changed.clear()
1267
+ self.log.info(f"peers with peak: {len(new_peers_with_peak)}")
1118
1268
  except Exception as e:
1119
1269
  self.log.error(f"Exception fetching {start_height} to {end_height} from peer {e}")
1120
1270
  finally:
1121
1271
  # finished signal with None
1122
- await batch_queue.put(None)
1272
+ await output_queue.put(None)
1273
+
1274
+ async def validate_blocks(
1275
+ input_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]],
1276
+ output_queue: asyncio.Queue[
1277
+ Optional[
1278
+ tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]
1279
+ ]
1280
+ ],
1281
+ ) -> None:
1282
+ nonlocal blockchain
1283
+ nonlocal fork_info
1284
+ first_batch = True
1285
+
1286
+ vs = ValidationState(ssi, diff, prev_ses_block)
1287
+
1288
+ try:
1289
+ while True:
1290
+ res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await input_queue.get()
1291
+ if res is None:
1292
+ self.log.debug("done fetching blocks")
1293
+ return None
1294
+ peer, blocks = res
1295
+
1296
+ # skip_blocks is only relevant at the start of the sync,
1297
+ # to skip blocks we already have in the database (and have
1298
+ # been validated). Once we start validating blocks, we
1299
+ # shouldn't be skipping any.
1300
+ blocks_to_validate = await self.skip_blocks(blockchain, blocks, fork_info, vs)
1301
+ assert first_batch or len(blocks_to_validate) == len(blocks)
1302
+ next_validation_state = copy.copy(vs)
1303
+
1304
+ if len(blocks_to_validate) == 0:
1305
+ continue
1123
1306
 
1124
- async def validate_block_batches(
1125
- inner_batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]]
1307
+ first_batch = False
1308
+
1309
+ futures: list[Awaitable[PreValidationResult]] = []
1310
+ for block in blocks_to_validate:
1311
+ futures.extend(
1312
+ await self.prevalidate_blocks(
1313
+ blockchain,
1314
+ [block],
1315
+ vs,
1316
+ summaries,
1317
+ )
1318
+ )
1319
+ start = time.monotonic()
1320
+ await output_queue.put((peer, next_validation_state, list(futures), blocks_to_validate))
1321
+ end = time.monotonic()
1322
+ if end - start > 1:
1323
+ self.log.info(f"sync pipeline back-pressure. stalled {end - start:0.2f} seconds on add_block()")
1324
+ except Exception:
1325
+ self.log.exception("Exception validating")
1326
+ finally:
1327
+ # finished signal with None
1328
+ await output_queue.put(None)
1329
+
1330
+ async def ingest_blocks(
1331
+ input_queue: asyncio.Queue[
1332
+ Optional[
1333
+ tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]
1334
+ ]
1335
+ ],
1126
1336
  ) -> None:
1127
- fork_info: Optional[ForkInfo] = None
1128
- if fork_point_height == 0:
1129
- ssi = self.constants.SUB_SLOT_ITERS_STARTING
1130
- diff = self.constants.DIFFICULTY_STARTING
1131
- prev_ses_block = None
1132
- else:
1133
- prev_b_hash = self.blockchain.height_to_hash(fork_point_height)
1134
- assert prev_b_hash is not None
1135
- prev_b = await self.blockchain.get_full_block(prev_b_hash)
1136
- assert prev_b is not None
1137
- ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None)
1337
+ nonlocal fork_info
1138
1338
  block_rate = 0
1139
1339
  block_rate_time = time.monotonic()
1140
1340
  block_rate_height = -1
1141
1341
  while True:
1142
- res: Optional[Tuple[WSChiaConnection, List[FullBlock]]] = await inner_batch_queue.get()
1342
+ res = await input_queue.get()
1143
1343
  if res is None:
1144
- self.log.debug("done fetching blocks")
1344
+ self.log.debug("done validating blocks")
1145
1345
  return None
1146
- peer, blocks = res
1346
+ peer, vs, futures, blocks = res
1147
1347
  start_height = blocks[0].height
1148
1348
  end_height = blocks[-1].height
1149
1349
 
1150
1350
  if block_rate_height == -1:
1151
1351
  block_rate_height = start_height
1152
1352
 
1153
- # in case we're validating a reorg fork (i.e. not extending the
1154
- # main chain), we need to record the coin set from that fork in
1155
- # fork_info. Otherwise validation is very expensive, especially
1156
- # for deep reorgs
1157
- peak: Optional[BlockRecord]
1158
- if fork_info is None:
1159
- peak = self.blockchain.get_peak()
1160
- extending_main_chain: bool = peak is None or (
1161
- peak.header_hash == blocks[0].prev_header_hash or peak.header_hash == blocks[0].header_hash
1162
- )
1163
- # if we're simply extending the main chain, it's important
1164
- # *not* to pass in a ForkInfo object, as it can potentially
1165
- # accrue a large state (with no value, since we can validate
1166
- # against the CoinStore)
1167
- if not extending_main_chain:
1168
- if fork_point_height == 0:
1169
- fork_info = ForkInfo(-1, -1, self.constants.GENESIS_CHALLENGE)
1170
- else:
1171
- fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
1172
- assert fork_hash is not None
1173
- fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)
1174
-
1175
- success, state_change_summary, ssi, diff, prev_ses_block, err = await self.add_block_batch(
1353
+ pre_validation_results = list(await asyncio.gather(*futures))
1354
+ # The ValidationState object (vs) is an in-out parameter. the add_block_batch()
1355
+ # call will update it
1356
+ state_change_summary, err = await self.add_prevalidated_blocks(
1357
+ blockchain,
1176
1358
  blocks,
1177
- peer.get_peer_logging(),
1359
+ pre_validation_results,
1178
1360
  fork_info,
1179
- ssi,
1180
- diff,
1181
- prev_ses_block,
1182
- summaries,
1361
+ peer.peer_info,
1362
+ vs,
1183
1363
  )
1184
- if success is False:
1364
+ if err is not None:
1185
1365
  await peer.close(600)
1186
- raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
1366
+ raise ValueError(f"Failed to validate block batch {start_height} to {end_height}: {err}")
1187
1367
  if end_height - block_rate_height > 100:
1188
1368
  now = time.monotonic()
1189
1369
  block_rate = int((end_height - block_rate_height) // (now - block_rate_time))
1190
1370
  block_rate_time = now
1191
1371
  block_rate_height = end_height
1192
1372
 
1193
- self.log.info(f"Added blocks {start_height} to {end_height} ({block_rate} blocks/s)")
1194
- peak = self.blockchain.get_peak()
1373
+ self.log.info(
1374
+ f"Added blocks {start_height} to {end_height} ({block_rate} blocks/s) (from: {peer.peer_info.ip})"
1375
+ )
1376
+ peak: Optional[BlockRecord] = self.blockchain.get_peak()
1195
1377
  if state_change_summary is not None:
1196
1378
  assert peak is not None
1197
1379
  # Hints must be added to the DB. The other post-processing tasks are not required when syncing
@@ -1208,20 +1390,35 @@ class FullNode:
1208
1390
  # height, in that case.
1209
1391
  self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE)
1210
1392
 
1211
- batch_queue_input: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] = asyncio.Queue(
1212
- maxsize=buffer_size
1213
- )
1214
- fetch_task = asyncio.Task(fetch_block_batches(batch_queue_input))
1215
- validate_task = asyncio.Task(validate_block_batches(batch_queue_input))
1393
+ block_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue(maxsize=10)
1394
+ validation_queue: asyncio.Queue[
1395
+ Optional[tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]]
1396
+ ] = asyncio.Queue(maxsize=10)
1397
+
1398
+ fetch_task = create_referenced_task(fetch_blocks(block_queue))
1399
+ validate_task = create_referenced_task(validate_blocks(block_queue, validation_queue))
1400
+ ingest_task = create_referenced_task(ingest_blocks(validation_queue))
1216
1401
  try:
1217
- with log_exceptions(log=self.log, message="sync from fork point failed"):
1218
- await asyncio.gather(fetch_task, validate_task)
1402
+ await asyncio.gather(fetch_task, validate_task, ingest_task)
1219
1403
  except Exception:
1220
- assert validate_task.done()
1221
- fetch_task.cancel() # no need to cancel validate_task, if we end up here validate_task is already done
1404
+ self.log.exception("sync from fork point failed")
1405
+ finally:
1406
+ cancel_task_safe(validate_task, self.log)
1407
+ cancel_task_safe(fetch_task)
1408
+ cancel_task_safe(ingest_task)
1409
+
1410
+ # we still need to await all the pending futures of the
1411
+ # prevalidation steps posted to the thread pool
1412
+ while not validation_queue.empty():
1413
+ result = validation_queue.get_nowait()
1414
+ if result is None:
1415
+ continue
1416
+
1417
+ _, _, futures, _ = result
1418
+ await asyncio.gather(*futures)
1222
1419
 
1223
- def get_peers_with_peak(self, peak_hash: bytes32) -> List[WSChiaConnection]:
1224
- peer_ids: Set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash])
1420
+ def get_peers_with_peak(self, peak_hash: bytes32) -> list[WSChiaConnection]:
1421
+ peer_ids: set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash])
1225
1422
  if len(peer_ids) == 0:
1226
1423
  self.log.warning(f"Not syncing, no peers with header_hash {peak_hash} ")
1227
1424
  return []
@@ -1240,7 +1437,7 @@ class FullNode:
1240
1437
  self.log.debug(
1241
1438
  f"update_wallets - fork_height: {wallet_update.fork_height}, peak_height: {wallet_update.peak.height}"
1242
1439
  )
1243
- changes_for_peer: Dict[bytes32, Set[CoinState]] = {}
1440
+ changes_for_peer: dict[bytes32, set[CoinState]] = {}
1244
1441
  for coin_record in wallet_update.coin_records:
1245
1442
  coin_id = coin_record.name
1246
1443
  subscribed_peers = self.subscriptions.peers_for_coin_id(coin_id)
@@ -1276,44 +1473,75 @@ class FullNode:
1276
1473
 
1277
1474
  async def add_block_batch(
1278
1475
  self,
1279
- all_blocks: List[FullBlock],
1476
+ all_blocks: list[FullBlock],
1280
1477
  peer_info: PeerInfo,
1281
- fork_info: Optional[ForkInfo],
1282
- current_ssi: uint64,
1283
- current_difficulty: uint64,
1284
- prev_ses_block: Optional[BlockRecord] = None,
1285
- wp_summaries: Optional[List[SubEpochSummary]] = None,
1286
- ) -> Tuple[bool, Optional[StateChangeSummary], uint64, uint64, Optional[BlockRecord], Optional[Err]]:
1478
+ fork_info: ForkInfo,
1479
+ vs: ValidationState, # in-out parameter
1480
+ wp_summaries: Optional[list[SubEpochSummary]] = None,
1481
+ ) -> tuple[bool, Optional[StateChangeSummary]]:
1287
1482
  # Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i
1288
1483
  # Returns a bool for success, as well as a StateChangeSummary if the peak was advanced
1289
1484
 
1290
- blocks_to_validate: List[FullBlock] = []
1485
+ pre_validate_start = time.monotonic()
1486
+ blockchain = AugmentedBlockchain(self.blockchain)
1487
+ blocks_to_validate = await self.skip_blocks(blockchain, all_blocks, fork_info, vs)
1488
+
1489
+ if len(blocks_to_validate) == 0:
1490
+ return True, None
1491
+
1492
+ futures = await self.prevalidate_blocks(
1493
+ blockchain,
1494
+ blocks_to_validate,
1495
+ copy.copy(vs),
1496
+ wp_summaries,
1497
+ )
1498
+ pre_validation_results = list(await asyncio.gather(*futures))
1499
+
1500
+ agg_state_change_summary, err = await self.add_prevalidated_blocks(
1501
+ blockchain,
1502
+ blocks_to_validate,
1503
+ pre_validation_results,
1504
+ fork_info,
1505
+ peer_info,
1506
+ vs,
1507
+ )
1508
+
1509
+ if agg_state_change_summary is not None:
1510
+ self._state_changed("new_peak")
1511
+ self.log.debug(
1512
+ f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, "
1513
+ f"advanced: True"
1514
+ )
1515
+ return err is None, agg_state_change_summary
1516
+
1517
+ async def skip_blocks(
1518
+ self,
1519
+ blockchain: AugmentedBlockchain,
1520
+ all_blocks: list[FullBlock],
1521
+ fork_info: ForkInfo,
1522
+ vs: ValidationState, # in-out parameter
1523
+ ) -> list[FullBlock]:
1524
+ blocks_to_validate: list[FullBlock] = []
1291
1525
  for i, block in enumerate(all_blocks):
1292
1526
  header_hash = block.header_hash
1293
- block_rec = await self.blockchain.get_block_record_from_db(header_hash)
1527
+ block_rec = await blockchain.get_block_record_from_db(header_hash)
1294
1528
  if block_rec is None:
1295
1529
  blocks_to_validate = all_blocks[i:]
1296
1530
  break
1297
1531
  else:
1298
- self.blockchain.add_block_record(block_rec)
1532
+ blockchain.add_block_record(block_rec)
1299
1533
  if block_rec.sub_epoch_summary_included:
1300
1534
  # already validated block, update sub slot iters, difficulty and prev sub epoch summary
1301
- prev_ses_block = block_rec
1535
+ vs.prev_ses_block = block_rec
1302
1536
  if block_rec.sub_epoch_summary_included.new_sub_slot_iters is not None:
1303
- current_ssi = block_rec.sub_epoch_summary_included.new_sub_slot_iters
1537
+ vs.ssi = block_rec.sub_epoch_summary_included.new_sub_slot_iters
1304
1538
  if block_rec.sub_epoch_summary_included.new_difficulty is not None:
1305
- current_difficulty = block_rec.sub_epoch_summary_included.new_difficulty
1539
+ vs.difficulty = block_rec.sub_epoch_summary_included.new_difficulty
1306
1540
 
1307
- if fork_info is None:
1308
- continue
1309
1541
  # the below section updates the fork_info object, if
1310
1542
  # there is one.
1311
-
1312
- # TODO: it seems unnecessary to request overlapping block ranges
1313
- # when syncing
1314
1543
  if block.height <= fork_info.peak_height:
1315
1544
  continue
1316
-
1317
1545
  # we have already validated this block once, no need to do it again.
1318
1546
  # however, if this block is not part of the main chain, we need to
1319
1547
  # update the fork context with its additions and removals
@@ -1326,52 +1554,60 @@ class FullNode:
1326
1554
  # removals in fork_info.
1327
1555
  await self.blockchain.advance_fork_info(block, fork_info)
1328
1556
  await self.blockchain.run_single_block(block, fork_info)
1557
+ return blocks_to_validate
1329
1558
 
1330
- if len(blocks_to_validate) == 0:
1331
- return True, None, current_ssi, current_difficulty, prev_ses_block, None
1559
+ async def prevalidate_blocks(
1560
+ self,
1561
+ blockchain: AugmentedBlockchain,
1562
+ blocks_to_validate: list[FullBlock],
1563
+ vs: ValidationState,
1564
+ wp_summaries: Optional[list[SubEpochSummary]] = None,
1565
+ ) -> Sequence[Awaitable[PreValidationResult]]:
1566
+ """
1567
+ This is a thin wrapper over pre_validate_block().
1332
1568
 
1569
+ Args:
1570
+ blockchain:
1571
+ blocks_to_validate:
1572
+ vs: The ValidationState for the first block in the batch. This is an in-out
1573
+ parameter. It will be updated to be the validation state for the next
1574
+ batch of blocks.
1575
+ wp_summaries:
1576
+ """
1333
1577
  # Validates signatures in multiprocessing since they take a while, and we don't have cached transactions
1334
1578
  # for these blocks (unlike during normal operation where we validate one at a time)
1335
- pre_validate_start = time.monotonic()
1336
- pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing(
1337
- self.blockchain.constants,
1338
- self.blockchain,
1339
- blocks_to_validate,
1340
- self.blockchain.pool,
1341
- {},
1342
- sub_slot_iters=current_ssi,
1343
- difficulty=current_difficulty,
1344
- prev_ses_block=prev_ses_block,
1345
- wp_summaries=wp_summaries,
1346
- validate_signatures=True,
1347
- )
1348
- pre_validate_end = time.monotonic()
1349
- pre_validate_time = pre_validate_end - pre_validate_start
1350
-
1351
- self.log.log(
1352
- logging.WARNING if pre_validate_time > 10 else logging.DEBUG,
1353
- f"Block pre-validation: {pre_validate_end - pre_validate_start:0.2f}s "
1354
- f"CLVM: {sum(pvr.timing/1000.0 for pvr in pre_validation_results):0.2f}s "
1355
- f"({len(blocks_to_validate)} blocks, start height: {blocks_to_validate[0].height})",
1356
- )
1357
-
1358
- for i, block in enumerate(blocks_to_validate):
1359
- if pre_validation_results[i].error is not None:
1360
- self.log.error(
1361
- f"Invalid block from peer: {peer_info} height {block.height} {Err(pre_validation_results[i].error)}"
1362
- )
1363
- return (
1364
- False,
1579
+ # We have to copy the ValidationState object to preserve it for the add_block()
1580
+ # call below. pre_validate_block() will update the
1581
+ # object we pass in.
1582
+ ret: list[Awaitable[PreValidationResult]] = []
1583
+ for block in blocks_to_validate:
1584
+ ret.append(
1585
+ await pre_validate_block(
1586
+ self.constants,
1587
+ blockchain,
1588
+ block,
1589
+ self.blockchain.pool,
1365
1590
  None,
1366
- current_ssi,
1367
- current_difficulty,
1368
- prev_ses_block,
1369
- Err(pre_validation_results[i].error),
1591
+ vs,
1592
+ wp_summaries=wp_summaries,
1370
1593
  )
1594
+ )
1595
+ return ret
1371
1596
 
1597
+ async def add_prevalidated_blocks(
1598
+ self,
1599
+ blockchain: AugmentedBlockchain,
1600
+ blocks_to_validate: list[FullBlock],
1601
+ pre_validation_results: list[PreValidationResult],
1602
+ fork_info: ForkInfo,
1603
+ peer_info: PeerInfo,
1604
+ vs: ValidationState, # in-out parameter
1605
+ ) -> tuple[Optional[StateChangeSummary], Optional[Err]]:
1372
1606
  agg_state_change_summary: Optional[StateChangeSummary] = None
1373
1607
  block_record = await self.blockchain.get_block_record_from_db(blocks_to_validate[0].prev_header_hash)
1374
1608
  for i, block in enumerate(blocks_to_validate):
1609
+ header_hash = block.header_hash
1610
+ assert vs.prev_ses_block is None or vs.prev_ses_block.height < block.height
1375
1611
  assert pre_validation_results[i].required_iters is not None
1376
1612
  state_change_summary: Optional[StateChangeSummary]
1377
1613
  # when adding blocks in batches, we won't have any overlapping
@@ -1382,23 +1618,30 @@ class FullNode:
1382
1618
  cc_sub_slot = block.finished_sub_slots[0].challenge_chain
1383
1619
  if cc_sub_slot.new_sub_slot_iters is not None or cc_sub_slot.new_difficulty is not None:
1384
1620
  expected_sub_slot_iters, expected_difficulty = get_next_sub_slot_iters_and_difficulty(
1385
- self.constants, True, block_record, self.blockchain
1621
+ self.constants, True, block_record, blockchain
1386
1622
  )
1387
1623
  assert cc_sub_slot.new_sub_slot_iters is not None
1388
- current_ssi = cc_sub_slot.new_sub_slot_iters
1624
+ vs.ssi = cc_sub_slot.new_sub_slot_iters
1389
1625
  assert cc_sub_slot.new_difficulty is not None
1390
- current_difficulty = cc_sub_slot.new_difficulty
1391
- assert expected_sub_slot_iters == current_ssi
1392
- assert expected_difficulty == current_difficulty
1626
+ vs.difficulty = cc_sub_slot.new_difficulty
1627
+ assert expected_sub_slot_iters == vs.ssi
1628
+ assert expected_difficulty == vs.difficulty
1629
+ block_rec = blockchain.block_record(block.header_hash)
1393
1630
  result, error, state_change_summary = await self.blockchain.add_block(
1394
- block, pre_validation_results[i], None, current_ssi, fork_info, prev_ses_block=prev_ses_block
1631
+ block,
1632
+ pre_validation_results[i],
1633
+ vs.ssi,
1634
+ fork_info,
1635
+ prev_ses_block=vs.prev_ses_block,
1636
+ block_record=block_rec,
1395
1637
  )
1638
+ if error is None:
1639
+ blockchain.remove_extra_block(header_hash)
1396
1640
 
1397
1641
  if result == AddBlockResult.NEW_PEAK:
1398
1642
  # since this block just added a new peak, we've don't need any
1399
1643
  # fork history from fork_info anymore
1400
- if fork_info is not None:
1401
- fork_info.reset(block.height, block.header_hash)
1644
+ fork_info.reset(block.height, header_hash)
1402
1645
  assert state_change_summary is not None
1403
1646
  # Since all blocks are contiguous, we can simply append the rollback changes and npc results
1404
1647
  if agg_state_change_summary is None:
@@ -1414,27 +1657,23 @@ class FullNode:
1414
1657
  agg_state_change_summary.additions + state_change_summary.additions,
1415
1658
  agg_state_change_summary.new_rewards + state_change_summary.new_rewards,
1416
1659
  )
1417
- elif result == AddBlockResult.INVALID_BLOCK or result == AddBlockResult.DISCONNECTED_BLOCK:
1660
+ elif result in {AddBlockResult.INVALID_BLOCK, AddBlockResult.DISCONNECTED_BLOCK}:
1418
1661
  if error is not None:
1419
1662
  self.log.error(f"Error: {error}, Invalid block from peer: {peer_info} ")
1420
- return False, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, error
1421
- block_record = self.blockchain.block_record(block.header_hash)
1663
+ return agg_state_change_summary, error
1664
+ block_record = blockchain.block_record(header_hash)
1422
1665
  assert block_record is not None
1423
1666
  if block_record.sub_epoch_summary_included is not None:
1424
- prev_ses_block = block_record
1667
+ vs.prev_ses_block = block_record
1425
1668
  if self.weight_proof_handler is not None:
1426
1669
  await self.weight_proof_handler.create_prev_sub_epoch_segments()
1427
1670
  if agg_state_change_summary is not None:
1428
1671
  self._state_changed("new_peak")
1429
- self.log.debug(
1430
- f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, "
1431
- f"advanced: True"
1432
- )
1433
- return True, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, None
1672
+ return agg_state_change_summary, None
1434
1673
 
1435
1674
  async def get_sub_slot_iters_difficulty_ses_block(
1436
1675
  self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64]
1437
- ) -> Tuple[uint64, uint64, Optional[BlockRecord]]:
1676
+ ) -> tuple[uint64, uint64, Optional[BlockRecord]]:
1438
1677
  prev_ses_block = None
1439
1678
  if ssi is None or diff is None:
1440
1679
  if block.height == 0:
@@ -1471,7 +1710,7 @@ class FullNode:
1471
1710
  assert diff is not None
1472
1711
  return ssi, diff, prev_ses_block
1473
1712
 
1474
- async def _finish_sync(self) -> None:
1713
+ async def _finish_sync(self, fork_point: Optional[uint32]) -> None:
1475
1714
  """
1476
1715
  Finalize sync by setting sync mode to False, clearing all sync information, and adding any final
1477
1716
  blocks that we have finalized recently.
@@ -1487,12 +1726,17 @@ class FullNode:
1487
1726
  peak: Optional[BlockRecord] = self.blockchain.get_peak()
1488
1727
  peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak()
1489
1728
  if peak_fb is not None:
1729
+ if fork_point is None:
1730
+ fork_point = uint32(max(peak_fb.height - 1, 0))
1490
1731
  assert peak is not None
1491
- state_change_summary = StateChangeSummary(peak, uint32(max(peak.height - 1, 0)), [], [], [], [])
1732
+ state_change_summary = StateChangeSummary(peak, fork_point, [], [], [], [])
1492
1733
  ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
1493
1734
  peak_fb, state_change_summary, None
1494
1735
  )
1495
- await self.peak_post_processing_2(peak_fb, None, state_change_summary, ppp_result)
1736
+
1737
+ if peak_fb is not None:
1738
+ # Call outside of priority_mutex to encourage concurrency
1739
+ await self.peak_post_processing_2(peak_fb, None, state_change_summary, ppp_result)
1496
1740
 
1497
1741
  if peak is not None and self.weight_proof_handler is not None:
1498
1742
  await self.weight_proof_handler.get_proof_of_weight(peak.header_hash)
@@ -1595,6 +1839,7 @@ class FullNode:
1595
1839
  self.log.info(
1596
1840
  f"🌱 Updated peak to height {record.height}, weight {record.weight}, "
1597
1841
  f"hh {record.header_hash.hex()}, "
1842
+ f"ph {record.prev_hash.hex()}, "
1598
1843
  f"forked at {state_change_summary.fork_height}, rh: {record.reward_infusion_new_challenge.hex()}, "
1599
1844
  f"total iters: {record.total_iters}, "
1600
1845
  f"overflow: {record.overflow}, "
@@ -1668,7 +1913,7 @@ class FullNode:
1668
1913
  )
1669
1914
 
1670
1915
  # Update the mempool (returns successful pending transactions added to the mempool)
1671
- spent_coins: List[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals]
1916
+ spent_coins: list[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals]
1672
1917
  mempool_new_peak_result = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), spent_coins)
1673
1918
 
1674
1919
  return PeakPostProcessingResult(
@@ -1734,7 +1979,7 @@ class FullNode:
1734
1979
  else:
1735
1980
  await self.server.send_to_all([msg], NodeType.FULL_NODE)
1736
1981
 
1737
- coin_hints: Dict[bytes32, bytes32] = {
1982
+ coin_hints: dict[bytes32, bytes32] = {
1738
1983
  coin_id: bytes32(hint) for coin_id, hint in ppp_result.hints if len(hint) == 32
1739
1984
  }
1740
1985
 
@@ -1773,6 +2018,8 @@ class FullNode:
1773
2018
  # Adds the block to seen, and check if it's seen before (which means header is in memory)
1774
2019
  header_hash = block.header_hash
1775
2020
  if self.blockchain.contains_block(header_hash):
2021
+ if fork_info is not None:
2022
+ await self.blockchain.run_single_block(block, fork_info)
1776
2023
  return None
1777
2024
 
1778
2025
  pre_validation_result: Optional[PreValidationResult] = None
@@ -1790,6 +2037,7 @@ class FullNode:
1790
2037
  unf_entry: Optional[UnfinishedBlockEntry] = self.full_node_store.get_unfinished_block_result(
1791
2038
  unfinished_rh, foliage_hash
1792
2039
  )
2040
+ assert unf_entry is None or unf_entry.result is None or unf_entry.result.validated_signature is True
1793
2041
  if (
1794
2042
  unf_entry is not None
1795
2043
  and unf_entry.unfinished_block is not None
@@ -1844,12 +2092,14 @@ class FullNode:
1844
2092
  ):
1845
2093
  # After acquiring the lock, check again, because another asyncio thread might have added it
1846
2094
  if self.blockchain.contains_block(header_hash):
2095
+ if fork_info is not None:
2096
+ await self.blockchain.run_single_block(block, fork_info)
1847
2097
  return None
1848
2098
  validation_start = time.monotonic()
1849
2099
  # Tries to add the block to the blockchain, if we already validated transactions, don't do it again
1850
- block_height_conds_map = {}
2100
+ conds = None
1851
2101
  if pre_validation_result is not None and pre_validation_result.conds is not None:
1852
- block_height_conds_map[block.height] = pre_validation_result.conds
2102
+ conds = pre_validation_result.conds
1853
2103
 
1854
2104
  # Don't validate signatures because we want to validate them in the main thread later, since we have a
1855
2105
  # cache available
@@ -1864,40 +2114,34 @@ class FullNode:
1864
2114
  prev_ses_block = curr
1865
2115
  new_slot = len(block.finished_sub_slots) > 0
1866
2116
  ssi, diff = get_next_sub_slot_iters_and_difficulty(self.constants, new_slot, prev_b, self.blockchain)
1867
- pre_validation_results = await pre_validate_blocks_multiprocessing(
2117
+ future = await pre_validate_block(
1868
2118
  self.blockchain.constants,
1869
- self.blockchain,
1870
- [block],
2119
+ AugmentedBlockchain(self.blockchain),
2120
+ block,
1871
2121
  self.blockchain.pool,
1872
- block_height_conds_map,
1873
- sub_slot_iters=ssi,
1874
- difficulty=diff,
1875
- prev_ses_block=prev_ses_block,
1876
- validate_signatures=False,
2122
+ conds,
2123
+ ValidationState(ssi, diff, prev_ses_block),
1877
2124
  )
2125
+ pre_validation_result = await future
1878
2126
  added: Optional[AddBlockResult] = None
1879
2127
  pre_validation_time = time.monotonic() - validation_start
1880
2128
  try:
1881
- if len(pre_validation_results) < 1:
1882
- raise ValueError(f"Failed to validate block {header_hash} height {block.height}")
1883
- if pre_validation_results[0].error is not None:
1884
- if Err(pre_validation_results[0].error) == Err.INVALID_PREV_BLOCK_HASH:
2129
+ if pre_validation_result.error is not None:
2130
+ if Err(pre_validation_result.error) == Err.INVALID_PREV_BLOCK_HASH:
1885
2131
  added = AddBlockResult.DISCONNECTED_BLOCK
1886
2132
  error_code: Optional[Err] = Err.INVALID_PREV_BLOCK_HASH
1887
- elif Err(pre_validation_results[0].error) == Err.TIMESTAMP_TOO_FAR_IN_FUTURE:
2133
+ elif Err(pre_validation_result.error) == Err.TIMESTAMP_TOO_FAR_IN_FUTURE:
1888
2134
  raise TimestampError()
1889
2135
  else:
1890
2136
  raise ValueError(
1891
2137
  f"Failed to validate block {header_hash} height "
1892
- f"{block.height}: {Err(pre_validation_results[0].error).name}"
2138
+ f"{block.height}: {Err(pre_validation_result.error).name}"
1893
2139
  )
1894
2140
  else:
1895
- result_to_validate = (
1896
- pre_validation_results[0] if pre_validation_result is None else pre_validation_result
1897
- )
1898
- assert result_to_validate.required_iters == pre_validation_results[0].required_iters
2141
+ if fork_info is None:
2142
+ fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash)
1899
2143
  (added, error_code, state_change_summary) = await self.blockchain.add_block(
1900
- block, result_to_validate, bls_cache, ssi, fork_info
2144
+ block, pre_validation_result, ssi, fork_info
1901
2145
  )
1902
2146
  if added == AddBlockResult.ALREADY_HAVE_BLOCK:
1903
2147
  return None
@@ -1911,6 +2155,12 @@ class FullNode:
1911
2155
  raise RuntimeError("Expected block to be added, received disconnected block.")
1912
2156
  return None
1913
2157
  elif added == AddBlockResult.NEW_PEAK:
2158
+ # Evict any related BLS cache entries as we no longer need them
2159
+ if bls_cache is not None and pre_validation_result.conds is not None:
2160
+ pairs_pks, pairs_msgs = pkm_pairs(
2161
+ pre_validation_result.conds, self.constants.AGG_SIG_ME_ADDITIONAL_DATA
2162
+ )
2163
+ bls_cache.evict(pairs_pks, pairs_msgs)
1914
2164
  # Only propagate blocks which extend the blockchain (becomes one of the heads)
1915
2165
  assert state_change_summary is not None
1916
2166
  post_process_time = time.monotonic()
@@ -1952,7 +2202,7 @@ class FullNode:
1952
2202
  logging.WARNING if validation_time > 2 else logging.DEBUG,
1953
2203
  f"Block validation: {validation_time:0.2f}s, "
1954
2204
  f"pre_validation: {pre_validation_time:0.2f}s, "
1955
- f"CLVM: {pre_validation_results[0].timing/1000.0:0.2f}s, "
2205
+ f"CLVM: {pre_validation_result.timing / 1000.0:0.2f}s, "
1956
2206
  f"post-process: {post_process_time:0.2f}s, "
1957
2207
  f"cost: {block.transactions_info.cost if block.transactions_info is not None else 'None'}"
1958
2208
  f"{percent_full_str} header_hash: {header_hash.hex()} height: {block.height}",
@@ -1974,7 +2224,7 @@ class FullNode:
1974
2224
  self.full_node_store.clear_candidate_blocks_below(clear_height)
1975
2225
  self.full_node_store.clear_unfinished_blocks_below(clear_height)
1976
2226
 
1977
- state_changed_data: Dict[str, Any] = {
2227
+ state_changed_data: dict[str, Any] = {
1978
2228
  "transaction_block": False,
1979
2229
  "k_size": block.reward_chain_block.proof_of_space.size,
1980
2230
  "header_hash": block.header_hash,
@@ -2008,8 +2258,12 @@ class FullNode:
2008
2258
 
2009
2259
  record = self.blockchain.block_record(block.header_hash)
2010
2260
  if self.weight_proof_handler is not None and record.sub_epoch_summary_included is not None:
2011
- if self._segment_task is None or self._segment_task.done():
2012
- self._segment_task = asyncio.create_task(self.weight_proof_handler.create_prev_sub_epoch_segments())
2261
+ self._segment_task_list.append(
2262
+ create_referenced_task(self.weight_proof_handler.create_prev_sub_epoch_segments())
2263
+ )
2264
+ for task in self._segment_task_list[:]:
2265
+ if task.done():
2266
+ self._segment_task_list.remove(task)
2013
2267
  return None
2014
2268
 
2015
2269
  async def add_unfinished_block(
@@ -2017,7 +2271,6 @@ class FullNode:
2017
2271
  block: UnfinishedBlock,
2018
2272
  peer: Optional[WSChiaConnection],
2019
2273
  farmed_block: bool = False,
2020
- block_bytes: Optional[bytes] = None,
2021
2274
  ) -> None:
2022
2275
  """
2023
2276
  We have received an unfinished block, either created by us, or from another peer.
@@ -2096,29 +2349,44 @@ class FullNode:
2096
2349
  if block.transactions_generator is not None:
2097
2350
  pre_validation_start = time.monotonic()
2098
2351
  assert block.transactions_info is not None
2099
- try:
2100
- block_generator: Optional[BlockGenerator] = await get_block_generator(
2101
- self.blockchain.lookup_block_generators, block
2352
+ if len(block.transactions_generator_ref_list) > 0:
2353
+ generator_refs = set(block.transactions_generator_ref_list)
2354
+ generators: dict[uint32, bytes] = await self.blockchain.lookup_block_generators(
2355
+ block.prev_header_hash, generator_refs
2102
2356
  )
2103
- except ValueError:
2104
- raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
2105
- if block_generator is None:
2106
- raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
2107
- if block_bytes is None:
2108
- block_bytes = bytes(block)
2357
+ generator_args = [generators[height] for height in block.transactions_generator_ref_list]
2358
+ else:
2359
+ generator_args = []
2109
2360
 
2110
2361
  height = uint32(0) if prev_b is None else uint32(prev_b.height + 1)
2111
- npc_result = await self.blockchain.run_generator(block_bytes, block_generator, height)
2112
- pre_validation_time = time.monotonic() - pre_validation_start
2362
+ flags = get_flags_for_height_and_constants(height, self.constants)
2113
2363
 
2114
- # blockchain.run_generator throws on errors, so npc_result is
2115
- # guaranteed to represent a successful run
2116
- assert npc_result.conds is not None
2117
- pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, self.constants.AGG_SIG_ME_ADDITIONAL_DATA)
2118
- if not self._bls_cache.aggregate_verify(
2119
- pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature
2120
- ):
2121
- raise ConsensusError(Err.BAD_AGGREGATE_SIGNATURE)
2364
+ # on mainnet we won't receive unfinished blocks for heights
2365
+ # below the hard fork activation, but we have tests where we do
2366
+ if height >= self.constants.HARD_FORK_HEIGHT:
2367
+ run_block = run_block_generator2
2368
+ else:
2369
+ run_block = run_block_generator
2370
+
2371
+ # run_block() also validates the signature
2372
+ err, conditions = await asyncio.get_running_loop().run_in_executor(
2373
+ self.blockchain.pool,
2374
+ run_block,
2375
+ bytes(block.transactions_generator),
2376
+ generator_args,
2377
+ min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
2378
+ flags,
2379
+ block.transactions_info.aggregated_signature,
2380
+ self._bls_cache,
2381
+ self.constants,
2382
+ )
2383
+
2384
+ if err is not None:
2385
+ raise ConsensusError(Err(err))
2386
+ assert conditions is not None
2387
+ assert conditions.validated_signature
2388
+ npc_result = NPCResult(None, conditions)
2389
+ pre_validation_time = time.monotonic() - pre_validation_start
2122
2390
 
2123
2391
  async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
2124
2392
  # TODO: pre-validate VDFs outside of lock
@@ -2128,9 +2396,6 @@ class FullNode:
2128
2396
  raise ConsensusError(Err(validate_result.error))
2129
2397
  validation_time = time.monotonic() - validation_start
2130
2398
 
2131
- # respond_block will later use the cache (validated_signature=True)
2132
- validate_result = dataclasses.replace(validate_result, validated_signature=True)
2133
-
2134
2399
  assert validate_result.required_iters is not None
2135
2400
 
2136
2401
  # Perform another check, in case we have already concurrently added the same unfinished block
@@ -2295,7 +2560,7 @@ class FullNode:
2295
2560
  )
2296
2561
  return None
2297
2562
 
2298
- finished_sub_slots: Optional[List[EndOfSubSlotBundle]] = self.full_node_store.get_finished_sub_slots(
2563
+ finished_sub_slots: Optional[list[EndOfSubSlotBundle]] = self.full_node_store.get_finished_sub_slots(
2299
2564
  self.blockchain,
2300
2565
  prev_b,
2301
2566
  last_slot_cc_hash,
@@ -2355,7 +2620,7 @@ class FullNode:
2355
2620
 
2356
2621
  async def add_end_of_sub_slot(
2357
2622
  self, end_of_slot_bundle: EndOfSubSlotBundle, peer: WSChiaConnection
2358
- ) -> Tuple[Optional[Message], bool]:
2623
+ ) -> tuple[Optional[Message], bool]:
2359
2624
  fetched_ss = self.full_node_store.get_sub_slot(end_of_slot_bundle.challenge_chain.get_hash())
2360
2625
 
2361
2626
  # We are not interested in sub-slots which have the same challenge chain but different reward chain. If there
@@ -2377,7 +2642,7 @@ class FullNode:
2377
2642
  full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
2378
2643
  end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
2379
2644
  uint8(0),
2380
- bytes32([0] * 32),
2645
+ bytes32.zeros,
2381
2646
  )
2382
2647
  return (
2383
2648
  make_msg(ProtocolMessageTypes.request_signage_point_or_end_of_sub_slot, full_node_request),
@@ -2452,7 +2717,7 @@ class FullNode:
2452
2717
 
2453
2718
  async def add_transaction(
2454
2719
  self, transaction: SpendBundle, spend_name: bytes32, peer: Optional[WSChiaConnection] = None, test: bool = False
2455
- ) -> Tuple[MempoolInclusionStatus, Optional[Err]]:
2720
+ ) -> tuple[MempoolInclusionStatus, Optional[Err]]:
2456
2721
  if self.sync_store.get_sync_mode():
2457
2722
  return MempoolInclusionStatus.FAILED, Err.NO_TRANSACTIONS_WHILE_SYNCING
2458
2723
  if not test and not (await self.synced()):
@@ -2483,6 +2748,15 @@ class FullNode:
2483
2748
  self.mempool_manager.remove_seen(spend_name)
2484
2749
  raise
2485
2750
 
2751
+ if self.config.get("log_mempool", False): # pragma: no cover
2752
+ try:
2753
+ mempool_dir = path_from_root(self.root_path, "mempool-log") / f"{self.blockchain.get_peak_height()}"
2754
+ mempool_dir.mkdir(parents=True, exist_ok=True)
2755
+ with open(mempool_dir / f"{spend_name}.bundle", "wb+") as f:
2756
+ f.write(bytes(transaction))
2757
+ except Exception:
2758
+ self.log.exception(f"Failed to log mempool item: {spend_name}")
2759
+
2486
2760
  async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.low):
2487
2761
  if self.mempool_manager.get_spendbundle(spend_name) is not None:
2488
2762
  self.mempool_manager.remove_seen(spend_name)
@@ -2509,7 +2783,7 @@ class FullNode:
2509
2783
  await self.broadcast_added_tx(mempool_item, current_peer=peer)
2510
2784
 
2511
2785
  if self.simulator_transaction_callback is not None: # callback
2512
- await self.simulator_transaction_callback(spend_name) # pylint: disable=E1102
2786
+ await self.simulator_transaction_callback(spend_name)
2513
2787
 
2514
2788
  else:
2515
2789
  self.mempool_manager.remove_seen(spend_name)
@@ -2567,7 +2841,7 @@ class FullNode:
2567
2841
  f"Broadcasting added transaction {mempool_item.name} to {len(peer_ids)} peers took {total_time:.4f}s",
2568
2842
  )
2569
2843
 
2570
- async def broadcast_removed_tx(self, mempool_removals: List[MempoolRemoveInfo]) -> None:
2844
+ async def broadcast_removed_tx(self, mempool_removals: list[MempoolRemoveInfo]) -> None:
2571
2845
  total_removals = sum(len(r.items) for r in mempool_removals)
2572
2846
  if total_removals == 0:
2573
2847
  return
@@ -2585,7 +2859,7 @@ class FullNode:
2585
2859
  if len(all_peers) == 0:
2586
2860
  return
2587
2861
 
2588
- removals_to_send: Dict[bytes32, List[RemovedMempoolItem]] = dict()
2862
+ removals_to_send: dict[bytes32, list[RemovedMempoolItem]] = dict()
2589
2863
 
2590
2864
  for removal_info in mempool_removals:
2591
2865
  for internal_mempool_item in removal_info.items:
@@ -2602,7 +2876,7 @@ class FullNode:
2602
2876
 
2603
2877
  transaction_id = internal_mempool_item.spend_bundle.name()
2604
2878
 
2605
- self.log.debug(f"Broadcasting removed transaction {transaction_id} to " f"wallet peers {peer_ids}")
2879
+ self.log.debug(f"Broadcasting removed transaction {transaction_id} to wallet peers {peer_ids}")
2606
2880
 
2607
2881
  for peer_id in peer_ids:
2608
2882
  peer = self.server.all_connections.get(peer_id)
@@ -2926,7 +3200,7 @@ class FullNode:
2926
3200
  return None
2927
3201
  await asyncio.sleep(30)
2928
3202
 
2929
- broadcast_list: List[timelord_protocol.RequestCompactProofOfTime] = []
3203
+ broadcast_list: list[timelord_protocol.RequestCompactProofOfTime] = []
2930
3204
 
2931
3205
  self.log.info("Getting random heights for bluebox to compact")
2932
3206
 
@@ -2942,7 +3216,7 @@ class FullNode:
2942
3216
 
2943
3217
  for h in heights:
2944
3218
  headers = await self.blockchain.get_header_blocks_in_range(h, h, tx_filter=False)
2945
- records: Dict[bytes32, BlockRecord] = {}
3219
+ records: dict[bytes32, BlockRecord] = {}
2946
3220
  if sanitize_weight_proof_only:
2947
3221
  records = await self.blockchain.get_block_records_in_range(h, h)
2948
3222
  for header in headers.values():
@@ -3010,7 +3284,7 @@ class FullNode:
3010
3284
  )
3011
3285
  )
3012
3286
 
3013
- broadcast_list_chunks: List[List[timelord_protocol.RequestCompactProofOfTime]] = []
3287
+ broadcast_list_chunks: list[list[timelord_protocol.RequestCompactProofOfTime]] = []
3014
3288
  for index in range(0, len(broadcast_list), target_uncompact_proofs):
3015
3289
  broadcast_list_chunks.append(broadcast_list[index : index + target_uncompact_proofs])
3016
3290
  if len(broadcast_list_chunks) == 0: