chia-blockchain 2.5.0rc2__py3-none-any.whl → 2.5.1rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chia/_tests/README.md +1 -1
- chia/_tests/blockchain/blockchain_test_utils.py +24 -26
- chia/_tests/blockchain/test_augmented_chain.py +6 -8
- chia/_tests/blockchain/test_blockchain.py +409 -307
- chia/_tests/blockchain/test_blockchain_transactions.py +56 -75
- chia/_tests/blockchain/test_build_chains.py +11 -13
- chia/_tests/blockchain/test_get_block_generator.py +8 -8
- chia/_tests/blockchain/test_lookup_fork_chain.py +3 -4
- chia/_tests/build-init-files.py +3 -4
- chia/_tests/build-job-matrix.py +9 -9
- chia/_tests/check_sql_statements.py +2 -3
- chia/_tests/clvm/benchmark_costs.py +1 -1
- chia/_tests/clvm/coin_store.py +7 -5
- chia/_tests/clvm/test_chialisp_deserialization.py +8 -8
- chia/_tests/clvm/test_condition_codes.py +2 -2
- chia/_tests/clvm/test_curry_and_treehash.py +2 -4
- chia/_tests/clvm/test_message_conditions.py +184 -0
- chia/_tests/clvm/test_puzzle_compression.py +1 -2
- chia/_tests/clvm/test_puzzle_drivers.py +3 -3
- chia/_tests/clvm/test_puzzles.py +13 -18
- chia/_tests/clvm/test_singletons.py +17 -17
- chia/_tests/clvm/test_spend_sim.py +7 -7
- chia/_tests/cmds/cmd_test_utils.py +42 -45
- chia/_tests/cmds/conftest.py +2 -2
- chia/_tests/cmds/test_click_types.py +21 -16
- chia/_tests/cmds/test_cmd_framework.py +255 -35
- chia/_tests/cmds/test_cmds_util.py +2 -2
- chia/_tests/cmds/test_daemon.py +3 -3
- chia/_tests/cmds/test_dev_gh.py +131 -0
- chia/_tests/cmds/test_farm_cmd.py +1 -2
- chia/_tests/cmds/test_show.py +6 -6
- chia/_tests/cmds/test_tx_config_args.py +2 -1
- chia/_tests/cmds/wallet/test_dao.py +23 -23
- chia/_tests/cmds/wallet/test_did.py +29 -29
- chia/_tests/cmds/wallet/test_nft.py +24 -23
- chia/_tests/cmds/wallet/test_notifications.py +8 -8
- chia/_tests/cmds/wallet/test_tx_decorators.py +3 -3
- chia/_tests/cmds/wallet/test_vcs.py +97 -73
- chia/_tests/cmds/wallet/test_wallet.py +74 -75
- chia/_tests/cmds/wallet/test_wallet_check.py +5 -7
- chia/_tests/conftest.py +153 -38
- chia/_tests/connection_utils.py +7 -6
- chia/_tests/core/cmds/test_beta.py +3 -3
- chia/_tests/core/cmds/test_keys.py +6 -6
- chia/_tests/core/cmds/test_wallet.py +3 -3
- chia/_tests/core/consensus/test_block_creation.py +3 -5
- chia/_tests/core/custom_types/test_coin.py +1 -3
- chia/_tests/core/custom_types/test_spend_bundle.py +3 -4
- chia/_tests/core/daemon/test_daemon.py +58 -58
- chia/_tests/core/daemon/test_keychain_proxy.py +2 -1
- chia/_tests/core/data_layer/conftest.py +4 -3
- chia/_tests/core/data_layer/test_data_cli.py +1 -2
- chia/_tests/core/data_layer/test_data_layer.py +5 -5
- chia/_tests/core/data_layer/test_data_layer_util.py +8 -9
- chia/_tests/core/data_layer/test_data_rpc.py +75 -93
- chia/_tests/core/data_layer/test_data_store.py +38 -37
- chia/_tests/core/data_layer/test_data_store_schema.py +11 -11
- chia/_tests/core/data_layer/util.py +11 -10
- chia/_tests/core/farmer/test_farmer_api.py +6 -4
- chia/_tests/core/full_node/full_sync/test_full_sync.py +5 -10
- chia/_tests/core/full_node/ram_db.py +2 -2
- chia/_tests/core/full_node/stores/test_block_store.py +113 -11
- chia/_tests/core/full_node/stores/test_coin_store.py +37 -28
- chia/_tests/core/full_node/stores/test_full_node_store.py +34 -30
- chia/_tests/core/full_node/stores/test_hint_store.py +3 -4
- chia/_tests/core/full_node/test_address_manager.py +2 -2
- chia/_tests/core/full_node/test_block_height_map.py +1 -1
- chia/_tests/core/full_node/test_conditions.py +10 -12
- chia/_tests/core/full_node/test_full_node.py +2077 -1822
- chia/_tests/core/full_node/test_generator_tools.py +4 -4
- chia/_tests/core/full_node/test_hint_management.py +2 -2
- chia/_tests/core/full_node/test_performance.py +2 -5
- chia/_tests/core/full_node/test_subscriptions.py +4 -4
- chia/_tests/core/full_node/test_tx_processing_queue.py +5 -4
- chia/_tests/core/make_block_generator.py +5 -7
- chia/_tests/core/mempool/test_mempool.py +205 -208
- chia/_tests/core/mempool/test_mempool_fee_protocol.py +5 -5
- chia/_tests/core/mempool/test_mempool_item_queries.py +2 -4
- chia/_tests/core/mempool/test_mempool_manager.py +109 -80
- chia/_tests/core/mempool/test_mempool_performance.py +3 -4
- chia/_tests/core/mempool/test_singleton_fast_forward.py +12 -12
- chia/_tests/core/server/flood.py +6 -4
- chia/_tests/core/server/serve.py +10 -7
- chia/_tests/core/server/test_api_protocol.py +21 -0
- chia/_tests/core/server/test_capabilities.py +3 -5
- chia/_tests/core/server/test_dos.py +15 -16
- chia/_tests/core/server/test_loop.py +14 -10
- chia/_tests/core/server/test_node_discovery.py +1 -2
- chia/_tests/core/server/test_rate_limits.py +156 -44
- chia/_tests/core/server/test_server.py +8 -7
- chia/_tests/core/services/test_services.py +59 -37
- chia/_tests/core/ssl/test_ssl.py +5 -3
- chia/_tests/core/test_cost_calculation.py +5 -6
- chia/_tests/core/test_crawler.py +2 -2
- chia/_tests/core/test_db_conversion.py +5 -4
- chia/_tests/core/test_db_validation.py +6 -5
- chia/_tests/core/test_farmer_harvester_rpc.py +8 -7
- chia/_tests/core/test_filter.py +3 -5
- chia/_tests/core/test_full_node_rpc.py +64 -90
- chia/_tests/core/test_merkle_set.py +10 -10
- chia/_tests/core/test_program.py +2 -4
- chia/_tests/core/test_rpc_util.py +1 -2
- chia/_tests/core/test_seeder.py +124 -12
- chia/_tests/core/util/test_block_cache.py +5 -5
- chia/_tests/core/util/test_cached_bls.py +3 -3
- chia/_tests/core/util/test_config.py +13 -13
- chia/_tests/core/util/test_files.py +2 -2
- chia/_tests/core/util/test_jsonify.py +9 -9
- chia/_tests/core/util/test_keychain.py +13 -5
- chia/_tests/core/util/test_keyring_wrapper.py +6 -5
- chia/_tests/core/util/test_log_exceptions.py +3 -3
- chia/_tests/core/util/test_streamable.py +38 -38
- chia/_tests/db/test_db_wrapper.py +13 -12
- chia/_tests/environments/common.py +2 -2
- chia/_tests/environments/full_node.py +2 -2
- chia/_tests/environments/wallet.py +109 -48
- chia/_tests/farmer_harvester/test_farmer.py +35 -35
- chia/_tests/farmer_harvester/test_farmer_harvester.py +17 -17
- chia/_tests/farmer_harvester/test_filter_prefix_bits.py +6 -5
- chia/_tests/farmer_harvester/test_third_party_harvesters.py +73 -46
- chia/_tests/fee_estimation/test_fee_estimation_integration.py +8 -8
- chia/_tests/fee_estimation/test_fee_estimation_rpc.py +47 -47
- chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +6 -7
- chia/_tests/fee_estimation/test_mempoolitem_height_added.py +11 -11
- chia/_tests/generator/test_compression.py +13 -30
- chia/_tests/generator/test_generator_types.py +3 -3
- chia/_tests/generator/test_rom.py +7 -9
- chia/_tests/plot_sync/test_delta.py +2 -3
- chia/_tests/plot_sync/test_plot_sync.py +25 -24
- chia/_tests/plot_sync/test_receiver.py +9 -9
- chia/_tests/plot_sync/test_sender.py +1 -1
- chia/_tests/plot_sync/test_sync_simulated.py +27 -26
- chia/_tests/plot_sync/util.py +2 -1
- chia/_tests/plotting/test_plot_manager.py +54 -11
- chia/_tests/plotting/util.py +2 -3
- chia/_tests/pools/test_pool_cli_parsing.py +128 -0
- chia/_tests/pools/test_pool_cmdline.py +993 -15
- chia/_tests/pools/test_pool_config.py +3 -5
- chia/_tests/pools/test_pool_puzzles_lifecycle.py +10 -11
- chia/_tests/pools/test_pool_rpc.py +203 -90
- chia/_tests/pools/test_pool_wallet.py +12 -8
- chia/_tests/pools/test_wallet_pool_store.py +3 -3
- chia/_tests/process_junit.py +16 -17
- chia/_tests/rpc/test_rpc_client.py +59 -2
- chia/_tests/rpc/test_rpc_server.py +183 -0
- chia/_tests/simulation/test_simulation.py +5 -5
- chia/_tests/simulation/test_simulator.py +8 -10
- chia/_tests/simulation/test_start_simulator.py +5 -4
- chia/_tests/timelord/test_new_peak.py +19 -19
- chia/_tests/tools/test_run_block.py +1 -2
- chia/_tests/tools/test_virtual_project.py +591 -0
- chia/_tests/util/benchmark_cost.py +9 -9
- chia/_tests/util/benchmarks.py +1 -2
- chia/_tests/util/blockchain.py +12 -11
- chia/_tests/util/blockchain_mock.py +15 -15
- chia/_tests/util/build_network_protocol_files.py +12 -12
- chia/_tests/util/db_connection.py +3 -2
- chia/_tests/util/full_sync.py +14 -6
- chia/_tests/util/gen_ssl_certs.py +4 -5
- chia/_tests/util/generator_tools_testing.py +5 -7
- chia/_tests/util/get_name_puzzle_conditions.py +52 -0
- chia/_tests/util/key_tool.py +2 -3
- chia/_tests/util/misc.py +59 -106
- chia/_tests/util/network_protocol_data.py +7 -9
- chia/_tests/util/protocol_messages_json.py +112 -111
- chia/_tests/util/rpc.py +3 -0
- chia/_tests/util/run_block.py +16 -16
- chia/_tests/util/setup_nodes.py +25 -23
- chia/{clvm → _tests/util}/spend_sim.py +59 -55
- chia/_tests/util/split_managers.py +12 -9
- chia/_tests/util/temp_file.py +1 -1
- chia/_tests/util/test_action_scope.py +2 -1
- chia/_tests/util/test_async_pool.py +8 -8
- chia/_tests/util/test_build_job_matrix.py +2 -3
- chia/_tests/util/test_condition_tools.py +4 -6
- chia/_tests/util/test_config.py +5 -5
- chia/_tests/util/test_dump_keyring.py +1 -1
- chia/_tests/util/test_full_block_utils.py +19 -11
- chia/_tests/util/test_limited_semaphore.py +4 -3
- chia/_tests/util/test_logging_filter.py +2 -3
- chia/_tests/util/test_misc.py +29 -28
- chia/_tests/util/test_network.py +32 -31
- chia/_tests/util/test_network_protocol_files.py +2 -3
- chia/_tests/util/test_network_protocol_json.py +1 -0
- chia/_tests/util/test_network_protocol_test.py +18 -19
- chia/_tests/util/test_paginator.py +3 -4
- chia/_tests/util/test_pprint.py +1 -1
- chia/_tests/util/test_priority_mutex.py +18 -17
- chia/_tests/util/test_recursive_replace.py +2 -2
- chia/_tests/util/test_testnet_overrides.py +3 -3
- chia/_tests/util/test_timing.py +1 -1
- chia/_tests/util/test_trusted_peer.py +2 -2
- chia/_tests/util/time_out_assert.py +43 -6
- chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py +13 -13
- chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py +1 -1
- chia/_tests/wallet/cat_wallet/test_cat_wallet.py +117 -29
- chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +15 -15
- chia/_tests/wallet/cat_wallet/test_trades.py +50 -28
- chia/_tests/wallet/clawback/test_clawback_decorator.py +3 -5
- chia/_tests/wallet/clawback/test_clawback_lifecycle.py +6 -6
- chia/_tests/wallet/clawback/test_clawback_metadata.py +1 -2
- chia/_tests/wallet/conftest.py +135 -74
- chia/_tests/wallet/dao_wallet/test_dao_clvm.py +25 -17
- chia/_tests/wallet/dao_wallet/test_dao_wallets.py +75 -75
- chia/_tests/wallet/db_wallet/test_db_graftroot.py +10 -12
- chia/_tests/wallet/db_wallet/test_dl_offers.py +6 -6
- chia/_tests/wallet/db_wallet/test_dl_wallet.py +18 -18
- chia/_tests/wallet/did_wallet/test_did.py +1277 -474
- chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +12 -11
- chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py +115 -105
- chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +6 -7
- chia/_tests/wallet/nft_wallet/test_nft_offers.py +16 -16
- chia/_tests/wallet/nft_wallet/test_nft_puzzles.py +3 -3
- chia/_tests/wallet/nft_wallet/test_nft_wallet.py +38 -12
- chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py +1 -1
- chia/_tests/wallet/rpc/test_dl_wallet_rpc.py +31 -33
- chia/_tests/wallet/rpc/test_wallet_rpc.py +218 -171
- chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py +36 -37
- chia/_tests/wallet/sync/test_wallet_sync.py +241 -78
- chia/_tests/wallet/test_address_type.py +20 -20
- chia/_tests/wallet/test_clvm_streamable.py +5 -5
- chia/_tests/wallet/test_coin_management.py +354 -0
- chia/_tests/wallet/test_coin_selection.py +34 -35
- chia/_tests/wallet/test_conditions.py +28 -16
- chia/_tests/wallet/test_debug_spend_bundle.py +156 -14
- chia/_tests/wallet/test_new_wallet_protocol.py +29 -31
- chia/_tests/wallet/test_nft_store.py +1 -2
- chia/_tests/wallet/test_notifications.py +2 -2
- chia/_tests/wallet/test_offer_parsing_performance.py +1 -1
- chia/_tests/wallet/test_puzzle_store.py +2 -3
- chia/_tests/wallet/test_sign_coin_spends.py +3 -3
- chia/_tests/wallet/test_signer_protocol.py +33 -34
- chia/_tests/wallet/test_singleton_lifecycle_fast.py +29 -29
- chia/_tests/wallet/test_taproot.py +1 -1
- chia/_tests/wallet/test_transaction_store.py +23 -19
- chia/_tests/wallet/test_util.py +36 -32
- chia/_tests/wallet/test_wallet.py +37 -37
- chia/_tests/wallet/test_wallet_action_scope.py +8 -8
- chia/_tests/wallet/test_wallet_blockchain.py +4 -6
- chia/_tests/wallet/test_wallet_coin_store.py +34 -34
- chia/_tests/wallet/test_wallet_node.py +69 -72
- chia/_tests/wallet/test_wallet_retry.py +3 -3
- chia/_tests/wallet/test_wallet_state_manager.py +12 -5
- chia/_tests/wallet/test_wallet_trade_store.py +2 -2
- chia/_tests/wallet/test_wallet_utils.py +5 -4
- chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py +3 -3
- chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +18 -18
- chia/_tests/wallet/vc_wallet/test_vc_wallet.py +69 -40
- chia/_tests/wallet/wallet_block_tools.py +27 -27
- chia/_tests/weight_proof/test_weight_proof.py +30 -30
- chia/apis.py +19 -0
- chia/cmds/beta.py +8 -7
- chia/cmds/beta_funcs.py +15 -11
- chia/cmds/check_wallet_db.py +29 -27
- chia/cmds/chia.py +17 -9
- chia/cmds/cmd_classes.py +87 -79
- chia/cmds/cmd_helpers.py +242 -0
- chia/cmds/cmds_util.py +56 -66
- chia/cmds/coin_funcs.py +168 -153
- chia/cmds/coins.py +156 -194
- chia/cmds/configure.py +4 -3
- chia/cmds/dao.py +89 -33
- chia/cmds/dao_funcs.py +55 -33
- chia/cmds/data.py +7 -6
- chia/cmds/data_funcs.py +26 -21
- chia/cmds/db.py +4 -3
- chia/cmds/db_backup_func.py +2 -2
- chia/cmds/db_upgrade_func.py +3 -3
- chia/cmds/db_validate_func.py +2 -2
- chia/cmds/dev.py +2 -0
- chia/cmds/farm.py +18 -5
- chia/cmds/farm_funcs.py +17 -24
- chia/cmds/gh.py +275 -0
- chia/cmds/init.py +4 -11
- chia/cmds/init_funcs.py +9 -9
- chia/cmds/installers.py +5 -3
- chia/cmds/keys.py +56 -39
- chia/cmds/keys_funcs.py +30 -31
- chia/cmds/netspace.py +6 -3
- chia/cmds/netspace_funcs.py +3 -2
- chia/cmds/param_types.py +16 -6
- chia/cmds/passphrase.py +8 -7
- chia/cmds/passphrase_funcs.py +7 -61
- chia/cmds/peer.py +2 -1
- chia/cmds/peer_funcs.py +5 -5
- chia/cmds/plotnft.py +207 -153
- chia/cmds/plotnft_funcs.py +205 -174
- chia/cmds/plots.py +14 -6
- chia/cmds/plotters.py +2 -1
- chia/cmds/rpc.py +48 -28
- chia/cmds/show.py +2 -1
- chia/cmds/show_funcs.py +7 -6
- chia/cmds/signer.py +50 -58
- chia/cmds/sim.py +22 -14
- chia/cmds/sim_funcs.py +11 -11
- chia/cmds/start.py +3 -3
- chia/cmds/start_funcs.py +9 -12
- chia/cmds/stop.py +4 -3
- chia/cmds/units.py +1 -3
- chia/cmds/wallet.py +252 -96
- chia/cmds/wallet_funcs.py +217 -143
- chia/consensus/block_body_validation.py +133 -86
- chia/consensus/block_creation.py +42 -21
- chia/consensus/block_header_validation.py +32 -37
- chia/consensus/block_record.py +1 -2
- chia/consensus/blockchain.py +167 -180
- chia/consensus/blockchain_interface.py +10 -10
- chia/consensus/constants.py +2 -2
- chia/consensus/default_constants.py +3 -4
- chia/consensus/difficulty_adjustment.py +5 -5
- chia/consensus/find_fork_point.py +5 -5
- chia/consensus/full_block_to_block_record.py +4 -4
- chia/consensus/get_block_challenge.py +2 -2
- chia/consensus/get_block_generator.py +4 -3
- chia/consensus/multiprocess_validation.py +207 -304
- chia/consensus/vdf_info_computation.py +3 -3
- chia/daemon/client.py +46 -27
- chia/daemon/keychain_proxy.py +10 -9
- chia/daemon/keychain_server.py +18 -18
- chia/daemon/server.py +103 -113
- chia/daemon/windows_signal.py +2 -2
- chia/data_layer/data_layer.py +64 -76
- chia/data_layer/data_layer_api.py +8 -0
- chia/data_layer/data_layer_errors.py +3 -3
- chia/data_layer/data_layer_server.py +2 -2
- chia/data_layer/data_layer_util.py +71 -71
- chia/data_layer/data_layer_wallet.py +63 -67
- chia/data_layer/data_store.py +72 -72
- chia/data_layer/dl_wallet_store.py +10 -10
- chia/data_layer/download_data.py +5 -5
- chia/data_layer/s3_plugin_service.py +9 -9
- chia/data_layer/util/benchmark.py +0 -1
- chia/data_layer/util/plugin.py +2 -3
- chia/farmer/farmer.py +46 -43
- chia/farmer/farmer_api.py +27 -21
- chia/full_node/block_height_map.py +6 -6
- chia/full_node/block_store.py +41 -35
- chia/full_node/coin_store.py +42 -41
- chia/full_node/fee_estimate.py +2 -2
- chia/full_node/fee_estimation.py +1 -2
- chia/full_node/fee_history.py +5 -6
- chia/full_node/fee_tracker.py +24 -24
- chia/full_node/full_node.py +574 -300
- chia/full_node/full_node_api.py +181 -130
- chia/full_node/full_node_store.py +43 -43
- chia/full_node/hint_management.py +4 -4
- chia/full_node/hint_store.py +9 -10
- chia/full_node/mempool.py +25 -19
- chia/full_node/mempool_check_conditions.py +11 -42
- chia/full_node/mempool_manager.py +48 -53
- chia/full_node/pending_tx_cache.py +9 -9
- chia/full_node/subscriptions.py +23 -24
- chia/full_node/sync_store.py +8 -7
- chia/full_node/tx_processing_queue.py +3 -3
- chia/full_node/util/__init__.py +0 -0
- chia/full_node/weight_proof.py +79 -78
- chia/harvester/harvester.py +9 -8
- chia/harvester/harvester_api.py +19 -13
- chia/introducer/introducer.py +7 -5
- chia/introducer/introducer_api.py +9 -3
- chia/legacy/keyring.py +6 -5
- chia/plot_sync/delta.py +8 -8
- chia/plot_sync/receiver.py +12 -11
- chia/plot_sync/sender.py +15 -12
- chia/plotters/bladebit.py +12 -12
- chia/plotters/chiapos.py +2 -2
- chia/plotters/madmax.py +8 -8
- chia/plotters/plotters.py +6 -6
- chia/plotters/plotters_util.py +6 -4
- chia/plotting/cache.py +8 -7
- chia/plotting/check_plots.py +8 -8
- chia/plotting/create_plots.py +6 -6
- chia/plotting/manager.py +22 -22
- chia/plotting/util.py +31 -19
- chia/pools/pool_config.py +7 -7
- chia/pools/pool_puzzles.py +16 -16
- chia/pools/pool_wallet.py +64 -57
- chia/pools/pool_wallet_info.py +3 -3
- chia/protocols/full_node_protocol.py +3 -3
- chia/protocols/harvester_protocol.py +12 -12
- chia/protocols/introducer_protocol.py +1 -2
- chia/protocols/protocol_message_types.py +4 -4
- chia/protocols/protocol_state_machine.py +2 -2
- chia/protocols/protocol_timing.py +1 -0
- chia/protocols/shared_protocol.py +3 -3
- chia/protocols/timelord_protocol.py +2 -2
- chia/protocols/wallet_protocol.py +33 -33
- chia/rpc/crawler_rpc_api.py +12 -7
- chia/rpc/data_layer_rpc_api.py +49 -44
- chia/rpc/data_layer_rpc_client.py +41 -41
- chia/rpc/data_layer_rpc_util.py +7 -11
- chia/rpc/farmer_rpc_api.py +32 -27
- chia/rpc/farmer_rpc_client.py +14 -14
- chia/rpc/full_node_rpc_api.py +53 -48
- chia/rpc/full_node_rpc_client.py +30 -30
- chia/rpc/harvester_rpc_api.py +16 -11
- chia/rpc/harvester_rpc_client.py +6 -6
- chia/rpc/rpc_client.py +34 -14
- chia/rpc/rpc_server.py +117 -43
- chia/rpc/timelord_rpc_api.py +9 -4
- chia/rpc/util.py +11 -211
- chia/rpc/wallet_request_types.py +276 -60
- chia/rpc/wallet_rpc_api.py +563 -399
- chia/rpc/wallet_rpc_client.py +220 -250
- chia/seeder/crawl_store.py +6 -8
- chia/seeder/crawler.py +23 -36
- chia/seeder/crawler_api.py +28 -22
- chia/seeder/dns_server.py +99 -50
- chia/seeder/start_crawler.py +13 -9
- chia/server/address_manager.py +19 -19
- chia/server/address_manager_store.py +17 -17
- chia/server/api_protocol.py +106 -1
- chia/server/capabilities.py +3 -3
- chia/server/chia_policy.py +17 -16
- chia/server/introducer_peers.py +3 -3
- chia/server/node_discovery.py +34 -38
- chia/server/rate_limit_numbers.py +26 -16
- chia/server/rate_limits.py +67 -27
- chia/server/server.py +52 -31
- chia/server/signal_handlers.py +6 -3
- chia/server/ssl_context.py +5 -5
- chia/server/start_data_layer.py +37 -23
- chia/server/start_farmer.py +28 -16
- chia/server/start_full_node.py +29 -23
- chia/server/start_harvester.py +28 -15
- chia/server/start_introducer.py +27 -15
- chia/server/start_service.py +17 -29
- chia/server/start_timelord.py +25 -18
- chia/server/start_wallet.py +22 -18
- chia/server/upnp.py +4 -3
- chia/server/ws_connection.py +68 -54
- chia/simulator/add_blocks_in_batches.py +54 -0
- chia/simulator/block_tools.py +65 -64
- chia/simulator/full_node_simulator.py +66 -74
- chia/simulator/setup_services.py +10 -9
- chia/simulator/simulator_full_node_rpc_api.py +12 -14
- chia/simulator/simulator_full_node_rpc_client.py +3 -5
- chia/simulator/simulator_test_tools.py +8 -7
- chia/simulator/socket.py +1 -4
- chia/simulator/ssl_certs.py +5 -5
- chia/simulator/ssl_certs_1.py +2 -4
- chia/simulator/ssl_certs_10.py +2 -4
- chia/simulator/ssl_certs_2.py +2 -4
- chia/simulator/ssl_certs_3.py +2 -4
- chia/simulator/ssl_certs_4.py +2 -4
- chia/simulator/ssl_certs_5.py +2 -4
- chia/simulator/ssl_certs_6.py +2 -4
- chia/simulator/ssl_certs_7.py +2 -4
- chia/simulator/ssl_certs_8.py +2 -4
- chia/simulator/ssl_certs_9.py +2 -4
- chia/simulator/start_simulator.py +14 -6
- chia/simulator/wallet_tools.py +21 -20
- chia/ssl/create_ssl.py +11 -11
- chia/timelord/iters_from_block.py +2 -2
- chia/timelord/timelord.py +57 -33
- chia/timelord/timelord_api.py +12 -6
- chia/timelord/timelord_launcher.py +10 -8
- chia/timelord/timelord_state.py +5 -5
- chia/types/block_protocol.py +2 -2
- chia/types/blockchain_format/coin.py +3 -3
- chia/types/blockchain_format/program.py +17 -18
- chia/types/blockchain_format/tree_hash.py +9 -9
- chia/types/coin_spend.py +8 -8
- chia/types/condition_with_args.py +1 -2
- chia/types/eligible_coin_spends.py +16 -15
- chia/types/generator_types.py +1 -2
- chia/types/internal_mempool_item.py +1 -2
- chia/types/mempool_item.py +7 -7
- chia/types/mempool_submission_status.py +2 -2
- chia/types/peer_info.py +1 -1
- chia/types/spend_bundle.py +1 -2
- chia/types/transaction_queue_entry.py +2 -2
- chia/types/unfinished_header_block.py +2 -2
- chia/types/validation_state.py +14 -0
- chia/types/weight_proof.py +5 -6
- chia/util/action_scope.py +8 -8
- chia/util/async_pool.py +6 -4
- chia/util/augmented_chain.py +13 -9
- chia/util/batches.py +5 -2
- chia/util/bech32m.py +14 -11
- chia/util/beta_metrics.py +5 -4
- chia/util/block_cache.py +5 -5
- chia/util/byte_types.py +2 -0
- chia/util/check_fork_next_block.py +3 -2
- chia/util/chia_logging.py +41 -21
- chia/util/collection.py +3 -3
- chia/util/condition_tools.py +18 -18
- chia/util/config.py +26 -25
- chia/util/cpu.py +2 -0
- chia/util/db_synchronous.py +2 -0
- chia/util/db_version.py +2 -0
- chia/util/db_wrapper.py +13 -10
- chia/util/default_root.py +17 -0
- chia/util/dump_keyring.py +6 -6
- chia/util/errors.py +5 -3
- chia/util/file_keyring.py +22 -33
- chia/util/files.py +2 -0
- chia/util/full_block_utils.py +31 -7
- chia/util/generator_tools.py +18 -8
- chia/util/hash.py +3 -1
- chia/util/initial-config.yaml +19 -0
- chia/util/inline_executor.py +2 -0
- chia/util/ip_address.py +39 -0
- chia/util/json_util.py +0 -4
- chia/util/keychain.py +27 -24
- chia/util/keyring_wrapper.py +65 -4
- chia/util/limited_semaphore.py +3 -1
- chia/util/lock.py +4 -2
- chia/util/log_exceptions.py +5 -2
- chia/util/logging.py +3 -1
- chia/util/lru_cache.py +2 -0
- chia/util/math.py +4 -4
- chia/util/network.py +15 -73
- chia/util/paginator.py +3 -1
- chia/util/path.py +2 -0
- chia/util/permissions.py +3 -2
- chia/util/prev_transaction_block.py +1 -3
- chia/util/priority_mutex.py +6 -3
- chia/util/profiler.py +7 -4
- chia/util/recursive_replace.py +2 -0
- chia/util/safe_cancel_task.py +2 -0
- chia/util/service_groups.py +2 -2
- chia/util/setproctitle.py +2 -0
- chia/util/significant_bits.py +2 -0
- chia/util/ssl_check.py +11 -11
- chia/util/streamable.py +44 -56
- chia/util/task_referencer.py +59 -0
- chia/util/task_timing.py +22 -18
- chia/util/timing.py +4 -1
- chia/util/vdf_prover.py +2 -3
- chia/util/virtual_project_analysis.py +540 -0
- chia/util/ws_message.py +6 -6
- chia/wallet/cat_wallet/cat_info.py +3 -3
- chia/wallet/cat_wallet/cat_outer_puzzle.py +3 -3
- chia/wallet/cat_wallet/cat_utils.py +5 -4
- chia/wallet/cat_wallet/cat_wallet.py +56 -70
- chia/wallet/cat_wallet/dao_cat_info.py +3 -3
- chia/wallet/cat_wallet/dao_cat_wallet.py +18 -18
- chia/wallet/cat_wallet/lineage_store.py +2 -2
- chia/wallet/coin_selection.py +15 -15
- chia/wallet/conditions.py +257 -71
- chia/wallet/dao_wallet/dao_info.py +4 -4
- chia/wallet/dao_wallet/dao_utils.py +43 -42
- chia/wallet/dao_wallet/dao_wallet.py +66 -68
- chia/wallet/db_wallet/db_wallet_puzzles.py +12 -8
- chia/wallet/derive_keys.py +11 -11
- chia/wallet/did_wallet/did_info.py +3 -3
- chia/wallet/did_wallet/did_wallet.py +56 -47
- chia/wallet/did_wallet/did_wallet_puzzles.py +7 -6
- chia/wallet/lineage_proof.py +4 -4
- chia/wallet/nft_wallet/metadata_outer_puzzle.py +2 -2
- chia/wallet/nft_wallet/nft_info.py +4 -4
- chia/wallet/nft_wallet/nft_puzzles.py +16 -16
- chia/wallet/nft_wallet/nft_wallet.py +90 -89
- chia/wallet/nft_wallet/ownership_outer_puzzle.py +2 -2
- chia/wallet/nft_wallet/singleton_outer_puzzle.py +2 -2
- chia/wallet/nft_wallet/transfer_program_puzzle.py +2 -2
- chia/wallet/nft_wallet/uncurry_nft.py +2 -2
- chia/wallet/notification_manager.py +5 -5
- chia/wallet/notification_store.py +6 -6
- chia/wallet/outer_puzzles.py +2 -2
- chia/wallet/payment.py +4 -5
- chia/wallet/puzzle_drivers.py +4 -4
- chia/wallet/puzzles/clawback/drivers.py +5 -5
- chia/wallet/puzzles/clawback/puzzle_decorator.py +7 -7
- chia/wallet/puzzles/load_clvm.py +2 -3
- chia/wallet/puzzles/p2_conditions.py +1 -2
- chia/wallet/puzzles/p2_delegated_conditions.py +1 -2
- chia/wallet/puzzles/p2_delegated_puzzle.py +2 -3
- chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py +3 -4
- chia/wallet/puzzles/p2_m_of_n_delegate_direct.py +1 -2
- chia/wallet/puzzles/p2_puzzle_hash.py +1 -2
- chia/wallet/puzzles/puzzle_utils.py +7 -7
- chia/wallet/puzzles/singleton_top_layer.py +6 -5
- chia/wallet/puzzles/singleton_top_layer_v1_1.py +6 -5
- chia/wallet/puzzles/tails.py +34 -30
- chia/wallet/signer_protocol.py +7 -8
- chia/wallet/singleton.py +4 -4
- chia/wallet/trade_manager.py +155 -141
- chia/wallet/trade_record.py +5 -5
- chia/wallet/trading/offer.py +100 -101
- chia/wallet/trading/trade_store.py +14 -14
- chia/wallet/transaction_record.py +31 -16
- chia/wallet/util/address_type.py +4 -4
- chia/wallet/util/blind_signer_tl.py +8 -12
- chia/wallet/util/clvm_streamable.py +15 -15
- chia/wallet/util/compute_hints.py +5 -5
- chia/wallet/util/compute_memos.py +4 -6
- chia/wallet/util/curry_and_treehash.py +3 -2
- chia/wallet/util/debug_spend_bundle.py +6 -8
- chia/wallet/util/merkle_tree.py +10 -10
- chia/wallet/util/merkle_utils.py +10 -10
- chia/wallet/util/new_peak_queue.py +3 -3
- chia/wallet/util/peer_request_cache.py +8 -8
- chia/{util → wallet/util}/pprint.py +2 -3
- chia/wallet/util/puzzle_compression.py +3 -4
- chia/wallet/util/puzzle_decorator.py +10 -10
- chia/wallet/util/query_filter.py +9 -10
- chia/wallet/util/tx_config.py +12 -12
- chia/wallet/util/wallet_sync_utils.py +24 -21
- chia/wallet/util/wallet_types.py +9 -2
- chia/wallet/vc_wallet/cr_cat_drivers.py +28 -27
- chia/wallet/vc_wallet/cr_cat_wallet.py +42 -40
- chia/wallet/vc_wallet/cr_outer_puzzle.py +4 -4
- chia/wallet/vc_wallet/vc_drivers.py +16 -16
- chia/wallet/vc_wallet/vc_store.py +9 -9
- chia/wallet/vc_wallet/vc_wallet.py +35 -35
- chia/wallet/wallet.py +54 -54
- chia/wallet/wallet_action_scope.py +14 -13
- chia/wallet/wallet_blockchain.py +10 -10
- chia/wallet/wallet_coin_record.py +2 -2
- chia/wallet/wallet_coin_store.py +10 -10
- chia/wallet/wallet_info.py +1 -2
- chia/wallet/wallet_interested_store.py +5 -5
- chia/wallet/wallet_nft_store.py +6 -6
- chia/wallet/wallet_node.py +72 -76
- chia/wallet/wallet_node_api.py +33 -27
- chia/wallet/wallet_pool_store.py +1 -2
- chia/wallet/wallet_protocol.py +15 -15
- chia/wallet/wallet_puzzle_store.py +35 -4
- chia/wallet/wallet_retry_store.py +2 -2
- chia/wallet/wallet_singleton_store.py +10 -9
- chia/wallet/wallet_spend_bundle.py +4 -20
- chia/wallet/wallet_state_manager.py +223 -224
- chia/wallet/wallet_transaction_store.py +44 -18
- chia/wallet/wallet_user_store.py +2 -2
- chia/wallet/wallet_weight_proof_handler.py +2 -2
- {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/LICENSE +1 -1
- {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/METADATA +67 -72
- chia_blockchain-2.5.1rc2.dist-info/RECORD +1042 -0
- {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/WHEEL +1 -1
- mozilla-ca/cacert.pem +32 -87
- chia/_tests/cmds/wallet/test_coins.py +0 -195
- chia/consensus/block_root_validation.py +0 -46
- chia/util/api_decorators.py +0 -89
- chia_blockchain-2.5.0rc2.dist-info/RECORD +0 -1028
- {chia_blockchain-2.5.0rc2.dist-info → chia_blockchain-2.5.1rc2.dist-info}/entry_points.txt +0 -0
chia/full_node/full_node.py
CHANGED
|
@@ -2,6 +2,7 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import contextlib
|
|
5
|
+
import copy
|
|
5
6
|
import dataclasses
|
|
6
7
|
import logging
|
|
7
8
|
import multiprocessing
|
|
@@ -9,27 +10,18 @@ import random
|
|
|
9
10
|
import sqlite3
|
|
10
11
|
import time
|
|
11
12
|
import traceback
|
|
13
|
+
from collections.abc import AsyncIterator, Awaitable, Sequence
|
|
12
14
|
from multiprocessing.context import BaseContext
|
|
13
15
|
from pathlib import Path
|
|
14
|
-
from typing import
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
List,
|
|
23
|
-
Optional,
|
|
24
|
-
Set,
|
|
25
|
-
TextIO,
|
|
26
|
-
Tuple,
|
|
27
|
-
Union,
|
|
28
|
-
cast,
|
|
29
|
-
final,
|
|
16
|
+
from typing import TYPE_CHECKING, Any, Callable, ClassVar, Optional, TextIO, Union, cast, final
|
|
17
|
+
|
|
18
|
+
from chia_rs import (
|
|
19
|
+
AugSchemeMPL,
|
|
20
|
+
BLSCache,
|
|
21
|
+
get_flags_for_height_and_constants,
|
|
22
|
+
run_block_generator,
|
|
23
|
+
run_block_generator2,
|
|
30
24
|
)
|
|
31
|
-
|
|
32
|
-
from chia_rs import AugSchemeMPL, BLSCache
|
|
33
25
|
from packaging.version import Version
|
|
34
26
|
|
|
35
27
|
from chia.consensus.block_body_validation import ForkInfo
|
|
@@ -40,9 +32,8 @@ from chia.consensus.blockchain_interface import BlockchainInterface
|
|
|
40
32
|
from chia.consensus.constants import ConsensusConstants
|
|
41
33
|
from chia.consensus.cost_calculator import NPCResult
|
|
42
34
|
from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
|
|
43
|
-
from chia.consensus.get_block_generator import get_block_generator
|
|
44
35
|
from chia.consensus.make_sub_epoch_summary import next_sub_epoch_summary
|
|
45
|
-
from chia.consensus.multiprocess_validation import PreValidationResult,
|
|
36
|
+
from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_block
|
|
46
37
|
from chia.consensus.pot_iterations import calculate_sp_iters
|
|
47
38
|
from chia.full_node.block_store import BlockStore
|
|
48
39
|
from chia.full_node.coin_store import CoinStore
|
|
@@ -76,7 +67,6 @@ from chia.types.blockchain_format.vdf import CompressibleVDFField, VDFInfo, VDFP
|
|
|
76
67
|
from chia.types.coin_record import CoinRecord
|
|
77
68
|
from chia.types.end_of_slot_bundle import EndOfSubSlotBundle
|
|
78
69
|
from chia.types.full_block import FullBlock
|
|
79
|
-
from chia.types.generator_types import BlockGenerator
|
|
80
70
|
from chia.types.header_block import HeaderBlock
|
|
81
71
|
from chia.types.mempool_inclusion_status import MempoolInclusionStatus
|
|
82
72
|
from chia.types.mempool_item import MempoolItem
|
|
@@ -84,7 +74,9 @@ from chia.types.peer_info import PeerInfo
|
|
|
84
74
|
from chia.types.spend_bundle import SpendBundle
|
|
85
75
|
from chia.types.transaction_queue_entry import TransactionQueueEntry
|
|
86
76
|
from chia.types.unfinished_block import UnfinishedBlock
|
|
77
|
+
from chia.types.validation_state import ValidationState
|
|
87
78
|
from chia.types.weight_proof import WeightProof
|
|
79
|
+
from chia.util.augmented_chain import AugmentedBlockchain
|
|
88
80
|
from chia.util.bech32m import encode_puzzle_hash
|
|
89
81
|
from chia.util.check_fork_next_block import check_fork_next_block
|
|
90
82
|
from chia.util.condition_tools import pkm_pairs
|
|
@@ -95,28 +87,29 @@ from chia.util.db_wrapper import DBWrapper2, manage_connection
|
|
|
95
87
|
from chia.util.errors import ConsensusError, Err, TimestampError, ValidationError
|
|
96
88
|
from chia.util.ints import uint8, uint32, uint64, uint128
|
|
97
89
|
from chia.util.limited_semaphore import LimitedSemaphore
|
|
98
|
-
from chia.util.
|
|
90
|
+
from chia.util.network import is_localhost
|
|
99
91
|
from chia.util.path import path_from_root
|
|
100
92
|
from chia.util.profiler import enable_profiler, mem_profile_task, profile_task
|
|
101
93
|
from chia.util.safe_cancel_task import cancel_task_safe
|
|
94
|
+
from chia.util.task_referencer import create_referenced_task
|
|
102
95
|
|
|
103
96
|
|
|
104
97
|
# This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2
|
|
105
98
|
@dataclasses.dataclass
|
|
106
99
|
class PeakPostProcessingResult:
|
|
107
|
-
mempool_peak_result:
|
|
108
|
-
mempool_removals:
|
|
100
|
+
mempool_peak_result: list[NewPeakItem] # The new items from calling MempoolManager.new_peak
|
|
101
|
+
mempool_removals: list[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak
|
|
109
102
|
fns_peak_result: FullNodeStorePeakResult # The result of calling FullNodeStore.new_peak
|
|
110
|
-
hints:
|
|
111
|
-
lookup_coin_ids:
|
|
103
|
+
hints: list[tuple[bytes32, bytes]] # The hints added to the DB
|
|
104
|
+
lookup_coin_ids: list[bytes32] # The coin IDs that we need to look up to notify wallets of changes
|
|
112
105
|
|
|
113
106
|
|
|
114
107
|
@dataclasses.dataclass(frozen=True)
|
|
115
108
|
class WalletUpdate:
|
|
116
109
|
fork_height: uint32
|
|
117
110
|
peak: Peak
|
|
118
|
-
coin_records:
|
|
119
|
-
hints:
|
|
111
|
+
coin_records: list[CoinRecord]
|
|
112
|
+
hints: dict[bytes32, bytes32]
|
|
120
113
|
|
|
121
114
|
|
|
122
115
|
@final
|
|
@@ -128,32 +121,33 @@ class FullNode:
|
|
|
128
121
|
_protocol_check: ClassVar[RpcServiceProtocol] = cast("FullNode", None)
|
|
129
122
|
|
|
130
123
|
root_path: Path
|
|
131
|
-
config:
|
|
124
|
+
config: dict[str, Any]
|
|
132
125
|
constants: ConsensusConstants
|
|
133
|
-
signage_point_times:
|
|
126
|
+
signage_point_times: list[float]
|
|
134
127
|
full_node_store: FullNodeStore
|
|
135
128
|
log: logging.Logger
|
|
136
129
|
db_path: Path
|
|
137
130
|
wallet_sync_queue: asyncio.Queue[WalletUpdate]
|
|
138
|
-
|
|
131
|
+
_segment_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list)
|
|
139
132
|
initialized: bool = False
|
|
140
133
|
_server: Optional[ChiaServer] = None
|
|
141
134
|
_shut_down: bool = False
|
|
142
|
-
pow_creation:
|
|
135
|
+
pow_creation: dict[bytes32, asyncio.Event] = dataclasses.field(default_factory=dict)
|
|
143
136
|
state_changed_callback: Optional[StateChangedProtocol] = None
|
|
144
137
|
full_node_peers: Optional[FullNodePeers] = None
|
|
145
138
|
sync_store: SyncStore = dataclasses.field(default_factory=SyncStore)
|
|
146
139
|
uncompact_task: Optional[asyncio.Task[None]] = None
|
|
147
|
-
compact_vdf_requests:
|
|
140
|
+
compact_vdf_requests: set[bytes32] = dataclasses.field(default_factory=set)
|
|
148
141
|
# TODO: Logging isn't setup yet so the log entries related to parsing the
|
|
149
142
|
# config would end up on stdout if handled here.
|
|
150
143
|
multiprocessing_context: Optional[BaseContext] = None
|
|
151
|
-
_ui_tasks:
|
|
144
|
+
_ui_tasks: set[asyncio.Task[None]] = dataclasses.field(default_factory=set)
|
|
152
145
|
subscriptions: PeerSubscriptions = dataclasses.field(default_factory=PeerSubscriptions)
|
|
153
146
|
_transaction_queue_task: Optional[asyncio.Task[None]] = None
|
|
154
147
|
simulator_transaction_callback: Optional[Callable[[bytes32], Awaitable[None]]] = None
|
|
155
|
-
|
|
148
|
+
_sync_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list)
|
|
156
149
|
_transaction_queue: Optional[TransactionQueue] = None
|
|
150
|
+
_tx_task_list: list[asyncio.Task[None]] = dataclasses.field(default_factory=list)
|
|
157
151
|
_compact_vdf_sem: Optional[LimitedSemaphore] = None
|
|
158
152
|
_new_peak_sem: Optional[LimitedSemaphore] = None
|
|
159
153
|
_add_transaction_semaphore: Optional[asyncio.Semaphore] = None
|
|
@@ -167,7 +161,7 @@ class FullNode:
|
|
|
167
161
|
_timelord_lock: Optional[asyncio.Lock] = None
|
|
168
162
|
weight_proof_handler: Optional[WeightProofHandler] = None
|
|
169
163
|
# hashes of peaks that failed long sync on chip13 Validation
|
|
170
|
-
bad_peak_cache:
|
|
164
|
+
bad_peak_cache: dict[bytes32, uint32] = dataclasses.field(default_factory=dict)
|
|
171
165
|
wallet_sync_task: Optional[asyncio.Task[None]] = None
|
|
172
166
|
_bls_cache: BLSCache = dataclasses.field(default_factory=lambda: BLSCache(50000))
|
|
173
167
|
|
|
@@ -183,7 +177,7 @@ class FullNode:
|
|
|
183
177
|
@classmethod
|
|
184
178
|
async def create(
|
|
185
179
|
cls,
|
|
186
|
-
config:
|
|
180
|
+
config: dict[str, Any],
|
|
187
181
|
root_path: Path,
|
|
188
182
|
consensus_constants: ConsensusConstants,
|
|
189
183
|
name: str = __name__,
|
|
@@ -265,6 +259,7 @@ class FullNode:
|
|
|
265
259
|
start_time = time.monotonic()
|
|
266
260
|
reserved_cores = self.config.get("reserved_cores", 0)
|
|
267
261
|
single_threaded = self.config.get("single_threaded", False)
|
|
262
|
+
log_coins = self.config.get("log_coins", False)
|
|
268
263
|
multiprocessing_start_method = process_config_start_method(config=self.config, log=self.log)
|
|
269
264
|
self.multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method)
|
|
270
265
|
self._blockchain = await Blockchain.create(
|
|
@@ -273,8 +268,8 @@ class FullNode:
|
|
|
273
268
|
consensus_constants=self.constants,
|
|
274
269
|
blockchain_dir=self.db_path.parent,
|
|
275
270
|
reserved_cores=reserved_cores,
|
|
276
|
-
multiprocessing_context=self.multiprocessing_context,
|
|
277
271
|
single_threaded=single_threaded,
|
|
272
|
+
log_coins=log_coins,
|
|
278
273
|
)
|
|
279
274
|
|
|
280
275
|
self._mempool_manager = MempoolManager(
|
|
@@ -285,12 +280,12 @@ class FullNode:
|
|
|
285
280
|
|
|
286
281
|
# Transactions go into this queue from the server, and get sent to respond_transaction
|
|
287
282
|
self._transaction_queue = TransactionQueue(1000, self.log)
|
|
288
|
-
self._transaction_queue_task: asyncio.Task[None] =
|
|
283
|
+
self._transaction_queue_task: asyncio.Task[None] = create_referenced_task(self._handle_transactions())
|
|
289
284
|
|
|
290
|
-
self._init_weight_proof =
|
|
285
|
+
self._init_weight_proof = create_referenced_task(self.initialize_weight_proof())
|
|
291
286
|
|
|
292
287
|
if self.config.get("enable_profiler", False):
|
|
293
|
-
|
|
288
|
+
create_referenced_task(profile_task(self.root_path, "node", self.log), known_unreferenced=True)
|
|
294
289
|
|
|
295
290
|
self.profile_block_validation = self.config.get("profile_block_validation", False)
|
|
296
291
|
if self.profile_block_validation: # pragma: no cover
|
|
@@ -300,7 +295,7 @@ class FullNode:
|
|
|
300
295
|
profile_dir.mkdir(parents=True, exist_ok=True)
|
|
301
296
|
|
|
302
297
|
if self.config.get("enable_memory_profiler", False):
|
|
303
|
-
|
|
298
|
+
create_referenced_task(mem_profile_task(self.root_path, "node", self.log), known_unreferenced=True)
|
|
304
299
|
|
|
305
300
|
time_taken = time.monotonic() - start_time
|
|
306
301
|
peak: Optional[BlockRecord] = self.blockchain.get_peak()
|
|
@@ -321,21 +316,23 @@ class FullNode:
|
|
|
321
316
|
)
|
|
322
317
|
async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
|
|
323
318
|
pending_tx = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), None)
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
319
|
+
assert len(pending_tx.items) == 0 # no pending transactions when starting up
|
|
320
|
+
|
|
321
|
+
full_peak: Optional[FullBlock] = await self.blockchain.get_full_peak()
|
|
322
|
+
assert full_peak is not None
|
|
323
|
+
state_change_summary = StateChangeSummary(peak, uint32(max(peak.height - 1, 0)), [], [], [], [])
|
|
324
|
+
# Must be called under priority_mutex
|
|
325
|
+
ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
|
|
326
|
+
full_peak, state_change_summary, None
|
|
327
|
+
)
|
|
328
|
+
# Can be called outside of priority_mutex
|
|
332
329
|
await self.peak_post_processing_2(full_peak, None, state_change_summary, ppp_result)
|
|
333
330
|
if self.config["send_uncompact_interval"] != 0:
|
|
334
331
|
sanitize_weight_proof_only = False
|
|
335
332
|
if "sanitize_weight_proof_only" in self.config:
|
|
336
333
|
sanitize_weight_proof_only = self.config["sanitize_weight_proof_only"]
|
|
337
334
|
assert self.config["target_uncompact_proofs"] != 0
|
|
338
|
-
self.uncompact_task =
|
|
335
|
+
self.uncompact_task = create_referenced_task(
|
|
339
336
|
self.broadcast_uncompact_blocks(
|
|
340
337
|
self.config["send_uncompact_interval"],
|
|
341
338
|
self.config["target_uncompact_proofs"],
|
|
@@ -343,11 +340,11 @@ class FullNode:
|
|
|
343
340
|
)
|
|
344
341
|
)
|
|
345
342
|
if self.wallet_sync_task is None or self.wallet_sync_task.done():
|
|
346
|
-
self.wallet_sync_task =
|
|
343
|
+
self.wallet_sync_task = create_referenced_task(self._wallets_sync_task_handler())
|
|
347
344
|
|
|
348
345
|
self.initialized = True
|
|
349
346
|
if self.full_node_peers is not None:
|
|
350
|
-
|
|
347
|
+
create_referenced_task(self.full_node_peers.start(), known_unreferenced=True)
|
|
351
348
|
try:
|
|
352
349
|
yield
|
|
353
350
|
finally:
|
|
@@ -363,21 +360,39 @@ class FullNode:
|
|
|
363
360
|
self.mempool_manager.shut_down()
|
|
364
361
|
|
|
365
362
|
if self.full_node_peers is not None:
|
|
366
|
-
|
|
363
|
+
create_referenced_task(self.full_node_peers.close(), known_unreferenced=True)
|
|
367
364
|
if self.uncompact_task is not None:
|
|
368
365
|
self.uncompact_task.cancel()
|
|
369
366
|
if self._transaction_queue_task is not None:
|
|
370
367
|
self._transaction_queue_task.cancel()
|
|
371
368
|
cancel_task_safe(task=self.wallet_sync_task, log=self.log)
|
|
372
|
-
|
|
373
|
-
|
|
369
|
+
for one_tx_task in self._tx_task_list:
|
|
370
|
+
if not one_tx_task.done():
|
|
371
|
+
cancel_task_safe(task=one_tx_task, log=self.log)
|
|
372
|
+
for one_sync_task in self._sync_task_list:
|
|
373
|
+
if not one_sync_task.done():
|
|
374
|
+
cancel_task_safe(task=one_sync_task, log=self.log)
|
|
375
|
+
for segment_task in self._segment_task_list:
|
|
376
|
+
cancel_task_safe(segment_task, self.log)
|
|
374
377
|
for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()):
|
|
375
378
|
cancel_task_safe(task, self.log)
|
|
376
379
|
if self._init_weight_proof is not None:
|
|
377
380
|
await asyncio.wait([self._init_weight_proof])
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
381
|
+
for one_tx_task in self._tx_task_list:
|
|
382
|
+
if one_tx_task.done():
|
|
383
|
+
self.log.info(f"TX task {one_tx_task.get_name()} done")
|
|
384
|
+
else:
|
|
385
|
+
with contextlib.suppress(asyncio.CancelledError):
|
|
386
|
+
self.log.info(f"Awaiting TX task {one_tx_task.get_name()}")
|
|
387
|
+
await one_tx_task
|
|
388
|
+
for one_sync_task in self._sync_task_list:
|
|
389
|
+
if one_sync_task.done():
|
|
390
|
+
self.log.info(f"Long sync task {one_sync_task.get_name()} done")
|
|
391
|
+
else:
|
|
392
|
+
with contextlib.suppress(asyncio.CancelledError):
|
|
393
|
+
self.log.info(f"Awaiting long sync task {one_sync_task.get_name()}")
|
|
394
|
+
await one_sync_task
|
|
395
|
+
await asyncio.gather(*self._segment_task_list, return_exceptions=True)
|
|
381
396
|
|
|
382
397
|
@property
|
|
383
398
|
def block_store(self) -> BlockStore:
|
|
@@ -434,9 +449,9 @@ class FullNode:
|
|
|
434
449
|
assert self._compact_vdf_sem is not None
|
|
435
450
|
return self._compact_vdf_sem
|
|
436
451
|
|
|
437
|
-
def get_connections(self, request_node_type: Optional[NodeType]) ->
|
|
452
|
+
def get_connections(self, request_node_type: Optional[NodeType]) -> list[dict[str, Any]]:
|
|
438
453
|
connections = self.server.get_connections(request_node_type)
|
|
439
|
-
con_info:
|
|
454
|
+
con_info: list[dict[str, Any]] = []
|
|
440
455
|
if self.sync_store is not None:
|
|
441
456
|
peak_store = self.sync_store.peer_to_peak
|
|
442
457
|
else:
|
|
@@ -451,7 +466,7 @@ class FullNode:
|
|
|
451
466
|
peak_height = None
|
|
452
467
|
peak_hash = None
|
|
453
468
|
peak_weight = None
|
|
454
|
-
con_dict:
|
|
469
|
+
con_dict: dict[str, Any] = {
|
|
455
470
|
"type": con.connection_type,
|
|
456
471
|
"local_port": con.local_port,
|
|
457
472
|
"peer_host": con.peer_info.host,
|
|
@@ -494,8 +509,14 @@ class FullNode:
|
|
|
494
509
|
# We use a semaphore to make sure we don't send more than 200 concurrent calls of respond_transaction.
|
|
495
510
|
# However, doing them one at a time would be slow, because they get sent to other processes.
|
|
496
511
|
await self.add_transaction_semaphore.acquire()
|
|
512
|
+
|
|
513
|
+
# Clean up task reference list (used to prevent gc from killing running tasks)
|
|
514
|
+
for oldtask in self._tx_task_list[:]:
|
|
515
|
+
if oldtask.done():
|
|
516
|
+
self._tx_task_list.remove(oldtask)
|
|
517
|
+
|
|
497
518
|
item: TransactionQueueEntry = await self.transaction_queue.pop()
|
|
498
|
-
|
|
519
|
+
self._tx_task_list.append(create_referenced_task(self._handle_one_transaction(item)))
|
|
499
520
|
|
|
500
521
|
async def initialize_weight_proof(self) -> None:
|
|
501
522
|
self.weight_proof_handler = WeightProofHandler(
|
|
@@ -509,7 +530,7 @@ class FullNode:
|
|
|
509
530
|
|
|
510
531
|
def set_server(self, server: ChiaServer) -> None:
|
|
511
532
|
self._server = server
|
|
512
|
-
dns_servers:
|
|
533
|
+
dns_servers: list[str] = []
|
|
513
534
|
network_name = self.config["selected_network"]
|
|
514
535
|
try:
|
|
515
536
|
default_port = self.config["network_overrides"]["config"][network_name]["default_full_node_port"]
|
|
@@ -539,7 +560,7 @@ class FullNode:
|
|
|
539
560
|
self.log.error(f"Exception in peer discovery: {e}")
|
|
540
561
|
self.log.error(f"Exception Stack: {error_stack}")
|
|
541
562
|
|
|
542
|
-
def _state_changed(self, change: str, change_data: Optional[
|
|
563
|
+
def _state_changed(self, change: str, change_data: Optional[dict[str, Any]] = None) -> None:
|
|
543
564
|
if self.state_changed_callback is not None:
|
|
544
565
|
self.state_changed_callback(change, change_data)
|
|
545
566
|
|
|
@@ -575,22 +596,29 @@ class FullNode:
|
|
|
575
596
|
self.sync_store.batch_syncing.remove(peer.peer_node_id)
|
|
576
597
|
self.log.error(f"Error short batch syncing, could not fetch block at height {start_height}")
|
|
577
598
|
return False
|
|
578
|
-
|
|
599
|
+
hash = self.blockchain.height_to_hash(first.block.height - 1)
|
|
600
|
+
assert hash is not None
|
|
601
|
+
if hash != first.block.prev_header_hash:
|
|
579
602
|
self.log.info("Batch syncing stopped, this is a deep chain")
|
|
580
603
|
self.sync_store.batch_syncing.remove(peer.peer_node_id)
|
|
581
604
|
# First sb not connected to our blockchain, do a long sync instead
|
|
582
605
|
return False
|
|
583
606
|
|
|
584
607
|
batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
self.
|
|
588
|
-
|
|
589
|
-
self.log
|
|
590
|
-
self._segment_task = None
|
|
608
|
+
for task in self._segment_task_list[:]:
|
|
609
|
+
if task.done():
|
|
610
|
+
self._segment_task_list.remove(task)
|
|
611
|
+
else:
|
|
612
|
+
cancel_task_safe(task=task, log=self.log)
|
|
591
613
|
|
|
592
614
|
try:
|
|
593
615
|
peer_info = peer.get_peer_logging()
|
|
616
|
+
if start_height > 0:
|
|
617
|
+
fork_hash = self.blockchain.height_to_hash(uint32(start_height - 1))
|
|
618
|
+
else:
|
|
619
|
+
fork_hash = self.constants.GENESIS_CHALLENGE
|
|
620
|
+
assert fork_hash
|
|
621
|
+
fork_info = ForkInfo(start_height - 1, start_height - 1, fork_hash)
|
|
594
622
|
for height in range(start_height, target_height, batch_size):
|
|
595
623
|
end_height = min(target_height, height + batch_size)
|
|
596
624
|
request = RequestBlocks(uint32(height), uint32(end_height), True)
|
|
@@ -607,8 +635,9 @@ class FullNode:
|
|
|
607
635
|
ssi, diff = get_next_sub_slot_iters_and_difficulty(
|
|
608
636
|
self.constants, new_slot, prev_b, self.blockchain
|
|
609
637
|
)
|
|
610
|
-
|
|
611
|
-
|
|
638
|
+
vs = ValidationState(ssi, diff, None)
|
|
639
|
+
success, state_change_summary = await self.add_block_batch(
|
|
640
|
+
response.blocks, peer_info, fork_info, vs
|
|
612
641
|
)
|
|
613
642
|
if not success:
|
|
614
643
|
raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}")
|
|
@@ -621,7 +650,6 @@ class FullNode:
|
|
|
621
650
|
state_change_summary,
|
|
622
651
|
peer,
|
|
623
652
|
)
|
|
624
|
-
await self.peak_post_processing_2(peak_fb, peer, state_change_summary, ppp_result)
|
|
625
653
|
except Exception:
|
|
626
654
|
# Still do post processing after cancel (or exception)
|
|
627
655
|
peak_fb = await self.blockchain.get_full_peak()
|
|
@@ -630,10 +658,11 @@ class FullNode:
|
|
|
630
658
|
raise
|
|
631
659
|
finally:
|
|
632
660
|
self.log.info(f"Added blocks {height}-{end_height}")
|
|
633
|
-
|
|
661
|
+
if state_change_summary is not None and peak_fb is not None:
|
|
662
|
+
# Call outside of priority_mutex to encourage concurrency
|
|
663
|
+
await self.peak_post_processing_2(peak_fb, peer, state_change_summary, ppp_result)
|
|
664
|
+
finally:
|
|
634
665
|
self.sync_store.batch_syncing.remove(peer.peer_node_id)
|
|
635
|
-
raise
|
|
636
|
-
self.sync_store.batch_syncing.remove(peer.peer_node_id)
|
|
637
666
|
return True
|
|
638
667
|
|
|
639
668
|
async def short_sync_backtrack(
|
|
@@ -674,15 +703,22 @@ class FullNode:
|
|
|
674
703
|
f"Failed to fetch block {curr_height} from {peer.get_peer_logging()}, wrong type {type(curr)}"
|
|
675
704
|
)
|
|
676
705
|
blocks.append(curr.block)
|
|
677
|
-
if
|
|
706
|
+
if curr_height == 0:
|
|
707
|
+
found_fork_point = True
|
|
708
|
+
break
|
|
709
|
+
hash_at_height = self.blockchain.height_to_hash(curr.block.height - 1)
|
|
710
|
+
if hash_at_height is not None and hash_at_height == curr.block.prev_header_hash:
|
|
678
711
|
found_fork_point = True
|
|
679
712
|
break
|
|
680
713
|
curr_height -= 1
|
|
681
714
|
if found_fork_point:
|
|
715
|
+
first_block = blocks[-1] # blocks are reveresd this is the lowest block to add
|
|
716
|
+
# we create the fork_info and pass it here so it would be updated on each call to add_block
|
|
717
|
+
fork_info = ForkInfo(first_block.height - 1, first_block.height - 1, first_block.prev_header_hash)
|
|
682
718
|
for block in reversed(blocks):
|
|
683
719
|
# when syncing, we won't share any signatures with the
|
|
684
720
|
# mempool, so there's no need to pass in the BLS cache.
|
|
685
|
-
await self.add_block(block, peer)
|
|
721
|
+
await self.add_block(block, peer, fork_info=fork_info)
|
|
686
722
|
except (asyncio.CancelledError, Exception):
|
|
687
723
|
self.sync_store.decrement_backtrack_syncing(node_id=peer.peer_node_id)
|
|
688
724
|
raise
|
|
@@ -711,7 +747,7 @@ class FullNode:
|
|
|
711
747
|
# Updates heights in the UI. Sleeps 1.5s before, so other peers have time to update their peaks as well.
|
|
712
748
|
# Limit to 3 refreshes.
|
|
713
749
|
if not seen_header_hash and len(self._ui_tasks) < 3:
|
|
714
|
-
self._ui_tasks.add(
|
|
750
|
+
self._ui_tasks.add(create_referenced_task(self._refresh_ui_connections(1.5)))
|
|
715
751
|
# Prune completed connect tasks
|
|
716
752
|
self._ui_tasks = set(filter(lambda t: not t.done(), self._ui_tasks))
|
|
717
753
|
except Exception as e:
|
|
@@ -733,7 +769,7 @@ class FullNode:
|
|
|
733
769
|
# If peer connects while we are syncing, check if they have the block we are syncing towards
|
|
734
770
|
target_peak = self.sync_store.target_peak
|
|
735
771
|
if target_peak is not None and request.header_hash != target_peak.header_hash:
|
|
736
|
-
peak_peers:
|
|
772
|
+
peak_peers: set[bytes32] = self.sync_store.get_peers_that_have_peak([target_peak.header_hash])
|
|
737
773
|
# Don't ask if we already know this peer has the peak
|
|
738
774
|
if peer.peer_node_id not in peak_peers:
|
|
739
775
|
target_peak_response: Optional[RespondBlock] = await peer.call_api(
|
|
@@ -774,9 +810,16 @@ class FullNode:
|
|
|
774
810
|
if await self.short_sync_batch(peer, uint32(max(curr_peak_height - 6, 0)), request.height):
|
|
775
811
|
return None
|
|
776
812
|
|
|
813
|
+
# Clean up task reference list (used to prevent gc from killing running tasks)
|
|
814
|
+
for oldtask in self._sync_task_list[:]:
|
|
815
|
+
if oldtask.done():
|
|
816
|
+
self._sync_task_list.remove(oldtask)
|
|
817
|
+
|
|
777
818
|
# This is the either the case where we were not able to sync successfully (for example, due to the fork
|
|
778
819
|
# point being in the past), or we are very far behind. Performs a long sync.
|
|
779
|
-
|
|
820
|
+
# Multiple tasks may be created here. If we don't save all handles, a task could enter a sync object
|
|
821
|
+
# and be cleaned up by the GC, corrupting the sync object and possibly not allowing anything else in.
|
|
822
|
+
self._sync_task_list.append(create_referenced_task(self._sync()))
|
|
780
823
|
|
|
781
824
|
async def send_peak_to_timelords(
|
|
782
825
|
self, peak_block: Optional[FullBlock] = None, peer: Optional[WSChiaConnection] = None
|
|
@@ -864,7 +907,7 @@ class FullNode:
|
|
|
864
907
|
self._state_changed("add_connection")
|
|
865
908
|
self._state_changed("sync_mode")
|
|
866
909
|
if self.full_node_peers is not None:
|
|
867
|
-
|
|
910
|
+
create_referenced_task(self.full_node_peers.on_connect(connection))
|
|
868
911
|
|
|
869
912
|
if self.initialized is False:
|
|
870
913
|
return None
|
|
@@ -926,6 +969,7 @@ class FullNode:
|
|
|
926
969
|
- Disconnect peers that provide invalid blocks or don't have the blocks
|
|
927
970
|
"""
|
|
928
971
|
# Ensure we are only syncing once and not double calling this method
|
|
972
|
+
fork_point: Optional[uint32] = None
|
|
929
973
|
if self.sync_store.get_sync_mode():
|
|
930
974
|
return None
|
|
931
975
|
|
|
@@ -941,7 +985,7 @@ class FullNode:
|
|
|
941
985
|
# Wait until we have 3 peaks or up to a max of 30 seconds
|
|
942
986
|
max_iterations = int(self.config.get("max_sync_wait", 30)) * 10
|
|
943
987
|
|
|
944
|
-
self.log.info(f"Waiting to receive peaks from peers. (timeout: {max_iterations/10}s)")
|
|
988
|
+
self.log.info(f"Waiting to receive peaks from peers. (timeout: {max_iterations / 10}s)")
|
|
945
989
|
peaks = []
|
|
946
990
|
for i in range(max_iterations):
|
|
947
991
|
peaks = [peak.header_hash for peak in self.sync_store.get_peak_of_each_peer().values()]
|
|
@@ -988,6 +1032,12 @@ class FullNode:
|
|
|
988
1032
|
# Ensures that the fork point does not change
|
|
989
1033
|
async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
|
|
990
1034
|
await self.blockchain.warmup(fork_point)
|
|
1035
|
+
fork_point = await check_fork_next_block(
|
|
1036
|
+
self.blockchain,
|
|
1037
|
+
fork_point,
|
|
1038
|
+
self.get_peers_with_peak(target_peak.header_hash),
|
|
1039
|
+
node_next_block_check,
|
|
1040
|
+
)
|
|
991
1041
|
await self.sync_from_fork_point(fork_point, target_peak.height, target_peak.header_hash, summaries)
|
|
992
1042
|
except asyncio.CancelledError:
|
|
993
1043
|
self.log.warning("Syncing failed, CancelledError")
|
|
@@ -997,16 +1047,20 @@ class FullNode:
|
|
|
997
1047
|
finally:
|
|
998
1048
|
if self._shut_down:
|
|
999
1049
|
return None
|
|
1000
|
-
await self._finish_sync()
|
|
1050
|
+
await self._finish_sync(fork_point)
|
|
1001
1051
|
|
|
1002
1052
|
async def request_validate_wp(
|
|
1003
1053
|
self, peak_header_hash: bytes32, peak_height: uint32, peak_weight: uint128
|
|
1004
|
-
) ->
|
|
1054
|
+
) -> tuple[uint32, list[SubEpochSummary]]:
|
|
1005
1055
|
if self.weight_proof_handler is None:
|
|
1006
1056
|
raise RuntimeError("Weight proof handler is None")
|
|
1007
1057
|
peers_with_peak = self.get_peers_with_peak(peak_header_hash)
|
|
1008
1058
|
# Request weight proof from a random peer
|
|
1009
|
-
|
|
1059
|
+
peers_with_peak_len = len(peers_with_peak)
|
|
1060
|
+
self.log.info(f"Total of {peers_with_peak_len} peers with peak {peak_height}")
|
|
1061
|
+
# We can't choose from an empty sequence
|
|
1062
|
+
if peers_with_peak_len == 0:
|
|
1063
|
+
raise RuntimeError(f"Not performing sync, no peers with peak {peak_height}")
|
|
1010
1064
|
weight_proof_peer: WSChiaConnection = random.choice(peers_with_peak)
|
|
1011
1065
|
self.log.info(
|
|
1012
1066
|
f"Requesting weight proof from peer {weight_proof_peer.peer_info.host} up to height {peak_height}"
|
|
@@ -1058,14 +1112,9 @@ class FullNode:
|
|
|
1058
1112
|
fork_point_height: uint32,
|
|
1059
1113
|
target_peak_sb_height: uint32,
|
|
1060
1114
|
peak_hash: bytes32,
|
|
1061
|
-
summaries:
|
|
1115
|
+
summaries: list[SubEpochSummary],
|
|
1062
1116
|
) -> None:
|
|
1063
|
-
buffer_size = 4
|
|
1064
1117
|
self.log.info(f"Start syncing from fork point at {fork_point_height} up to {target_peak_sb_height}")
|
|
1065
|
-
peers_with_peak: List[WSChiaConnection] = self.get_peers_with_peak(peak_hash)
|
|
1066
|
-
fork_point_height = await check_fork_next_block(
|
|
1067
|
-
self.blockchain, fork_point_height, peers_with_peak, node_next_block_check
|
|
1068
|
-
)
|
|
1069
1118
|
batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
|
|
1070
1119
|
counter = 0
|
|
1071
1120
|
if fork_point_height != 0:
|
|
@@ -1085,12 +1134,60 @@ class FullNode:
|
|
|
1085
1134
|
# normally "fork_point" or "fork_height" refers to the first common
|
|
1086
1135
|
# block between the main chain and the fork. Here "fork_point_height"
|
|
1087
1136
|
# seems to refer to the first diverging block
|
|
1137
|
+
# in case we're validating a reorg fork (i.e. not extending the
|
|
1138
|
+
# main chain), we need to record the coin set from that fork in
|
|
1139
|
+
# fork_info. Otherwise validation is very expensive, especially
|
|
1140
|
+
# for deep reorgs
|
|
1141
|
+
if fork_point_height > 0:
|
|
1142
|
+
fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
|
|
1143
|
+
assert fork_hash is not None
|
|
1144
|
+
else:
|
|
1145
|
+
fork_hash = self.constants.GENESIS_CHALLENGE
|
|
1146
|
+
fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)
|
|
1088
1147
|
|
|
1089
|
-
|
|
1090
|
-
|
|
1091
|
-
|
|
1148
|
+
if fork_point_height == 0:
|
|
1149
|
+
ssi = self.constants.SUB_SLOT_ITERS_STARTING
|
|
1150
|
+
diff = self.constants.DIFFICULTY_STARTING
|
|
1151
|
+
prev_ses_block = None
|
|
1152
|
+
else:
|
|
1153
|
+
prev_b_hash = self.blockchain.height_to_hash(fork_point_height)
|
|
1154
|
+
assert prev_b_hash is not None
|
|
1155
|
+
prev_b = await self.blockchain.get_full_block(prev_b_hash)
|
|
1156
|
+
assert prev_b is not None
|
|
1157
|
+
ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None)
|
|
1158
|
+
|
|
1159
|
+
# we need an augmented blockchain to validate blocks in batches. The
|
|
1160
|
+
# batch must be treated as if it's part of the chain to validate the
|
|
1161
|
+
# blocks in it. We also need them to keep appearing as if they're part
|
|
1162
|
+
# of the chain when pipelining the validation of blocks. We start
|
|
1163
|
+
# validating the next batch while still adding the first batch to the
|
|
1164
|
+
# chain.
|
|
1165
|
+
blockchain = AugmentedBlockchain(self.blockchain)
|
|
1166
|
+
peers_with_peak: list[WSChiaConnection] = self.get_peers_with_peak(peak_hash)
|
|
1167
|
+
|
|
1168
|
+
async def fetch_blocks(output_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]]) -> None:
|
|
1169
|
+
# the rate limit for respond_blocks is 100 messages / 60 seconds.
|
|
1170
|
+
# But the limit is scaled to 30% for outbound messages, so that's 30
|
|
1171
|
+
# messages per 60 seconds.
|
|
1172
|
+
# That's 2 seconds per request.
|
|
1173
|
+
seconds_per_request = 2
|
|
1092
1174
|
start_height, end_height = 0, 0
|
|
1093
|
-
|
|
1175
|
+
|
|
1176
|
+
# the timestamp of when the next request_block message is allowed to
|
|
1177
|
+
# be sent. It's initialized to the current time, and bumped by the
|
|
1178
|
+
# seconds_per_request every time we send a request. This ensures we
|
|
1179
|
+
# won't exceed the 100 requests / 60 seconds rate limit.
|
|
1180
|
+
# Whichever peer has the lowest timestamp is the one we request
|
|
1181
|
+
# from. peers that take more than 5 seconds to respond are pushed to
|
|
1182
|
+
# the end of the queue, to be less likely to request from.
|
|
1183
|
+
|
|
1184
|
+
# This should be cleaned up to not be a hard coded value, and maybe
|
|
1185
|
+
# allow higher request rates (and align the request_blocks and
|
|
1186
|
+
# respond_blocks rate limits).
|
|
1187
|
+
now = time.monotonic()
|
|
1188
|
+
new_peers_with_peak: list[tuple[WSChiaConnection, float]] = [(c, now) for c in peers_with_peak[:]]
|
|
1189
|
+
self.log.info(f"peers with peak: {len(new_peers_with_peak)}")
|
|
1190
|
+
random.shuffle(new_peers_with_peak)
|
|
1094
1191
|
try:
|
|
1095
1192
|
# block request ranges are *inclusive*, this requires some
|
|
1096
1193
|
# gymnastics of this range (+1 to make it exclusive, like normal
|
|
@@ -1098,100 +1195,185 @@ class FullNode:
|
|
|
1098
1195
|
for start_height in range(fork_point_height, target_peak_sb_height + 1, batch_size):
|
|
1099
1196
|
end_height = min(target_peak_sb_height, start_height + batch_size - 1)
|
|
1100
1197
|
request = RequestBlocks(uint32(start_height), uint32(end_height), True)
|
|
1198
|
+
new_peers_with_peak.sort(key=lambda pair: pair[1])
|
|
1101
1199
|
fetched = False
|
|
1102
|
-
for peer in
|
|
1200
|
+
for idx, (peer, timestamp) in enumerate(new_peers_with_peak):
|
|
1103
1201
|
if peer.closed:
|
|
1104
1202
|
continue
|
|
1105
|
-
|
|
1203
|
+
|
|
1204
|
+
start = time.monotonic()
|
|
1205
|
+
if start < timestamp:
|
|
1206
|
+
# rate limit ourselves, since we sent a message to
|
|
1207
|
+
# this peer too recently
|
|
1208
|
+
await asyncio.sleep(timestamp - start)
|
|
1209
|
+
start = time.monotonic()
|
|
1210
|
+
|
|
1211
|
+
# update the timestamp, now that we're sending a request
|
|
1212
|
+
# it's OK for the timestamp to fall behind wall-clock
|
|
1213
|
+
# time. It just means we're allowed to send more
|
|
1214
|
+
# requests to catch up
|
|
1215
|
+
if is_localhost(peer.peer_info.host):
|
|
1216
|
+
# we don't apply rate limits to localhost, and our
|
|
1217
|
+
# tests depend on it
|
|
1218
|
+
bump = 0.1
|
|
1219
|
+
else:
|
|
1220
|
+
bump = seconds_per_request
|
|
1221
|
+
|
|
1222
|
+
new_peers_with_peak[idx] = (
|
|
1223
|
+
new_peers_with_peak[idx][0],
|
|
1224
|
+
new_peers_with_peak[idx][1] + bump,
|
|
1225
|
+
)
|
|
1226
|
+
# the fewer peers we have, the more willing we should be
|
|
1227
|
+
# to wait for them.
|
|
1228
|
+
timeout = int(30 + 30 / len(new_peers_with_peak))
|
|
1229
|
+
response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=timeout)
|
|
1230
|
+
end = time.monotonic()
|
|
1106
1231
|
if response is None:
|
|
1232
|
+
self.log.info(f"peer timed out after {end - start:.1f} s")
|
|
1107
1233
|
await peer.close()
|
|
1108
1234
|
elif isinstance(response, RespondBlocks):
|
|
1109
|
-
|
|
1235
|
+
if end - start > 5:
|
|
1236
|
+
self.log.info(f"peer took {end - start:.1f} s to respond to request_blocks")
|
|
1237
|
+
# this isn't a great peer, reduce its priority
|
|
1238
|
+
# to prefer any peers that had to wait for it.
|
|
1239
|
+
# By setting the next allowed timestamp to now,
|
|
1240
|
+
# means that any other peer that has waited for
|
|
1241
|
+
# this will have its next allowed timestamp in
|
|
1242
|
+
# the passed, and be prefered multiple times
|
|
1243
|
+
# over this peer.
|
|
1244
|
+
new_peers_with_peak[idx] = (
|
|
1245
|
+
new_peers_with_peak[idx][0],
|
|
1246
|
+
end,
|
|
1247
|
+
)
|
|
1248
|
+
start = time.monotonic()
|
|
1249
|
+
await output_queue.put((peer, response.blocks))
|
|
1250
|
+
end = time.monotonic()
|
|
1251
|
+
if end - start > 1:
|
|
1252
|
+
self.log.info(
|
|
1253
|
+
f"sync pipeline back-pressure. stalled {end - start:0.2f} "
|
|
1254
|
+
"seconds on prevalidate block"
|
|
1255
|
+
)
|
|
1110
1256
|
fetched = True
|
|
1111
1257
|
break
|
|
1112
1258
|
if fetched is False:
|
|
1113
1259
|
self.log.error(f"failed fetching {start_height} to {end_height} from peers")
|
|
1114
1260
|
return
|
|
1115
1261
|
if self.sync_store.peers_changed.is_set():
|
|
1116
|
-
|
|
1262
|
+
existing_peers = {id(c): timestamp for c, timestamp in new_peers_with_peak}
|
|
1263
|
+
peers = self.get_peers_with_peak(peak_hash)
|
|
1264
|
+
new_peers_with_peak = [(c, existing_peers.get(id(c), end)) for c in peers]
|
|
1265
|
+
random.shuffle(new_peers_with_peak)
|
|
1117
1266
|
self.sync_store.peers_changed.clear()
|
|
1267
|
+
self.log.info(f"peers with peak: {len(new_peers_with_peak)}")
|
|
1118
1268
|
except Exception as e:
|
|
1119
1269
|
self.log.error(f"Exception fetching {start_height} to {end_height} from peer {e}")
|
|
1120
1270
|
finally:
|
|
1121
1271
|
# finished signal with None
|
|
1122
|
-
await
|
|
1272
|
+
await output_queue.put(None)
|
|
1273
|
+
|
|
1274
|
+
async def validate_blocks(
|
|
1275
|
+
input_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]],
|
|
1276
|
+
output_queue: asyncio.Queue[
|
|
1277
|
+
Optional[
|
|
1278
|
+
tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]
|
|
1279
|
+
]
|
|
1280
|
+
],
|
|
1281
|
+
) -> None:
|
|
1282
|
+
nonlocal blockchain
|
|
1283
|
+
nonlocal fork_info
|
|
1284
|
+
first_batch = True
|
|
1285
|
+
|
|
1286
|
+
vs = ValidationState(ssi, diff, prev_ses_block)
|
|
1287
|
+
|
|
1288
|
+
try:
|
|
1289
|
+
while True:
|
|
1290
|
+
res: Optional[tuple[WSChiaConnection, list[FullBlock]]] = await input_queue.get()
|
|
1291
|
+
if res is None:
|
|
1292
|
+
self.log.debug("done fetching blocks")
|
|
1293
|
+
return None
|
|
1294
|
+
peer, blocks = res
|
|
1295
|
+
|
|
1296
|
+
# skip_blocks is only relevant at the start of the sync,
|
|
1297
|
+
# to skip blocks we already have in the database (and have
|
|
1298
|
+
# been validated). Once we start validating blocks, we
|
|
1299
|
+
# shouldn't be skipping any.
|
|
1300
|
+
blocks_to_validate = await self.skip_blocks(blockchain, blocks, fork_info, vs)
|
|
1301
|
+
assert first_batch or len(blocks_to_validate) == len(blocks)
|
|
1302
|
+
next_validation_state = copy.copy(vs)
|
|
1303
|
+
|
|
1304
|
+
if len(blocks_to_validate) == 0:
|
|
1305
|
+
continue
|
|
1123
1306
|
|
|
1124
|
-
|
|
1125
|
-
|
|
1307
|
+
first_batch = False
|
|
1308
|
+
|
|
1309
|
+
futures: list[Awaitable[PreValidationResult]] = []
|
|
1310
|
+
for block in blocks_to_validate:
|
|
1311
|
+
futures.extend(
|
|
1312
|
+
await self.prevalidate_blocks(
|
|
1313
|
+
blockchain,
|
|
1314
|
+
[block],
|
|
1315
|
+
vs,
|
|
1316
|
+
summaries,
|
|
1317
|
+
)
|
|
1318
|
+
)
|
|
1319
|
+
start = time.monotonic()
|
|
1320
|
+
await output_queue.put((peer, next_validation_state, list(futures), blocks_to_validate))
|
|
1321
|
+
end = time.monotonic()
|
|
1322
|
+
if end - start > 1:
|
|
1323
|
+
self.log.info(f"sync pipeline back-pressure. stalled {end - start:0.2f} seconds on add_block()")
|
|
1324
|
+
except Exception:
|
|
1325
|
+
self.log.exception("Exception validating")
|
|
1326
|
+
finally:
|
|
1327
|
+
# finished signal with None
|
|
1328
|
+
await output_queue.put(None)
|
|
1329
|
+
|
|
1330
|
+
async def ingest_blocks(
|
|
1331
|
+
input_queue: asyncio.Queue[
|
|
1332
|
+
Optional[
|
|
1333
|
+
tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]
|
|
1334
|
+
]
|
|
1335
|
+
],
|
|
1126
1336
|
) -> None:
|
|
1127
|
-
fork_info
|
|
1128
|
-
if fork_point_height == 0:
|
|
1129
|
-
ssi = self.constants.SUB_SLOT_ITERS_STARTING
|
|
1130
|
-
diff = self.constants.DIFFICULTY_STARTING
|
|
1131
|
-
prev_ses_block = None
|
|
1132
|
-
else:
|
|
1133
|
-
prev_b_hash = self.blockchain.height_to_hash(fork_point_height)
|
|
1134
|
-
assert prev_b_hash is not None
|
|
1135
|
-
prev_b = await self.blockchain.get_full_block(prev_b_hash)
|
|
1136
|
-
assert prev_b is not None
|
|
1137
|
-
ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None)
|
|
1337
|
+
nonlocal fork_info
|
|
1138
1338
|
block_rate = 0
|
|
1139
1339
|
block_rate_time = time.monotonic()
|
|
1140
1340
|
block_rate_height = -1
|
|
1141
1341
|
while True:
|
|
1142
|
-
res
|
|
1342
|
+
res = await input_queue.get()
|
|
1143
1343
|
if res is None:
|
|
1144
|
-
self.log.debug("done
|
|
1344
|
+
self.log.debug("done validating blocks")
|
|
1145
1345
|
return None
|
|
1146
|
-
peer, blocks = res
|
|
1346
|
+
peer, vs, futures, blocks = res
|
|
1147
1347
|
start_height = blocks[0].height
|
|
1148
1348
|
end_height = blocks[-1].height
|
|
1149
1349
|
|
|
1150
1350
|
if block_rate_height == -1:
|
|
1151
1351
|
block_rate_height = start_height
|
|
1152
1352
|
|
|
1153
|
-
|
|
1154
|
-
#
|
|
1155
|
-
#
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
if fork_info is None:
|
|
1159
|
-
peak = self.blockchain.get_peak()
|
|
1160
|
-
extending_main_chain: bool = peak is None or (
|
|
1161
|
-
peak.header_hash == blocks[0].prev_header_hash or peak.header_hash == blocks[0].header_hash
|
|
1162
|
-
)
|
|
1163
|
-
# if we're simply extending the main chain, it's important
|
|
1164
|
-
# *not* to pass in a ForkInfo object, as it can potentially
|
|
1165
|
-
# accrue a large state (with no value, since we can validate
|
|
1166
|
-
# against the CoinStore)
|
|
1167
|
-
if not extending_main_chain:
|
|
1168
|
-
if fork_point_height == 0:
|
|
1169
|
-
fork_info = ForkInfo(-1, -1, self.constants.GENESIS_CHALLENGE)
|
|
1170
|
-
else:
|
|
1171
|
-
fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
|
|
1172
|
-
assert fork_hash is not None
|
|
1173
|
-
fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)
|
|
1174
|
-
|
|
1175
|
-
success, state_change_summary, ssi, diff, prev_ses_block, err = await self.add_block_batch(
|
|
1353
|
+
pre_validation_results = list(await asyncio.gather(*futures))
|
|
1354
|
+
# The ValidationState object (vs) is an in-out parameter. the add_block_batch()
|
|
1355
|
+
# call will update it
|
|
1356
|
+
state_change_summary, err = await self.add_prevalidated_blocks(
|
|
1357
|
+
blockchain,
|
|
1176
1358
|
blocks,
|
|
1177
|
-
|
|
1359
|
+
pre_validation_results,
|
|
1178
1360
|
fork_info,
|
|
1179
|
-
|
|
1180
|
-
|
|
1181
|
-
prev_ses_block,
|
|
1182
|
-
summaries,
|
|
1361
|
+
peer.peer_info,
|
|
1362
|
+
vs,
|
|
1183
1363
|
)
|
|
1184
|
-
if
|
|
1364
|
+
if err is not None:
|
|
1185
1365
|
await peer.close(600)
|
|
1186
|
-
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
|
|
1366
|
+
raise ValueError(f"Failed to validate block batch {start_height} to {end_height}: {err}")
|
|
1187
1367
|
if end_height - block_rate_height > 100:
|
|
1188
1368
|
now = time.monotonic()
|
|
1189
1369
|
block_rate = int((end_height - block_rate_height) // (now - block_rate_time))
|
|
1190
1370
|
block_rate_time = now
|
|
1191
1371
|
block_rate_height = end_height
|
|
1192
1372
|
|
|
1193
|
-
self.log.info(
|
|
1194
|
-
|
|
1373
|
+
self.log.info(
|
|
1374
|
+
f"Added blocks {start_height} to {end_height} ({block_rate} blocks/s) (from: {peer.peer_info.ip})"
|
|
1375
|
+
)
|
|
1376
|
+
peak: Optional[BlockRecord] = self.blockchain.get_peak()
|
|
1195
1377
|
if state_change_summary is not None:
|
|
1196
1378
|
assert peak is not None
|
|
1197
1379
|
# Hints must be added to the DB. The other post-processing tasks are not required when syncing
|
|
@@ -1208,20 +1390,35 @@ class FullNode:
|
|
|
1208
1390
|
# height, in that case.
|
|
1209
1391
|
self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE)
|
|
1210
1392
|
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
|
|
1214
|
-
|
|
1215
|
-
|
|
1393
|
+
block_queue: asyncio.Queue[Optional[tuple[WSChiaConnection, list[FullBlock]]]] = asyncio.Queue(maxsize=10)
|
|
1394
|
+
validation_queue: asyncio.Queue[
|
|
1395
|
+
Optional[tuple[WSChiaConnection, ValidationState, list[Awaitable[PreValidationResult]], list[FullBlock]]]
|
|
1396
|
+
] = asyncio.Queue(maxsize=10)
|
|
1397
|
+
|
|
1398
|
+
fetch_task = create_referenced_task(fetch_blocks(block_queue))
|
|
1399
|
+
validate_task = create_referenced_task(validate_blocks(block_queue, validation_queue))
|
|
1400
|
+
ingest_task = create_referenced_task(ingest_blocks(validation_queue))
|
|
1216
1401
|
try:
|
|
1217
|
-
|
|
1218
|
-
await asyncio.gather(fetch_task, validate_task)
|
|
1402
|
+
await asyncio.gather(fetch_task, validate_task, ingest_task)
|
|
1219
1403
|
except Exception:
|
|
1220
|
-
|
|
1221
|
-
|
|
1404
|
+
self.log.exception("sync from fork point failed")
|
|
1405
|
+
finally:
|
|
1406
|
+
cancel_task_safe(validate_task, self.log)
|
|
1407
|
+
cancel_task_safe(fetch_task)
|
|
1408
|
+
cancel_task_safe(ingest_task)
|
|
1409
|
+
|
|
1410
|
+
# we still need to await all the pending futures of the
|
|
1411
|
+
# prevalidation steps posted to the thread pool
|
|
1412
|
+
while not validation_queue.empty():
|
|
1413
|
+
result = validation_queue.get_nowait()
|
|
1414
|
+
if result is None:
|
|
1415
|
+
continue
|
|
1416
|
+
|
|
1417
|
+
_, _, futures, _ = result
|
|
1418
|
+
await asyncio.gather(*futures)
|
|
1222
1419
|
|
|
1223
|
-
def get_peers_with_peak(self, peak_hash: bytes32) ->
|
|
1224
|
-
peer_ids:
|
|
1420
|
+
def get_peers_with_peak(self, peak_hash: bytes32) -> list[WSChiaConnection]:
|
|
1421
|
+
peer_ids: set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash])
|
|
1225
1422
|
if len(peer_ids) == 0:
|
|
1226
1423
|
self.log.warning(f"Not syncing, no peers with header_hash {peak_hash} ")
|
|
1227
1424
|
return []
|
|
@@ -1240,7 +1437,7 @@ class FullNode:
|
|
|
1240
1437
|
self.log.debug(
|
|
1241
1438
|
f"update_wallets - fork_height: {wallet_update.fork_height}, peak_height: {wallet_update.peak.height}"
|
|
1242
1439
|
)
|
|
1243
|
-
changes_for_peer:
|
|
1440
|
+
changes_for_peer: dict[bytes32, set[CoinState]] = {}
|
|
1244
1441
|
for coin_record in wallet_update.coin_records:
|
|
1245
1442
|
coin_id = coin_record.name
|
|
1246
1443
|
subscribed_peers = self.subscriptions.peers_for_coin_id(coin_id)
|
|
@@ -1276,44 +1473,75 @@ class FullNode:
|
|
|
1276
1473
|
|
|
1277
1474
|
async def add_block_batch(
|
|
1278
1475
|
self,
|
|
1279
|
-
all_blocks:
|
|
1476
|
+
all_blocks: list[FullBlock],
|
|
1280
1477
|
peer_info: PeerInfo,
|
|
1281
|
-
fork_info:
|
|
1282
|
-
|
|
1283
|
-
|
|
1284
|
-
|
|
1285
|
-
wp_summaries: Optional[List[SubEpochSummary]] = None,
|
|
1286
|
-
) -> Tuple[bool, Optional[StateChangeSummary], uint64, uint64, Optional[BlockRecord], Optional[Err]]:
|
|
1478
|
+
fork_info: ForkInfo,
|
|
1479
|
+
vs: ValidationState, # in-out parameter
|
|
1480
|
+
wp_summaries: Optional[list[SubEpochSummary]] = None,
|
|
1481
|
+
) -> tuple[bool, Optional[StateChangeSummary]]:
|
|
1287
1482
|
# Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i
|
|
1288
1483
|
# Returns a bool for success, as well as a StateChangeSummary if the peak was advanced
|
|
1289
1484
|
|
|
1290
|
-
|
|
1485
|
+
pre_validate_start = time.monotonic()
|
|
1486
|
+
blockchain = AugmentedBlockchain(self.blockchain)
|
|
1487
|
+
blocks_to_validate = await self.skip_blocks(blockchain, all_blocks, fork_info, vs)
|
|
1488
|
+
|
|
1489
|
+
if len(blocks_to_validate) == 0:
|
|
1490
|
+
return True, None
|
|
1491
|
+
|
|
1492
|
+
futures = await self.prevalidate_blocks(
|
|
1493
|
+
blockchain,
|
|
1494
|
+
blocks_to_validate,
|
|
1495
|
+
copy.copy(vs),
|
|
1496
|
+
wp_summaries,
|
|
1497
|
+
)
|
|
1498
|
+
pre_validation_results = list(await asyncio.gather(*futures))
|
|
1499
|
+
|
|
1500
|
+
agg_state_change_summary, err = await self.add_prevalidated_blocks(
|
|
1501
|
+
blockchain,
|
|
1502
|
+
blocks_to_validate,
|
|
1503
|
+
pre_validation_results,
|
|
1504
|
+
fork_info,
|
|
1505
|
+
peer_info,
|
|
1506
|
+
vs,
|
|
1507
|
+
)
|
|
1508
|
+
|
|
1509
|
+
if agg_state_change_summary is not None:
|
|
1510
|
+
self._state_changed("new_peak")
|
|
1511
|
+
self.log.debug(
|
|
1512
|
+
f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, "
|
|
1513
|
+
f"advanced: True"
|
|
1514
|
+
)
|
|
1515
|
+
return err is None, agg_state_change_summary
|
|
1516
|
+
|
|
1517
|
+
async def skip_blocks(
|
|
1518
|
+
self,
|
|
1519
|
+
blockchain: AugmentedBlockchain,
|
|
1520
|
+
all_blocks: list[FullBlock],
|
|
1521
|
+
fork_info: ForkInfo,
|
|
1522
|
+
vs: ValidationState, # in-out parameter
|
|
1523
|
+
) -> list[FullBlock]:
|
|
1524
|
+
blocks_to_validate: list[FullBlock] = []
|
|
1291
1525
|
for i, block in enumerate(all_blocks):
|
|
1292
1526
|
header_hash = block.header_hash
|
|
1293
|
-
block_rec = await
|
|
1527
|
+
block_rec = await blockchain.get_block_record_from_db(header_hash)
|
|
1294
1528
|
if block_rec is None:
|
|
1295
1529
|
blocks_to_validate = all_blocks[i:]
|
|
1296
1530
|
break
|
|
1297
1531
|
else:
|
|
1298
|
-
|
|
1532
|
+
blockchain.add_block_record(block_rec)
|
|
1299
1533
|
if block_rec.sub_epoch_summary_included:
|
|
1300
1534
|
# already validated block, update sub slot iters, difficulty and prev sub epoch summary
|
|
1301
|
-
prev_ses_block = block_rec
|
|
1535
|
+
vs.prev_ses_block = block_rec
|
|
1302
1536
|
if block_rec.sub_epoch_summary_included.new_sub_slot_iters is not None:
|
|
1303
|
-
|
|
1537
|
+
vs.ssi = block_rec.sub_epoch_summary_included.new_sub_slot_iters
|
|
1304
1538
|
if block_rec.sub_epoch_summary_included.new_difficulty is not None:
|
|
1305
|
-
|
|
1539
|
+
vs.difficulty = block_rec.sub_epoch_summary_included.new_difficulty
|
|
1306
1540
|
|
|
1307
|
-
if fork_info is None:
|
|
1308
|
-
continue
|
|
1309
1541
|
# the below section updates the fork_info object, if
|
|
1310
1542
|
# there is one.
|
|
1311
|
-
|
|
1312
|
-
# TODO: it seems unnecessary to request overlapping block ranges
|
|
1313
|
-
# when syncing
|
|
1314
1543
|
if block.height <= fork_info.peak_height:
|
|
1315
1544
|
continue
|
|
1316
|
-
|
|
1317
1545
|
# we have already validated this block once, no need to do it again.
|
|
1318
1546
|
# however, if this block is not part of the main chain, we need to
|
|
1319
1547
|
# update the fork context with its additions and removals
|
|
@@ -1326,52 +1554,60 @@ class FullNode:
|
|
|
1326
1554
|
# removals in fork_info.
|
|
1327
1555
|
await self.blockchain.advance_fork_info(block, fork_info)
|
|
1328
1556
|
await self.blockchain.run_single_block(block, fork_info)
|
|
1557
|
+
return blocks_to_validate
|
|
1329
1558
|
|
|
1330
|
-
|
|
1331
|
-
|
|
1559
|
+
async def prevalidate_blocks(
|
|
1560
|
+
self,
|
|
1561
|
+
blockchain: AugmentedBlockchain,
|
|
1562
|
+
blocks_to_validate: list[FullBlock],
|
|
1563
|
+
vs: ValidationState,
|
|
1564
|
+
wp_summaries: Optional[list[SubEpochSummary]] = None,
|
|
1565
|
+
) -> Sequence[Awaitable[PreValidationResult]]:
|
|
1566
|
+
"""
|
|
1567
|
+
This is a thin wrapper over pre_validate_block().
|
|
1332
1568
|
|
|
1569
|
+
Args:
|
|
1570
|
+
blockchain:
|
|
1571
|
+
blocks_to_validate:
|
|
1572
|
+
vs: The ValidationState for the first block in the batch. This is an in-out
|
|
1573
|
+
parameter. It will be updated to be the validation state for the next
|
|
1574
|
+
batch of blocks.
|
|
1575
|
+
wp_summaries:
|
|
1576
|
+
"""
|
|
1333
1577
|
# Validates signatures in multiprocessing since they take a while, and we don't have cached transactions
|
|
1334
1578
|
# for these blocks (unlike during normal operation where we validate one at a time)
|
|
1335
|
-
|
|
1336
|
-
|
|
1337
|
-
|
|
1338
|
-
|
|
1339
|
-
|
|
1340
|
-
|
|
1341
|
-
|
|
1342
|
-
|
|
1343
|
-
|
|
1344
|
-
|
|
1345
|
-
|
|
1346
|
-
validate_signatures=True,
|
|
1347
|
-
)
|
|
1348
|
-
pre_validate_end = time.monotonic()
|
|
1349
|
-
pre_validate_time = pre_validate_end - pre_validate_start
|
|
1350
|
-
|
|
1351
|
-
self.log.log(
|
|
1352
|
-
logging.WARNING if pre_validate_time > 10 else logging.DEBUG,
|
|
1353
|
-
f"Block pre-validation: {pre_validate_end - pre_validate_start:0.2f}s "
|
|
1354
|
-
f"CLVM: {sum(pvr.timing/1000.0 for pvr in pre_validation_results):0.2f}s "
|
|
1355
|
-
f"({len(blocks_to_validate)} blocks, start height: {blocks_to_validate[0].height})",
|
|
1356
|
-
)
|
|
1357
|
-
|
|
1358
|
-
for i, block in enumerate(blocks_to_validate):
|
|
1359
|
-
if pre_validation_results[i].error is not None:
|
|
1360
|
-
self.log.error(
|
|
1361
|
-
f"Invalid block from peer: {peer_info} height {block.height} {Err(pre_validation_results[i].error)}"
|
|
1362
|
-
)
|
|
1363
|
-
return (
|
|
1364
|
-
False,
|
|
1579
|
+
# We have to copy the ValidationState object to preserve it for the add_block()
|
|
1580
|
+
# call below. pre_validate_block() will update the
|
|
1581
|
+
# object we pass in.
|
|
1582
|
+
ret: list[Awaitable[PreValidationResult]] = []
|
|
1583
|
+
for block in blocks_to_validate:
|
|
1584
|
+
ret.append(
|
|
1585
|
+
await pre_validate_block(
|
|
1586
|
+
self.constants,
|
|
1587
|
+
blockchain,
|
|
1588
|
+
block,
|
|
1589
|
+
self.blockchain.pool,
|
|
1365
1590
|
None,
|
|
1366
|
-
|
|
1367
|
-
|
|
1368
|
-
prev_ses_block,
|
|
1369
|
-
Err(pre_validation_results[i].error),
|
|
1591
|
+
vs,
|
|
1592
|
+
wp_summaries=wp_summaries,
|
|
1370
1593
|
)
|
|
1594
|
+
)
|
|
1595
|
+
return ret
|
|
1371
1596
|
|
|
1597
|
+
async def add_prevalidated_blocks(
|
|
1598
|
+
self,
|
|
1599
|
+
blockchain: AugmentedBlockchain,
|
|
1600
|
+
blocks_to_validate: list[FullBlock],
|
|
1601
|
+
pre_validation_results: list[PreValidationResult],
|
|
1602
|
+
fork_info: ForkInfo,
|
|
1603
|
+
peer_info: PeerInfo,
|
|
1604
|
+
vs: ValidationState, # in-out parameter
|
|
1605
|
+
) -> tuple[Optional[StateChangeSummary], Optional[Err]]:
|
|
1372
1606
|
agg_state_change_summary: Optional[StateChangeSummary] = None
|
|
1373
1607
|
block_record = await self.blockchain.get_block_record_from_db(blocks_to_validate[0].prev_header_hash)
|
|
1374
1608
|
for i, block in enumerate(blocks_to_validate):
|
|
1609
|
+
header_hash = block.header_hash
|
|
1610
|
+
assert vs.prev_ses_block is None or vs.prev_ses_block.height < block.height
|
|
1375
1611
|
assert pre_validation_results[i].required_iters is not None
|
|
1376
1612
|
state_change_summary: Optional[StateChangeSummary]
|
|
1377
1613
|
# when adding blocks in batches, we won't have any overlapping
|
|
@@ -1382,23 +1618,30 @@ class FullNode:
|
|
|
1382
1618
|
cc_sub_slot = block.finished_sub_slots[0].challenge_chain
|
|
1383
1619
|
if cc_sub_slot.new_sub_slot_iters is not None or cc_sub_slot.new_difficulty is not None:
|
|
1384
1620
|
expected_sub_slot_iters, expected_difficulty = get_next_sub_slot_iters_and_difficulty(
|
|
1385
|
-
self.constants, True, block_record,
|
|
1621
|
+
self.constants, True, block_record, blockchain
|
|
1386
1622
|
)
|
|
1387
1623
|
assert cc_sub_slot.new_sub_slot_iters is not None
|
|
1388
|
-
|
|
1624
|
+
vs.ssi = cc_sub_slot.new_sub_slot_iters
|
|
1389
1625
|
assert cc_sub_slot.new_difficulty is not None
|
|
1390
|
-
|
|
1391
|
-
assert expected_sub_slot_iters ==
|
|
1392
|
-
assert expected_difficulty ==
|
|
1626
|
+
vs.difficulty = cc_sub_slot.new_difficulty
|
|
1627
|
+
assert expected_sub_slot_iters == vs.ssi
|
|
1628
|
+
assert expected_difficulty == vs.difficulty
|
|
1629
|
+
block_rec = blockchain.block_record(block.header_hash)
|
|
1393
1630
|
result, error, state_change_summary = await self.blockchain.add_block(
|
|
1394
|
-
block,
|
|
1631
|
+
block,
|
|
1632
|
+
pre_validation_results[i],
|
|
1633
|
+
vs.ssi,
|
|
1634
|
+
fork_info,
|
|
1635
|
+
prev_ses_block=vs.prev_ses_block,
|
|
1636
|
+
block_record=block_rec,
|
|
1395
1637
|
)
|
|
1638
|
+
if error is None:
|
|
1639
|
+
blockchain.remove_extra_block(header_hash)
|
|
1396
1640
|
|
|
1397
1641
|
if result == AddBlockResult.NEW_PEAK:
|
|
1398
1642
|
# since this block just added a new peak, we've don't need any
|
|
1399
1643
|
# fork history from fork_info anymore
|
|
1400
|
-
|
|
1401
|
-
fork_info.reset(block.height, block.header_hash)
|
|
1644
|
+
fork_info.reset(block.height, header_hash)
|
|
1402
1645
|
assert state_change_summary is not None
|
|
1403
1646
|
# Since all blocks are contiguous, we can simply append the rollback changes and npc results
|
|
1404
1647
|
if agg_state_change_summary is None:
|
|
@@ -1414,27 +1657,23 @@ class FullNode:
|
|
|
1414
1657
|
agg_state_change_summary.additions + state_change_summary.additions,
|
|
1415
1658
|
agg_state_change_summary.new_rewards + state_change_summary.new_rewards,
|
|
1416
1659
|
)
|
|
1417
|
-
elif result
|
|
1660
|
+
elif result in {AddBlockResult.INVALID_BLOCK, AddBlockResult.DISCONNECTED_BLOCK}:
|
|
1418
1661
|
if error is not None:
|
|
1419
1662
|
self.log.error(f"Error: {error}, Invalid block from peer: {peer_info} ")
|
|
1420
|
-
return
|
|
1421
|
-
block_record =
|
|
1663
|
+
return agg_state_change_summary, error
|
|
1664
|
+
block_record = blockchain.block_record(header_hash)
|
|
1422
1665
|
assert block_record is not None
|
|
1423
1666
|
if block_record.sub_epoch_summary_included is not None:
|
|
1424
|
-
prev_ses_block = block_record
|
|
1667
|
+
vs.prev_ses_block = block_record
|
|
1425
1668
|
if self.weight_proof_handler is not None:
|
|
1426
1669
|
await self.weight_proof_handler.create_prev_sub_epoch_segments()
|
|
1427
1670
|
if agg_state_change_summary is not None:
|
|
1428
1671
|
self._state_changed("new_peak")
|
|
1429
|
-
|
|
1430
|
-
f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, "
|
|
1431
|
-
f"advanced: True"
|
|
1432
|
-
)
|
|
1433
|
-
return True, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, None
|
|
1672
|
+
return agg_state_change_summary, None
|
|
1434
1673
|
|
|
1435
1674
|
async def get_sub_slot_iters_difficulty_ses_block(
|
|
1436
1675
|
self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64]
|
|
1437
|
-
) ->
|
|
1676
|
+
) -> tuple[uint64, uint64, Optional[BlockRecord]]:
|
|
1438
1677
|
prev_ses_block = None
|
|
1439
1678
|
if ssi is None or diff is None:
|
|
1440
1679
|
if block.height == 0:
|
|
@@ -1471,7 +1710,7 @@ class FullNode:
|
|
|
1471
1710
|
assert diff is not None
|
|
1472
1711
|
return ssi, diff, prev_ses_block
|
|
1473
1712
|
|
|
1474
|
-
async def _finish_sync(self) -> None:
|
|
1713
|
+
async def _finish_sync(self, fork_point: Optional[uint32]) -> None:
|
|
1475
1714
|
"""
|
|
1476
1715
|
Finalize sync by setting sync mode to False, clearing all sync information, and adding any final
|
|
1477
1716
|
blocks that we have finalized recently.
|
|
@@ -1487,12 +1726,17 @@ class FullNode:
|
|
|
1487
1726
|
peak: Optional[BlockRecord] = self.blockchain.get_peak()
|
|
1488
1727
|
peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak()
|
|
1489
1728
|
if peak_fb is not None:
|
|
1729
|
+
if fork_point is None:
|
|
1730
|
+
fork_point = uint32(max(peak_fb.height - 1, 0))
|
|
1490
1731
|
assert peak is not None
|
|
1491
|
-
state_change_summary = StateChangeSummary(peak,
|
|
1732
|
+
state_change_summary = StateChangeSummary(peak, fork_point, [], [], [], [])
|
|
1492
1733
|
ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
|
|
1493
1734
|
peak_fb, state_change_summary, None
|
|
1494
1735
|
)
|
|
1495
|
-
|
|
1736
|
+
|
|
1737
|
+
if peak_fb is not None:
|
|
1738
|
+
# Call outside of priority_mutex to encourage concurrency
|
|
1739
|
+
await self.peak_post_processing_2(peak_fb, None, state_change_summary, ppp_result)
|
|
1496
1740
|
|
|
1497
1741
|
if peak is not None and self.weight_proof_handler is not None:
|
|
1498
1742
|
await self.weight_proof_handler.get_proof_of_weight(peak.header_hash)
|
|
@@ -1595,6 +1839,7 @@ class FullNode:
|
|
|
1595
1839
|
self.log.info(
|
|
1596
1840
|
f"🌱 Updated peak to height {record.height}, weight {record.weight}, "
|
|
1597
1841
|
f"hh {record.header_hash.hex()}, "
|
|
1842
|
+
f"ph {record.prev_hash.hex()}, "
|
|
1598
1843
|
f"forked at {state_change_summary.fork_height}, rh: {record.reward_infusion_new_challenge.hex()}, "
|
|
1599
1844
|
f"total iters: {record.total_iters}, "
|
|
1600
1845
|
f"overflow: {record.overflow}, "
|
|
@@ -1668,7 +1913,7 @@ class FullNode:
|
|
|
1668
1913
|
)
|
|
1669
1914
|
|
|
1670
1915
|
# Update the mempool (returns successful pending transactions added to the mempool)
|
|
1671
|
-
spent_coins:
|
|
1916
|
+
spent_coins: list[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals]
|
|
1672
1917
|
mempool_new_peak_result = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), spent_coins)
|
|
1673
1918
|
|
|
1674
1919
|
return PeakPostProcessingResult(
|
|
@@ -1734,7 +1979,7 @@ class FullNode:
|
|
|
1734
1979
|
else:
|
|
1735
1980
|
await self.server.send_to_all([msg], NodeType.FULL_NODE)
|
|
1736
1981
|
|
|
1737
|
-
coin_hints:
|
|
1982
|
+
coin_hints: dict[bytes32, bytes32] = {
|
|
1738
1983
|
coin_id: bytes32(hint) for coin_id, hint in ppp_result.hints if len(hint) == 32
|
|
1739
1984
|
}
|
|
1740
1985
|
|
|
@@ -1773,6 +2018,8 @@ class FullNode:
|
|
|
1773
2018
|
# Adds the block to seen, and check if it's seen before (which means header is in memory)
|
|
1774
2019
|
header_hash = block.header_hash
|
|
1775
2020
|
if self.blockchain.contains_block(header_hash):
|
|
2021
|
+
if fork_info is not None:
|
|
2022
|
+
await self.blockchain.run_single_block(block, fork_info)
|
|
1776
2023
|
return None
|
|
1777
2024
|
|
|
1778
2025
|
pre_validation_result: Optional[PreValidationResult] = None
|
|
@@ -1790,6 +2037,7 @@ class FullNode:
|
|
|
1790
2037
|
unf_entry: Optional[UnfinishedBlockEntry] = self.full_node_store.get_unfinished_block_result(
|
|
1791
2038
|
unfinished_rh, foliage_hash
|
|
1792
2039
|
)
|
|
2040
|
+
assert unf_entry is None or unf_entry.result is None or unf_entry.result.validated_signature is True
|
|
1793
2041
|
if (
|
|
1794
2042
|
unf_entry is not None
|
|
1795
2043
|
and unf_entry.unfinished_block is not None
|
|
@@ -1844,12 +2092,14 @@ class FullNode:
|
|
|
1844
2092
|
):
|
|
1845
2093
|
# After acquiring the lock, check again, because another asyncio thread might have added it
|
|
1846
2094
|
if self.blockchain.contains_block(header_hash):
|
|
2095
|
+
if fork_info is not None:
|
|
2096
|
+
await self.blockchain.run_single_block(block, fork_info)
|
|
1847
2097
|
return None
|
|
1848
2098
|
validation_start = time.monotonic()
|
|
1849
2099
|
# Tries to add the block to the blockchain, if we already validated transactions, don't do it again
|
|
1850
|
-
|
|
2100
|
+
conds = None
|
|
1851
2101
|
if pre_validation_result is not None and pre_validation_result.conds is not None:
|
|
1852
|
-
|
|
2102
|
+
conds = pre_validation_result.conds
|
|
1853
2103
|
|
|
1854
2104
|
# Don't validate signatures because we want to validate them in the main thread later, since we have a
|
|
1855
2105
|
# cache available
|
|
@@ -1864,40 +2114,34 @@ class FullNode:
|
|
|
1864
2114
|
prev_ses_block = curr
|
|
1865
2115
|
new_slot = len(block.finished_sub_slots) > 0
|
|
1866
2116
|
ssi, diff = get_next_sub_slot_iters_and_difficulty(self.constants, new_slot, prev_b, self.blockchain)
|
|
1867
|
-
|
|
2117
|
+
future = await pre_validate_block(
|
|
1868
2118
|
self.blockchain.constants,
|
|
1869
|
-
self.blockchain,
|
|
1870
|
-
|
|
2119
|
+
AugmentedBlockchain(self.blockchain),
|
|
2120
|
+
block,
|
|
1871
2121
|
self.blockchain.pool,
|
|
1872
|
-
|
|
1873
|
-
|
|
1874
|
-
difficulty=diff,
|
|
1875
|
-
prev_ses_block=prev_ses_block,
|
|
1876
|
-
validate_signatures=False,
|
|
2122
|
+
conds,
|
|
2123
|
+
ValidationState(ssi, diff, prev_ses_block),
|
|
1877
2124
|
)
|
|
2125
|
+
pre_validation_result = await future
|
|
1878
2126
|
added: Optional[AddBlockResult] = None
|
|
1879
2127
|
pre_validation_time = time.monotonic() - validation_start
|
|
1880
2128
|
try:
|
|
1881
|
-
if
|
|
1882
|
-
|
|
1883
|
-
if pre_validation_results[0].error is not None:
|
|
1884
|
-
if Err(pre_validation_results[0].error) == Err.INVALID_PREV_BLOCK_HASH:
|
|
2129
|
+
if pre_validation_result.error is not None:
|
|
2130
|
+
if Err(pre_validation_result.error) == Err.INVALID_PREV_BLOCK_HASH:
|
|
1885
2131
|
added = AddBlockResult.DISCONNECTED_BLOCK
|
|
1886
2132
|
error_code: Optional[Err] = Err.INVALID_PREV_BLOCK_HASH
|
|
1887
|
-
elif Err(
|
|
2133
|
+
elif Err(pre_validation_result.error) == Err.TIMESTAMP_TOO_FAR_IN_FUTURE:
|
|
1888
2134
|
raise TimestampError()
|
|
1889
2135
|
else:
|
|
1890
2136
|
raise ValueError(
|
|
1891
2137
|
f"Failed to validate block {header_hash} height "
|
|
1892
|
-
f"{block.height}: {Err(
|
|
2138
|
+
f"{block.height}: {Err(pre_validation_result.error).name}"
|
|
1893
2139
|
)
|
|
1894
2140
|
else:
|
|
1895
|
-
|
|
1896
|
-
|
|
1897
|
-
)
|
|
1898
|
-
assert result_to_validate.required_iters == pre_validation_results[0].required_iters
|
|
2141
|
+
if fork_info is None:
|
|
2142
|
+
fork_info = ForkInfo(block.height - 1, block.height - 1, block.prev_header_hash)
|
|
1899
2143
|
(added, error_code, state_change_summary) = await self.blockchain.add_block(
|
|
1900
|
-
block,
|
|
2144
|
+
block, pre_validation_result, ssi, fork_info
|
|
1901
2145
|
)
|
|
1902
2146
|
if added == AddBlockResult.ALREADY_HAVE_BLOCK:
|
|
1903
2147
|
return None
|
|
@@ -1911,6 +2155,12 @@ class FullNode:
|
|
|
1911
2155
|
raise RuntimeError("Expected block to be added, received disconnected block.")
|
|
1912
2156
|
return None
|
|
1913
2157
|
elif added == AddBlockResult.NEW_PEAK:
|
|
2158
|
+
# Evict any related BLS cache entries as we no longer need them
|
|
2159
|
+
if bls_cache is not None and pre_validation_result.conds is not None:
|
|
2160
|
+
pairs_pks, pairs_msgs = pkm_pairs(
|
|
2161
|
+
pre_validation_result.conds, self.constants.AGG_SIG_ME_ADDITIONAL_DATA
|
|
2162
|
+
)
|
|
2163
|
+
bls_cache.evict(pairs_pks, pairs_msgs)
|
|
1914
2164
|
# Only propagate blocks which extend the blockchain (becomes one of the heads)
|
|
1915
2165
|
assert state_change_summary is not None
|
|
1916
2166
|
post_process_time = time.monotonic()
|
|
@@ -1952,7 +2202,7 @@ class FullNode:
|
|
|
1952
2202
|
logging.WARNING if validation_time > 2 else logging.DEBUG,
|
|
1953
2203
|
f"Block validation: {validation_time:0.2f}s, "
|
|
1954
2204
|
f"pre_validation: {pre_validation_time:0.2f}s, "
|
|
1955
|
-
f"CLVM: {
|
|
2205
|
+
f"CLVM: {pre_validation_result.timing / 1000.0:0.2f}s, "
|
|
1956
2206
|
f"post-process: {post_process_time:0.2f}s, "
|
|
1957
2207
|
f"cost: {block.transactions_info.cost if block.transactions_info is not None else 'None'}"
|
|
1958
2208
|
f"{percent_full_str} header_hash: {header_hash.hex()} height: {block.height}",
|
|
@@ -1974,7 +2224,7 @@ class FullNode:
|
|
|
1974
2224
|
self.full_node_store.clear_candidate_blocks_below(clear_height)
|
|
1975
2225
|
self.full_node_store.clear_unfinished_blocks_below(clear_height)
|
|
1976
2226
|
|
|
1977
|
-
state_changed_data:
|
|
2227
|
+
state_changed_data: dict[str, Any] = {
|
|
1978
2228
|
"transaction_block": False,
|
|
1979
2229
|
"k_size": block.reward_chain_block.proof_of_space.size,
|
|
1980
2230
|
"header_hash": block.header_hash,
|
|
@@ -2008,8 +2258,12 @@ class FullNode:
|
|
|
2008
2258
|
|
|
2009
2259
|
record = self.blockchain.block_record(block.header_hash)
|
|
2010
2260
|
if self.weight_proof_handler is not None and record.sub_epoch_summary_included is not None:
|
|
2011
|
-
|
|
2012
|
-
|
|
2261
|
+
self._segment_task_list.append(
|
|
2262
|
+
create_referenced_task(self.weight_proof_handler.create_prev_sub_epoch_segments())
|
|
2263
|
+
)
|
|
2264
|
+
for task in self._segment_task_list[:]:
|
|
2265
|
+
if task.done():
|
|
2266
|
+
self._segment_task_list.remove(task)
|
|
2013
2267
|
return None
|
|
2014
2268
|
|
|
2015
2269
|
async def add_unfinished_block(
|
|
@@ -2017,7 +2271,6 @@ class FullNode:
|
|
|
2017
2271
|
block: UnfinishedBlock,
|
|
2018
2272
|
peer: Optional[WSChiaConnection],
|
|
2019
2273
|
farmed_block: bool = False,
|
|
2020
|
-
block_bytes: Optional[bytes] = None,
|
|
2021
2274
|
) -> None:
|
|
2022
2275
|
"""
|
|
2023
2276
|
We have received an unfinished block, either created by us, or from another peer.
|
|
@@ -2096,29 +2349,44 @@ class FullNode:
|
|
|
2096
2349
|
if block.transactions_generator is not None:
|
|
2097
2350
|
pre_validation_start = time.monotonic()
|
|
2098
2351
|
assert block.transactions_info is not None
|
|
2099
|
-
|
|
2100
|
-
|
|
2101
|
-
|
|
2352
|
+
if len(block.transactions_generator_ref_list) > 0:
|
|
2353
|
+
generator_refs = set(block.transactions_generator_ref_list)
|
|
2354
|
+
generators: dict[uint32, bytes] = await self.blockchain.lookup_block_generators(
|
|
2355
|
+
block.prev_header_hash, generator_refs
|
|
2102
2356
|
)
|
|
2103
|
-
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
|
|
2107
|
-
if block_bytes is None:
|
|
2108
|
-
block_bytes = bytes(block)
|
|
2357
|
+
generator_args = [generators[height] for height in block.transactions_generator_ref_list]
|
|
2358
|
+
else:
|
|
2359
|
+
generator_args = []
|
|
2109
2360
|
|
|
2110
2361
|
height = uint32(0) if prev_b is None else uint32(prev_b.height + 1)
|
|
2111
|
-
|
|
2112
|
-
pre_validation_time = time.monotonic() - pre_validation_start
|
|
2362
|
+
flags = get_flags_for_height_and_constants(height, self.constants)
|
|
2113
2363
|
|
|
2114
|
-
#
|
|
2115
|
-
#
|
|
2116
|
-
|
|
2117
|
-
|
|
2118
|
-
|
|
2119
|
-
|
|
2120
|
-
|
|
2121
|
-
|
|
2364
|
+
# on mainnet we won't receive unfinished blocks for heights
|
|
2365
|
+
# below the hard fork activation, but we have tests where we do
|
|
2366
|
+
if height >= self.constants.HARD_FORK_HEIGHT:
|
|
2367
|
+
run_block = run_block_generator2
|
|
2368
|
+
else:
|
|
2369
|
+
run_block = run_block_generator
|
|
2370
|
+
|
|
2371
|
+
# run_block() also validates the signature
|
|
2372
|
+
err, conditions = await asyncio.get_running_loop().run_in_executor(
|
|
2373
|
+
self.blockchain.pool,
|
|
2374
|
+
run_block,
|
|
2375
|
+
bytes(block.transactions_generator),
|
|
2376
|
+
generator_args,
|
|
2377
|
+
min(self.constants.MAX_BLOCK_COST_CLVM, block.transactions_info.cost),
|
|
2378
|
+
flags,
|
|
2379
|
+
block.transactions_info.aggregated_signature,
|
|
2380
|
+
self._bls_cache,
|
|
2381
|
+
self.constants,
|
|
2382
|
+
)
|
|
2383
|
+
|
|
2384
|
+
if err is not None:
|
|
2385
|
+
raise ConsensusError(Err(err))
|
|
2386
|
+
assert conditions is not None
|
|
2387
|
+
assert conditions.validated_signature
|
|
2388
|
+
npc_result = NPCResult(None, conditions)
|
|
2389
|
+
pre_validation_time = time.monotonic() - pre_validation_start
|
|
2122
2390
|
|
|
2123
2391
|
async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
|
|
2124
2392
|
# TODO: pre-validate VDFs outside of lock
|
|
@@ -2128,9 +2396,6 @@ class FullNode:
|
|
|
2128
2396
|
raise ConsensusError(Err(validate_result.error))
|
|
2129
2397
|
validation_time = time.monotonic() - validation_start
|
|
2130
2398
|
|
|
2131
|
-
# respond_block will later use the cache (validated_signature=True)
|
|
2132
|
-
validate_result = dataclasses.replace(validate_result, validated_signature=True)
|
|
2133
|
-
|
|
2134
2399
|
assert validate_result.required_iters is not None
|
|
2135
2400
|
|
|
2136
2401
|
# Perform another check, in case we have already concurrently added the same unfinished block
|
|
@@ -2295,7 +2560,7 @@ class FullNode:
|
|
|
2295
2560
|
)
|
|
2296
2561
|
return None
|
|
2297
2562
|
|
|
2298
|
-
finished_sub_slots: Optional[
|
|
2563
|
+
finished_sub_slots: Optional[list[EndOfSubSlotBundle]] = self.full_node_store.get_finished_sub_slots(
|
|
2299
2564
|
self.blockchain,
|
|
2300
2565
|
prev_b,
|
|
2301
2566
|
last_slot_cc_hash,
|
|
@@ -2355,7 +2620,7 @@ class FullNode:
|
|
|
2355
2620
|
|
|
2356
2621
|
async def add_end_of_sub_slot(
|
|
2357
2622
|
self, end_of_slot_bundle: EndOfSubSlotBundle, peer: WSChiaConnection
|
|
2358
|
-
) ->
|
|
2623
|
+
) -> tuple[Optional[Message], bool]:
|
|
2359
2624
|
fetched_ss = self.full_node_store.get_sub_slot(end_of_slot_bundle.challenge_chain.get_hash())
|
|
2360
2625
|
|
|
2361
2626
|
# We are not interested in sub-slots which have the same challenge chain but different reward chain. If there
|
|
@@ -2377,7 +2642,7 @@ class FullNode:
|
|
|
2377
2642
|
full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
|
|
2378
2643
|
end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
|
|
2379
2644
|
uint8(0),
|
|
2380
|
-
bytes32
|
|
2645
|
+
bytes32.zeros,
|
|
2381
2646
|
)
|
|
2382
2647
|
return (
|
|
2383
2648
|
make_msg(ProtocolMessageTypes.request_signage_point_or_end_of_sub_slot, full_node_request),
|
|
@@ -2452,7 +2717,7 @@ class FullNode:
|
|
|
2452
2717
|
|
|
2453
2718
|
async def add_transaction(
|
|
2454
2719
|
self, transaction: SpendBundle, spend_name: bytes32, peer: Optional[WSChiaConnection] = None, test: bool = False
|
|
2455
|
-
) ->
|
|
2720
|
+
) -> tuple[MempoolInclusionStatus, Optional[Err]]:
|
|
2456
2721
|
if self.sync_store.get_sync_mode():
|
|
2457
2722
|
return MempoolInclusionStatus.FAILED, Err.NO_TRANSACTIONS_WHILE_SYNCING
|
|
2458
2723
|
if not test and not (await self.synced()):
|
|
@@ -2483,6 +2748,15 @@ class FullNode:
|
|
|
2483
2748
|
self.mempool_manager.remove_seen(spend_name)
|
|
2484
2749
|
raise
|
|
2485
2750
|
|
|
2751
|
+
if self.config.get("log_mempool", False): # pragma: no cover
|
|
2752
|
+
try:
|
|
2753
|
+
mempool_dir = path_from_root(self.root_path, "mempool-log") / f"{self.blockchain.get_peak_height()}"
|
|
2754
|
+
mempool_dir.mkdir(parents=True, exist_ok=True)
|
|
2755
|
+
with open(mempool_dir / f"{spend_name}.bundle", "wb+") as f:
|
|
2756
|
+
f.write(bytes(transaction))
|
|
2757
|
+
except Exception:
|
|
2758
|
+
self.log.exception(f"Failed to log mempool item: {spend_name}")
|
|
2759
|
+
|
|
2486
2760
|
async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.low):
|
|
2487
2761
|
if self.mempool_manager.get_spendbundle(spend_name) is not None:
|
|
2488
2762
|
self.mempool_manager.remove_seen(spend_name)
|
|
@@ -2509,7 +2783,7 @@ class FullNode:
|
|
|
2509
2783
|
await self.broadcast_added_tx(mempool_item, current_peer=peer)
|
|
2510
2784
|
|
|
2511
2785
|
if self.simulator_transaction_callback is not None: # callback
|
|
2512
|
-
await self.simulator_transaction_callback(spend_name)
|
|
2786
|
+
await self.simulator_transaction_callback(spend_name)
|
|
2513
2787
|
|
|
2514
2788
|
else:
|
|
2515
2789
|
self.mempool_manager.remove_seen(spend_name)
|
|
@@ -2567,7 +2841,7 @@ class FullNode:
|
|
|
2567
2841
|
f"Broadcasting added transaction {mempool_item.name} to {len(peer_ids)} peers took {total_time:.4f}s",
|
|
2568
2842
|
)
|
|
2569
2843
|
|
|
2570
|
-
async def broadcast_removed_tx(self, mempool_removals:
|
|
2844
|
+
async def broadcast_removed_tx(self, mempool_removals: list[MempoolRemoveInfo]) -> None:
|
|
2571
2845
|
total_removals = sum(len(r.items) for r in mempool_removals)
|
|
2572
2846
|
if total_removals == 0:
|
|
2573
2847
|
return
|
|
@@ -2585,7 +2859,7 @@ class FullNode:
|
|
|
2585
2859
|
if len(all_peers) == 0:
|
|
2586
2860
|
return
|
|
2587
2861
|
|
|
2588
|
-
removals_to_send:
|
|
2862
|
+
removals_to_send: dict[bytes32, list[RemovedMempoolItem]] = dict()
|
|
2589
2863
|
|
|
2590
2864
|
for removal_info in mempool_removals:
|
|
2591
2865
|
for internal_mempool_item in removal_info.items:
|
|
@@ -2602,7 +2876,7 @@ class FullNode:
|
|
|
2602
2876
|
|
|
2603
2877
|
transaction_id = internal_mempool_item.spend_bundle.name()
|
|
2604
2878
|
|
|
2605
|
-
self.log.debug(f"Broadcasting removed transaction {transaction_id} to
|
|
2879
|
+
self.log.debug(f"Broadcasting removed transaction {transaction_id} to wallet peers {peer_ids}")
|
|
2606
2880
|
|
|
2607
2881
|
for peer_id in peer_ids:
|
|
2608
2882
|
peer = self.server.all_connections.get(peer_id)
|
|
@@ -2926,7 +3200,7 @@ class FullNode:
|
|
|
2926
3200
|
return None
|
|
2927
3201
|
await asyncio.sleep(30)
|
|
2928
3202
|
|
|
2929
|
-
broadcast_list:
|
|
3203
|
+
broadcast_list: list[timelord_protocol.RequestCompactProofOfTime] = []
|
|
2930
3204
|
|
|
2931
3205
|
self.log.info("Getting random heights for bluebox to compact")
|
|
2932
3206
|
|
|
@@ -2942,7 +3216,7 @@ class FullNode:
|
|
|
2942
3216
|
|
|
2943
3217
|
for h in heights:
|
|
2944
3218
|
headers = await self.blockchain.get_header_blocks_in_range(h, h, tx_filter=False)
|
|
2945
|
-
records:
|
|
3219
|
+
records: dict[bytes32, BlockRecord] = {}
|
|
2946
3220
|
if sanitize_weight_proof_only:
|
|
2947
3221
|
records = await self.blockchain.get_block_records_in_range(h, h)
|
|
2948
3222
|
for header in headers.values():
|
|
@@ -3010,7 +3284,7 @@ class FullNode:
|
|
|
3010
3284
|
)
|
|
3011
3285
|
)
|
|
3012
3286
|
|
|
3013
|
-
broadcast_list_chunks:
|
|
3287
|
+
broadcast_list_chunks: list[list[timelord_protocol.RequestCompactProofOfTime]] = []
|
|
3014
3288
|
for index in range(0, len(broadcast_list), target_uncompact_proofs):
|
|
3015
3289
|
broadcast_list_chunks.append(broadcast_list[index : index + target_uncompact_proofs])
|
|
3016
3290
|
if len(broadcast_list_chunks) == 0:
|