chia-blockchain 2.4.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (1028) hide show
  1. chia/__init__.py +10 -0
  2. chia/__main__.py +5 -0
  3. chia/_tests/README.md +53 -0
  4. chia/_tests/__init__.py +0 -0
  5. chia/_tests/blockchain/__init__.py +0 -0
  6. chia/_tests/blockchain/blockchain_test_utils.py +197 -0
  7. chia/_tests/blockchain/config.py +4 -0
  8. chia/_tests/blockchain/test_augmented_chain.py +147 -0
  9. chia/_tests/blockchain/test_blockchain.py +4100 -0
  10. chia/_tests/blockchain/test_blockchain_transactions.py +1050 -0
  11. chia/_tests/blockchain/test_build_chains.py +61 -0
  12. chia/_tests/blockchain/test_get_block_generator.py +72 -0
  13. chia/_tests/blockchain/test_lookup_fork_chain.py +195 -0
  14. chia/_tests/build-init-files.py +93 -0
  15. chia/_tests/build-job-matrix.py +204 -0
  16. chia/_tests/check_pytest_monitor_output.py +34 -0
  17. chia/_tests/check_sql_statements.py +73 -0
  18. chia/_tests/chia-start-sim +42 -0
  19. chia/_tests/clvm/__init__.py +0 -0
  20. chia/_tests/clvm/benchmark_costs.py +23 -0
  21. chia/_tests/clvm/coin_store.py +147 -0
  22. chia/_tests/clvm/test_chialisp_deserialization.py +101 -0
  23. chia/_tests/clvm/test_clvm_step.py +37 -0
  24. chia/_tests/clvm/test_condition_codes.py +13 -0
  25. chia/_tests/clvm/test_curry_and_treehash.py +57 -0
  26. chia/_tests/clvm/test_program.py +150 -0
  27. chia/_tests/clvm/test_puzzle_compression.py +144 -0
  28. chia/_tests/clvm/test_puzzle_drivers.py +45 -0
  29. chia/_tests/clvm/test_puzzles.py +247 -0
  30. chia/_tests/clvm/test_singletons.py +540 -0
  31. chia/_tests/clvm/test_spend_sim.py +181 -0
  32. chia/_tests/cmds/__init__.py +0 -0
  33. chia/_tests/cmds/cmd_test_utils.py +472 -0
  34. chia/_tests/cmds/config.py +3 -0
  35. chia/_tests/cmds/conftest.py +23 -0
  36. chia/_tests/cmds/test_click_types.py +195 -0
  37. chia/_tests/cmds/test_cmd_framework.py +400 -0
  38. chia/_tests/cmds/test_cmds_util.py +97 -0
  39. chia/_tests/cmds/test_daemon.py +92 -0
  40. chia/_tests/cmds/test_farm_cmd.py +67 -0
  41. chia/_tests/cmds/test_show.py +116 -0
  42. chia/_tests/cmds/test_sim.py +207 -0
  43. chia/_tests/cmds/test_timelock_args.py +75 -0
  44. chia/_tests/cmds/test_tx_config_args.py +153 -0
  45. chia/_tests/cmds/testing_classes.py +59 -0
  46. chia/_tests/cmds/wallet/__init__.py +0 -0
  47. chia/_tests/cmds/wallet/test_coins.py +195 -0
  48. chia/_tests/cmds/wallet/test_consts.py +47 -0
  49. chia/_tests/cmds/wallet/test_dao.py +565 -0
  50. chia/_tests/cmds/wallet/test_did.py +403 -0
  51. chia/_tests/cmds/wallet/test_nft.py +470 -0
  52. chia/_tests/cmds/wallet/test_notifications.py +124 -0
  53. chia/_tests/cmds/wallet/test_offer.toffer +1 -0
  54. chia/_tests/cmds/wallet/test_tx_decorators.py +27 -0
  55. chia/_tests/cmds/wallet/test_vcs.py +376 -0
  56. chia/_tests/cmds/wallet/test_wallet.py +1126 -0
  57. chia/_tests/cmds/wallet/test_wallet_check.py +111 -0
  58. chia/_tests/conftest.py +1304 -0
  59. chia/_tests/connection_utils.py +124 -0
  60. chia/_tests/core/__init__.py +0 -0
  61. chia/_tests/core/cmds/__init__.py +0 -0
  62. chia/_tests/core/cmds/test_beta.py +382 -0
  63. chia/_tests/core/cmds/test_keys.py +1734 -0
  64. chia/_tests/core/cmds/test_wallet.py +126 -0
  65. chia/_tests/core/config.py +3 -0
  66. chia/_tests/core/consensus/__init__.py +0 -0
  67. chia/_tests/core/consensus/test_block_creation.py +56 -0
  68. chia/_tests/core/consensus/test_pot_iterations.py +117 -0
  69. chia/_tests/core/custom_types/__init__.py +0 -0
  70. chia/_tests/core/custom_types/test_coin.py +109 -0
  71. chia/_tests/core/custom_types/test_proof_of_space.py +144 -0
  72. chia/_tests/core/custom_types/test_spend_bundle.py +71 -0
  73. chia/_tests/core/daemon/__init__.py +0 -0
  74. chia/_tests/core/daemon/config.py +4 -0
  75. chia/_tests/core/daemon/test_daemon.py +2128 -0
  76. chia/_tests/core/daemon/test_daemon_register.py +109 -0
  77. chia/_tests/core/daemon/test_keychain_proxy.py +100 -0
  78. chia/_tests/core/data_layer/__init__.py +0 -0
  79. chia/_tests/core/data_layer/config.py +5 -0
  80. chia/_tests/core/data_layer/conftest.py +105 -0
  81. chia/_tests/core/data_layer/test_data_cli.py +57 -0
  82. chia/_tests/core/data_layer/test_data_layer.py +83 -0
  83. chia/_tests/core/data_layer/test_data_layer_util.py +219 -0
  84. chia/_tests/core/data_layer/test_data_rpc.py +3865 -0
  85. chia/_tests/core/data_layer/test_data_store.py +2423 -0
  86. chia/_tests/core/data_layer/test_data_store_schema.py +381 -0
  87. chia/_tests/core/data_layer/test_plugin.py +91 -0
  88. chia/_tests/core/data_layer/util.py +232 -0
  89. chia/_tests/core/farmer/__init__.py +0 -0
  90. chia/_tests/core/farmer/config.py +3 -0
  91. chia/_tests/core/farmer/test_farmer_api.py +101 -0
  92. chia/_tests/core/full_node/__init__.py +0 -0
  93. chia/_tests/core/full_node/config.py +4 -0
  94. chia/_tests/core/full_node/dos/__init__.py +0 -0
  95. chia/_tests/core/full_node/dos/config.py +3 -0
  96. chia/_tests/core/full_node/full_sync/__init__.py +0 -0
  97. chia/_tests/core/full_node/full_sync/config.py +4 -0
  98. chia/_tests/core/full_node/full_sync/test_full_sync.py +448 -0
  99. chia/_tests/core/full_node/ram_db.py +27 -0
  100. chia/_tests/core/full_node/stores/__init__.py +0 -0
  101. chia/_tests/core/full_node/stores/config.py +4 -0
  102. chia/_tests/core/full_node/stores/test_block_store.py +488 -0
  103. chia/_tests/core/full_node/stores/test_coin_store.py +888 -0
  104. chia/_tests/core/full_node/stores/test_full_node_store.py +1215 -0
  105. chia/_tests/core/full_node/stores/test_hint_store.py +230 -0
  106. chia/_tests/core/full_node/stores/test_sync_store.py +135 -0
  107. chia/_tests/core/full_node/test_address_manager.py +588 -0
  108. chia/_tests/core/full_node/test_block_height_map.py +556 -0
  109. chia/_tests/core/full_node/test_conditions.py +558 -0
  110. chia/_tests/core/full_node/test_full_node.py +2445 -0
  111. chia/_tests/core/full_node/test_generator_tools.py +82 -0
  112. chia/_tests/core/full_node/test_hint_management.py +104 -0
  113. chia/_tests/core/full_node/test_node_load.py +34 -0
  114. chia/_tests/core/full_node/test_performance.py +182 -0
  115. chia/_tests/core/full_node/test_subscriptions.py +492 -0
  116. chia/_tests/core/full_node/test_transactions.py +203 -0
  117. chia/_tests/core/full_node/test_tx_processing_queue.py +154 -0
  118. chia/_tests/core/large_block.py +2388 -0
  119. chia/_tests/core/make_block_generator.py +72 -0
  120. chia/_tests/core/mempool/__init__.py +0 -0
  121. chia/_tests/core/mempool/config.py +4 -0
  122. chia/_tests/core/mempool/test_mempool.py +3180 -0
  123. chia/_tests/core/mempool/test_mempool_fee_estimator.py +104 -0
  124. chia/_tests/core/mempool/test_mempool_fee_protocol.py +55 -0
  125. chia/_tests/core/mempool/test_mempool_item_queries.py +192 -0
  126. chia/_tests/core/mempool/test_mempool_manager.py +2054 -0
  127. chia/_tests/core/mempool/test_mempool_performance.py +65 -0
  128. chia/_tests/core/mempool/test_singleton_fast_forward.py +567 -0
  129. chia/_tests/core/node_height.py +28 -0
  130. chia/_tests/core/server/__init__.py +0 -0
  131. chia/_tests/core/server/config.py +3 -0
  132. chia/_tests/core/server/flood.py +82 -0
  133. chia/_tests/core/server/serve.py +132 -0
  134. chia/_tests/core/server/test_capabilities.py +68 -0
  135. chia/_tests/core/server/test_dos.py +320 -0
  136. chia/_tests/core/server/test_event_loop.py +109 -0
  137. chia/_tests/core/server/test_loop.py +290 -0
  138. chia/_tests/core/server/test_node_discovery.py +74 -0
  139. chia/_tests/core/server/test_rate_limits.py +370 -0
  140. chia/_tests/core/server/test_server.py +225 -0
  141. chia/_tests/core/server/test_upnp.py +8 -0
  142. chia/_tests/core/services/__init__.py +0 -0
  143. chia/_tests/core/services/config.py +3 -0
  144. chia/_tests/core/services/test_services.py +166 -0
  145. chia/_tests/core/ssl/__init__.py +0 -0
  146. chia/_tests/core/ssl/config.py +3 -0
  147. chia/_tests/core/ssl/test_ssl.py +198 -0
  148. chia/_tests/core/test_coins.py +33 -0
  149. chia/_tests/core/test_cost_calculation.py +314 -0
  150. chia/_tests/core/test_crawler.py +175 -0
  151. chia/_tests/core/test_crawler_rpc.py +53 -0
  152. chia/_tests/core/test_daemon_rpc.py +24 -0
  153. chia/_tests/core/test_db_conversion.py +129 -0
  154. chia/_tests/core/test_db_validation.py +161 -0
  155. chia/_tests/core/test_farmer_harvester_rpc.py +504 -0
  156. chia/_tests/core/test_filter.py +37 -0
  157. chia/_tests/core/test_full_node_rpc.py +794 -0
  158. chia/_tests/core/test_merkle_set.py +343 -0
  159. chia/_tests/core/test_program.py +49 -0
  160. chia/_tests/core/test_rpc_util.py +87 -0
  161. chia/_tests/core/test_seeder.py +308 -0
  162. chia/_tests/core/test_setproctitle.py +13 -0
  163. chia/_tests/core/util/__init__.py +0 -0
  164. chia/_tests/core/util/config.py +4 -0
  165. chia/_tests/core/util/test_block_cache.py +44 -0
  166. chia/_tests/core/util/test_cached_bls.py +57 -0
  167. chia/_tests/core/util/test_config.py +337 -0
  168. chia/_tests/core/util/test_file_keyring_synchronization.py +105 -0
  169. chia/_tests/core/util/test_files.py +391 -0
  170. chia/_tests/core/util/test_jsonify.py +146 -0
  171. chia/_tests/core/util/test_keychain.py +514 -0
  172. chia/_tests/core/util/test_keyring_wrapper.py +490 -0
  173. chia/_tests/core/util/test_lockfile.py +380 -0
  174. chia/_tests/core/util/test_log_exceptions.py +187 -0
  175. chia/_tests/core/util/test_lru_cache.py +56 -0
  176. chia/_tests/core/util/test_significant_bits.py +40 -0
  177. chia/_tests/core/util/test_streamable.py +883 -0
  178. chia/_tests/db/__init__.py +0 -0
  179. chia/_tests/db/test_db_wrapper.py +565 -0
  180. chia/_tests/environments/__init__.py +0 -0
  181. chia/_tests/environments/common.py +35 -0
  182. chia/_tests/environments/full_node.py +47 -0
  183. chia/_tests/environments/wallet.py +368 -0
  184. chia/_tests/ether.py +19 -0
  185. chia/_tests/farmer_harvester/__init__.py +0 -0
  186. chia/_tests/farmer_harvester/config.py +3 -0
  187. chia/_tests/farmer_harvester/test_farmer.py +1264 -0
  188. chia/_tests/farmer_harvester/test_farmer_harvester.py +292 -0
  189. chia/_tests/farmer_harvester/test_filter_prefix_bits.py +130 -0
  190. chia/_tests/farmer_harvester/test_third_party_harvesters.py +501 -0
  191. chia/_tests/farmer_harvester/test_third_party_harvesters_data.json +29 -0
  192. chia/_tests/fee_estimation/__init__.py +0 -0
  193. chia/_tests/fee_estimation/config.py +3 -0
  194. chia/_tests/fee_estimation/test_fee_estimation_integration.py +262 -0
  195. chia/_tests/fee_estimation/test_fee_estimation_rpc.py +287 -0
  196. chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +145 -0
  197. chia/_tests/fee_estimation/test_mempoolitem_height_added.py +146 -0
  198. chia/_tests/generator/__init__.py +0 -0
  199. chia/_tests/generator/puzzles/__init__.py +0 -0
  200. chia/_tests/generator/puzzles/test_generator_deserialize.clsp +3 -0
  201. chia/_tests/generator/puzzles/test_generator_deserialize.clsp.hex +1 -0
  202. chia/_tests/generator/puzzles/test_multiple_generator_input_arguments.clsp +19 -0
  203. chia/_tests/generator/puzzles/test_multiple_generator_input_arguments.clsp.hex +1 -0
  204. chia/_tests/generator/test_compression.py +218 -0
  205. chia/_tests/generator/test_generator_types.py +44 -0
  206. chia/_tests/generator/test_rom.py +182 -0
  207. chia/_tests/plot_sync/__init__.py +0 -0
  208. chia/_tests/plot_sync/config.py +3 -0
  209. chia/_tests/plot_sync/test_delta.py +102 -0
  210. chia/_tests/plot_sync/test_plot_sync.py +617 -0
  211. chia/_tests/plot_sync/test_receiver.py +451 -0
  212. chia/_tests/plot_sync/test_sender.py +116 -0
  213. chia/_tests/plot_sync/test_sync_simulated.py +450 -0
  214. chia/_tests/plot_sync/util.py +67 -0
  215. chia/_tests/plotting/__init__.py +0 -0
  216. chia/_tests/plotting/config.py +3 -0
  217. chia/_tests/plotting/test_plot_manager.py +738 -0
  218. chia/_tests/plotting/util.py +13 -0
  219. chia/_tests/pools/__init__.py +0 -0
  220. chia/_tests/pools/config.py +5 -0
  221. chia/_tests/pools/test_pool_cmdline.py +23 -0
  222. chia/_tests/pools/test_pool_config.py +44 -0
  223. chia/_tests/pools/test_pool_puzzles_lifecycle.py +398 -0
  224. chia/_tests/pools/test_pool_rpc.py +1010 -0
  225. chia/_tests/pools/test_pool_wallet.py +201 -0
  226. chia/_tests/pools/test_wallet_pool_store.py +161 -0
  227. chia/_tests/process_junit.py +349 -0
  228. chia/_tests/rpc/__init__.py +0 -0
  229. chia/_tests/rpc/test_rpc_client.py +81 -0
  230. chia/_tests/simulation/__init__.py +0 -0
  231. chia/_tests/simulation/config.py +6 -0
  232. chia/_tests/simulation/test_simulation.py +501 -0
  233. chia/_tests/simulation/test_simulator.py +234 -0
  234. chia/_tests/simulation/test_start_simulator.py +106 -0
  235. chia/_tests/testconfig.py +13 -0
  236. chia/_tests/timelord/__init__.py +0 -0
  237. chia/_tests/timelord/config.py +3 -0
  238. chia/_tests/timelord/test_new_peak.py +437 -0
  239. chia/_tests/timelord/test_timelord.py +11 -0
  240. chia/_tests/tools/1315537.json +170 -0
  241. chia/_tests/tools/1315544.json +160 -0
  242. chia/_tests/tools/1315630.json +150 -0
  243. chia/_tests/tools/300000.json +105 -0
  244. chia/_tests/tools/442734.json +140 -0
  245. chia/_tests/tools/466212.json +130 -0
  246. chia/_tests/tools/__init__.py +0 -0
  247. chia/_tests/tools/config.py +5 -0
  248. chia/_tests/tools/test-blockchain-db.sqlite +0 -0
  249. chia/_tests/tools/test_full_sync.py +30 -0
  250. chia/_tests/tools/test_legacy_keyring.py +82 -0
  251. chia/_tests/tools/test_run_block.py +129 -0
  252. chia/_tests/util/__init__.py +0 -0
  253. chia/_tests/util/benchmark_cost.py +170 -0
  254. chia/_tests/util/benchmarks.py +154 -0
  255. chia/_tests/util/bip39_test_vectors.json +148 -0
  256. chia/_tests/util/blockchain.py +133 -0
  257. chia/_tests/util/blockchain_mock.py +132 -0
  258. chia/_tests/util/build_network_protocol_files.py +302 -0
  259. chia/_tests/util/clvm_generator.bin +0 -0
  260. chia/_tests/util/config.py +3 -0
  261. chia/_tests/util/constants.py +20 -0
  262. chia/_tests/util/db_connection.py +36 -0
  263. chia/_tests/util/full_sync.py +245 -0
  264. chia/_tests/util/gen_ssl_certs.py +115 -0
  265. chia/_tests/util/generator_tools_testing.py +47 -0
  266. chia/_tests/util/key_tool.py +37 -0
  267. chia/_tests/util/misc.py +722 -0
  268. chia/_tests/util/network_protocol_data.py +1074 -0
  269. chia/_tests/util/protocol_messages_bytes-v1.0 +0 -0
  270. chia/_tests/util/protocol_messages_json.py +2700 -0
  271. chia/_tests/util/rpc.py +23 -0
  272. chia/_tests/util/run_block.py +163 -0
  273. chia/_tests/util/setup_nodes.py +479 -0
  274. chia/_tests/util/split_managers.py +99 -0
  275. chia/_tests/util/temp_file.py +14 -0
  276. chia/_tests/util/test_action_scope.py +143 -0
  277. chia/_tests/util/test_async_pool.py +366 -0
  278. chia/_tests/util/test_build_job_matrix.py +43 -0
  279. chia/_tests/util/test_build_network_protocol_files.py +7 -0
  280. chia/_tests/util/test_chia_version.py +50 -0
  281. chia/_tests/util/test_collection.py +11 -0
  282. chia/_tests/util/test_condition_tools.py +231 -0
  283. chia/_tests/util/test_config.py +426 -0
  284. chia/_tests/util/test_dump_keyring.py +60 -0
  285. chia/_tests/util/test_errors.py +10 -0
  286. chia/_tests/util/test_full_block_utils.py +271 -0
  287. chia/_tests/util/test_installed.py +20 -0
  288. chia/_tests/util/test_limited_semaphore.py +52 -0
  289. chia/_tests/util/test_logging_filter.py +43 -0
  290. chia/_tests/util/test_misc.py +444 -0
  291. chia/_tests/util/test_network.py +74 -0
  292. chia/_tests/util/test_network_protocol_files.py +579 -0
  293. chia/_tests/util/test_network_protocol_json.py +266 -0
  294. chia/_tests/util/test_network_protocol_test.py +257 -0
  295. chia/_tests/util/test_paginator.py +72 -0
  296. chia/_tests/util/test_pprint.py +17 -0
  297. chia/_tests/util/test_priority_mutex.py +487 -0
  298. chia/_tests/util/test_recursive_replace.py +116 -0
  299. chia/_tests/util/test_replace_str_to_bytes.py +137 -0
  300. chia/_tests/util/test_service_groups.py +15 -0
  301. chia/_tests/util/test_ssl_check.py +31 -0
  302. chia/_tests/util/test_testnet_overrides.py +19 -0
  303. chia/_tests/util/test_tests_misc.py +38 -0
  304. chia/_tests/util/test_timing.py +37 -0
  305. chia/_tests/util/test_trusted_peer.py +51 -0
  306. chia/_tests/util/time_out_assert.py +154 -0
  307. chia/_tests/wallet/__init__.py +0 -0
  308. chia/_tests/wallet/cat_wallet/__init__.py +0 -0
  309. chia/_tests/wallet/cat_wallet/config.py +4 -0
  310. chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py +468 -0
  311. chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py +69 -0
  312. chia/_tests/wallet/cat_wallet/test_cat_wallet.py +1738 -0
  313. chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +291 -0
  314. chia/_tests/wallet/cat_wallet/test_trades.py +2578 -0
  315. chia/_tests/wallet/clawback/__init__.py +0 -0
  316. chia/_tests/wallet/clawback/config.py +3 -0
  317. chia/_tests/wallet/clawback/test_clawback_decorator.py +80 -0
  318. chia/_tests/wallet/clawback/test_clawback_lifecycle.py +292 -0
  319. chia/_tests/wallet/clawback/test_clawback_metadata.py +51 -0
  320. chia/_tests/wallet/config.py +4 -0
  321. chia/_tests/wallet/conftest.py +217 -0
  322. chia/_tests/wallet/dao_wallet/__init__.py +0 -0
  323. chia/_tests/wallet/dao_wallet/config.py +3 -0
  324. chia/_tests/wallet/dao_wallet/test_dao_clvm.py +1322 -0
  325. chia/_tests/wallet/dao_wallet/test_dao_wallets.py +3488 -0
  326. chia/_tests/wallet/db_wallet/__init__.py +0 -0
  327. chia/_tests/wallet/db_wallet/config.py +3 -0
  328. chia/_tests/wallet/db_wallet/test_db_graftroot.py +143 -0
  329. chia/_tests/wallet/db_wallet/test_dl_offers.py +491 -0
  330. chia/_tests/wallet/db_wallet/test_dl_wallet.py +823 -0
  331. chia/_tests/wallet/did_wallet/__init__.py +0 -0
  332. chia/_tests/wallet/did_wallet/config.py +4 -0
  333. chia/_tests/wallet/did_wallet/test_did.py +1481 -0
  334. chia/_tests/wallet/nft_wallet/__init__.py +0 -0
  335. chia/_tests/wallet/nft_wallet/config.py +4 -0
  336. chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +1492 -0
  337. chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py +1014 -0
  338. chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +376 -0
  339. chia/_tests/wallet/nft_wallet/test_nft_offers.py +1209 -0
  340. chia/_tests/wallet/nft_wallet/test_nft_puzzles.py +172 -0
  341. chia/_tests/wallet/nft_wallet/test_nft_wallet.py +2558 -0
  342. chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py +70 -0
  343. chia/_tests/wallet/rpc/__init__.py +0 -0
  344. chia/_tests/wallet/rpc/config.py +4 -0
  345. chia/_tests/wallet/rpc/test_dl_wallet_rpc.py +287 -0
  346. chia/_tests/wallet/rpc/test_wallet_rpc.py +3106 -0
  347. chia/_tests/wallet/simple_sync/__init__.py +0 -0
  348. chia/_tests/wallet/simple_sync/config.py +3 -0
  349. chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py +719 -0
  350. chia/_tests/wallet/sync/__init__.py +0 -0
  351. chia/_tests/wallet/sync/config.py +4 -0
  352. chia/_tests/wallet/sync/test_wallet_sync.py +1529 -0
  353. chia/_tests/wallet/test_address_type.py +189 -0
  354. chia/_tests/wallet/test_bech32m.py +45 -0
  355. chia/_tests/wallet/test_clvm_streamable.py +244 -0
  356. chia/_tests/wallet/test_coin_selection.py +589 -0
  357. chia/_tests/wallet/test_conditions.py +388 -0
  358. chia/_tests/wallet/test_debug_spend_bundle.py +76 -0
  359. chia/_tests/wallet/test_new_wallet_protocol.py +1176 -0
  360. chia/_tests/wallet/test_nft_store.py +193 -0
  361. chia/_tests/wallet/test_notifications.py +196 -0
  362. chia/_tests/wallet/test_offer_parsing_performance.py +48 -0
  363. chia/_tests/wallet/test_puzzle_store.py +133 -0
  364. chia/_tests/wallet/test_sign_coin_spends.py +159 -0
  365. chia/_tests/wallet/test_signer_protocol.py +948 -0
  366. chia/_tests/wallet/test_singleton.py +122 -0
  367. chia/_tests/wallet/test_singleton_lifecycle_fast.py +772 -0
  368. chia/_tests/wallet/test_singleton_store.py +152 -0
  369. chia/_tests/wallet/test_taproot.py +19 -0
  370. chia/_tests/wallet/test_transaction_store.py +941 -0
  371. chia/_tests/wallet/test_util.py +181 -0
  372. chia/_tests/wallet/test_wallet.py +2139 -0
  373. chia/_tests/wallet/test_wallet_action_scope.py +85 -0
  374. chia/_tests/wallet/test_wallet_blockchain.py +113 -0
  375. chia/_tests/wallet/test_wallet_coin_store.py +1002 -0
  376. chia/_tests/wallet/test_wallet_interested_store.py +43 -0
  377. chia/_tests/wallet/test_wallet_key_val_store.py +40 -0
  378. chia/_tests/wallet/test_wallet_node.py +783 -0
  379. chia/_tests/wallet/test_wallet_retry.py +95 -0
  380. chia/_tests/wallet/test_wallet_state_manager.py +252 -0
  381. chia/_tests/wallet/test_wallet_test_framework.py +275 -0
  382. chia/_tests/wallet/test_wallet_trade_store.py +218 -0
  383. chia/_tests/wallet/test_wallet_user_store.py +34 -0
  384. chia/_tests/wallet/test_wallet_utils.py +155 -0
  385. chia/_tests/wallet/vc_wallet/__init__.py +0 -0
  386. chia/_tests/wallet/vc_wallet/config.py +3 -0
  387. chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py +70 -0
  388. chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +883 -0
  389. chia/_tests/wallet/vc_wallet/test_vc_wallet.py +801 -0
  390. chia/_tests/wallet/wallet_block_tools.py +327 -0
  391. chia/_tests/weight_proof/__init__.py +0 -0
  392. chia/_tests/weight_proof/config.py +3 -0
  393. chia/_tests/weight_proof/test_weight_proof.py +528 -0
  394. chia/clvm/__init__.py +0 -0
  395. chia/clvm/spend_sim.py +488 -0
  396. chia/cmds/__init__.py +0 -0
  397. chia/cmds/beta.py +183 -0
  398. chia/cmds/beta_funcs.py +133 -0
  399. chia/cmds/check_wallet_db.py +418 -0
  400. chia/cmds/chia.py +143 -0
  401. chia/cmds/cmd_classes.py +315 -0
  402. chia/cmds/cmds_util.py +498 -0
  403. chia/cmds/coin_funcs.py +260 -0
  404. chia/cmds/coins.py +220 -0
  405. chia/cmds/completion.py +49 -0
  406. chia/cmds/configure.py +331 -0
  407. chia/cmds/dao.py +1008 -0
  408. chia/cmds/dao_funcs.py +576 -0
  409. chia/cmds/data.py +707 -0
  410. chia/cmds/data_funcs.py +380 -0
  411. chia/cmds/db.py +86 -0
  412. chia/cmds/db_backup_func.py +77 -0
  413. chia/cmds/db_upgrade_func.py +452 -0
  414. chia/cmds/db_validate_func.py +184 -0
  415. chia/cmds/dev.py +16 -0
  416. chia/cmds/farm.py +87 -0
  417. chia/cmds/farm_funcs.py +207 -0
  418. chia/cmds/init.py +70 -0
  419. chia/cmds/init_funcs.py +367 -0
  420. chia/cmds/installers.py +129 -0
  421. chia/cmds/keys.py +510 -0
  422. chia/cmds/keys_funcs.py +864 -0
  423. chia/cmds/netspace.py +47 -0
  424. chia/cmds/netspace_funcs.py +53 -0
  425. chia/cmds/options.py +32 -0
  426. chia/cmds/param_types.py +228 -0
  427. chia/cmds/passphrase.py +130 -0
  428. chia/cmds/passphrase_funcs.py +346 -0
  429. chia/cmds/peer.py +50 -0
  430. chia/cmds/peer_funcs.py +129 -0
  431. chia/cmds/plotnft.py +206 -0
  432. chia/cmds/plotnft_funcs.py +374 -0
  433. chia/cmds/plots.py +222 -0
  434. chia/cmds/plotters.py +17 -0
  435. chia/cmds/rpc.py +188 -0
  436. chia/cmds/show.py +71 -0
  437. chia/cmds/show_funcs.py +214 -0
  438. chia/cmds/signer.py +304 -0
  439. chia/cmds/sim.py +217 -0
  440. chia/cmds/sim_funcs.py +509 -0
  441. chia/cmds/start.py +24 -0
  442. chia/cmds/start_funcs.py +112 -0
  443. chia/cmds/stop.py +61 -0
  444. chia/cmds/units.py +11 -0
  445. chia/cmds/wallet.py +1745 -0
  446. chia/cmds/wallet_funcs.py +1800 -0
  447. chia/consensus/__init__.py +0 -0
  448. chia/consensus/block_body_validation.py +515 -0
  449. chia/consensus/block_creation.py +525 -0
  450. chia/consensus/block_header_validation.py +1064 -0
  451. chia/consensus/block_record.py +32 -0
  452. chia/consensus/block_rewards.py +53 -0
  453. chia/consensus/block_root_validation.py +46 -0
  454. chia/consensus/blockchain.py +1100 -0
  455. chia/consensus/blockchain_interface.py +56 -0
  456. chia/consensus/coinbase.py +30 -0
  457. chia/consensus/condition_costs.py +9 -0
  458. chia/consensus/constants.py +49 -0
  459. chia/consensus/cost_calculator.py +15 -0
  460. chia/consensus/default_constants.py +90 -0
  461. chia/consensus/deficit.py +55 -0
  462. chia/consensus/difficulty_adjustment.py +412 -0
  463. chia/consensus/find_fork_point.py +111 -0
  464. chia/consensus/full_block_to_block_record.py +167 -0
  465. chia/consensus/get_block_challenge.py +106 -0
  466. chia/consensus/get_block_generator.py +26 -0
  467. chia/consensus/make_sub_epoch_summary.py +210 -0
  468. chia/consensus/multiprocess_validation.py +365 -0
  469. chia/consensus/pos_quality.py +19 -0
  470. chia/consensus/pot_iterations.py +67 -0
  471. chia/consensus/puzzles/__init__.py +0 -0
  472. chia/consensus/puzzles/chialisp_deserialisation.clsp +69 -0
  473. chia/consensus/puzzles/chialisp_deserialisation.clsp.hex +1 -0
  474. chia/consensus/puzzles/rom_bootstrap_generator.clsp +37 -0
  475. chia/consensus/puzzles/rom_bootstrap_generator.clsp.hex +1 -0
  476. chia/consensus/vdf_info_computation.py +156 -0
  477. chia/daemon/__init__.py +0 -0
  478. chia/daemon/client.py +233 -0
  479. chia/daemon/keychain_proxy.py +501 -0
  480. chia/daemon/keychain_server.py +365 -0
  481. chia/daemon/server.py +1616 -0
  482. chia/daemon/windows_signal.py +56 -0
  483. chia/data_layer/__init__.py +0 -0
  484. chia/data_layer/data_layer.py +1303 -0
  485. chia/data_layer/data_layer_api.py +25 -0
  486. chia/data_layer/data_layer_errors.py +50 -0
  487. chia/data_layer/data_layer_server.py +170 -0
  488. chia/data_layer/data_layer_util.py +985 -0
  489. chia/data_layer/data_layer_wallet.py +1315 -0
  490. chia/data_layer/data_store.py +2267 -0
  491. chia/data_layer/dl_wallet_store.py +407 -0
  492. chia/data_layer/download_data.py +389 -0
  493. chia/data_layer/puzzles/__init__.py +0 -0
  494. chia/data_layer/puzzles/graftroot_dl_offers.clsp +100 -0
  495. chia/data_layer/puzzles/graftroot_dl_offers.clsp.hex +1 -0
  496. chia/data_layer/s3_plugin_config.yml +33 -0
  497. chia/data_layer/s3_plugin_service.py +468 -0
  498. chia/data_layer/util/__init__.py +0 -0
  499. chia/data_layer/util/benchmark.py +108 -0
  500. chia/data_layer/util/plugin.py +41 -0
  501. chia/farmer/__init__.py +0 -0
  502. chia/farmer/farmer.py +920 -0
  503. chia/farmer/farmer_api.py +814 -0
  504. chia/full_node/__init__.py +0 -0
  505. chia/full_node/bitcoin_fee_estimator.py +85 -0
  506. chia/full_node/block_height_map.py +271 -0
  507. chia/full_node/block_store.py +570 -0
  508. chia/full_node/bundle_tools.py +19 -0
  509. chia/full_node/coin_store.py +646 -0
  510. chia/full_node/fee_estimate.py +54 -0
  511. chia/full_node/fee_estimate_store.py +24 -0
  512. chia/full_node/fee_estimation.py +93 -0
  513. chia/full_node/fee_estimator.py +90 -0
  514. chia/full_node/fee_estimator_constants.py +38 -0
  515. chia/full_node/fee_estimator_interface.py +42 -0
  516. chia/full_node/fee_history.py +26 -0
  517. chia/full_node/fee_tracker.py +564 -0
  518. chia/full_node/full_node.py +3052 -0
  519. chia/full_node/full_node_api.py +1974 -0
  520. chia/full_node/full_node_store.py +1033 -0
  521. chia/full_node/hint_management.py +56 -0
  522. chia/full_node/hint_store.py +94 -0
  523. chia/full_node/mempool.py +583 -0
  524. chia/full_node/mempool_check_conditions.py +177 -0
  525. chia/full_node/mempool_manager.py +858 -0
  526. chia/full_node/pending_tx_cache.py +112 -0
  527. chia/full_node/puzzles/__init__.py +0 -0
  528. chia/full_node/puzzles/block_program_zero.clsp +14 -0
  529. chia/full_node/puzzles/block_program_zero.clsp.hex +1 -0
  530. chia/full_node/puzzles/decompress_coin_spend_entry.clsp +5 -0
  531. chia/full_node/puzzles/decompress_coin_spend_entry.clsp.hex +1 -0
  532. chia/full_node/puzzles/decompress_coin_spend_entry_with_prefix.clsp +7 -0
  533. chia/full_node/puzzles/decompress_coin_spend_entry_with_prefix.clsp.hex +1 -0
  534. chia/full_node/puzzles/decompress_puzzle.clsp +6 -0
  535. chia/full_node/puzzles/decompress_puzzle.clsp.hex +1 -0
  536. chia/full_node/signage_point.py +16 -0
  537. chia/full_node/subscriptions.py +248 -0
  538. chia/full_node/sync_store.py +145 -0
  539. chia/full_node/tx_processing_queue.py +78 -0
  540. chia/full_node/weight_proof.py +1719 -0
  541. chia/harvester/__init__.py +0 -0
  542. chia/harvester/harvester.py +271 -0
  543. chia/harvester/harvester_api.py +374 -0
  544. chia/introducer/__init__.py +0 -0
  545. chia/introducer/introducer.py +120 -0
  546. chia/introducer/introducer_api.py +64 -0
  547. chia/legacy/__init__.py +0 -0
  548. chia/legacy/keyring.py +154 -0
  549. chia/plot_sync/__init__.py +0 -0
  550. chia/plot_sync/delta.py +61 -0
  551. chia/plot_sync/exceptions.py +56 -0
  552. chia/plot_sync/receiver.py +385 -0
  553. chia/plot_sync/sender.py +337 -0
  554. chia/plot_sync/util.py +43 -0
  555. chia/plotters/__init__.py +0 -0
  556. chia/plotters/bladebit.py +388 -0
  557. chia/plotters/chiapos.py +63 -0
  558. chia/plotters/madmax.py +224 -0
  559. chia/plotters/plotters.py +577 -0
  560. chia/plotters/plotters_util.py +131 -0
  561. chia/plotting/__init__.py +0 -0
  562. chia/plotting/cache.py +212 -0
  563. chia/plotting/check_plots.py +283 -0
  564. chia/plotting/create_plots.py +278 -0
  565. chia/plotting/manager.py +436 -0
  566. chia/plotting/util.py +324 -0
  567. chia/pools/__init__.py +0 -0
  568. chia/pools/pool_config.py +110 -0
  569. chia/pools/pool_puzzles.py +459 -0
  570. chia/pools/pool_wallet.py +926 -0
  571. chia/pools/pool_wallet_info.py +118 -0
  572. chia/pools/puzzles/__init__.py +0 -0
  573. chia/pools/puzzles/pool_member_innerpuz.clsp +70 -0
  574. chia/pools/puzzles/pool_member_innerpuz.clsp.hex +1 -0
  575. chia/pools/puzzles/pool_waitingroom_innerpuz.clsp +69 -0
  576. chia/pools/puzzles/pool_waitingroom_innerpuz.clsp.hex +1 -0
  577. chia/protocols/__init__.py +0 -0
  578. chia/protocols/farmer_protocol.py +102 -0
  579. chia/protocols/full_node_protocol.py +219 -0
  580. chia/protocols/harvester_protocol.py +216 -0
  581. chia/protocols/introducer_protocol.py +26 -0
  582. chia/protocols/pool_protocol.py +177 -0
  583. chia/protocols/protocol_message_types.py +139 -0
  584. chia/protocols/protocol_state_machine.py +87 -0
  585. chia/protocols/protocol_timing.py +7 -0
  586. chia/protocols/shared_protocol.py +86 -0
  587. chia/protocols/timelord_protocol.py +93 -0
  588. chia/protocols/wallet_protocol.py +401 -0
  589. chia/py.typed +0 -0
  590. chia/rpc/__init__.py +0 -0
  591. chia/rpc/crawler_rpc_api.py +75 -0
  592. chia/rpc/data_layer_rpc_api.py +639 -0
  593. chia/rpc/data_layer_rpc_client.py +188 -0
  594. chia/rpc/data_layer_rpc_util.py +62 -0
  595. chia/rpc/farmer_rpc_api.py +360 -0
  596. chia/rpc/farmer_rpc_client.py +86 -0
  597. chia/rpc/full_node_rpc_api.py +954 -0
  598. chia/rpc/full_node_rpc_client.py +292 -0
  599. chia/rpc/harvester_rpc_api.py +136 -0
  600. chia/rpc/harvester_rpc_client.py +54 -0
  601. chia/rpc/rpc_client.py +144 -0
  602. chia/rpc/rpc_server.py +447 -0
  603. chia/rpc/timelord_rpc_api.py +27 -0
  604. chia/rpc/util.py +293 -0
  605. chia/rpc/wallet_request_types.py +688 -0
  606. chia/rpc/wallet_rpc_api.py +4779 -0
  607. chia/rpc/wallet_rpc_client.py +1844 -0
  608. chia/seeder/__init__.py +0 -0
  609. chia/seeder/crawl_store.py +427 -0
  610. chia/seeder/crawler.py +423 -0
  611. chia/seeder/crawler_api.py +129 -0
  612. chia/seeder/dns_server.py +544 -0
  613. chia/seeder/peer_record.py +146 -0
  614. chia/seeder/start_crawler.py +88 -0
  615. chia/server/__init__.py +0 -0
  616. chia/server/address_manager.py +658 -0
  617. chia/server/address_manager_store.py +237 -0
  618. chia/server/api_protocol.py +11 -0
  619. chia/server/capabilities.py +24 -0
  620. chia/server/chia_policy.py +345 -0
  621. chia/server/introducer_peers.py +76 -0
  622. chia/server/node_discovery.py +718 -0
  623. chia/server/outbound_message.py +33 -0
  624. chia/server/rate_limit_numbers.py +204 -0
  625. chia/server/rate_limits.py +113 -0
  626. chia/server/server.py +720 -0
  627. chia/server/signal_handlers.py +117 -0
  628. chia/server/ssl_context.py +32 -0
  629. chia/server/start_data_layer.py +137 -0
  630. chia/server/start_farmer.py +86 -0
  631. chia/server/start_full_node.py +106 -0
  632. chia/server/start_harvester.py +80 -0
  633. chia/server/start_introducer.py +69 -0
  634. chia/server/start_service.py +328 -0
  635. chia/server/start_timelord.py +82 -0
  636. chia/server/start_wallet.py +109 -0
  637. chia/server/upnp.py +117 -0
  638. chia/server/ws_connection.py +752 -0
  639. chia/simulator/__init__.py +0 -0
  640. chia/simulator/block_tools.py +2053 -0
  641. chia/simulator/full_node_simulator.py +802 -0
  642. chia/simulator/keyring.py +128 -0
  643. chia/simulator/setup_services.py +505 -0
  644. chia/simulator/simulator_constants.py +13 -0
  645. chia/simulator/simulator_full_node_rpc_api.py +101 -0
  646. chia/simulator/simulator_full_node_rpc_client.py +62 -0
  647. chia/simulator/simulator_protocol.py +29 -0
  648. chia/simulator/simulator_test_tools.py +163 -0
  649. chia/simulator/socket.py +27 -0
  650. chia/simulator/ssl_certs.py +114 -0
  651. chia/simulator/ssl_certs_1.py +699 -0
  652. chia/simulator/ssl_certs_10.py +699 -0
  653. chia/simulator/ssl_certs_2.py +699 -0
  654. chia/simulator/ssl_certs_3.py +699 -0
  655. chia/simulator/ssl_certs_4.py +699 -0
  656. chia/simulator/ssl_certs_5.py +699 -0
  657. chia/simulator/ssl_certs_6.py +699 -0
  658. chia/simulator/ssl_certs_7.py +699 -0
  659. chia/simulator/ssl_certs_8.py +699 -0
  660. chia/simulator/ssl_certs_9.py +699 -0
  661. chia/simulator/start_simulator.py +135 -0
  662. chia/simulator/wallet_tools.py +245 -0
  663. chia/ssl/__init__.py +0 -0
  664. chia/ssl/chia_ca.crt +19 -0
  665. chia/ssl/chia_ca.key +28 -0
  666. chia/ssl/create_ssl.py +249 -0
  667. chia/ssl/dst_root_ca.pem +20 -0
  668. chia/timelord/__init__.py +0 -0
  669. chia/timelord/iters_from_block.py +50 -0
  670. chia/timelord/timelord.py +1202 -0
  671. chia/timelord/timelord_api.py +132 -0
  672. chia/timelord/timelord_launcher.py +188 -0
  673. chia/timelord/timelord_state.py +244 -0
  674. chia/timelord/types.py +22 -0
  675. chia/types/__init__.py +0 -0
  676. chia/types/aliases.py +35 -0
  677. chia/types/block_protocol.py +20 -0
  678. chia/types/blockchain_format/__init__.py +0 -0
  679. chia/types/blockchain_format/classgroup.py +5 -0
  680. chia/types/blockchain_format/coin.py +28 -0
  681. chia/types/blockchain_format/foliage.py +8 -0
  682. chia/types/blockchain_format/pool_target.py +5 -0
  683. chia/types/blockchain_format/program.py +270 -0
  684. chia/types/blockchain_format/proof_of_space.py +135 -0
  685. chia/types/blockchain_format/reward_chain_block.py +6 -0
  686. chia/types/blockchain_format/serialized_program.py +5 -0
  687. chia/types/blockchain_format/sized_bytes.py +11 -0
  688. chia/types/blockchain_format/slots.py +9 -0
  689. chia/types/blockchain_format/sub_epoch_summary.py +5 -0
  690. chia/types/blockchain_format/tree_hash.py +72 -0
  691. chia/types/blockchain_format/vdf.py +86 -0
  692. chia/types/clvm_cost.py +13 -0
  693. chia/types/coin_record.py +43 -0
  694. chia/types/coin_spend.py +115 -0
  695. chia/types/condition_opcodes.py +73 -0
  696. chia/types/condition_with_args.py +17 -0
  697. chia/types/eligible_coin_spends.py +364 -0
  698. chia/types/end_of_slot_bundle.py +5 -0
  699. chia/types/fee_rate.py +38 -0
  700. chia/types/full_block.py +5 -0
  701. chia/types/generator_types.py +14 -0
  702. chia/types/header_block.py +5 -0
  703. chia/types/internal_mempool_item.py +19 -0
  704. chia/types/mempool_inclusion_status.py +9 -0
  705. chia/types/mempool_item.py +85 -0
  706. chia/types/mempool_submission_status.py +30 -0
  707. chia/types/mojos.py +7 -0
  708. chia/types/peer_info.py +64 -0
  709. chia/types/signing_mode.py +29 -0
  710. chia/types/spend_bundle.py +31 -0
  711. chia/types/spend_bundle_conditions.py +7 -0
  712. chia/types/transaction_queue_entry.py +55 -0
  713. chia/types/unfinished_block.py +5 -0
  714. chia/types/unfinished_header_block.py +37 -0
  715. chia/types/weight_proof.py +50 -0
  716. chia/util/__init__.py +0 -0
  717. chia/util/action_scope.py +168 -0
  718. chia/util/api_decorators.py +89 -0
  719. chia/util/async_pool.py +224 -0
  720. chia/util/augmented_chain.py +130 -0
  721. chia/util/batches.py +39 -0
  722. chia/util/bech32m.py +123 -0
  723. chia/util/beta_metrics.py +118 -0
  724. chia/util/block_cache.py +56 -0
  725. chia/util/byte_types.py +10 -0
  726. chia/util/check_fork_next_block.py +32 -0
  727. chia/util/chia_logging.py +124 -0
  728. chia/util/chia_version.py +33 -0
  729. chia/util/collection.py +17 -0
  730. chia/util/condition_tools.py +201 -0
  731. chia/util/config.py +366 -0
  732. chia/util/cpu.py +20 -0
  733. chia/util/db_synchronous.py +21 -0
  734. chia/util/db_version.py +30 -0
  735. chia/util/db_wrapper.py +427 -0
  736. chia/util/default_root.py +10 -0
  737. chia/util/dump_keyring.py +93 -0
  738. chia/util/english.txt +2048 -0
  739. chia/util/errors.py +351 -0
  740. chia/util/file_keyring.py +480 -0
  741. chia/util/files.py +95 -0
  742. chia/util/full_block_utils.py +321 -0
  743. chia/util/generator_tools.py +62 -0
  744. chia/util/hash.py +29 -0
  745. chia/util/initial-config.yaml +675 -0
  746. chia/util/inline_executor.py +24 -0
  747. chia/util/ints.py +19 -0
  748. chia/util/json_util.py +41 -0
  749. chia/util/keychain.py +673 -0
  750. chia/util/keyring_wrapper.py +266 -0
  751. chia/util/limited_semaphore.py +39 -0
  752. chia/util/lock.py +47 -0
  753. chia/util/log_exceptions.py +29 -0
  754. chia/util/logging.py +34 -0
  755. chia/util/lru_cache.py +29 -0
  756. chia/util/math.py +20 -0
  757. chia/util/network.py +240 -0
  758. chia/util/paginator.py +46 -0
  759. chia/util/path.py +29 -0
  760. chia/util/permissions.py +19 -0
  761. chia/util/pprint.py +40 -0
  762. chia/util/prev_transaction_block.py +23 -0
  763. chia/util/priority_mutex.py +92 -0
  764. chia/util/profiler.py +194 -0
  765. chia/util/recursive_replace.py +22 -0
  766. chia/util/safe_cancel_task.py +14 -0
  767. chia/util/service_groups.py +47 -0
  768. chia/util/setproctitle.py +20 -0
  769. chia/util/significant_bits.py +30 -0
  770. chia/util/ssl_check.py +213 -0
  771. chia/util/streamable.py +654 -0
  772. chia/util/task_timing.py +378 -0
  773. chia/util/timing.py +64 -0
  774. chia/util/vdf_prover.py +31 -0
  775. chia/util/ws_message.py +66 -0
  776. chia/wallet/__init__.py +0 -0
  777. chia/wallet/cat_wallet/__init__.py +0 -0
  778. chia/wallet/cat_wallet/cat_constants.py +75 -0
  779. chia/wallet/cat_wallet/cat_info.py +47 -0
  780. chia/wallet/cat_wallet/cat_outer_puzzle.py +120 -0
  781. chia/wallet/cat_wallet/cat_utils.py +163 -0
  782. chia/wallet/cat_wallet/cat_wallet.py +869 -0
  783. chia/wallet/cat_wallet/dao_cat_info.py +28 -0
  784. chia/wallet/cat_wallet/dao_cat_wallet.py +669 -0
  785. chia/wallet/cat_wallet/lineage_store.py +74 -0
  786. chia/wallet/cat_wallet/puzzles/__init__.py +0 -0
  787. chia/wallet/cat_wallet/puzzles/cat_truths.clib +31 -0
  788. chia/wallet/cat_wallet/puzzles/cat_v2.clsp +397 -0
  789. chia/wallet/cat_wallet/puzzles/cat_v2.clsp.hex +1 -0
  790. chia/wallet/cat_wallet/puzzles/delegated_tail.clsp +25 -0
  791. chia/wallet/cat_wallet/puzzles/delegated_tail.clsp.hex +1 -0
  792. chia/wallet/cat_wallet/puzzles/everything_with_signature.clsp +15 -0
  793. chia/wallet/cat_wallet/puzzles/everything_with_signature.clsp.hex +1 -0
  794. chia/wallet/cat_wallet/puzzles/genesis_by_coin_id.clsp +26 -0
  795. chia/wallet/cat_wallet/puzzles/genesis_by_coin_id.clsp.hex +1 -0
  796. chia/wallet/cat_wallet/puzzles/genesis_by_coin_id_or_singleton.clsp +42 -0
  797. chia/wallet/cat_wallet/puzzles/genesis_by_coin_id_or_singleton.clsp.hex +1 -0
  798. chia/wallet/cat_wallet/puzzles/genesis_by_puzzle_hash.clsp +24 -0
  799. chia/wallet/cat_wallet/puzzles/genesis_by_puzzle_hash.clsp.hex +1 -0
  800. chia/wallet/coin_selection.py +188 -0
  801. chia/wallet/conditions.py +1326 -0
  802. chia/wallet/dao_wallet/__init__.py +0 -0
  803. chia/wallet/dao_wallet/dao_info.py +61 -0
  804. chia/wallet/dao_wallet/dao_utils.py +810 -0
  805. chia/wallet/dao_wallet/dao_wallet.py +2121 -0
  806. chia/wallet/db_wallet/__init__.py +0 -0
  807. chia/wallet/db_wallet/db_wallet_puzzles.py +107 -0
  808. chia/wallet/derivation_record.py +30 -0
  809. chia/wallet/derive_keys.py +146 -0
  810. chia/wallet/did_wallet/__init__.py +0 -0
  811. chia/wallet/did_wallet/did_info.py +39 -0
  812. chia/wallet/did_wallet/did_wallet.py +1485 -0
  813. chia/wallet/did_wallet/did_wallet_puzzles.py +220 -0
  814. chia/wallet/did_wallet/puzzles/__init__.py +0 -0
  815. chia/wallet/did_wallet/puzzles/did_innerpuz.clsp +135 -0
  816. chia/wallet/did_wallet/puzzles/did_innerpuz.clsp.hex +1 -0
  817. chia/wallet/driver_protocol.py +26 -0
  818. chia/wallet/key_val_store.py +55 -0
  819. chia/wallet/lineage_proof.py +58 -0
  820. chia/wallet/nft_wallet/__init__.py +0 -0
  821. chia/wallet/nft_wallet/metadata_outer_puzzle.py +92 -0
  822. chia/wallet/nft_wallet/nft_info.py +120 -0
  823. chia/wallet/nft_wallet/nft_puzzles.py +305 -0
  824. chia/wallet/nft_wallet/nft_wallet.py +1686 -0
  825. chia/wallet/nft_wallet/ownership_outer_puzzle.py +101 -0
  826. chia/wallet/nft_wallet/puzzles/__init__.py +0 -0
  827. chia/wallet/nft_wallet/puzzles/create_nft_launcher_from_did.clsp +6 -0
  828. chia/wallet/nft_wallet/puzzles/create_nft_launcher_from_did.clsp.hex +1 -0
  829. chia/wallet/nft_wallet/puzzles/nft_intermediate_launcher.clsp +6 -0
  830. chia/wallet/nft_wallet/puzzles/nft_intermediate_launcher.clsp.hex +1 -0
  831. chia/wallet/nft_wallet/puzzles/nft_metadata_updater_default.clsp +30 -0
  832. chia/wallet/nft_wallet/puzzles/nft_metadata_updater_default.clsp.hex +1 -0
  833. chia/wallet/nft_wallet/puzzles/nft_metadata_updater_updateable.clsp +28 -0
  834. chia/wallet/nft_wallet/puzzles/nft_metadata_updater_updateable.clsp.hex +1 -0
  835. chia/wallet/nft_wallet/puzzles/nft_ownership_layer.clsp +100 -0
  836. chia/wallet/nft_wallet/puzzles/nft_ownership_layer.clsp.hex +1 -0
  837. chia/wallet/nft_wallet/puzzles/nft_ownership_transfer_program_one_way_claim_with_royalties.clsp +78 -0
  838. chia/wallet/nft_wallet/puzzles/nft_ownership_transfer_program_one_way_claim_with_royalties.clsp.hex +1 -0
  839. chia/wallet/nft_wallet/puzzles/nft_state_layer.clsp +74 -0
  840. chia/wallet/nft_wallet/puzzles/nft_state_layer.clsp.hex +1 -0
  841. chia/wallet/nft_wallet/singleton_outer_puzzle.py +101 -0
  842. chia/wallet/nft_wallet/transfer_program_puzzle.py +82 -0
  843. chia/wallet/nft_wallet/uncurry_nft.py +217 -0
  844. chia/wallet/notification_manager.py +117 -0
  845. chia/wallet/notification_store.py +178 -0
  846. chia/wallet/outer_puzzles.py +84 -0
  847. chia/wallet/payment.py +34 -0
  848. chia/wallet/puzzle_drivers.py +118 -0
  849. chia/wallet/puzzles/__init__.py +0 -0
  850. chia/wallet/puzzles/augmented_condition.clsp +13 -0
  851. chia/wallet/puzzles/augmented_condition.clsp.hex +1 -0
  852. chia/wallet/puzzles/clawback/__init__.py +0 -0
  853. chia/wallet/puzzles/clawback/drivers.py +188 -0
  854. chia/wallet/puzzles/clawback/metadata.py +38 -0
  855. chia/wallet/puzzles/clawback/puzzle_decorator.py +67 -0
  856. chia/wallet/puzzles/condition_codes.clib +77 -0
  857. chia/wallet/puzzles/curry-and-treehash.clib +102 -0
  858. chia/wallet/puzzles/curry.clib +135 -0
  859. chia/wallet/puzzles/curry_by_index.clib +16 -0
  860. chia/wallet/puzzles/dao_cat_eve.clsp +17 -0
  861. chia/wallet/puzzles/dao_cat_eve.clsp.hex +1 -0
  862. chia/wallet/puzzles/dao_cat_launcher.clsp +36 -0
  863. chia/wallet/puzzles/dao_cat_launcher.clsp.hex +1 -0
  864. chia/wallet/puzzles/dao_finished_state.clsp +35 -0
  865. chia/wallet/puzzles/dao_finished_state.clsp.hex +1 -0
  866. chia/wallet/puzzles/dao_finished_state.clsp.hex.sha256tree +1 -0
  867. chia/wallet/puzzles/dao_lockup.clsp +288 -0
  868. chia/wallet/puzzles/dao_lockup.clsp.hex +1 -0
  869. chia/wallet/puzzles/dao_lockup.clsp.hex.sha256tree +1 -0
  870. chia/wallet/puzzles/dao_proposal.clsp +377 -0
  871. chia/wallet/puzzles/dao_proposal.clsp.hex +1 -0
  872. chia/wallet/puzzles/dao_proposal.clsp.hex.sha256tree +1 -0
  873. chia/wallet/puzzles/dao_proposal_timer.clsp +78 -0
  874. chia/wallet/puzzles/dao_proposal_timer.clsp.hex +1 -0
  875. chia/wallet/puzzles/dao_proposal_timer.clsp.hex.sha256tree +1 -0
  876. chia/wallet/puzzles/dao_proposal_validator.clsp +87 -0
  877. chia/wallet/puzzles/dao_proposal_validator.clsp.hex +1 -0
  878. chia/wallet/puzzles/dao_proposal_validator.clsp.hex.sha256tree +1 -0
  879. chia/wallet/puzzles/dao_spend_p2_singleton_v2.clsp +240 -0
  880. chia/wallet/puzzles/dao_spend_p2_singleton_v2.clsp.hex +1 -0
  881. chia/wallet/puzzles/dao_spend_p2_singleton_v2.clsp.hex.sha256tree +1 -0
  882. chia/wallet/puzzles/dao_treasury.clsp +115 -0
  883. chia/wallet/puzzles/dao_treasury.clsp.hex +1 -0
  884. chia/wallet/puzzles/dao_update_proposal.clsp +44 -0
  885. chia/wallet/puzzles/dao_update_proposal.clsp.hex +1 -0
  886. chia/wallet/puzzles/deployed_puzzle_hashes.json +67 -0
  887. chia/wallet/puzzles/json.clib +25 -0
  888. chia/wallet/puzzles/load_clvm.py +162 -0
  889. chia/wallet/puzzles/merkle_utils.clib +18 -0
  890. chia/wallet/puzzles/notification.clsp +7 -0
  891. chia/wallet/puzzles/notification.clsp.hex +1 -0
  892. chia/wallet/puzzles/p2_1_of_n.clsp +22 -0
  893. chia/wallet/puzzles/p2_1_of_n.clsp.hex +1 -0
  894. chia/wallet/puzzles/p2_conditions.clsp +3 -0
  895. chia/wallet/puzzles/p2_conditions.clsp.hex +1 -0
  896. chia/wallet/puzzles/p2_conditions.py +27 -0
  897. chia/wallet/puzzles/p2_delegated_conditions.clsp +18 -0
  898. chia/wallet/puzzles/p2_delegated_conditions.clsp.hex +1 -0
  899. chia/wallet/puzzles/p2_delegated_conditions.py +22 -0
  900. chia/wallet/puzzles/p2_delegated_puzzle.clsp +19 -0
  901. chia/wallet/puzzles/p2_delegated_puzzle.clsp.hex +1 -0
  902. chia/wallet/puzzles/p2_delegated_puzzle.py +35 -0
  903. chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.clsp +91 -0
  904. chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.clsp.hex +1 -0
  905. chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py +161 -0
  906. chia/wallet/puzzles/p2_m_of_n_delegate_direct.clsp +108 -0
  907. chia/wallet/puzzles/p2_m_of_n_delegate_direct.clsp.hex +1 -0
  908. chia/wallet/puzzles/p2_m_of_n_delegate_direct.py +22 -0
  909. chia/wallet/puzzles/p2_parent.clsp +19 -0
  910. chia/wallet/puzzles/p2_parent.clsp.hex +1 -0
  911. chia/wallet/puzzles/p2_puzzle_hash.clsp +18 -0
  912. chia/wallet/puzzles/p2_puzzle_hash.clsp.hex +1 -0
  913. chia/wallet/puzzles/p2_puzzle_hash.py +28 -0
  914. chia/wallet/puzzles/p2_singleton.clsp +30 -0
  915. chia/wallet/puzzles/p2_singleton.clsp.hex +1 -0
  916. chia/wallet/puzzles/p2_singleton_aggregator.clsp +81 -0
  917. chia/wallet/puzzles/p2_singleton_aggregator.clsp.hex +1 -0
  918. chia/wallet/puzzles/p2_singleton_or_delayed_puzhash.clsp +50 -0
  919. chia/wallet/puzzles/p2_singleton_or_delayed_puzhash.clsp.hex +1 -0
  920. chia/wallet/puzzles/p2_singleton_via_delegated_puzzle.clsp +47 -0
  921. chia/wallet/puzzles/p2_singleton_via_delegated_puzzle.clsp.hex +1 -0
  922. chia/wallet/puzzles/puzzle_utils.py +34 -0
  923. chia/wallet/puzzles/settlement_payments.clsp +49 -0
  924. chia/wallet/puzzles/settlement_payments.clsp.hex +1 -0
  925. chia/wallet/puzzles/sha256tree.clib +11 -0
  926. chia/wallet/puzzles/singleton_launcher.clsp +16 -0
  927. chia/wallet/puzzles/singleton_launcher.clsp.hex +1 -0
  928. chia/wallet/puzzles/singleton_top_layer.clsp +177 -0
  929. chia/wallet/puzzles/singleton_top_layer.clsp.hex +1 -0
  930. chia/wallet/puzzles/singleton_top_layer.py +295 -0
  931. chia/wallet/puzzles/singleton_top_layer_v1_1.clsp +107 -0
  932. chia/wallet/puzzles/singleton_top_layer_v1_1.clsp.hex +1 -0
  933. chia/wallet/puzzles/singleton_top_layer_v1_1.py +344 -0
  934. chia/wallet/puzzles/singleton_truths.clib +21 -0
  935. chia/wallet/puzzles/tails.py +344 -0
  936. chia/wallet/puzzles/utility_macros.clib +48 -0
  937. chia/wallet/signer_protocol.py +126 -0
  938. chia/wallet/singleton.py +106 -0
  939. chia/wallet/singleton_record.py +30 -0
  940. chia/wallet/trade_manager.py +1088 -0
  941. chia/wallet/trade_record.py +67 -0
  942. chia/wallet/trading/__init__.py +0 -0
  943. chia/wallet/trading/offer.py +703 -0
  944. chia/wallet/trading/trade_status.py +13 -0
  945. chia/wallet/trading/trade_store.py +526 -0
  946. chia/wallet/transaction_record.py +143 -0
  947. chia/wallet/transaction_sorting.py +14 -0
  948. chia/wallet/uncurried_puzzle.py +17 -0
  949. chia/wallet/util/__init__.py +0 -0
  950. chia/wallet/util/address_type.py +55 -0
  951. chia/wallet/util/blind_signer_tl.py +168 -0
  952. chia/wallet/util/clvm_streamable.py +203 -0
  953. chia/wallet/util/compute_hints.py +66 -0
  954. chia/wallet/util/compute_memos.py +45 -0
  955. chia/wallet/util/curry_and_treehash.py +90 -0
  956. chia/wallet/util/debug_spend_bundle.py +234 -0
  957. chia/wallet/util/merkle_tree.py +100 -0
  958. chia/wallet/util/merkle_utils.py +102 -0
  959. chia/wallet/util/new_peak_queue.py +82 -0
  960. chia/wallet/util/notifications.py +12 -0
  961. chia/wallet/util/peer_request_cache.py +174 -0
  962. chia/wallet/util/puzzle_compression.py +96 -0
  963. chia/wallet/util/puzzle_decorator.py +100 -0
  964. chia/wallet/util/puzzle_decorator_type.py +7 -0
  965. chia/wallet/util/query_filter.py +60 -0
  966. chia/wallet/util/transaction_type.py +23 -0
  967. chia/wallet/util/tx_config.py +158 -0
  968. chia/wallet/util/wallet_sync_utils.py +348 -0
  969. chia/wallet/util/wallet_types.py +65 -0
  970. chia/wallet/vc_wallet/__init__.py +0 -0
  971. chia/wallet/vc_wallet/cr_cat_drivers.py +663 -0
  972. chia/wallet/vc_wallet/cr_cat_wallet.py +875 -0
  973. chia/wallet/vc_wallet/cr_outer_puzzle.py +102 -0
  974. chia/wallet/vc_wallet/cr_puzzles/__init__.py +0 -0
  975. chia/wallet/vc_wallet/cr_puzzles/conditions_w_fee_announce.clsp +3 -0
  976. chia/wallet/vc_wallet/cr_puzzles/conditions_w_fee_announce.clsp.hex +1 -0
  977. chia/wallet/vc_wallet/cr_puzzles/credential_restriction.clsp +304 -0
  978. chia/wallet/vc_wallet/cr_puzzles/credential_restriction.clsp.hex +1 -0
  979. chia/wallet/vc_wallet/cr_puzzles/flag_proofs_checker.clsp +45 -0
  980. chia/wallet/vc_wallet/cr_puzzles/flag_proofs_checker.clsp.hex +1 -0
  981. chia/wallet/vc_wallet/vc_drivers.py +838 -0
  982. chia/wallet/vc_wallet/vc_puzzles/__init__.py +0 -0
  983. chia/wallet/vc_wallet/vc_puzzles/covenant_layer.clsp +30 -0
  984. chia/wallet/vc_wallet/vc_puzzles/covenant_layer.clsp.hex +1 -0
  985. chia/wallet/vc_wallet/vc_puzzles/eml_covenant_morpher.clsp +75 -0
  986. chia/wallet/vc_wallet/vc_puzzles/eml_covenant_morpher.clsp.hex +1 -0
  987. chia/wallet/vc_wallet/vc_puzzles/eml_transfer_program_covenant_adapter.clsp +32 -0
  988. chia/wallet/vc_wallet/vc_puzzles/eml_transfer_program_covenant_adapter.clsp.hex +1 -0
  989. chia/wallet/vc_wallet/vc_puzzles/eml_update_metadata_with_DID.clsp +80 -0
  990. chia/wallet/vc_wallet/vc_puzzles/eml_update_metadata_with_DID.clsp.hex +1 -0
  991. chia/wallet/vc_wallet/vc_puzzles/exigent_metadata_layer.clsp +163 -0
  992. chia/wallet/vc_wallet/vc_puzzles/exigent_metadata_layer.clsp.hex +1 -0
  993. chia/wallet/vc_wallet/vc_puzzles/p2_announced_delegated_puzzle.clsp +16 -0
  994. chia/wallet/vc_wallet/vc_puzzles/p2_announced_delegated_puzzle.clsp.hex +1 -0
  995. chia/wallet/vc_wallet/vc_puzzles/standard_vc_backdoor_puzzle.clsp +74 -0
  996. chia/wallet/vc_wallet/vc_puzzles/standard_vc_backdoor_puzzle.clsp.hex +1 -0
  997. chia/wallet/vc_wallet/vc_puzzles/std_parent_morpher.clsp +23 -0
  998. chia/wallet/vc_wallet/vc_puzzles/std_parent_morpher.clsp.hex +1 -0
  999. chia/wallet/vc_wallet/vc_puzzles/viral_backdoor.clsp +64 -0
  1000. chia/wallet/vc_wallet/vc_puzzles/viral_backdoor.clsp.hex +1 -0
  1001. chia/wallet/vc_wallet/vc_store.py +263 -0
  1002. chia/wallet/vc_wallet/vc_wallet.py +638 -0
  1003. chia/wallet/wallet.py +698 -0
  1004. chia/wallet/wallet_action_scope.py +95 -0
  1005. chia/wallet/wallet_blockchain.py +244 -0
  1006. chia/wallet/wallet_coin_record.py +72 -0
  1007. chia/wallet/wallet_coin_store.py +351 -0
  1008. chia/wallet/wallet_info.py +36 -0
  1009. chia/wallet/wallet_interested_store.py +188 -0
  1010. chia/wallet/wallet_nft_store.py +279 -0
  1011. chia/wallet/wallet_node.py +1769 -0
  1012. chia/wallet/wallet_node_api.py +201 -0
  1013. chia/wallet/wallet_pool_store.py +120 -0
  1014. chia/wallet/wallet_protocol.py +90 -0
  1015. chia/wallet/wallet_puzzle_store.py +365 -0
  1016. chia/wallet/wallet_retry_store.py +70 -0
  1017. chia/wallet/wallet_singleton_store.py +258 -0
  1018. chia/wallet/wallet_spend_bundle.py +41 -0
  1019. chia/wallet/wallet_state_manager.py +2820 -0
  1020. chia/wallet/wallet_transaction_store.py +470 -0
  1021. chia/wallet/wallet_user_store.py +110 -0
  1022. chia/wallet/wallet_weight_proof_handler.py +126 -0
  1023. chia_blockchain-2.4.4.dist-info/LICENSE +201 -0
  1024. chia_blockchain-2.4.4.dist-info/METADATA +161 -0
  1025. chia_blockchain-2.4.4.dist-info/RECORD +1028 -0
  1026. chia_blockchain-2.4.4.dist-info/WHEEL +4 -0
  1027. chia_blockchain-2.4.4.dist-info/entry_points.txt +17 -0
  1028. mozilla-ca/cacert.pem +3666 -0
@@ -0,0 +1,3052 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import contextlib
5
+ import dataclasses
6
+ import logging
7
+ import multiprocessing
8
+ import random
9
+ import sqlite3
10
+ import time
11
+ import traceback
12
+ from multiprocessing.context import BaseContext
13
+ from pathlib import Path
14
+ from typing import (
15
+ TYPE_CHECKING,
16
+ Any,
17
+ AsyncIterator,
18
+ Awaitable,
19
+ Callable,
20
+ ClassVar,
21
+ Dict,
22
+ List,
23
+ Optional,
24
+ Set,
25
+ TextIO,
26
+ Tuple,
27
+ Union,
28
+ cast,
29
+ final,
30
+ )
31
+
32
+ from chia_rs import AugSchemeMPL, BLSCache
33
+ from packaging.version import Version
34
+
35
+ from chia.consensus.block_body_validation import ForkInfo
36
+ from chia.consensus.block_creation import unfinished_block_to_full_block
37
+ from chia.consensus.block_record import BlockRecord
38
+ from chia.consensus.blockchain import AddBlockResult, Blockchain, BlockchainMutexPriority, StateChangeSummary
39
+ from chia.consensus.blockchain_interface import BlockchainInterface
40
+ from chia.consensus.constants import ConsensusConstants
41
+ from chia.consensus.cost_calculator import NPCResult
42
+ from chia.consensus.difficulty_adjustment import get_next_sub_slot_iters_and_difficulty
43
+ from chia.consensus.get_block_generator import get_block_generator
44
+ from chia.consensus.make_sub_epoch_summary import next_sub_epoch_summary
45
+ from chia.consensus.multiprocess_validation import PreValidationResult, pre_validate_blocks_multiprocessing
46
+ from chia.consensus.pot_iterations import calculate_sp_iters
47
+ from chia.full_node.block_store import BlockStore
48
+ from chia.full_node.coin_store import CoinStore
49
+ from chia.full_node.full_node_api import FullNodeAPI
50
+ from chia.full_node.full_node_store import FullNodeStore, FullNodeStorePeakResult, UnfinishedBlockEntry
51
+ from chia.full_node.hint_management import get_hints_and_subscription_coin_ids
52
+ from chia.full_node.hint_store import HintStore
53
+ from chia.full_node.mempool import MempoolRemoveInfo
54
+ from chia.full_node.mempool_manager import MempoolManager, NewPeakItem
55
+ from chia.full_node.signage_point import SignagePoint
56
+ from chia.full_node.subscriptions import PeerSubscriptions, peers_for_spend_bundle
57
+ from chia.full_node.sync_store import Peak, SyncStore
58
+ from chia.full_node.tx_processing_queue import TransactionQueue
59
+ from chia.full_node.weight_proof import WeightProofHandler
60
+ from chia.protocols import farmer_protocol, full_node_protocol, timelord_protocol, wallet_protocol
61
+ from chia.protocols.farmer_protocol import SignagePointSourceData, SPSubSlotSourceData, SPVDFSourceData
62
+ from chia.protocols.full_node_protocol import RequestBlocks, RespondBlock, RespondBlocks, RespondSignagePoint
63
+ from chia.protocols.protocol_message_types import ProtocolMessageTypes
64
+ from chia.protocols.shared_protocol import Capability
65
+ from chia.protocols.wallet_protocol import CoinState, CoinStateUpdate, RemovedMempoolItem
66
+ from chia.rpc.rpc_server import StateChangedProtocol
67
+ from chia.server.node_discovery import FullNodePeers
68
+ from chia.server.outbound_message import Message, NodeType, make_msg
69
+ from chia.server.server import ChiaServer
70
+ from chia.server.ws_connection import WSChiaConnection
71
+ from chia.types.blockchain_format.classgroup import ClassgroupElement
72
+ from chia.types.blockchain_format.pool_target import PoolTarget
73
+ from chia.types.blockchain_format.sized_bytes import bytes32
74
+ from chia.types.blockchain_format.sub_epoch_summary import SubEpochSummary
75
+ from chia.types.blockchain_format.vdf import CompressibleVDFField, VDFInfo, VDFProof, validate_vdf
76
+ from chia.types.coin_record import CoinRecord
77
+ from chia.types.end_of_slot_bundle import EndOfSubSlotBundle
78
+ from chia.types.full_block import FullBlock
79
+ from chia.types.generator_types import BlockGenerator
80
+ from chia.types.header_block import HeaderBlock
81
+ from chia.types.mempool_inclusion_status import MempoolInclusionStatus
82
+ from chia.types.mempool_item import MempoolItem
83
+ from chia.types.peer_info import PeerInfo
84
+ from chia.types.spend_bundle import SpendBundle
85
+ from chia.types.transaction_queue_entry import TransactionQueueEntry
86
+ from chia.types.unfinished_block import UnfinishedBlock
87
+ from chia.types.weight_proof import WeightProof
88
+ from chia.util.bech32m import encode_puzzle_hash
89
+ from chia.util.check_fork_next_block import check_fork_next_block
90
+ from chia.util.condition_tools import pkm_pairs
91
+ from chia.util.config import process_config_start_method
92
+ from chia.util.db_synchronous import db_synchronous_on
93
+ from chia.util.db_version import lookup_db_version, set_db_version_async
94
+ from chia.util.db_wrapper import DBWrapper2, manage_connection
95
+ from chia.util.errors import ConsensusError, Err, TimestampError, ValidationError
96
+ from chia.util.ints import uint8, uint32, uint64, uint128
97
+ from chia.util.limited_semaphore import LimitedSemaphore
98
+ from chia.util.log_exceptions import log_exceptions
99
+ from chia.util.path import path_from_root
100
+ from chia.util.profiler import enable_profiler, mem_profile_task, profile_task
101
+ from chia.util.safe_cancel_task import cancel_task_safe
102
+
103
+
104
+ # This is the result of calling peak_post_processing, which is then fed into peak_post_processing_2
105
+ @dataclasses.dataclass
106
+ class PeakPostProcessingResult:
107
+ mempool_peak_result: List[NewPeakItem] # The new items from calling MempoolManager.new_peak
108
+ mempool_removals: List[MempoolRemoveInfo] # The removed mempool items from calling MempoolManager.new_peak
109
+ fns_peak_result: FullNodeStorePeakResult # The result of calling FullNodeStore.new_peak
110
+ hints: List[Tuple[bytes32, bytes]] # The hints added to the DB
111
+ lookup_coin_ids: List[bytes32] # The coin IDs that we need to look up to notify wallets of changes
112
+
113
+
114
+ @dataclasses.dataclass(frozen=True)
115
+ class WalletUpdate:
116
+ fork_height: uint32
117
+ peak: Peak
118
+ coin_records: List[CoinRecord]
119
+ hints: Dict[bytes32, bytes32]
120
+
121
+
122
+ @final
123
+ @dataclasses.dataclass
124
+ class FullNode:
125
+ if TYPE_CHECKING:
126
+ from chia.rpc.rpc_server import RpcServiceProtocol
127
+
128
+ _protocol_check: ClassVar[RpcServiceProtocol] = cast("FullNode", None)
129
+
130
+ root_path: Path
131
+ config: Dict[str, Any]
132
+ constants: ConsensusConstants
133
+ signage_point_times: List[float]
134
+ full_node_store: FullNodeStore
135
+ log: logging.Logger
136
+ db_path: Path
137
+ wallet_sync_queue: asyncio.Queue[WalletUpdate]
138
+ _segment_task: Optional[asyncio.Task[None]] = None
139
+ initialized: bool = False
140
+ _server: Optional[ChiaServer] = None
141
+ _shut_down: bool = False
142
+ pow_creation: Dict[bytes32, asyncio.Event] = dataclasses.field(default_factory=dict)
143
+ state_changed_callback: Optional[StateChangedProtocol] = None
144
+ full_node_peers: Optional[FullNodePeers] = None
145
+ sync_store: SyncStore = dataclasses.field(default_factory=SyncStore)
146
+ uncompact_task: Optional[asyncio.Task[None]] = None
147
+ compact_vdf_requests: Set[bytes32] = dataclasses.field(default_factory=set)
148
+ # TODO: Logging isn't setup yet so the log entries related to parsing the
149
+ # config would end up on stdout if handled here.
150
+ multiprocessing_context: Optional[BaseContext] = None
151
+ _ui_tasks: Set[asyncio.Task[None]] = dataclasses.field(default_factory=set)
152
+ subscriptions: PeerSubscriptions = dataclasses.field(default_factory=PeerSubscriptions)
153
+ _transaction_queue_task: Optional[asyncio.Task[None]] = None
154
+ simulator_transaction_callback: Optional[Callable[[bytes32], Awaitable[None]]] = None
155
+ _sync_task: Optional[asyncio.Task[None]] = None
156
+ _transaction_queue: Optional[TransactionQueue] = None
157
+ _compact_vdf_sem: Optional[LimitedSemaphore] = None
158
+ _new_peak_sem: Optional[LimitedSemaphore] = None
159
+ _add_transaction_semaphore: Optional[asyncio.Semaphore] = None
160
+ _db_wrapper: Optional[DBWrapper2] = None
161
+ _hint_store: Optional[HintStore] = None
162
+ _block_store: Optional[BlockStore] = None
163
+ _coin_store: Optional[CoinStore] = None
164
+ _mempool_manager: Optional[MempoolManager] = None
165
+ _init_weight_proof: Optional[asyncio.Task[None]] = None
166
+ _blockchain: Optional[Blockchain] = None
167
+ _timelord_lock: Optional[asyncio.Lock] = None
168
+ weight_proof_handler: Optional[WeightProofHandler] = None
169
+ # hashes of peaks that failed long sync on chip13 Validation
170
+ bad_peak_cache: Dict[bytes32, uint32] = dataclasses.field(default_factory=dict)
171
+ wallet_sync_task: Optional[asyncio.Task[None]] = None
172
+ _bls_cache: BLSCache = dataclasses.field(default_factory=lambda: BLSCache(50000))
173
+
174
+ @property
175
+ def server(self) -> ChiaServer:
176
+ # This is a stop gap until the class usage is refactored such the values of
177
+ # integral attributes are known at creation of the instance.
178
+ if self._server is None:
179
+ raise RuntimeError("server not assigned")
180
+
181
+ return self._server
182
+
183
+ @classmethod
184
+ async def create(
185
+ cls,
186
+ config: Dict[str, Any],
187
+ root_path: Path,
188
+ consensus_constants: ConsensusConstants,
189
+ name: str = __name__,
190
+ ) -> FullNode:
191
+ # NOTE: async to force the queue creation to occur when an event loop is available
192
+ db_path_replaced: str = config["database_path"].replace("CHALLENGE", config["selected_network"])
193
+ db_path = path_from_root(root_path, db_path_replaced)
194
+ db_path.parent.mkdir(parents=True, exist_ok=True)
195
+
196
+ return cls(
197
+ root_path=root_path,
198
+ config=config,
199
+ constants=consensus_constants,
200
+ signage_point_times=[time.time() for _ in range(consensus_constants.NUM_SPS_SUB_SLOT)],
201
+ full_node_store=FullNodeStore(consensus_constants),
202
+ log=logging.getLogger(name),
203
+ db_path=db_path,
204
+ wallet_sync_queue=asyncio.Queue(),
205
+ )
206
+
207
+ @contextlib.asynccontextmanager
208
+ async def manage(self) -> AsyncIterator[None]:
209
+ self._timelord_lock = asyncio.Lock()
210
+ self._compact_vdf_sem = LimitedSemaphore.create(active_limit=4, waiting_limit=20)
211
+
212
+ # We don't want to run too many concurrent new_peak instances, because it would fetch the same block from
213
+ # multiple peers and re-validate.
214
+ self._new_peak_sem = LimitedSemaphore.create(active_limit=2, waiting_limit=20)
215
+
216
+ # These many respond_transaction tasks can be active at any point in time
217
+ self._add_transaction_semaphore = asyncio.Semaphore(200)
218
+
219
+ sql_log_path: Optional[Path] = None
220
+ with contextlib.ExitStack() as exit_stack:
221
+ sql_log_file: Optional[TextIO] = None
222
+ if self.config.get("log_sqlite_cmds", False):
223
+ sql_log_path = path_from_root(self.root_path, "log/sql.log")
224
+ self.log.info(f"logging SQL commands to {sql_log_path}")
225
+ sql_log_file = exit_stack.enter_context(sql_log_path.open("a", encoding="utf-8"))
226
+
227
+ # create the store (db) and full node instance
228
+ # TODO: is this standardized and thus able to be handled by DBWrapper2?
229
+ async with manage_connection(self.db_path, log_file=sql_log_file, name="version_check") as db_connection:
230
+ db_version = await lookup_db_version(db_connection)
231
+
232
+ self.log.info(f"using blockchain database {self.db_path}, which is version {db_version}")
233
+
234
+ db_sync = db_synchronous_on(self.config.get("db_sync", "auto"))
235
+ self.log.info(f"opening blockchain DB: synchronous={db_sync}")
236
+
237
+ async with DBWrapper2.managed(
238
+ self.db_path,
239
+ db_version=db_version,
240
+ reader_count=self.config.get("db_readers", 4),
241
+ log_path=sql_log_path,
242
+ synchronous=db_sync,
243
+ ) as self._db_wrapper:
244
+ if self.db_wrapper.db_version != 2:
245
+ async with self.db_wrapper.reader_no_transaction() as conn:
246
+ async with conn.execute(
247
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='full_blocks'"
248
+ ) as cur:
249
+ if len(list(await cur.fetchall())) == 0:
250
+ try:
251
+ # this is a new DB file. Make it v2
252
+ async with self.db_wrapper.writer_maybe_transaction() as w_conn:
253
+ await set_db_version_async(w_conn, 2)
254
+ self.db_wrapper.db_version = 2
255
+ self.log.info("blockchain database is empty, configuring as v2")
256
+ except sqlite3.OperationalError:
257
+ # it could be a database created with "chia init", which is
258
+ # empty except it has the database_version table
259
+ pass
260
+
261
+ self._block_store = await BlockStore.create(self.db_wrapper)
262
+ self._hint_store = await HintStore.create(self.db_wrapper)
263
+ self._coin_store = await CoinStore.create(self.db_wrapper)
264
+ self.log.info("Initializing blockchain from disk")
265
+ start_time = time.monotonic()
266
+ reserved_cores = self.config.get("reserved_cores", 0)
267
+ single_threaded = self.config.get("single_threaded", False)
268
+ multiprocessing_start_method = process_config_start_method(config=self.config, log=self.log)
269
+ self.multiprocessing_context = multiprocessing.get_context(method=multiprocessing_start_method)
270
+ self._blockchain = await Blockchain.create(
271
+ coin_store=self.coin_store,
272
+ block_store=self.block_store,
273
+ consensus_constants=self.constants,
274
+ blockchain_dir=self.db_path.parent,
275
+ reserved_cores=reserved_cores,
276
+ multiprocessing_context=self.multiprocessing_context,
277
+ single_threaded=single_threaded,
278
+ )
279
+
280
+ self._mempool_manager = MempoolManager(
281
+ get_coin_records=self.coin_store.get_coin_records,
282
+ consensus_constants=self.constants,
283
+ single_threaded=single_threaded,
284
+ )
285
+
286
+ # Transactions go into this queue from the server, and get sent to respond_transaction
287
+ self._transaction_queue = TransactionQueue(1000, self.log)
288
+ self._transaction_queue_task: asyncio.Task[None] = asyncio.create_task(self._handle_transactions())
289
+
290
+ self._init_weight_proof = asyncio.create_task(self.initialize_weight_proof())
291
+
292
+ if self.config.get("enable_profiler", False):
293
+ asyncio.create_task(profile_task(self.root_path, "node", self.log))
294
+
295
+ self.profile_block_validation = self.config.get("profile_block_validation", False)
296
+ if self.profile_block_validation: # pragma: no cover
297
+ # this is not covered by any unit tests as it's essentially test code
298
+ # itself. It's exercised manually when investigating performance issues
299
+ profile_dir = path_from_root(self.root_path, "block-validation-profile")
300
+ profile_dir.mkdir(parents=True, exist_ok=True)
301
+
302
+ if self.config.get("enable_memory_profiler", False):
303
+ asyncio.create_task(mem_profile_task(self.root_path, "node", self.log))
304
+
305
+ time_taken = time.monotonic() - start_time
306
+ peak: Optional[BlockRecord] = self.blockchain.get_peak()
307
+ if peak is None:
308
+ self.log.info(f"Initialized with empty blockchain time taken: {int(time_taken)}s")
309
+ num_unspent = await self.coin_store.num_unspent()
310
+ if num_unspent > 0:
311
+ self.log.error(
312
+ f"Inconsistent blockchain DB file! Could not find peak block but found {num_unspent} coins! "
313
+ "This is a fatal error. The blockchain database may be corrupt"
314
+ )
315
+ raise RuntimeError("corrupt blockchain DB")
316
+ else:
317
+ self.log.info(
318
+ f"Blockchain initialized to peak {peak.header_hash} height"
319
+ f" {peak.height}, "
320
+ f"time taken: {int(time_taken)}s"
321
+ )
322
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
323
+ pending_tx = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), None)
324
+ assert len(pending_tx.items) == 0 # no pending transactions when starting up
325
+
326
+ full_peak: Optional[FullBlock] = await self.blockchain.get_full_peak()
327
+ assert full_peak is not None
328
+ state_change_summary = StateChangeSummary(peak, uint32(max(peak.height - 1, 0)), [], [], [], [])
329
+ ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
330
+ full_peak, state_change_summary, None
331
+ )
332
+ await self.peak_post_processing_2(full_peak, None, state_change_summary, ppp_result)
333
+ if self.config["send_uncompact_interval"] != 0:
334
+ sanitize_weight_proof_only = False
335
+ if "sanitize_weight_proof_only" in self.config:
336
+ sanitize_weight_proof_only = self.config["sanitize_weight_proof_only"]
337
+ assert self.config["target_uncompact_proofs"] != 0
338
+ self.uncompact_task = asyncio.create_task(
339
+ self.broadcast_uncompact_blocks(
340
+ self.config["send_uncompact_interval"],
341
+ self.config["target_uncompact_proofs"],
342
+ sanitize_weight_proof_only,
343
+ )
344
+ )
345
+ if self.wallet_sync_task is None or self.wallet_sync_task.done():
346
+ self.wallet_sync_task = asyncio.create_task(self._wallets_sync_task_handler())
347
+
348
+ self.initialized = True
349
+ if self.full_node_peers is not None:
350
+ asyncio.create_task(self.full_node_peers.start())
351
+ try:
352
+ yield
353
+ finally:
354
+ self._shut_down = True
355
+ if self._init_weight_proof is not None:
356
+ self._init_weight_proof.cancel()
357
+
358
+ # blockchain is created in _start and in certain cases it may not exist here during _close
359
+ if self._blockchain is not None:
360
+ self.blockchain.shut_down()
361
+ # same for mempool_manager
362
+ if self._mempool_manager is not None:
363
+ self.mempool_manager.shut_down()
364
+
365
+ if self.full_node_peers is not None:
366
+ asyncio.create_task(self.full_node_peers.close())
367
+ if self.uncompact_task is not None:
368
+ self.uncompact_task.cancel()
369
+ if self._transaction_queue_task is not None:
370
+ self._transaction_queue_task.cancel()
371
+ cancel_task_safe(task=self.wallet_sync_task, log=self.log)
372
+ cancel_task_safe(task=self._sync_task, log=self.log)
373
+
374
+ for task_id, task in list(self.full_node_store.tx_fetch_tasks.items()):
375
+ cancel_task_safe(task, self.log)
376
+ if self._init_weight_proof is not None:
377
+ await asyncio.wait([self._init_weight_proof])
378
+ if self._sync_task is not None:
379
+ with contextlib.suppress(asyncio.CancelledError):
380
+ await self._sync_task
381
+
382
+ @property
383
+ def block_store(self) -> BlockStore:
384
+ assert self._block_store is not None
385
+ return self._block_store
386
+
387
+ @property
388
+ def timelord_lock(self) -> asyncio.Lock:
389
+ assert self._timelord_lock is not None
390
+ return self._timelord_lock
391
+
392
+ @property
393
+ def mempool_manager(self) -> MempoolManager:
394
+ assert self._mempool_manager is not None
395
+ return self._mempool_manager
396
+
397
+ @property
398
+ def blockchain(self) -> Blockchain:
399
+ assert self._blockchain is not None
400
+ return self._blockchain
401
+
402
+ @property
403
+ def coin_store(self) -> CoinStore:
404
+ assert self._coin_store is not None
405
+ return self._coin_store
406
+
407
+ @property
408
+ def add_transaction_semaphore(self) -> asyncio.Semaphore:
409
+ assert self._add_transaction_semaphore is not None
410
+ return self._add_transaction_semaphore
411
+
412
+ @property
413
+ def transaction_queue(self) -> TransactionQueue:
414
+ assert self._transaction_queue is not None
415
+ return self._transaction_queue
416
+
417
+ @property
418
+ def db_wrapper(self) -> DBWrapper2:
419
+ assert self._db_wrapper is not None
420
+ return self._db_wrapper
421
+
422
+ @property
423
+ def hint_store(self) -> HintStore:
424
+ assert self._hint_store is not None
425
+ return self._hint_store
426
+
427
+ @property
428
+ def new_peak_sem(self) -> LimitedSemaphore:
429
+ assert self._new_peak_sem is not None
430
+ return self._new_peak_sem
431
+
432
+ @property
433
+ def compact_vdf_sem(self) -> LimitedSemaphore:
434
+ assert self._compact_vdf_sem is not None
435
+ return self._compact_vdf_sem
436
+
437
+ def get_connections(self, request_node_type: Optional[NodeType]) -> List[Dict[str, Any]]:
438
+ connections = self.server.get_connections(request_node_type)
439
+ con_info: List[Dict[str, Any]] = []
440
+ if self.sync_store is not None:
441
+ peak_store = self.sync_store.peer_to_peak
442
+ else:
443
+ peak_store = None
444
+ for con in connections:
445
+ if peak_store is not None and con.peer_node_id in peak_store:
446
+ peak = peak_store[con.peer_node_id]
447
+ peak_height = peak.height
448
+ peak_hash = peak.header_hash
449
+ peak_weight = peak.weight
450
+ else:
451
+ peak_height = None
452
+ peak_hash = None
453
+ peak_weight = None
454
+ con_dict: Dict[str, Any] = {
455
+ "type": con.connection_type,
456
+ "local_port": con.local_port,
457
+ "peer_host": con.peer_info.host,
458
+ "peer_port": con.peer_info.port,
459
+ "peer_server_port": con.peer_server_port,
460
+ "node_id": con.peer_node_id,
461
+ "creation_time": con.creation_time,
462
+ "bytes_read": con.bytes_read,
463
+ "bytes_written": con.bytes_written,
464
+ "last_message_time": con.last_message_time,
465
+ "peak_height": peak_height,
466
+ "peak_weight": peak_weight,
467
+ "peak_hash": peak_hash,
468
+ }
469
+ con_info.append(con_dict)
470
+
471
+ return con_info
472
+
473
+ def _set_state_changed_callback(self, callback: StateChangedProtocol) -> None:
474
+ self.state_changed_callback = callback
475
+
476
+ async def _handle_one_transaction(self, entry: TransactionQueueEntry) -> None:
477
+ peer = entry.peer
478
+ try:
479
+ inc_status, err = await self.add_transaction(entry.transaction, entry.spend_name, peer, entry.test)
480
+ entry.done.set((inc_status, err))
481
+ except asyncio.CancelledError:
482
+ error_stack = traceback.format_exc()
483
+ self.log.debug(f"Cancelling _handle_one_transaction, closing: {error_stack}")
484
+ except Exception:
485
+ error_stack = traceback.format_exc()
486
+ self.log.error(f"Error in _handle_one_transaction, closing: {error_stack}")
487
+ if peer is not None:
488
+ await peer.close()
489
+ finally:
490
+ self.add_transaction_semaphore.release()
491
+
492
+ async def _handle_transactions(self) -> None:
493
+ while not self._shut_down:
494
+ # We use a semaphore to make sure we don't send more than 200 concurrent calls of respond_transaction.
495
+ # However, doing them one at a time would be slow, because they get sent to other processes.
496
+ await self.add_transaction_semaphore.acquire()
497
+ item: TransactionQueueEntry = await self.transaction_queue.pop()
498
+ asyncio.create_task(self._handle_one_transaction(item))
499
+
500
+ async def initialize_weight_proof(self) -> None:
501
+ self.weight_proof_handler = WeightProofHandler(
502
+ constants=self.constants,
503
+ blockchain=self.blockchain,
504
+ multiprocessing_context=self.multiprocessing_context,
505
+ )
506
+ peak = self.blockchain.get_peak()
507
+ if peak is not None:
508
+ await self.weight_proof_handler.create_sub_epoch_segments()
509
+
510
+ def set_server(self, server: ChiaServer) -> None:
511
+ self._server = server
512
+ dns_servers: List[str] = []
513
+ network_name = self.config["selected_network"]
514
+ try:
515
+ default_port = self.config["network_overrides"]["config"][network_name]["default_full_node_port"]
516
+ except Exception:
517
+ self.log.info("Default port field not found in config.")
518
+ default_port = None
519
+ if "dns_servers" in self.config:
520
+ dns_servers = self.config["dns_servers"]
521
+ elif network_name == "mainnet":
522
+ # If `dns_servers` is missing from the `config`, hardcode it if we're running mainnet.
523
+ dns_servers.append("dns-introducer.chia.net")
524
+ try:
525
+ self.full_node_peers = FullNodePeers(
526
+ self.server,
527
+ self.config["target_outbound_peer_count"],
528
+ self.root_path / Path(self.config.get("peers_file_path", "db/peers.dat")),
529
+ self.config["introducer_peer"],
530
+ dns_servers,
531
+ self.config["peer_connect_interval"],
532
+ self.config["selected_network"],
533
+ default_port,
534
+ self.log,
535
+ )
536
+ except Exception as e:
537
+ error_stack = traceback.format_exc()
538
+ self.log.error(f"Exception: {e}")
539
+ self.log.error(f"Exception in peer discovery: {e}")
540
+ self.log.error(f"Exception Stack: {error_stack}")
541
+
542
+ def _state_changed(self, change: str, change_data: Optional[Dict[str, Any]] = None) -> None:
543
+ if self.state_changed_callback is not None:
544
+ self.state_changed_callback(change, change_data)
545
+
546
+ async def short_sync_batch(self, peer: WSChiaConnection, start_height: uint32, target_height: uint32) -> bool:
547
+ """
548
+ Tries to sync to a chain which is not too far in the future, by downloading batches of blocks. If the first
549
+ block that we download is not connected to our chain, we return False and do an expensive long sync instead.
550
+ Long sync is not preferred because it requires downloading and validating a weight proof.
551
+
552
+ Args:
553
+ peer: peer to sync from
554
+ start_height: height that we should start downloading at. (Our peak is higher)
555
+ target_height: target to sync to
556
+
557
+ Returns:
558
+ False if the fork point was not found, and we need to do a long sync. True otherwise.
559
+
560
+ """
561
+ # Don't trigger multiple batch syncs to the same peer
562
+
563
+ if self.sync_store.is_backtrack_syncing(node_id=peer.peer_node_id):
564
+ return True # Don't batch sync, we are already in progress of a backtrack sync
565
+ if peer.peer_node_id in self.sync_store.batch_syncing:
566
+ return True # Don't trigger a long sync
567
+ self.sync_store.batch_syncing.add(peer.peer_node_id)
568
+
569
+ self.log.info(f"Starting batch short sync from {start_height} to height {target_height}")
570
+ if start_height > 0:
571
+ first = await peer.call_api(
572
+ FullNodeAPI.request_block, full_node_protocol.RequestBlock(uint32(start_height), False)
573
+ )
574
+ if first is None or not isinstance(first, full_node_protocol.RespondBlock):
575
+ self.sync_store.batch_syncing.remove(peer.peer_node_id)
576
+ self.log.error(f"Error short batch syncing, could not fetch block at height {start_height}")
577
+ return False
578
+ if not self.blockchain.contains_block(first.block.prev_header_hash):
579
+ self.log.info("Batch syncing stopped, this is a deep chain")
580
+ self.sync_store.batch_syncing.remove(peer.peer_node_id)
581
+ # First sb not connected to our blockchain, do a long sync instead
582
+ return False
583
+
584
+ batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
585
+ if self._segment_task is not None and (not self._segment_task.done()):
586
+ try:
587
+ self._segment_task.cancel()
588
+ except Exception as e:
589
+ self.log.warning(f"failed to cancel segment task {e}")
590
+ self._segment_task = None
591
+
592
+ try:
593
+ peer_info = peer.get_peer_logging()
594
+ for height in range(start_height, target_height, batch_size):
595
+ end_height = min(target_height, height + batch_size)
596
+ request = RequestBlocks(uint32(height), uint32(end_height), True)
597
+ response = await peer.call_api(FullNodeAPI.request_blocks, request)
598
+ if not response:
599
+ raise ValueError(f"Error short batch syncing, invalid/no response for {height}-{end_height}")
600
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
601
+ state_change_summary: Optional[StateChangeSummary]
602
+ prev_b = None
603
+ if response.blocks[0].height > 0:
604
+ prev_b = await self.blockchain.get_block_record_from_db(response.blocks[0].prev_header_hash)
605
+ assert prev_b is not None
606
+ new_slot = len(response.blocks[0].finished_sub_slots) > 0
607
+ ssi, diff = get_next_sub_slot_iters_and_difficulty(
608
+ self.constants, new_slot, prev_b, self.blockchain
609
+ )
610
+ success, state_change_summary, ssi, diff, _, _ = await self.add_block_batch(
611
+ response.blocks, peer_info, None, ssi, diff
612
+ )
613
+ if not success:
614
+ raise ValueError(f"Error short batch syncing, failed to validate blocks {height}-{end_height}")
615
+ if state_change_summary is not None:
616
+ try:
617
+ peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak()
618
+ assert peak_fb is not None
619
+ ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
620
+ peak_fb,
621
+ state_change_summary,
622
+ peer,
623
+ )
624
+ await self.peak_post_processing_2(peak_fb, peer, state_change_summary, ppp_result)
625
+ except Exception:
626
+ # Still do post processing after cancel (or exception)
627
+ peak_fb = await self.blockchain.get_full_peak()
628
+ assert peak_fb is not None
629
+ await self.peak_post_processing(peak_fb, state_change_summary, peer)
630
+ raise
631
+ finally:
632
+ self.log.info(f"Added blocks {height}-{end_height}")
633
+ except (asyncio.CancelledError, Exception):
634
+ self.sync_store.batch_syncing.remove(peer.peer_node_id)
635
+ raise
636
+ self.sync_store.batch_syncing.remove(peer.peer_node_id)
637
+ return True
638
+
639
+ async def short_sync_backtrack(
640
+ self, peer: WSChiaConnection, peak_height: uint32, target_height: uint32, target_unf_hash: bytes32
641
+ ) -> bool:
642
+ """
643
+ Performs a backtrack sync, where blocks are downloaded one at a time from newest to oldest. If we do not
644
+ find the fork point 5 deeper than our peak, we return False and do a long sync instead.
645
+
646
+ Args:
647
+ peer: peer to sync from
648
+ peak_height: height of our peak
649
+ target_height: target height
650
+ target_unf_hash: partial hash of the unfinished block of the target
651
+
652
+ Returns:
653
+ True iff we found the fork point, and we do not need to long sync.
654
+ """
655
+ try:
656
+ self.sync_store.increment_backtrack_syncing(node_id=peer.peer_node_id)
657
+
658
+ unfinished_block: Optional[UnfinishedBlock] = self.full_node_store.get_unfinished_block(target_unf_hash)
659
+ curr_height: int = target_height
660
+ found_fork_point = False
661
+ blocks = []
662
+ while curr_height > peak_height - 5:
663
+ # If we already have the unfinished block, don't fetch the transactions. In the normal case, we will
664
+ # already have the unfinished block, from when it was broadcast, so we just need to download the header,
665
+ # but not the transactions
666
+ fetch_tx: bool = unfinished_block is None or curr_height != target_height
667
+ curr = await peer.call_api(
668
+ FullNodeAPI.request_block, full_node_protocol.RequestBlock(uint32(curr_height), fetch_tx)
669
+ )
670
+ if curr is None:
671
+ raise ValueError(f"Failed to fetch block {curr_height} from {peer.get_peer_logging()}, timed out")
672
+ if curr is None or not isinstance(curr, full_node_protocol.RespondBlock):
673
+ raise ValueError(
674
+ f"Failed to fetch block {curr_height} from {peer.get_peer_logging()}, wrong type {type(curr)}"
675
+ )
676
+ blocks.append(curr.block)
677
+ if self.blockchain.contains_block(curr.block.prev_header_hash) or curr_height == 0:
678
+ found_fork_point = True
679
+ break
680
+ curr_height -= 1
681
+ if found_fork_point:
682
+ for block in reversed(blocks):
683
+ # when syncing, we won't share any signatures with the
684
+ # mempool, so there's no need to pass in the BLS cache.
685
+ await self.add_block(block, peer)
686
+ except (asyncio.CancelledError, Exception):
687
+ self.sync_store.decrement_backtrack_syncing(node_id=peer.peer_node_id)
688
+ raise
689
+
690
+ self.sync_store.decrement_backtrack_syncing(node_id=peer.peer_node_id)
691
+ return found_fork_point
692
+
693
+ async def _refresh_ui_connections(self, sleep_before: float = 0) -> None:
694
+ if sleep_before > 0:
695
+ await asyncio.sleep(sleep_before)
696
+ self._state_changed("peer_changed_peak")
697
+
698
+ async def new_peak(self, request: full_node_protocol.NewPeak, peer: WSChiaConnection) -> None:
699
+ """
700
+ We have received a notification of a new peak from a peer. This happens either when we have just connected,
701
+ or when the peer has updated their peak.
702
+
703
+ Args:
704
+ request: information about the new peak
705
+ peer: peer that sent the message
706
+
707
+ """
708
+
709
+ try:
710
+ seen_header_hash = self.sync_store.seen_header_hash(request.header_hash)
711
+ # Updates heights in the UI. Sleeps 1.5s before, so other peers have time to update their peaks as well.
712
+ # Limit to 3 refreshes.
713
+ if not seen_header_hash and len(self._ui_tasks) < 3:
714
+ self._ui_tasks.add(asyncio.create_task(self._refresh_ui_connections(1.5)))
715
+ # Prune completed connect tasks
716
+ self._ui_tasks = set(filter(lambda t: not t.done(), self._ui_tasks))
717
+ except Exception as e:
718
+ self.log.warning(f"Exception UI refresh task: {e}")
719
+
720
+ # Store this peak/peer combination in case we want to sync to it, and to keep track of peers
721
+ self.sync_store.peer_has_block(request.header_hash, peer.peer_node_id, request.weight, request.height, True)
722
+
723
+ if self.blockchain.contains_block(request.header_hash):
724
+ return None
725
+
726
+ # Not interested in less heavy peaks
727
+ peak: Optional[BlockRecord] = self.blockchain.get_peak()
728
+ curr_peak_height = uint32(0) if peak is None else peak.height
729
+ if peak is not None and peak.weight > request.weight:
730
+ return None
731
+
732
+ if self.sync_store.get_sync_mode():
733
+ # If peer connects while we are syncing, check if they have the block we are syncing towards
734
+ target_peak = self.sync_store.target_peak
735
+ if target_peak is not None and request.header_hash != target_peak.header_hash:
736
+ peak_peers: Set[bytes32] = self.sync_store.get_peers_that_have_peak([target_peak.header_hash])
737
+ # Don't ask if we already know this peer has the peak
738
+ if peer.peer_node_id not in peak_peers:
739
+ target_peak_response: Optional[RespondBlock] = await peer.call_api(
740
+ FullNodeAPI.request_block,
741
+ full_node_protocol.RequestBlock(target_peak.height, False),
742
+ timeout=10,
743
+ )
744
+ if target_peak_response is not None and isinstance(target_peak_response, RespondBlock):
745
+ self.sync_store.peer_has_block(
746
+ target_peak.header_hash,
747
+ peer.peer_node_id,
748
+ target_peak_response.block.weight,
749
+ target_peak.height,
750
+ False,
751
+ )
752
+ else:
753
+ if (
754
+ curr_peak_height <= request.height
755
+ and request.height <= curr_peak_height + self.config["short_sync_blocks_behind_threshold"]
756
+ ):
757
+ # This is the normal case of receiving the next block
758
+ if await self.short_sync_backtrack(
759
+ peer, curr_peak_height, request.height, request.unfinished_reward_block_hash
760
+ ):
761
+ return None
762
+
763
+ if request.height < self.constants.WEIGHT_PROOF_RECENT_BLOCKS:
764
+ # This is the case of syncing up more than a few blocks, at the start of the chain
765
+ self.log.debug("Doing batch sync, no backup")
766
+ await self.short_sync_batch(peer, uint32(0), request.height)
767
+ return None
768
+
769
+ if (
770
+ curr_peak_height <= request.height
771
+ and request.height < curr_peak_height + self.config["sync_blocks_behind_threshold"]
772
+ ):
773
+ # This case of being behind but not by so much
774
+ if await self.short_sync_batch(peer, uint32(max(curr_peak_height - 6, 0)), request.height):
775
+ return None
776
+
777
+ # This is the either the case where we were not able to sync successfully (for example, due to the fork
778
+ # point being in the past), or we are very far behind. Performs a long sync.
779
+ self._sync_task = asyncio.create_task(self._sync())
780
+
781
+ async def send_peak_to_timelords(
782
+ self, peak_block: Optional[FullBlock] = None, peer: Optional[WSChiaConnection] = None
783
+ ) -> None:
784
+ """
785
+ Sends current peak to timelords
786
+ """
787
+ if peak_block is None:
788
+ peak_block = await self.blockchain.get_full_peak()
789
+ if peak_block is not None:
790
+ peak = self.blockchain.block_record(peak_block.header_hash)
791
+ difficulty = self.blockchain.get_next_difficulty(peak.header_hash, False)
792
+ ses: Optional[SubEpochSummary] = next_sub_epoch_summary(
793
+ self.constants,
794
+ self.blockchain,
795
+ peak.required_iters,
796
+ peak_block,
797
+ True,
798
+ )
799
+ recent_rc = self.blockchain.get_recent_reward_challenges()
800
+
801
+ curr = peak
802
+ while not curr.is_challenge_block(self.constants) and not curr.first_in_sub_slot:
803
+ curr = self.blockchain.block_record(curr.prev_hash)
804
+
805
+ if curr.is_challenge_block(self.constants):
806
+ last_csb_or_eos = curr.total_iters
807
+ else:
808
+ last_csb_or_eos = curr.ip_sub_slot_total_iters(self.constants)
809
+
810
+ curr = peak
811
+ passed_ses_height_but_not_yet_included = True
812
+ while (curr.height % self.constants.SUB_EPOCH_BLOCKS) != 0:
813
+ if curr.sub_epoch_summary_included:
814
+ passed_ses_height_but_not_yet_included = False
815
+ curr = self.blockchain.block_record(curr.prev_hash)
816
+ if curr.sub_epoch_summary_included or curr.height == 0:
817
+ passed_ses_height_but_not_yet_included = False
818
+
819
+ timelord_new_peak: timelord_protocol.NewPeakTimelord = timelord_protocol.NewPeakTimelord(
820
+ peak_block.reward_chain_block,
821
+ difficulty,
822
+ peak.deficit,
823
+ peak.sub_slot_iters,
824
+ ses,
825
+ recent_rc,
826
+ last_csb_or_eos,
827
+ passed_ses_height_but_not_yet_included,
828
+ )
829
+
830
+ msg = make_msg(ProtocolMessageTypes.new_peak_timelord, timelord_new_peak)
831
+ if peer is None:
832
+ await self.server.send_to_all([msg], NodeType.TIMELORD)
833
+ else:
834
+ await self.server.send_to_specific([msg], peer.peer_node_id)
835
+
836
+ async def synced(self, block_is_current_at: Optional[uint64] = None) -> bool:
837
+ if block_is_current_at is None:
838
+ block_is_current_at = uint64(int(time.time() - 60 * 7))
839
+ if "simulator" in str(self.config.get("selected_network")):
840
+ return True # sim is always synced because it has no peers
841
+ curr: Optional[BlockRecord] = self.blockchain.get_peak()
842
+ if curr is None:
843
+ return False
844
+
845
+ while curr is not None and not curr.is_transaction_block:
846
+ curr = self.blockchain.try_block_record(curr.prev_hash)
847
+
848
+ if (
849
+ curr is None
850
+ or curr.timestamp is None
851
+ or curr.timestamp < block_is_current_at
852
+ or self.sync_store.get_sync_mode()
853
+ ):
854
+ return False
855
+ else:
856
+ return True
857
+
858
+ async def on_connect(self, connection: WSChiaConnection) -> None:
859
+ """
860
+ Whenever we connect to another node / wallet, send them our current heads. Also send heads to farmers
861
+ and challenges to timelords.
862
+ """
863
+
864
+ self._state_changed("add_connection")
865
+ self._state_changed("sync_mode")
866
+ if self.full_node_peers is not None:
867
+ asyncio.create_task(self.full_node_peers.on_connect(connection))
868
+
869
+ if self.initialized is False:
870
+ return None
871
+
872
+ if connection.connection_type is NodeType.FULL_NODE:
873
+ # Send filter to node and request mempool items that are not in it (Only if we are currently synced)
874
+ synced = await self.synced()
875
+ peak_height = self.blockchain.get_peak_height()
876
+ if synced and peak_height is not None:
877
+ my_filter = self.mempool_manager.get_filter()
878
+ mempool_request = full_node_protocol.RequestMempoolTransactions(my_filter)
879
+
880
+ msg = make_msg(ProtocolMessageTypes.request_mempool_transactions, mempool_request)
881
+ await connection.send_message(msg)
882
+
883
+ peak_full: Optional[FullBlock] = await self.blockchain.get_full_peak()
884
+
885
+ if peak_full is not None:
886
+ peak: BlockRecord = self.blockchain.block_record(peak_full.header_hash)
887
+ if connection.connection_type is NodeType.FULL_NODE:
888
+ request_node = full_node_protocol.NewPeak(
889
+ peak.header_hash,
890
+ peak.height,
891
+ peak.weight,
892
+ peak.height,
893
+ peak_full.reward_chain_block.get_unfinished().get_hash(),
894
+ )
895
+ await connection.send_message(make_msg(ProtocolMessageTypes.new_peak, request_node))
896
+
897
+ elif connection.connection_type is NodeType.WALLET:
898
+ # If connected to a wallet, send the Peak
899
+ request_wallet = wallet_protocol.NewPeakWallet(
900
+ peak.header_hash,
901
+ peak.height,
902
+ peak.weight,
903
+ peak.height,
904
+ )
905
+ await connection.send_message(make_msg(ProtocolMessageTypes.new_peak_wallet, request_wallet))
906
+ elif connection.connection_type is NodeType.TIMELORD:
907
+ await self.send_peak_to_timelords()
908
+
909
+ async def on_disconnect(self, connection: WSChiaConnection) -> None:
910
+ self.log.info(f"peer disconnected {connection.get_peer_logging()}")
911
+ self._state_changed("close_connection")
912
+ self._state_changed("sync_mode")
913
+ if self.sync_store is not None:
914
+ self.sync_store.peer_disconnected(connection.peer_node_id)
915
+ # Remove all ph | coin id subscription for this peer
916
+ self.subscriptions.remove_peer(connection.peer_node_id)
917
+
918
+ async def _sync(self) -> None:
919
+ """
920
+ Performs a full sync of the blockchain up to the peak.
921
+ - Wait a few seconds for peers to send us their peaks
922
+ - Select the heaviest peak, and request a weight proof from a peer with that peak
923
+ - Validate the weight proof, and disconnect from the peer if invalid
924
+ - Find the fork point to see where to start downloading blocks
925
+ - Download blocks in batch (and in parallel) and verify them one at a time
926
+ - Disconnect peers that provide invalid blocks or don't have the blocks
927
+ """
928
+ # Ensure we are only syncing once and not double calling this method
929
+ if self.sync_store.get_sync_mode():
930
+ return None
931
+
932
+ if self.sync_store.get_long_sync():
933
+ self.log.debug("already in long sync")
934
+ return None
935
+
936
+ self.sync_store.set_long_sync(True)
937
+ self.log.debug("long sync started")
938
+ try:
939
+ self.log.info("Starting to perform sync.")
940
+
941
+ # Wait until we have 3 peaks or up to a max of 30 seconds
942
+ max_iterations = int(self.config.get("max_sync_wait", 30)) * 10
943
+
944
+ self.log.info(f"Waiting to receive peaks from peers. (timeout: {max_iterations/10}s)")
945
+ peaks = []
946
+ for i in range(max_iterations):
947
+ peaks = [peak.header_hash for peak in self.sync_store.get_peak_of_each_peer().values()]
948
+ if len(self.sync_store.get_peers_that_have_peak(peaks)) < 3:
949
+ if self._shut_down:
950
+ return None
951
+ await asyncio.sleep(0.1)
952
+ continue
953
+ break
954
+
955
+ self.log.info(f"Collected a total of {len(peaks)} peaks.")
956
+
957
+ # Based on responses from peers about the current peaks, see which peak is the heaviest
958
+ # (similar to longest chain rule).
959
+ target_peak = self.sync_store.get_heaviest_peak()
960
+
961
+ if target_peak is None:
962
+ raise RuntimeError("Not performing sync, no peaks collected")
963
+
964
+ self.sync_store.target_peak = target_peak
965
+
966
+ self.log.info(f"Selected peak {target_peak}")
967
+ # Check which peers are updated to this height
968
+
969
+ peers = self.server.get_connections(NodeType.FULL_NODE)
970
+ coroutines = []
971
+ for peer in peers:
972
+ coroutines.append(
973
+ peer.call_api(
974
+ FullNodeAPI.request_block,
975
+ full_node_protocol.RequestBlock(target_peak.height, True),
976
+ timeout=10,
977
+ )
978
+ )
979
+ for i, target_peak_response in enumerate(await asyncio.gather(*coroutines)):
980
+ if target_peak_response is not None and isinstance(target_peak_response, RespondBlock):
981
+ self.sync_store.peer_has_block(
982
+ target_peak.header_hash, peers[i].peer_node_id, target_peak.weight, target_peak.height, False
983
+ )
984
+ # TODO: disconnect from peer which gave us the heaviest_peak, if nobody has the peak
985
+ fork_point, summaries = await self.request_validate_wp(
986
+ target_peak.header_hash, target_peak.height, target_peak.weight
987
+ )
988
+ # Ensures that the fork point does not change
989
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
990
+ await self.blockchain.warmup(fork_point)
991
+ await self.sync_from_fork_point(fork_point, target_peak.height, target_peak.header_hash, summaries)
992
+ except asyncio.CancelledError:
993
+ self.log.warning("Syncing failed, CancelledError")
994
+ except Exception as e:
995
+ tb = traceback.format_exc()
996
+ self.log.error(f"Error with syncing: {type(e)}{tb}")
997
+ finally:
998
+ if self._shut_down:
999
+ return None
1000
+ await self._finish_sync()
1001
+
1002
+ async def request_validate_wp(
1003
+ self, peak_header_hash: bytes32, peak_height: uint32, peak_weight: uint128
1004
+ ) -> Tuple[uint32, List[SubEpochSummary]]:
1005
+ if self.weight_proof_handler is None:
1006
+ raise RuntimeError("Weight proof handler is None")
1007
+ peers_with_peak = self.get_peers_with_peak(peak_header_hash)
1008
+ # Request weight proof from a random peer
1009
+ self.log.info(f"Total of {len(peers_with_peak)} peers with peak {peak_height}")
1010
+ weight_proof_peer: WSChiaConnection = random.choice(peers_with_peak)
1011
+ self.log.info(
1012
+ f"Requesting weight proof from peer {weight_proof_peer.peer_info.host} up to height {peak_height}"
1013
+ )
1014
+ cur_peak: Optional[BlockRecord] = self.blockchain.get_peak()
1015
+ if cur_peak is not None and peak_weight <= cur_peak.weight:
1016
+ raise ValueError("Not performing sync, already caught up.")
1017
+ wp_timeout = 360
1018
+ if "weight_proof_timeout" in self.config:
1019
+ wp_timeout = self.config["weight_proof_timeout"]
1020
+ self.log.debug(f"weight proof timeout is {wp_timeout} sec")
1021
+ request = full_node_protocol.RequestProofOfWeight(peak_height, peak_header_hash)
1022
+ response = await weight_proof_peer.call_api(FullNodeAPI.request_proof_of_weight, request, timeout=wp_timeout)
1023
+ # Disconnect from this peer, because they have not behaved properly
1024
+ if response is None or not isinstance(response, full_node_protocol.RespondProofOfWeight):
1025
+ await weight_proof_peer.close(600)
1026
+ raise RuntimeError(f"Weight proof did not arrive in time from peer: {weight_proof_peer.peer_info.host}")
1027
+ if response.wp.recent_chain_data[-1].reward_chain_block.height != peak_height:
1028
+ await weight_proof_peer.close(600)
1029
+ raise RuntimeError(f"Weight proof had the wrong height: {weight_proof_peer.peer_info.host}")
1030
+ if response.wp.recent_chain_data[-1].reward_chain_block.weight != peak_weight:
1031
+ await weight_proof_peer.close(600)
1032
+ raise RuntimeError(f"Weight proof had the wrong weight: {weight_proof_peer.peer_info.host}")
1033
+ if self.in_bad_peak_cache(response.wp):
1034
+ raise ValueError("Weight proof failed bad peak cache validation")
1035
+ # dont sync to wp if local peak is heavier,
1036
+ # dont ban peer, we asked for this peak
1037
+ current_peak = self.blockchain.get_peak()
1038
+ if current_peak is not None:
1039
+ if response.wp.recent_chain_data[-1].reward_chain_block.weight <= current_peak.weight:
1040
+ raise RuntimeError(
1041
+ f"current peak is heavier than Weight proof peek: {weight_proof_peer.peer_info.host}"
1042
+ )
1043
+ try:
1044
+ validated, fork_point, summaries = await self.weight_proof_handler.validate_weight_proof(response.wp)
1045
+ except Exception as e:
1046
+ await weight_proof_peer.close(600)
1047
+ raise ValueError(f"Weight proof validation threw an error {e}")
1048
+ if not validated:
1049
+ await weight_proof_peer.close(600)
1050
+ raise ValueError("Weight proof validation failed")
1051
+ self.log.info(f"Re-checked peers: total of {len(peers_with_peak)} peers with peak {peak_height}")
1052
+ self.sync_store.set_sync_mode(True)
1053
+ self._state_changed("sync_mode")
1054
+ return fork_point, summaries
1055
+
1056
+ async def sync_from_fork_point(
1057
+ self,
1058
+ fork_point_height: uint32,
1059
+ target_peak_sb_height: uint32,
1060
+ peak_hash: bytes32,
1061
+ summaries: List[SubEpochSummary],
1062
+ ) -> None:
1063
+ buffer_size = 4
1064
+ self.log.info(f"Start syncing from fork point at {fork_point_height} up to {target_peak_sb_height}")
1065
+ peers_with_peak: List[WSChiaConnection] = self.get_peers_with_peak(peak_hash)
1066
+ fork_point_height = await check_fork_next_block(
1067
+ self.blockchain, fork_point_height, peers_with_peak, node_next_block_check
1068
+ )
1069
+ batch_size = self.constants.MAX_BLOCK_COUNT_PER_REQUESTS
1070
+ counter = 0
1071
+ if fork_point_height != 0:
1072
+ # warmup the cache
1073
+ curr = self.blockchain.height_to_block_record(fork_point_height)
1074
+ while (
1075
+ curr.sub_epoch_summary_included is None
1076
+ or counter < 3 * self.constants.MAX_SUB_SLOT_BLOCKS + self.constants.MIN_BLOCKS_PER_CHALLENGE_BLOCK + 3
1077
+ ):
1078
+ res = await self.blockchain.get_block_record_from_db(curr.prev_hash)
1079
+ if res is None:
1080
+ break
1081
+ curr = res
1082
+ self.blockchain.add_block_record(curr)
1083
+ counter += 1
1084
+
1085
+ # normally "fork_point" or "fork_height" refers to the first common
1086
+ # block between the main chain and the fork. Here "fork_point_height"
1087
+ # seems to refer to the first diverging block
1088
+
1089
+ async def fetch_block_batches(
1090
+ batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]]
1091
+ ) -> None:
1092
+ start_height, end_height = 0, 0
1093
+ new_peers_with_peak: List[WSChiaConnection] = peers_with_peak[:]
1094
+ try:
1095
+ # block request ranges are *inclusive*, this requires some
1096
+ # gymnastics of this range (+1 to make it exclusive, like normal
1097
+ # ranges) and then -1 when forming the request message
1098
+ for start_height in range(fork_point_height, target_peak_sb_height + 1, batch_size):
1099
+ end_height = min(target_peak_sb_height, start_height + batch_size - 1)
1100
+ request = RequestBlocks(uint32(start_height), uint32(end_height), True)
1101
+ fetched = False
1102
+ for peer in random.sample(new_peers_with_peak, len(new_peers_with_peak)):
1103
+ if peer.closed:
1104
+ continue
1105
+ response = await peer.call_api(FullNodeAPI.request_blocks, request, timeout=30)
1106
+ if response is None:
1107
+ await peer.close()
1108
+ elif isinstance(response, RespondBlocks):
1109
+ await batch_queue.put((peer, response.blocks))
1110
+ fetched = True
1111
+ break
1112
+ if fetched is False:
1113
+ self.log.error(f"failed fetching {start_height} to {end_height} from peers")
1114
+ return
1115
+ if self.sync_store.peers_changed.is_set():
1116
+ new_peers_with_peak = self.get_peers_with_peak(peak_hash)
1117
+ self.sync_store.peers_changed.clear()
1118
+ except Exception as e:
1119
+ self.log.error(f"Exception fetching {start_height} to {end_height} from peer {e}")
1120
+ finally:
1121
+ # finished signal with None
1122
+ await batch_queue.put(None)
1123
+
1124
+ async def validate_block_batches(
1125
+ inner_batch_queue: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]]
1126
+ ) -> None:
1127
+ fork_info: Optional[ForkInfo] = None
1128
+ if fork_point_height == 0:
1129
+ ssi = self.constants.SUB_SLOT_ITERS_STARTING
1130
+ diff = self.constants.DIFFICULTY_STARTING
1131
+ prev_ses_block = None
1132
+ else:
1133
+ prev_b_hash = self.blockchain.height_to_hash(fork_point_height)
1134
+ assert prev_b_hash is not None
1135
+ prev_b = await self.blockchain.get_full_block(prev_b_hash)
1136
+ assert prev_b is not None
1137
+ ssi, diff, prev_ses_block = await self.get_sub_slot_iters_difficulty_ses_block(prev_b, None, None)
1138
+ block_rate = 0
1139
+ block_rate_time = time.monotonic()
1140
+ block_rate_height = -1
1141
+ while True:
1142
+ res: Optional[Tuple[WSChiaConnection, List[FullBlock]]] = await inner_batch_queue.get()
1143
+ if res is None:
1144
+ self.log.debug("done fetching blocks")
1145
+ return None
1146
+ peer, blocks = res
1147
+ start_height = blocks[0].height
1148
+ end_height = blocks[-1].height
1149
+
1150
+ if block_rate_height == -1:
1151
+ block_rate_height = start_height
1152
+
1153
+ # in case we're validating a reorg fork (i.e. not extending the
1154
+ # main chain), we need to record the coin set from that fork in
1155
+ # fork_info. Otherwise validation is very expensive, especially
1156
+ # for deep reorgs
1157
+ peak: Optional[BlockRecord]
1158
+ if fork_info is None:
1159
+ peak = self.blockchain.get_peak()
1160
+ extending_main_chain: bool = peak is None or (
1161
+ peak.header_hash == blocks[0].prev_header_hash or peak.header_hash == blocks[0].header_hash
1162
+ )
1163
+ # if we're simply extending the main chain, it's important
1164
+ # *not* to pass in a ForkInfo object, as it can potentially
1165
+ # accrue a large state (with no value, since we can validate
1166
+ # against the CoinStore)
1167
+ if not extending_main_chain:
1168
+ if fork_point_height == 0:
1169
+ fork_info = ForkInfo(-1, -1, self.constants.GENESIS_CHALLENGE)
1170
+ else:
1171
+ fork_hash = self.blockchain.height_to_hash(uint32(fork_point_height - 1))
1172
+ assert fork_hash is not None
1173
+ fork_info = ForkInfo(fork_point_height - 1, fork_point_height - 1, fork_hash)
1174
+
1175
+ success, state_change_summary, ssi, diff, prev_ses_block, err = await self.add_block_batch(
1176
+ blocks,
1177
+ peer.get_peer_logging(),
1178
+ fork_info,
1179
+ ssi,
1180
+ diff,
1181
+ prev_ses_block,
1182
+ summaries,
1183
+ )
1184
+ if success is False:
1185
+ await peer.close(600)
1186
+ raise ValueError(f"Failed to validate block batch {start_height} to {end_height}")
1187
+ if end_height - block_rate_height > 100:
1188
+ now = time.monotonic()
1189
+ block_rate = int((end_height - block_rate_height) // (now - block_rate_time))
1190
+ block_rate_time = now
1191
+ block_rate_height = end_height
1192
+
1193
+ self.log.info(f"Added blocks {start_height} to {end_height} ({block_rate} blocks/s)")
1194
+ peak = self.blockchain.get_peak()
1195
+ if state_change_summary is not None:
1196
+ assert peak is not None
1197
+ # Hints must be added to the DB. The other post-processing tasks are not required when syncing
1198
+ hints_to_add, _ = get_hints_and_subscription_coin_ids(
1199
+ state_change_summary,
1200
+ self.subscriptions.has_coin_subscription,
1201
+ self.subscriptions.has_puzzle_subscription,
1202
+ )
1203
+ await self.hint_store.add_hints(hints_to_add)
1204
+ # Note that end_height is not necessarily the peak at this
1205
+ # point. In case of a re-org, it may even be significantly
1206
+ # higher than _peak_height, and still not be the peak.
1207
+ # clean_block_record() will not necessarily honor this cut-off
1208
+ # height, in that case.
1209
+ self.blockchain.clean_block_record(end_height - self.constants.BLOCKS_CACHE_SIZE)
1210
+
1211
+ batch_queue_input: asyncio.Queue[Optional[Tuple[WSChiaConnection, List[FullBlock]]]] = asyncio.Queue(
1212
+ maxsize=buffer_size
1213
+ )
1214
+ fetch_task = asyncio.Task(fetch_block_batches(batch_queue_input))
1215
+ validate_task = asyncio.Task(validate_block_batches(batch_queue_input))
1216
+ try:
1217
+ with log_exceptions(log=self.log, message="sync from fork point failed"):
1218
+ await asyncio.gather(fetch_task, validate_task)
1219
+ except Exception:
1220
+ assert validate_task.done()
1221
+ fetch_task.cancel() # no need to cancel validate_task, if we end up here validate_task is already done
1222
+
1223
+ def get_peers_with_peak(self, peak_hash: bytes32) -> List[WSChiaConnection]:
1224
+ peer_ids: Set[bytes32] = self.sync_store.get_peers_that_have_peak([peak_hash])
1225
+ if len(peer_ids) == 0:
1226
+ self.log.warning(f"Not syncing, no peers with header_hash {peak_hash} ")
1227
+ return []
1228
+ return [c for c in self.server.all_connections.values() if c.peer_node_id in peer_ids]
1229
+
1230
+ async def _wallets_sync_task_handler(self) -> None:
1231
+ while not self._shut_down:
1232
+ try:
1233
+ wallet_update = await self.wallet_sync_queue.get()
1234
+ await self.update_wallets(wallet_update)
1235
+ except Exception:
1236
+ self.log.exception("Wallet sync task failure")
1237
+ continue
1238
+
1239
+ async def update_wallets(self, wallet_update: WalletUpdate) -> None:
1240
+ self.log.debug(
1241
+ f"update_wallets - fork_height: {wallet_update.fork_height}, peak_height: {wallet_update.peak.height}"
1242
+ )
1243
+ changes_for_peer: Dict[bytes32, Set[CoinState]] = {}
1244
+ for coin_record in wallet_update.coin_records:
1245
+ coin_id = coin_record.name
1246
+ subscribed_peers = self.subscriptions.peers_for_coin_id(coin_id)
1247
+ subscribed_peers.update(self.subscriptions.peers_for_puzzle_hash(coin_record.coin.puzzle_hash))
1248
+ hint = wallet_update.hints.get(coin_id)
1249
+ if hint is not None:
1250
+ subscribed_peers.update(self.subscriptions.peers_for_puzzle_hash(hint))
1251
+ for peer in subscribed_peers:
1252
+ changes_for_peer.setdefault(peer, set()).add(coin_record.coin_state)
1253
+
1254
+ for peer, changes in changes_for_peer.items():
1255
+ connection = self.server.all_connections.get(peer)
1256
+ if connection is not None:
1257
+ state = CoinStateUpdate(
1258
+ wallet_update.peak.height,
1259
+ wallet_update.fork_height,
1260
+ wallet_update.peak.header_hash,
1261
+ list(changes),
1262
+ )
1263
+ await connection.send_message(make_msg(ProtocolMessageTypes.coin_state_update, state))
1264
+
1265
+ # Tell wallets about the new peak
1266
+ new_peak_message = make_msg(
1267
+ ProtocolMessageTypes.new_peak_wallet,
1268
+ wallet_protocol.NewPeakWallet(
1269
+ wallet_update.peak.header_hash,
1270
+ wallet_update.peak.height,
1271
+ wallet_update.peak.weight,
1272
+ wallet_update.fork_height,
1273
+ ),
1274
+ )
1275
+ await self.server.send_to_all([new_peak_message], NodeType.WALLET)
1276
+
1277
+ async def add_block_batch(
1278
+ self,
1279
+ all_blocks: List[FullBlock],
1280
+ peer_info: PeerInfo,
1281
+ fork_info: Optional[ForkInfo],
1282
+ current_ssi: uint64,
1283
+ current_difficulty: uint64,
1284
+ prev_ses_block: Optional[BlockRecord] = None,
1285
+ wp_summaries: Optional[List[SubEpochSummary]] = None,
1286
+ ) -> Tuple[bool, Optional[StateChangeSummary], uint64, uint64, Optional[BlockRecord], Optional[Err]]:
1287
+ # Precondition: All blocks must be contiguous blocks, index i+1 must be the parent of index i
1288
+ # Returns a bool for success, as well as a StateChangeSummary if the peak was advanced
1289
+
1290
+ blocks_to_validate: List[FullBlock] = []
1291
+ for i, block in enumerate(all_blocks):
1292
+ header_hash = block.header_hash
1293
+ block_rec = await self.blockchain.get_block_record_from_db(header_hash)
1294
+ if block_rec is None:
1295
+ blocks_to_validate = all_blocks[i:]
1296
+ break
1297
+ else:
1298
+ self.blockchain.add_block_record(block_rec)
1299
+ if block_rec.sub_epoch_summary_included:
1300
+ # already validated block, update sub slot iters, difficulty and prev sub epoch summary
1301
+ prev_ses_block = block_rec
1302
+ if block_rec.sub_epoch_summary_included.new_sub_slot_iters is not None:
1303
+ current_ssi = block_rec.sub_epoch_summary_included.new_sub_slot_iters
1304
+ if block_rec.sub_epoch_summary_included.new_difficulty is not None:
1305
+ current_difficulty = block_rec.sub_epoch_summary_included.new_difficulty
1306
+
1307
+ if fork_info is None:
1308
+ continue
1309
+ # the below section updates the fork_info object, if
1310
+ # there is one.
1311
+
1312
+ # TODO: it seems unnecessary to request overlapping block ranges
1313
+ # when syncing
1314
+ if block.height <= fork_info.peak_height:
1315
+ continue
1316
+
1317
+ # we have already validated this block once, no need to do it again.
1318
+ # however, if this block is not part of the main chain, we need to
1319
+ # update the fork context with its additions and removals
1320
+ if self.blockchain.height_to_hash(block.height) == header_hash:
1321
+ # we're on the main chain, just fast-forward the fork height
1322
+ fork_info.reset(block.height, header_hash)
1323
+ else:
1324
+ # We have already validated the block, but if it's not part of the
1325
+ # main chain, we still need to re-run it to update the additions and
1326
+ # removals in fork_info.
1327
+ await self.blockchain.advance_fork_info(block, fork_info)
1328
+ await self.blockchain.run_single_block(block, fork_info)
1329
+
1330
+ if len(blocks_to_validate) == 0:
1331
+ return True, None, current_ssi, current_difficulty, prev_ses_block, None
1332
+
1333
+ # Validates signatures in multiprocessing since they take a while, and we don't have cached transactions
1334
+ # for these blocks (unlike during normal operation where we validate one at a time)
1335
+ pre_validate_start = time.monotonic()
1336
+ pre_validation_results: List[PreValidationResult] = await pre_validate_blocks_multiprocessing(
1337
+ self.blockchain.constants,
1338
+ self.blockchain,
1339
+ blocks_to_validate,
1340
+ self.blockchain.pool,
1341
+ {},
1342
+ sub_slot_iters=current_ssi,
1343
+ difficulty=current_difficulty,
1344
+ prev_ses_block=prev_ses_block,
1345
+ wp_summaries=wp_summaries,
1346
+ validate_signatures=True,
1347
+ )
1348
+ pre_validate_end = time.monotonic()
1349
+ pre_validate_time = pre_validate_end - pre_validate_start
1350
+
1351
+ self.log.log(
1352
+ logging.WARNING if pre_validate_time > 10 else logging.DEBUG,
1353
+ f"Block pre-validation: {pre_validate_end - pre_validate_start:0.2f}s "
1354
+ f"CLVM: {sum(pvr.timing/1000.0 for pvr in pre_validation_results):0.2f}s "
1355
+ f"({len(blocks_to_validate)} blocks, start height: {blocks_to_validate[0].height})",
1356
+ )
1357
+
1358
+ for i, block in enumerate(blocks_to_validate):
1359
+ if pre_validation_results[i].error is not None:
1360
+ self.log.error(
1361
+ f"Invalid block from peer: {peer_info} height {block.height} {Err(pre_validation_results[i].error)}"
1362
+ )
1363
+ return (
1364
+ False,
1365
+ None,
1366
+ current_ssi,
1367
+ current_difficulty,
1368
+ prev_ses_block,
1369
+ Err(pre_validation_results[i].error),
1370
+ )
1371
+
1372
+ agg_state_change_summary: Optional[StateChangeSummary] = None
1373
+ block_record = await self.blockchain.get_block_record_from_db(blocks_to_validate[0].prev_header_hash)
1374
+ for i, block in enumerate(blocks_to_validate):
1375
+ assert pre_validation_results[i].required_iters is not None
1376
+ state_change_summary: Optional[StateChangeSummary]
1377
+ # when adding blocks in batches, we won't have any overlapping
1378
+ # signatures with the mempool. There won't be any cache hits, so
1379
+ # there's no need to pass the BLS cache in
1380
+
1381
+ if len(block.finished_sub_slots) > 0:
1382
+ cc_sub_slot = block.finished_sub_slots[0].challenge_chain
1383
+ if cc_sub_slot.new_sub_slot_iters is not None or cc_sub_slot.new_difficulty is not None:
1384
+ expected_sub_slot_iters, expected_difficulty = get_next_sub_slot_iters_and_difficulty(
1385
+ self.constants, True, block_record, self.blockchain
1386
+ )
1387
+ assert cc_sub_slot.new_sub_slot_iters is not None
1388
+ current_ssi = cc_sub_slot.new_sub_slot_iters
1389
+ assert cc_sub_slot.new_difficulty is not None
1390
+ current_difficulty = cc_sub_slot.new_difficulty
1391
+ assert expected_sub_slot_iters == current_ssi
1392
+ assert expected_difficulty == current_difficulty
1393
+ result, error, state_change_summary = await self.blockchain.add_block(
1394
+ block, pre_validation_results[i], None, current_ssi, fork_info, prev_ses_block=prev_ses_block
1395
+ )
1396
+
1397
+ if result == AddBlockResult.NEW_PEAK:
1398
+ # since this block just added a new peak, we've don't need any
1399
+ # fork history from fork_info anymore
1400
+ if fork_info is not None:
1401
+ fork_info.reset(block.height, block.header_hash)
1402
+ assert state_change_summary is not None
1403
+ # Since all blocks are contiguous, we can simply append the rollback changes and npc results
1404
+ if agg_state_change_summary is None:
1405
+ agg_state_change_summary = state_change_summary
1406
+ else:
1407
+ # Keeps the old, original fork_height, since the next blocks will have fork height h-1
1408
+ # Groups up all state changes into one
1409
+ agg_state_change_summary = StateChangeSummary(
1410
+ state_change_summary.peak,
1411
+ agg_state_change_summary.fork_height,
1412
+ agg_state_change_summary.rolled_back_records + state_change_summary.rolled_back_records,
1413
+ agg_state_change_summary.removals + state_change_summary.removals,
1414
+ agg_state_change_summary.additions + state_change_summary.additions,
1415
+ agg_state_change_summary.new_rewards + state_change_summary.new_rewards,
1416
+ )
1417
+ elif result == AddBlockResult.INVALID_BLOCK or result == AddBlockResult.DISCONNECTED_BLOCK:
1418
+ if error is not None:
1419
+ self.log.error(f"Error: {error}, Invalid block from peer: {peer_info} ")
1420
+ return False, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, error
1421
+ block_record = self.blockchain.block_record(block.header_hash)
1422
+ assert block_record is not None
1423
+ if block_record.sub_epoch_summary_included is not None:
1424
+ prev_ses_block = block_record
1425
+ if self.weight_proof_handler is not None:
1426
+ await self.weight_proof_handler.create_prev_sub_epoch_segments()
1427
+ if agg_state_change_summary is not None:
1428
+ self._state_changed("new_peak")
1429
+ self.log.debug(
1430
+ f"Total time for {len(blocks_to_validate)} blocks: {time.monotonic() - pre_validate_start}, "
1431
+ f"advanced: True"
1432
+ )
1433
+ return True, agg_state_change_summary, current_ssi, current_difficulty, prev_ses_block, None
1434
+
1435
+ async def get_sub_slot_iters_difficulty_ses_block(
1436
+ self, block: FullBlock, ssi: Optional[uint64], diff: Optional[uint64]
1437
+ ) -> Tuple[uint64, uint64, Optional[BlockRecord]]:
1438
+ prev_ses_block = None
1439
+ if ssi is None or diff is None:
1440
+ if block.height == 0:
1441
+ ssi = self.constants.SUB_SLOT_ITERS_STARTING
1442
+ diff = self.constants.DIFFICULTY_STARTING
1443
+ if ssi is None or diff is None:
1444
+ if len(block.finished_sub_slots) > 0:
1445
+ if block.finished_sub_slots[0].challenge_chain.new_difficulty is not None:
1446
+ diff = block.finished_sub_slots[0].challenge_chain.new_difficulty
1447
+ if block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters is not None:
1448
+ ssi = block.finished_sub_slots[0].challenge_chain.new_sub_slot_iters
1449
+
1450
+ if block.height > 0:
1451
+ prev_b = await self.blockchain.get_block_record_from_db(block.prev_header_hash)
1452
+ curr = prev_b
1453
+ while prev_ses_block is None or ssi is None or diff is None:
1454
+ assert curr is not None
1455
+ if curr.height == 0:
1456
+ if ssi is None or diff is None:
1457
+ ssi = self.constants.SUB_SLOT_ITERS_STARTING
1458
+ diff = self.constants.DIFFICULTY_STARTING
1459
+ if prev_ses_block is None:
1460
+ prev_ses_block = curr
1461
+ if curr.sub_epoch_summary_included is not None:
1462
+ if prev_ses_block is None:
1463
+ prev_ses_block = curr
1464
+ if ssi is None or diff is None:
1465
+ if curr.sub_epoch_summary_included.new_difficulty is not None:
1466
+ diff = curr.sub_epoch_summary_included.new_difficulty
1467
+ if curr.sub_epoch_summary_included.new_sub_slot_iters is not None:
1468
+ ssi = curr.sub_epoch_summary_included.new_sub_slot_iters
1469
+ curr = await self.blockchain.get_block_record_from_db(curr.prev_hash)
1470
+ assert ssi is not None
1471
+ assert diff is not None
1472
+ return ssi, diff, prev_ses_block
1473
+
1474
+ async def _finish_sync(self) -> None:
1475
+ """
1476
+ Finalize sync by setting sync mode to False, clearing all sync information, and adding any final
1477
+ blocks that we have finalized recently.
1478
+ """
1479
+ self.log.info("long sync done")
1480
+ self.sync_store.set_long_sync(False)
1481
+ self.sync_store.set_sync_mode(False)
1482
+ self._state_changed("sync_mode")
1483
+ if self._server is None:
1484
+ return None
1485
+
1486
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
1487
+ peak: Optional[BlockRecord] = self.blockchain.get_peak()
1488
+ peak_fb: Optional[FullBlock] = await self.blockchain.get_full_peak()
1489
+ if peak_fb is not None:
1490
+ assert peak is not None
1491
+ state_change_summary = StateChangeSummary(peak, uint32(max(peak.height - 1, 0)), [], [], [], [])
1492
+ ppp_result: PeakPostProcessingResult = await self.peak_post_processing(
1493
+ peak_fb, state_change_summary, None
1494
+ )
1495
+ await self.peak_post_processing_2(peak_fb, None, state_change_summary, ppp_result)
1496
+
1497
+ if peak is not None and self.weight_proof_handler is not None:
1498
+ await self.weight_proof_handler.get_proof_of_weight(peak.header_hash)
1499
+ self._state_changed("block")
1500
+
1501
+ def has_valid_pool_sig(self, block: Union[UnfinishedBlock, FullBlock]) -> bool:
1502
+ if (
1503
+ block.foliage.foliage_block_data.pool_target
1504
+ == PoolTarget(self.constants.GENESIS_PRE_FARM_POOL_PUZZLE_HASH, uint32(0))
1505
+ and block.foliage.prev_block_hash != self.constants.GENESIS_CHALLENGE
1506
+ and block.reward_chain_block.proof_of_space.pool_public_key is not None
1507
+ ):
1508
+ assert block.foliage.foliage_block_data.pool_signature is not None
1509
+ if not AugSchemeMPL.verify(
1510
+ block.reward_chain_block.proof_of_space.pool_public_key,
1511
+ bytes(block.foliage.foliage_block_data.pool_target),
1512
+ block.foliage.foliage_block_data.pool_signature,
1513
+ ):
1514
+ return False
1515
+ return True
1516
+
1517
+ async def signage_point_post_processing(
1518
+ self,
1519
+ request: full_node_protocol.RespondSignagePoint,
1520
+ peer: WSChiaConnection,
1521
+ ip_sub_slot: Optional[EndOfSubSlotBundle],
1522
+ ) -> None:
1523
+ self.log.info(
1524
+ f"⏲️ Finished signage point {request.index_from_challenge}/"
1525
+ f"{self.constants.NUM_SPS_SUB_SLOT}: "
1526
+ f"CC: {request.challenge_chain_vdf.output.get_hash().hex()} "
1527
+ f"RC: {request.reward_chain_vdf.output.get_hash().hex()} "
1528
+ )
1529
+ self.signage_point_times[request.index_from_challenge] = time.time()
1530
+ sub_slot_tuple = self.full_node_store.get_sub_slot(request.challenge_chain_vdf.challenge)
1531
+ prev_challenge: Optional[bytes32]
1532
+ if sub_slot_tuple is not None:
1533
+ prev_challenge = sub_slot_tuple[0].challenge_chain.challenge_chain_end_of_slot_vdf.challenge
1534
+ else:
1535
+ prev_challenge = None
1536
+
1537
+ # Notify nodes of the new signage point
1538
+ broadcast = full_node_protocol.NewSignagePointOrEndOfSubSlot(
1539
+ prev_challenge,
1540
+ request.challenge_chain_vdf.challenge,
1541
+ request.index_from_challenge,
1542
+ request.reward_chain_vdf.challenge,
1543
+ )
1544
+ msg = make_msg(ProtocolMessageTypes.new_signage_point_or_end_of_sub_slot, broadcast)
1545
+ await self.server.send_to_all([msg], NodeType.FULL_NODE, peer.peer_node_id)
1546
+
1547
+ peak = self.blockchain.get_peak()
1548
+ if peak is not None and peak.height > self.constants.MAX_SUB_SLOT_BLOCKS:
1549
+ sub_slot_iters = peak.sub_slot_iters
1550
+ difficulty = uint64(peak.weight - self.blockchain.block_record(peak.prev_hash).weight)
1551
+ # Makes sure to potentially update the difficulty if we are past the peak (into a new sub-slot)
1552
+ assert ip_sub_slot is not None
1553
+ if request.challenge_chain_vdf.challenge != ip_sub_slot.challenge_chain.get_hash():
1554
+ next_difficulty = self.blockchain.get_next_difficulty(peak.header_hash, True)
1555
+ next_sub_slot_iters = self.blockchain.get_next_slot_iters(peak.header_hash, True)
1556
+ difficulty = next_difficulty
1557
+ sub_slot_iters = next_sub_slot_iters
1558
+ else:
1559
+ difficulty = self.constants.DIFFICULTY_STARTING
1560
+ sub_slot_iters = self.constants.SUB_SLOT_ITERS_STARTING
1561
+
1562
+ # Notify farmers of the new signage point
1563
+ broadcast_farmer = farmer_protocol.NewSignagePoint(
1564
+ request.challenge_chain_vdf.challenge,
1565
+ request.challenge_chain_vdf.output.get_hash(),
1566
+ request.reward_chain_vdf.output.get_hash(),
1567
+ difficulty,
1568
+ sub_slot_iters,
1569
+ request.index_from_challenge,
1570
+ uint32(0) if peak is None else peak.height,
1571
+ sp_source_data=SignagePointSourceData(
1572
+ vdf_data=SPVDFSourceData(request.challenge_chain_vdf.output, request.reward_chain_vdf.output)
1573
+ ),
1574
+ )
1575
+ msg = make_msg(ProtocolMessageTypes.new_signage_point, broadcast_farmer)
1576
+ await self.server.send_to_all([msg], NodeType.FARMER)
1577
+
1578
+ self._state_changed("signage_point", {"broadcast_farmer": broadcast_farmer})
1579
+
1580
+ async def peak_post_processing(
1581
+ self,
1582
+ block: FullBlock,
1583
+ state_change_summary: StateChangeSummary,
1584
+ peer: Optional[WSChiaConnection],
1585
+ ) -> PeakPostProcessingResult:
1586
+ """
1587
+ Must be called under self.blockchain.priority_mutex. This updates the internal state of the full node with the
1588
+ latest peak information. It also notifies peers about the new peak.
1589
+ """
1590
+
1591
+ record = state_change_summary.peak
1592
+ difficulty = self.blockchain.get_next_difficulty(record.header_hash, False)
1593
+ sub_slot_iters = self.blockchain.get_next_slot_iters(record.header_hash, False)
1594
+
1595
+ self.log.info(
1596
+ f"🌱 Updated peak to height {record.height}, weight {record.weight}, "
1597
+ f"hh {record.header_hash.hex()}, "
1598
+ f"forked at {state_change_summary.fork_height}, rh: {record.reward_infusion_new_challenge.hex()}, "
1599
+ f"total iters: {record.total_iters}, "
1600
+ f"overflow: {record.overflow}, "
1601
+ f"deficit: {record.deficit}, "
1602
+ f"difficulty: {difficulty}, "
1603
+ f"sub slot iters: {sub_slot_iters}, "
1604
+ f"Generator size: "
1605
+ f"{len(bytes(block.transactions_generator)) if block.transactions_generator else 'No tx'}, "
1606
+ f"Generator ref list size: "
1607
+ f"{len(block.transactions_generator_ref_list) if block.transactions_generator else 'No tx'}"
1608
+ )
1609
+
1610
+ hints_to_add, lookup_coin_ids = get_hints_and_subscription_coin_ids(
1611
+ state_change_summary,
1612
+ self.subscriptions.has_coin_subscription,
1613
+ self.subscriptions.has_puzzle_subscription,
1614
+ )
1615
+ await self.hint_store.add_hints(hints_to_add)
1616
+
1617
+ sub_slots = await self.blockchain.get_sp_and_ip_sub_slots(record.header_hash)
1618
+ assert sub_slots is not None
1619
+
1620
+ if not self.sync_store.get_sync_mode():
1621
+ self.blockchain.clean_block_records()
1622
+
1623
+ fork_block: Optional[BlockRecord] = None
1624
+ if state_change_summary.fork_height != block.height - 1 and block.height != 0:
1625
+ # This is a reorg
1626
+ fork_hash: Optional[bytes32] = self.blockchain.height_to_hash(state_change_summary.fork_height)
1627
+ assert fork_hash is not None
1628
+ fork_block = await self.blockchain.get_block_record_from_db(fork_hash)
1629
+
1630
+ fns_peak_result: FullNodeStorePeakResult = self.full_node_store.new_peak(
1631
+ record,
1632
+ block,
1633
+ sub_slots[0],
1634
+ sub_slots[1],
1635
+ fork_block,
1636
+ self.blockchain,
1637
+ sub_slot_iters,
1638
+ difficulty,
1639
+ )
1640
+
1641
+ if fns_peak_result.new_signage_points is not None and peer is not None:
1642
+ for index, sp in fns_peak_result.new_signage_points:
1643
+ assert (
1644
+ sp.cc_vdf is not None
1645
+ and sp.cc_proof is not None
1646
+ and sp.rc_vdf is not None
1647
+ and sp.rc_proof is not None
1648
+ )
1649
+ await self.signage_point_post_processing(
1650
+ RespondSignagePoint(index, sp.cc_vdf, sp.cc_proof, sp.rc_vdf, sp.rc_proof), peer, sub_slots[1]
1651
+ )
1652
+
1653
+ if sub_slots[1] is None:
1654
+ assert record.ip_sub_slot_total_iters(self.constants) == 0
1655
+ # Ensure the signage point is also in the store, for consistency
1656
+ self.full_node_store.new_signage_point(
1657
+ record.signage_point_index,
1658
+ self.blockchain,
1659
+ record,
1660
+ record.sub_slot_iters,
1661
+ SignagePoint(
1662
+ block.reward_chain_block.challenge_chain_sp_vdf,
1663
+ block.challenge_chain_sp_proof,
1664
+ block.reward_chain_block.reward_chain_sp_vdf,
1665
+ block.reward_chain_sp_proof,
1666
+ ),
1667
+ skip_vdf_validation=True,
1668
+ )
1669
+
1670
+ # Update the mempool (returns successful pending transactions added to the mempool)
1671
+ spent_coins: List[bytes32] = [coin_id for coin_id, _ in state_change_summary.removals]
1672
+ mempool_new_peak_result = await self.mempool_manager.new_peak(self.blockchain.get_tx_peak(), spent_coins)
1673
+
1674
+ return PeakPostProcessingResult(
1675
+ mempool_new_peak_result.items,
1676
+ mempool_new_peak_result.removals,
1677
+ fns_peak_result,
1678
+ hints_to_add,
1679
+ lookup_coin_ids,
1680
+ )
1681
+
1682
+ async def peak_post_processing_2(
1683
+ self,
1684
+ block: FullBlock,
1685
+ peer: Optional[WSChiaConnection],
1686
+ state_change_summary: StateChangeSummary,
1687
+ ppp_result: PeakPostProcessingResult,
1688
+ ) -> None:
1689
+ """
1690
+ Does NOT need to be called under the blockchain lock. Handle other parts of post processing like communicating
1691
+ with peers
1692
+ """
1693
+ record = state_change_summary.peak
1694
+ for new_peak_item in ppp_result.mempool_peak_result:
1695
+ self.log.debug(f"Added transaction to mempool: {new_peak_item.transaction_id}")
1696
+ mempool_item = self.mempool_manager.get_mempool_item(new_peak_item.transaction_id)
1697
+ assert mempool_item is not None
1698
+ await self.broadcast_added_tx(mempool_item)
1699
+
1700
+ # If there were pending end of slots that happen after this peak, broadcast them if they are added
1701
+ if ppp_result.fns_peak_result.added_eos is not None:
1702
+ broadcast = full_node_protocol.NewSignagePointOrEndOfSubSlot(
1703
+ ppp_result.fns_peak_result.added_eos.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
1704
+ ppp_result.fns_peak_result.added_eos.challenge_chain.get_hash(),
1705
+ uint8(0),
1706
+ ppp_result.fns_peak_result.added_eos.reward_chain.end_of_slot_vdf.challenge,
1707
+ )
1708
+ msg = make_msg(ProtocolMessageTypes.new_signage_point_or_end_of_sub_slot, broadcast)
1709
+ await self.server.send_to_all([msg], NodeType.FULL_NODE)
1710
+
1711
+ # TODO: maybe add and broadcast new IPs as well
1712
+
1713
+ if record.height % 1000 == 0:
1714
+ # Occasionally clear data in full node store to keep memory usage small
1715
+ self.full_node_store.clear_old_cache_entries()
1716
+
1717
+ if self.sync_store.get_sync_mode() is False:
1718
+ await self.send_peak_to_timelords(block)
1719
+ await self.broadcast_removed_tx(ppp_result.mempool_removals)
1720
+
1721
+ # Tell full nodes about the new peak
1722
+ msg = make_msg(
1723
+ ProtocolMessageTypes.new_peak,
1724
+ full_node_protocol.NewPeak(
1725
+ record.header_hash,
1726
+ record.height,
1727
+ record.weight,
1728
+ state_change_summary.fork_height,
1729
+ block.reward_chain_block.get_unfinished().get_hash(),
1730
+ ),
1731
+ )
1732
+ if peer is not None:
1733
+ await self.server.send_to_all([msg], NodeType.FULL_NODE, peer.peer_node_id)
1734
+ else:
1735
+ await self.server.send_to_all([msg], NodeType.FULL_NODE)
1736
+
1737
+ coin_hints: Dict[bytes32, bytes32] = {
1738
+ coin_id: bytes32(hint) for coin_id, hint in ppp_result.hints if len(hint) == 32
1739
+ }
1740
+
1741
+ peak = Peak(
1742
+ state_change_summary.peak.header_hash, state_change_summary.peak.height, state_change_summary.peak.weight
1743
+ )
1744
+
1745
+ # Looks up coin records in DB for the coins that wallets are interested in
1746
+ new_states = await self.coin_store.get_coin_records(ppp_result.lookup_coin_ids)
1747
+
1748
+ await self.wallet_sync_queue.put(
1749
+ WalletUpdate(
1750
+ state_change_summary.fork_height,
1751
+ peak,
1752
+ state_change_summary.rolled_back_records + new_states,
1753
+ coin_hints,
1754
+ )
1755
+ )
1756
+
1757
+ self._state_changed("new_peak")
1758
+
1759
+ async def add_block(
1760
+ self,
1761
+ block: FullBlock,
1762
+ peer: Optional[WSChiaConnection] = None,
1763
+ bls_cache: Optional[BLSCache] = None,
1764
+ raise_on_disconnected: bool = False,
1765
+ fork_info: Optional[ForkInfo] = None,
1766
+ ) -> Optional[Message]:
1767
+ """
1768
+ Add a full block from a peer full node (or ourselves).
1769
+ """
1770
+ if self.sync_store.get_sync_mode():
1771
+ return None
1772
+
1773
+ # Adds the block to seen, and check if it's seen before (which means header is in memory)
1774
+ header_hash = block.header_hash
1775
+ if self.blockchain.contains_block(header_hash):
1776
+ return None
1777
+
1778
+ pre_validation_result: Optional[PreValidationResult] = None
1779
+ if (
1780
+ block.is_transaction_block()
1781
+ and block.transactions_info is not None
1782
+ and block.transactions_info.generator_root != bytes([0] * 32)
1783
+ and block.transactions_generator is None
1784
+ ):
1785
+ # This is the case where we already had the unfinished block, and asked for this block without
1786
+ # the transactions (since we already had them). Therefore, here we add the transactions.
1787
+ unfinished_rh: bytes32 = block.reward_chain_block.get_unfinished().get_hash()
1788
+ foliage_hash: Optional[bytes32] = block.foliage.foliage_transaction_block_hash
1789
+ assert foliage_hash is not None
1790
+ unf_entry: Optional[UnfinishedBlockEntry] = self.full_node_store.get_unfinished_block_result(
1791
+ unfinished_rh, foliage_hash
1792
+ )
1793
+ if (
1794
+ unf_entry is not None
1795
+ and unf_entry.unfinished_block is not None
1796
+ and unf_entry.unfinished_block.transactions_generator is not None
1797
+ and unf_entry.unfinished_block.foliage_transaction_block == block.foliage_transaction_block
1798
+ ):
1799
+ # We checked that the transaction block is the same, therefore all transactions and the signature
1800
+ # must be identical in the unfinished and finished blocks. We can therefore use the cache.
1801
+
1802
+ # this is a transaction block, the foliage hash should be set
1803
+ assert foliage_hash is not None
1804
+ pre_validation_result = unf_entry.result
1805
+ assert pre_validation_result is not None
1806
+ block = block.replace(
1807
+ transactions_generator=unf_entry.unfinished_block.transactions_generator,
1808
+ transactions_generator_ref_list=unf_entry.unfinished_block.transactions_generator_ref_list,
1809
+ )
1810
+ else:
1811
+ # We still do not have the correct information for this block, perhaps there is a duplicate block
1812
+ # with the same unfinished block hash in the cache, so we need to fetch the correct one
1813
+ if peer is None:
1814
+ return None
1815
+
1816
+ block_response: Optional[Any] = await peer.call_api(
1817
+ FullNodeAPI.request_block, full_node_protocol.RequestBlock(block.height, True)
1818
+ )
1819
+ if block_response is None or not isinstance(block_response, full_node_protocol.RespondBlock):
1820
+ self.log.warning(
1821
+ f"Was not able to fetch the correct block for height {block.height} {block_response}"
1822
+ )
1823
+ return None
1824
+ new_block: FullBlock = block_response.block
1825
+ if new_block.foliage_transaction_block != block.foliage_transaction_block:
1826
+ self.log.warning(
1827
+ f"Received the wrong block for height {block.height} {new_block.header_hash.hex()}"
1828
+ )
1829
+ return None
1830
+ assert new_block.transactions_generator is not None
1831
+
1832
+ self.log.debug(
1833
+ f"Wrong info in the cache for bh {new_block.header_hash.hex()}, "
1834
+ f"there might be multiple blocks from the "
1835
+ f"same farmer with the same pospace."
1836
+ )
1837
+ # This recursion ends here, we cannot recurse again because transactions_generator is not None
1838
+ return await self.add_block(new_block, peer, bls_cache)
1839
+ state_change_summary: Optional[StateChangeSummary] = None
1840
+ ppp_result: Optional[PeakPostProcessingResult] = None
1841
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high), enable_profiler(
1842
+ self.profile_block_validation
1843
+ ) as pr:
1844
+ # After acquiring the lock, check again, because another asyncio thread might have added it
1845
+ if self.blockchain.contains_block(header_hash):
1846
+ return None
1847
+ validation_start = time.monotonic()
1848
+ # Tries to add the block to the blockchain, if we already validated transactions, don't do it again
1849
+ block_height_conds_map = {}
1850
+ if pre_validation_result is not None and pre_validation_result.conds is not None:
1851
+ block_height_conds_map[block.height] = pre_validation_result.conds
1852
+
1853
+ # Don't validate signatures because we want to validate them in the main thread later, since we have a
1854
+ # cache available
1855
+ prev_b = None
1856
+ prev_ses_block = None
1857
+ if block.height > 0:
1858
+ prev_b = await self.blockchain.get_block_record_from_db(block.prev_header_hash)
1859
+ assert prev_b is not None
1860
+ curr = prev_b
1861
+ while curr.height > 0 and curr.sub_epoch_summary_included is None:
1862
+ curr = self.blockchain.block_record(curr.prev_hash)
1863
+ prev_ses_block = curr
1864
+ new_slot = len(block.finished_sub_slots) > 0
1865
+ ssi, diff = get_next_sub_slot_iters_and_difficulty(self.constants, new_slot, prev_b, self.blockchain)
1866
+ pre_validation_results = await pre_validate_blocks_multiprocessing(
1867
+ self.blockchain.constants,
1868
+ self.blockchain,
1869
+ [block],
1870
+ self.blockchain.pool,
1871
+ block_height_conds_map,
1872
+ sub_slot_iters=ssi,
1873
+ difficulty=diff,
1874
+ prev_ses_block=prev_ses_block,
1875
+ validate_signatures=False,
1876
+ )
1877
+ added: Optional[AddBlockResult] = None
1878
+ pre_validation_time = time.monotonic() - validation_start
1879
+ try:
1880
+ if len(pre_validation_results) < 1:
1881
+ raise ValueError(f"Failed to validate block {header_hash} height {block.height}")
1882
+ if pre_validation_results[0].error is not None:
1883
+ if Err(pre_validation_results[0].error) == Err.INVALID_PREV_BLOCK_HASH:
1884
+ added = AddBlockResult.DISCONNECTED_BLOCK
1885
+ error_code: Optional[Err] = Err.INVALID_PREV_BLOCK_HASH
1886
+ elif Err(pre_validation_results[0].error) == Err.TIMESTAMP_TOO_FAR_IN_FUTURE:
1887
+ raise TimestampError()
1888
+ else:
1889
+ raise ValueError(
1890
+ f"Failed to validate block {header_hash} height "
1891
+ f"{block.height}: {Err(pre_validation_results[0].error).name}"
1892
+ )
1893
+ else:
1894
+ result_to_validate = (
1895
+ pre_validation_results[0] if pre_validation_result is None else pre_validation_result
1896
+ )
1897
+ assert result_to_validate.required_iters == pre_validation_results[0].required_iters
1898
+ (added, error_code, state_change_summary) = await self.blockchain.add_block(
1899
+ block, result_to_validate, bls_cache, ssi, fork_info
1900
+ )
1901
+ if added == AddBlockResult.ALREADY_HAVE_BLOCK:
1902
+ return None
1903
+ elif added == AddBlockResult.INVALID_BLOCK:
1904
+ assert error_code is not None
1905
+ self.log.error(f"Block {header_hash} at height {block.height} is invalid with code {error_code}.")
1906
+ raise ConsensusError(error_code, [header_hash])
1907
+ elif added == AddBlockResult.DISCONNECTED_BLOCK:
1908
+ self.log.info(f"Disconnected block {header_hash} at height {block.height}")
1909
+ if raise_on_disconnected:
1910
+ raise RuntimeError("Expected block to be added, received disconnected block.")
1911
+ return None
1912
+ elif added == AddBlockResult.NEW_PEAK:
1913
+ # Only propagate blocks which extend the blockchain (becomes one of the heads)
1914
+ assert state_change_summary is not None
1915
+ post_process_time = time.monotonic()
1916
+ ppp_result = await self.peak_post_processing(block, state_change_summary, peer)
1917
+ post_process_time = time.monotonic() - post_process_time
1918
+
1919
+ elif added == AddBlockResult.ADDED_AS_ORPHAN:
1920
+ self.log.info(
1921
+ f"Received orphan block of height {block.height} rh {block.reward_chain_block.get_hash()}"
1922
+ )
1923
+ post_process_time = 0
1924
+ else:
1925
+ # Should never reach here, all the cases are covered
1926
+ raise RuntimeError(f"Invalid result from add_block {added}")
1927
+ except asyncio.CancelledError:
1928
+ # We need to make sure to always call this method even when we get a cancel exception, to make sure
1929
+ # the node stays in sync
1930
+ if added == AddBlockResult.NEW_PEAK:
1931
+ assert state_change_summary is not None
1932
+ await self.peak_post_processing(block, state_change_summary, peer)
1933
+ raise
1934
+
1935
+ validation_time = time.monotonic() - validation_start
1936
+
1937
+ if ppp_result is not None:
1938
+ assert state_change_summary is not None
1939
+ await self.peak_post_processing_2(block, peer, state_change_summary, ppp_result)
1940
+
1941
+ percent_full_str = (
1942
+ (
1943
+ ", percent full: "
1944
+ + str(round(100.0 * float(block.transactions_info.cost) / self.constants.MAX_BLOCK_COST_CLVM, 3))
1945
+ + "%"
1946
+ )
1947
+ if block.transactions_info is not None
1948
+ else ""
1949
+ )
1950
+ self.log.log(
1951
+ logging.WARNING if validation_time > 2 else logging.DEBUG,
1952
+ f"Block validation: {validation_time:0.2f}s, "
1953
+ f"pre_validation: {pre_validation_time:0.2f}s, "
1954
+ f"CLVM: {pre_validation_results[0].timing/1000.0:0.2f}s, "
1955
+ f"post-process: {post_process_time:0.2f}s, "
1956
+ f"cost: {block.transactions_info.cost if block.transactions_info is not None else 'None'}"
1957
+ f"{percent_full_str} header_hash: {header_hash.hex()} height: {block.height}",
1958
+ )
1959
+
1960
+ # this is not covered by any unit tests as it's essentially test code
1961
+ # itself. It's exercised manually when investigating performance issues
1962
+ if validation_time > 2 and pr is not None: # pragma: no cover
1963
+ pr.create_stats()
1964
+ profile_dir = path_from_root(self.root_path, "block-validation-profile")
1965
+ pr.dump_stats(profile_dir / f"{block.height}-{validation_time:0.1f}.profile")
1966
+
1967
+ # This code path is reached if added == ADDED_AS_ORPHAN or NEW_TIP
1968
+ peak = self.blockchain.get_peak()
1969
+ assert peak is not None
1970
+
1971
+ # Removes all temporary data for old blocks
1972
+ clear_height = uint32(max(0, peak.height - 50))
1973
+ self.full_node_store.clear_candidate_blocks_below(clear_height)
1974
+ self.full_node_store.clear_unfinished_blocks_below(clear_height)
1975
+
1976
+ state_changed_data: Dict[str, Any] = {
1977
+ "transaction_block": False,
1978
+ "k_size": block.reward_chain_block.proof_of_space.size,
1979
+ "header_hash": block.header_hash,
1980
+ "fork_height": None,
1981
+ "rolled_back_records": None,
1982
+ "height": block.height,
1983
+ "validation_time": validation_time,
1984
+ "pre_validation_time": pre_validation_time,
1985
+ }
1986
+
1987
+ if state_change_summary is not None:
1988
+ state_changed_data["fork_height"] = state_change_summary.fork_height
1989
+ state_changed_data["rolled_back_records"] = len(state_change_summary.rolled_back_records)
1990
+
1991
+ if block.transactions_info is not None:
1992
+ state_changed_data["transaction_block"] = True
1993
+ state_changed_data["block_cost"] = block.transactions_info.cost
1994
+ state_changed_data["block_fees"] = block.transactions_info.fees
1995
+
1996
+ if block.foliage_transaction_block is not None:
1997
+ state_changed_data["timestamp"] = block.foliage_transaction_block.timestamp
1998
+
1999
+ if block.transactions_generator is not None:
2000
+ state_changed_data["transaction_generator_size_bytes"] = len(bytes(block.transactions_generator))
2001
+
2002
+ state_changed_data["transaction_generator_ref_list"] = block.transactions_generator_ref_list
2003
+ if added is not None:
2004
+ state_changed_data["receive_block_result"] = added.value
2005
+
2006
+ self._state_changed("block", state_changed_data)
2007
+
2008
+ record = self.blockchain.block_record(block.header_hash)
2009
+ if self.weight_proof_handler is not None and record.sub_epoch_summary_included is not None:
2010
+ if self._segment_task is None or self._segment_task.done():
2011
+ self._segment_task = asyncio.create_task(self.weight_proof_handler.create_prev_sub_epoch_segments())
2012
+ return None
2013
+
2014
+ async def add_unfinished_block(
2015
+ self,
2016
+ block: UnfinishedBlock,
2017
+ peer: Optional[WSChiaConnection],
2018
+ farmed_block: bool = False,
2019
+ block_bytes: Optional[bytes] = None,
2020
+ ) -> None:
2021
+ """
2022
+ We have received an unfinished block, either created by us, or from another peer.
2023
+ We can validate and add it and if it's a good block, propagate it to other peers and
2024
+ timelords.
2025
+ """
2026
+ receive_time = time.time()
2027
+
2028
+ if block.prev_header_hash != self.constants.GENESIS_CHALLENGE and not self.blockchain.contains_block(
2029
+ block.prev_header_hash
2030
+ ):
2031
+ # No need to request the parent, since the peer will send it to us anyway, via NewPeak
2032
+ self.log.debug("Received a disconnected unfinished block")
2033
+ return None
2034
+
2035
+ # Adds the unfinished block to seen, and check if it's seen before, to prevent
2036
+ # processing it twice. This searches for the exact version of the unfinished block (there can be many different
2037
+ # foliages for the same trunk). This is intentional, to prevent DOS attacks.
2038
+ # Note that it does not require that this block was successfully processed
2039
+ if self.full_node_store.seen_unfinished_block(block.get_hash()):
2040
+ return None
2041
+
2042
+ block_hash = bytes32(block.reward_chain_block.get_hash())
2043
+ foliage_tx_hash = block.foliage.foliage_transaction_block_hash
2044
+
2045
+ # If we have already added the block with this reward block hash and
2046
+ # foliage hash, return
2047
+ if self.full_node_store.get_unfinished_block2(block_hash, foliage_tx_hash)[0] is not None:
2048
+ return None
2049
+
2050
+ peak: Optional[BlockRecord] = self.blockchain.get_peak()
2051
+ if peak is not None:
2052
+ if block.total_iters < peak.sp_total_iters(self.constants):
2053
+ # This means this unfinished block is pretty far behind, it will not add weight to our chain
2054
+ return None
2055
+
2056
+ if block.prev_header_hash == self.constants.GENESIS_CHALLENGE:
2057
+ prev_b = None
2058
+ else:
2059
+ prev_b = self.blockchain.block_record(block.prev_header_hash)
2060
+
2061
+ # Count the blocks in sub slot, and check if it's a new epoch
2062
+ if len(block.finished_sub_slots) > 0:
2063
+ num_blocks_in_ss = 1 # Curr
2064
+ else:
2065
+ curr = self.blockchain.try_block_record(block.prev_header_hash)
2066
+ num_blocks_in_ss = 2 # Curr and prev
2067
+ while (curr is not None) and not curr.first_in_sub_slot:
2068
+ curr = self.blockchain.try_block_record(curr.prev_hash)
2069
+ num_blocks_in_ss += 1
2070
+
2071
+ if num_blocks_in_ss > self.constants.MAX_SUB_SLOT_BLOCKS:
2072
+ # TODO: potentially allow overflow blocks here, which count for the next slot
2073
+ self.log.warning("Too many blocks added, not adding block")
2074
+ return None
2075
+
2076
+ # The clvm generator and aggregate signature are validated outside of the lock, to allow other blocks and
2077
+ # transactions to get validated
2078
+ npc_result: Optional[NPCResult] = None
2079
+ pre_validation_time = None
2080
+
2081
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
2082
+ start_header_time = time.monotonic()
2083
+ _, header_error = await self.blockchain.validate_unfinished_block_header(block)
2084
+ if header_error is not None:
2085
+ if header_error == Err.TIMESTAMP_TOO_FAR_IN_FUTURE:
2086
+ raise TimestampError()
2087
+ else:
2088
+ raise ConsensusError(header_error)
2089
+ validate_time = time.monotonic() - start_header_time
2090
+ self.log.log(
2091
+ logging.WARNING if validate_time > 2 else logging.DEBUG,
2092
+ f"Time for header validate: {validate_time:0.3f}s",
2093
+ )
2094
+
2095
+ if block.transactions_generator is not None:
2096
+ pre_validation_start = time.monotonic()
2097
+ assert block.transactions_info is not None
2098
+ try:
2099
+ block_generator: Optional[BlockGenerator] = await get_block_generator(
2100
+ self.blockchain.lookup_block_generators, block
2101
+ )
2102
+ except ValueError:
2103
+ raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
2104
+ if block_generator is None:
2105
+ raise ConsensusError(Err.GENERATOR_REF_HAS_NO_GENERATOR)
2106
+ if block_bytes is None:
2107
+ block_bytes = bytes(block)
2108
+
2109
+ height = uint32(0) if prev_b is None else uint32(prev_b.height + 1)
2110
+ npc_result = await self.blockchain.run_generator(block_bytes, block_generator, height)
2111
+ pre_validation_time = time.monotonic() - pre_validation_start
2112
+
2113
+ # blockchain.run_generator throws on errors, so npc_result is
2114
+ # guaranteed to represent a successful run
2115
+ assert npc_result.conds is not None
2116
+ pairs_pks, pairs_msgs = pkm_pairs(npc_result.conds, self.constants.AGG_SIG_ME_ADDITIONAL_DATA)
2117
+ if not self._bls_cache.aggregate_verify(
2118
+ pairs_pks, pairs_msgs, block.transactions_info.aggregated_signature
2119
+ ):
2120
+ raise ConsensusError(Err.BAD_AGGREGATE_SIGNATURE)
2121
+
2122
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.high):
2123
+ # TODO: pre-validate VDFs outside of lock
2124
+ validation_start = time.monotonic()
2125
+ validate_result = await self.blockchain.validate_unfinished_block(block, npc_result)
2126
+ if validate_result.error is not None:
2127
+ raise ConsensusError(Err(validate_result.error))
2128
+ validation_time = time.monotonic() - validation_start
2129
+
2130
+ # respond_block will later use the cache (validated_signature=True)
2131
+ validate_result = dataclasses.replace(validate_result, validated_signature=True)
2132
+
2133
+ assert validate_result.required_iters is not None
2134
+
2135
+ # Perform another check, in case we have already concurrently added the same unfinished block
2136
+ if self.full_node_store.get_unfinished_block2(block_hash, foliage_tx_hash)[0] is not None:
2137
+ return None
2138
+
2139
+ if block.prev_header_hash == self.constants.GENESIS_CHALLENGE:
2140
+ height = uint32(0)
2141
+ else:
2142
+ height = uint32(self.blockchain.block_record(block.prev_header_hash).height + 1)
2143
+
2144
+ ses: Optional[SubEpochSummary] = next_sub_epoch_summary(
2145
+ self.constants,
2146
+ self.blockchain,
2147
+ validate_result.required_iters,
2148
+ block,
2149
+ True,
2150
+ )
2151
+
2152
+ self.full_node_store.add_unfinished_block(height, block, validate_result)
2153
+ pre_validation_log = (
2154
+ f"pre_validation time {pre_validation_time:0.4f}, " if pre_validation_time is not None else ""
2155
+ )
2156
+ block_duration_in_seconds = (
2157
+ receive_time - self.signage_point_times[block.reward_chain_block.signage_point_index]
2158
+ )
2159
+ if farmed_block is True:
2160
+ self.log.info(
2161
+ f"🍀 ️Farmed unfinished_block {block_hash}, SP: {block.reward_chain_block.signage_point_index}, "
2162
+ f"validation time: {validation_time:0.4f} seconds, {pre_validation_log}"
2163
+ f"cost: {block.transactions_info.cost if block.transactions_info else 'None'} "
2164
+ )
2165
+ else:
2166
+ percent_full_str = (
2167
+ (
2168
+ ", percent full: "
2169
+ + str(round(100.0 * float(block.transactions_info.cost) / self.constants.MAX_BLOCK_COST_CLVM, 3))
2170
+ + "%"
2171
+ )
2172
+ if block.transactions_info is not None
2173
+ else ""
2174
+ )
2175
+ self.log.info(
2176
+ f"Added unfinished_block {block_hash}, not farmed by us,"
2177
+ f" SP: {block.reward_chain_block.signage_point_index} farmer response time: "
2178
+ f"{block_duration_in_seconds:0.4f}, "
2179
+ f"Pool pk {encode_puzzle_hash(block.foliage.foliage_block_data.pool_target.puzzle_hash, 'xch')}, "
2180
+ f"validation time: {validation_time:0.4f} seconds, {pre_validation_log}"
2181
+ f"cost: {block.transactions_info.cost if block.transactions_info else 'None'}"
2182
+ f"{percent_full_str}"
2183
+ )
2184
+
2185
+ sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
2186
+ self.constants,
2187
+ len(block.finished_sub_slots) > 0,
2188
+ prev_b,
2189
+ self.blockchain,
2190
+ )
2191
+
2192
+ if block.reward_chain_block.signage_point_index == 0:
2193
+ res = self.full_node_store.get_sub_slot(block.reward_chain_block.pos_ss_cc_challenge_hash)
2194
+ if res is None:
2195
+ if block.reward_chain_block.pos_ss_cc_challenge_hash == self.constants.GENESIS_CHALLENGE:
2196
+ rc_prev = self.constants.GENESIS_CHALLENGE
2197
+ else:
2198
+ self.log.warning(f"Do not have sub slot {block.reward_chain_block.pos_ss_cc_challenge_hash}")
2199
+ return None
2200
+ else:
2201
+ rc_prev = res[0].reward_chain.get_hash()
2202
+ else:
2203
+ assert block.reward_chain_block.reward_chain_sp_vdf is not None
2204
+ rc_prev = block.reward_chain_block.reward_chain_sp_vdf.challenge
2205
+
2206
+ timelord_request = timelord_protocol.NewUnfinishedBlockTimelord(
2207
+ block.reward_chain_block,
2208
+ difficulty,
2209
+ sub_slot_iters,
2210
+ block.foliage,
2211
+ ses,
2212
+ rc_prev,
2213
+ )
2214
+
2215
+ timelord_msg = make_msg(ProtocolMessageTypes.new_unfinished_block_timelord, timelord_request)
2216
+ await self.server.send_to_all([timelord_msg], NodeType.TIMELORD)
2217
+
2218
+ # create two versions of the NewUnfinishedBlock message, one to be sent
2219
+ # to newer clients and one for older clients
2220
+ full_node_request = full_node_protocol.NewUnfinishedBlock(block.reward_chain_block.get_hash())
2221
+ msg = make_msg(ProtocolMessageTypes.new_unfinished_block, full_node_request)
2222
+
2223
+ full_node_request2 = full_node_protocol.NewUnfinishedBlock2(
2224
+ block.reward_chain_block.get_hash(), block.foliage.foliage_transaction_block_hash
2225
+ )
2226
+ msg2 = make_msg(ProtocolMessageTypes.new_unfinished_block2, full_node_request2)
2227
+
2228
+ def old_clients(conn: WSChiaConnection) -> bool:
2229
+ # don't send this to peers with new clients
2230
+ return conn.protocol_version <= Version("0.0.35")
2231
+
2232
+ def new_clients(conn: WSChiaConnection) -> bool:
2233
+ # don't send this to peers with old clients
2234
+ return conn.protocol_version > Version("0.0.35")
2235
+
2236
+ peer_id: Optional[bytes32] = None if peer is None else peer.peer_node_id
2237
+ await self.server.send_to_all_if([msg], NodeType.FULL_NODE, old_clients, peer_id)
2238
+ await self.server.send_to_all_if([msg2], NodeType.FULL_NODE, new_clients, peer_id)
2239
+
2240
+ self._state_changed(
2241
+ "unfinished_block",
2242
+ {
2243
+ "block_duration_in_seconds": block_duration_in_seconds,
2244
+ "validation_time_in_seconds": validation_time,
2245
+ "pre_validation_time_in_seconds": pre_validation_time,
2246
+ "unfinished_block": block.to_json_dict(),
2247
+ },
2248
+ )
2249
+
2250
+ async def new_infusion_point_vdf(
2251
+ self, request: timelord_protocol.NewInfusionPointVDF, timelord_peer: Optional[WSChiaConnection] = None
2252
+ ) -> Optional[Message]:
2253
+ # Lookup unfinished blocks
2254
+ unfinished_block: Optional[UnfinishedBlock] = self.full_node_store.get_unfinished_block(
2255
+ request.unfinished_reward_hash
2256
+ )
2257
+
2258
+ if unfinished_block is None:
2259
+ self.log.warning(
2260
+ f"Do not have unfinished reward chain block {request.unfinished_reward_hash}, cannot finish."
2261
+ )
2262
+ return None
2263
+
2264
+ prev_b: Optional[BlockRecord] = None
2265
+
2266
+ target_rc_hash = request.reward_chain_ip_vdf.challenge
2267
+ last_slot_cc_hash = request.challenge_chain_ip_vdf.challenge
2268
+
2269
+ # Backtracks through end of slot objects, should work for multiple empty sub slots
2270
+ for eos, _, _ in reversed(self.full_node_store.finished_sub_slots):
2271
+ if eos is not None and eos.reward_chain.get_hash() == target_rc_hash:
2272
+ target_rc_hash = eos.reward_chain.end_of_slot_vdf.challenge
2273
+ if target_rc_hash == self.constants.GENESIS_CHALLENGE:
2274
+ prev_b = None
2275
+ else:
2276
+ # Find the prev block, starts looking backwards from the peak. target_rc_hash must be the hash of a block
2277
+ # and not an end of slot (since we just looked through the slots and backtracked)
2278
+ curr: Optional[BlockRecord] = self.blockchain.get_peak()
2279
+
2280
+ for _ in range(10):
2281
+ if curr is None:
2282
+ break
2283
+ if curr.reward_infusion_new_challenge == target_rc_hash:
2284
+ # Found our prev block
2285
+ prev_b = curr
2286
+ break
2287
+ curr = self.blockchain.try_block_record(curr.prev_hash)
2288
+
2289
+ # If not found, cache keyed on prev block
2290
+ if prev_b is None:
2291
+ self.full_node_store.add_to_future_ip(request)
2292
+ self.log.warning(
2293
+ f"Previous block is None, infusion point {request.reward_chain_ip_vdf.challenge.hex()}"
2294
+ )
2295
+ return None
2296
+
2297
+ finished_sub_slots: Optional[List[EndOfSubSlotBundle]] = self.full_node_store.get_finished_sub_slots(
2298
+ self.blockchain,
2299
+ prev_b,
2300
+ last_slot_cc_hash,
2301
+ )
2302
+ if finished_sub_slots is None:
2303
+ return None
2304
+
2305
+ sub_slot_iters, difficulty = get_next_sub_slot_iters_and_difficulty(
2306
+ self.constants,
2307
+ len(finished_sub_slots) > 0,
2308
+ prev_b,
2309
+ self.blockchain,
2310
+ )
2311
+
2312
+ if unfinished_block.reward_chain_block.pos_ss_cc_challenge_hash == self.constants.GENESIS_CHALLENGE:
2313
+ sub_slot_start_iters = uint128(0)
2314
+ else:
2315
+ ss_res = self.full_node_store.get_sub_slot(unfinished_block.reward_chain_block.pos_ss_cc_challenge_hash)
2316
+ if ss_res is None:
2317
+ self.log.warning(f"Do not have sub slot {unfinished_block.reward_chain_block.pos_ss_cc_challenge_hash}")
2318
+ return None
2319
+ _, _, sub_slot_start_iters = ss_res
2320
+ sp_total_iters = uint128(
2321
+ sub_slot_start_iters
2322
+ + calculate_sp_iters(
2323
+ self.constants,
2324
+ sub_slot_iters,
2325
+ unfinished_block.reward_chain_block.signage_point_index,
2326
+ )
2327
+ )
2328
+
2329
+ block: FullBlock = unfinished_block_to_full_block(
2330
+ unfinished_block,
2331
+ request.challenge_chain_ip_vdf,
2332
+ request.challenge_chain_ip_proof,
2333
+ request.reward_chain_ip_vdf,
2334
+ request.reward_chain_ip_proof,
2335
+ request.infused_challenge_chain_ip_vdf,
2336
+ request.infused_challenge_chain_ip_proof,
2337
+ finished_sub_slots,
2338
+ prev_b,
2339
+ self.blockchain,
2340
+ sp_total_iters,
2341
+ difficulty,
2342
+ )
2343
+ if not self.has_valid_pool_sig(block):
2344
+ self.log.warning("Trying to make a pre-farm block but height is not 0")
2345
+ return None
2346
+ try:
2347
+ await self.add_block(block, None, self._bls_cache, raise_on_disconnected=True)
2348
+ except Exception as e:
2349
+ self.log.warning(f"Consensus error validating block: {e}")
2350
+ if timelord_peer is not None:
2351
+ # Only sends to the timelord who sent us this VDF, to reset them to the correct peak
2352
+ await self.send_peak_to_timelords(peer=timelord_peer)
2353
+ return None
2354
+
2355
+ async def add_end_of_sub_slot(
2356
+ self, end_of_slot_bundle: EndOfSubSlotBundle, peer: WSChiaConnection
2357
+ ) -> Tuple[Optional[Message], bool]:
2358
+ fetched_ss = self.full_node_store.get_sub_slot(end_of_slot_bundle.challenge_chain.get_hash())
2359
+
2360
+ # We are not interested in sub-slots which have the same challenge chain but different reward chain. If there
2361
+ # is a reorg, we will find out through the broadcast of blocks instead.
2362
+ if fetched_ss is not None:
2363
+ # Already have the sub-slot
2364
+ return None, True
2365
+
2366
+ async with self.timelord_lock:
2367
+ fetched_ss = self.full_node_store.get_sub_slot(
2368
+ end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge
2369
+ )
2370
+ if (
2371
+ (fetched_ss is None)
2372
+ and end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge
2373
+ != self.constants.GENESIS_CHALLENGE
2374
+ ):
2375
+ # If we don't have the prev, request the prev instead
2376
+ full_node_request = full_node_protocol.RequestSignagePointOrEndOfSubSlot(
2377
+ end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
2378
+ uint8(0),
2379
+ bytes32([0] * 32),
2380
+ )
2381
+ return (
2382
+ make_msg(ProtocolMessageTypes.request_signage_point_or_end_of_sub_slot, full_node_request),
2383
+ False,
2384
+ )
2385
+
2386
+ peak = self.blockchain.get_peak()
2387
+ if peak is not None and peak.height > 2:
2388
+ next_sub_slot_iters = self.blockchain.get_next_slot_iters(peak.header_hash, True)
2389
+ next_difficulty = self.blockchain.get_next_difficulty(peak.header_hash, True)
2390
+ else:
2391
+ next_sub_slot_iters = self.constants.SUB_SLOT_ITERS_STARTING
2392
+ next_difficulty = self.constants.DIFFICULTY_STARTING
2393
+
2394
+ # Adds the sub slot and potentially get new infusions
2395
+ new_infusions = self.full_node_store.new_finished_sub_slot(
2396
+ end_of_slot_bundle,
2397
+ self.blockchain,
2398
+ peak,
2399
+ next_sub_slot_iters,
2400
+ next_difficulty,
2401
+ await self.blockchain.get_full_peak(),
2402
+ )
2403
+ # It may be an empty list, even if it's not None. Not None means added successfully
2404
+ if new_infusions is not None:
2405
+ self.log.info(
2406
+ f"⏲️ Finished sub slot, SP {self.constants.NUM_SPS_SUB_SLOT}/{self.constants.NUM_SPS_SUB_SLOT}, "
2407
+ f"{end_of_slot_bundle.challenge_chain.get_hash().hex()}, "
2408
+ f"number of sub-slots: {len(self.full_node_store.finished_sub_slots)}, "
2409
+ f"RC hash: {end_of_slot_bundle.reward_chain.get_hash().hex()}, "
2410
+ f"Deficit {end_of_slot_bundle.reward_chain.deficit}"
2411
+ )
2412
+ # Reset farmer response timer for sub slot (SP 0)
2413
+ self.signage_point_times[0] = time.time()
2414
+ # Notify full nodes of the new sub-slot
2415
+ broadcast = full_node_protocol.NewSignagePointOrEndOfSubSlot(
2416
+ end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge,
2417
+ end_of_slot_bundle.challenge_chain.get_hash(),
2418
+ uint8(0),
2419
+ end_of_slot_bundle.reward_chain.end_of_slot_vdf.challenge,
2420
+ )
2421
+ msg = make_msg(ProtocolMessageTypes.new_signage_point_or_end_of_sub_slot, broadcast)
2422
+ await self.server.send_to_all([msg], NodeType.FULL_NODE, peer.peer_node_id)
2423
+
2424
+ for infusion in new_infusions:
2425
+ await self.new_infusion_point_vdf(infusion)
2426
+
2427
+ # Notify farmers of the new sub-slot
2428
+ broadcast_farmer = farmer_protocol.NewSignagePoint(
2429
+ end_of_slot_bundle.challenge_chain.get_hash(),
2430
+ end_of_slot_bundle.challenge_chain.get_hash(),
2431
+ end_of_slot_bundle.reward_chain.get_hash(),
2432
+ next_difficulty,
2433
+ next_sub_slot_iters,
2434
+ uint8(0),
2435
+ uint32(0) if peak is None else peak.height,
2436
+ sp_source_data=SignagePointSourceData(
2437
+ sub_slot_data=SPSubSlotSourceData(
2438
+ end_of_slot_bundle.challenge_chain, end_of_slot_bundle.reward_chain
2439
+ )
2440
+ ),
2441
+ )
2442
+ msg = make_msg(ProtocolMessageTypes.new_signage_point, broadcast_farmer)
2443
+ await self.server.send_to_all([msg], NodeType.FARMER)
2444
+ return None, True
2445
+ else:
2446
+ self.log.info(
2447
+ f"End of slot not added CC challenge "
2448
+ f"{end_of_slot_bundle.challenge_chain.challenge_chain_end_of_slot_vdf.challenge.hex()}"
2449
+ )
2450
+ return None, False
2451
+
2452
+ async def add_transaction(
2453
+ self, transaction: SpendBundle, spend_name: bytes32, peer: Optional[WSChiaConnection] = None, test: bool = False
2454
+ ) -> Tuple[MempoolInclusionStatus, Optional[Err]]:
2455
+ if self.sync_store.get_sync_mode():
2456
+ return MempoolInclusionStatus.FAILED, Err.NO_TRANSACTIONS_WHILE_SYNCING
2457
+ if not test and not (await self.synced()):
2458
+ return MempoolInclusionStatus.FAILED, Err.NO_TRANSACTIONS_WHILE_SYNCING
2459
+
2460
+ if self.mempool_manager.get_spendbundle(spend_name) is not None:
2461
+ self.mempool_manager.remove_seen(spend_name)
2462
+ return MempoolInclusionStatus.SUCCESS, None
2463
+ if self.mempool_manager.seen(spend_name):
2464
+ return MempoolInclusionStatus.FAILED, Err.ALREADY_INCLUDING_TRANSACTION
2465
+ self.mempool_manager.add_and_maybe_pop_seen(spend_name)
2466
+ self.log.debug(f"Processing transaction: {spend_name}")
2467
+ # Ignore if syncing or if we have not yet received a block
2468
+ # the mempool must have a peak to validate transactions
2469
+ if self.sync_store.get_sync_mode() or self.mempool_manager.peak is None:
2470
+ status = MempoolInclusionStatus.FAILED
2471
+ error: Optional[Err] = Err.NO_TRANSACTIONS_WHILE_SYNCING
2472
+ self.mempool_manager.remove_seen(spend_name)
2473
+ else:
2474
+ try:
2475
+ cost_result = await self.mempool_manager.pre_validate_spendbundle(
2476
+ transaction, spend_name, self._bls_cache
2477
+ )
2478
+ except ValidationError as e:
2479
+ self.mempool_manager.remove_seen(spend_name)
2480
+ return MempoolInclusionStatus.FAILED, e.code
2481
+ except Exception:
2482
+ self.mempool_manager.remove_seen(spend_name)
2483
+ raise
2484
+
2485
+ async with self.blockchain.priority_mutex.acquire(priority=BlockchainMutexPriority.low):
2486
+ if self.mempool_manager.get_spendbundle(spend_name) is not None:
2487
+ self.mempool_manager.remove_seen(spend_name)
2488
+ return MempoolInclusionStatus.SUCCESS, None
2489
+ if self.mempool_manager.peak is None:
2490
+ return MempoolInclusionStatus.FAILED, Err.MEMPOOL_NOT_INITIALIZED
2491
+ info = await self.mempool_manager.add_spend_bundle(
2492
+ transaction, cost_result, spend_name, self.mempool_manager.peak.height
2493
+ )
2494
+ status = info.status
2495
+ error = info.error
2496
+ if status == MempoolInclusionStatus.SUCCESS:
2497
+ self.log.debug(
2498
+ f"Added transaction to mempool: {spend_name} mempool size: "
2499
+ f"{self.mempool_manager.mempool.total_mempool_cost()} normalized "
2500
+ f"{self.mempool_manager.mempool.total_mempool_cost() / 5000000}"
2501
+ )
2502
+
2503
+ # Only broadcast successful transactions, not pending ones. Otherwise it's a DOS
2504
+ # vector.
2505
+ mempool_item = self.mempool_manager.get_mempool_item(spend_name)
2506
+ assert mempool_item is not None
2507
+ await self.broadcast_removed_tx(info.removals)
2508
+ await self.broadcast_added_tx(mempool_item, current_peer=peer)
2509
+
2510
+ if self.simulator_transaction_callback is not None: # callback
2511
+ await self.simulator_transaction_callback(spend_name) # pylint: disable=E1102
2512
+
2513
+ else:
2514
+ self.mempool_manager.remove_seen(spend_name)
2515
+ self.log.debug(f"Wasn't able to add transaction with id {spend_name}, status {status} error: {error}")
2516
+ return status, error
2517
+
2518
+ async def broadcast_added_tx(
2519
+ self, mempool_item: MempoolItem, current_peer: Optional[WSChiaConnection] = None
2520
+ ) -> None:
2521
+ assert mempool_item.fee >= 0
2522
+ assert mempool_item.cost is not None
2523
+
2524
+ new_tx = full_node_protocol.NewTransaction(
2525
+ mempool_item.name,
2526
+ mempool_item.cost,
2527
+ mempool_item.fee,
2528
+ )
2529
+ msg = make_msg(ProtocolMessageTypes.new_transaction, new_tx)
2530
+ if current_peer is None:
2531
+ await self.server.send_to_all([msg], NodeType.FULL_NODE)
2532
+ else:
2533
+ await self.server.send_to_all([msg], NodeType.FULL_NODE, current_peer.peer_node_id)
2534
+
2535
+ conds = mempool_item.conds
2536
+
2537
+ all_peers = {
2538
+ peer_id
2539
+ for peer_id, peer in self.server.all_connections.items()
2540
+ if peer.has_capability(Capability.MEMPOOL_UPDATES)
2541
+ }
2542
+
2543
+ if len(all_peers) == 0:
2544
+ return
2545
+
2546
+ start_time = time.monotonic()
2547
+
2548
+ hints_for_removals = await self.hint_store.get_hints([bytes32(spend.coin_id) for spend in conds.spends])
2549
+ peer_ids = all_peers.intersection(peers_for_spend_bundle(self.subscriptions, conds, set(hints_for_removals)))
2550
+
2551
+ for peer_id in peer_ids:
2552
+ peer = self.server.all_connections.get(peer_id)
2553
+
2554
+ if peer is None:
2555
+ continue
2556
+
2557
+ msg = make_msg(
2558
+ ProtocolMessageTypes.mempool_items_added, wallet_protocol.MempoolItemsAdded([mempool_item.name])
2559
+ )
2560
+ await peer.send_message(msg)
2561
+
2562
+ total_time = time.monotonic() - start_time
2563
+
2564
+ self.log.log(
2565
+ logging.DEBUG if total_time < 0.5 else logging.WARNING,
2566
+ f"Broadcasting added transaction {mempool_item.name} to {len(peer_ids)} peers took {total_time:.4f}s",
2567
+ )
2568
+
2569
+ async def broadcast_removed_tx(self, mempool_removals: List[MempoolRemoveInfo]) -> None:
2570
+ total_removals = sum(len(r.items) for r in mempool_removals)
2571
+ if total_removals == 0:
2572
+ return
2573
+
2574
+ start_time = time.monotonic()
2575
+
2576
+ self.log.debug(f"Broadcasting {total_removals} removed transactions to peers")
2577
+
2578
+ all_peers = {
2579
+ peer_id
2580
+ for peer_id, peer in self.server.all_connections.items()
2581
+ if peer.has_capability(Capability.MEMPOOL_UPDATES)
2582
+ }
2583
+
2584
+ if len(all_peers) == 0:
2585
+ return
2586
+
2587
+ removals_to_send: Dict[bytes32, List[RemovedMempoolItem]] = dict()
2588
+
2589
+ for removal_info in mempool_removals:
2590
+ for internal_mempool_item in removal_info.items:
2591
+ conds = internal_mempool_item.conds
2592
+ assert conds is not None
2593
+
2594
+ hints_for_removals = await self.hint_store.get_hints([bytes32(spend.coin_id) for spend in conds.spends])
2595
+ peer_ids = all_peers.intersection(
2596
+ peers_for_spend_bundle(self.subscriptions, conds, set(hints_for_removals))
2597
+ )
2598
+
2599
+ if len(peer_ids) == 0:
2600
+ continue
2601
+
2602
+ transaction_id = internal_mempool_item.spend_bundle.name()
2603
+
2604
+ self.log.debug(f"Broadcasting removed transaction {transaction_id} to " f"wallet peers {peer_ids}")
2605
+
2606
+ for peer_id in peer_ids:
2607
+ peer = self.server.all_connections.get(peer_id)
2608
+
2609
+ if peer is None:
2610
+ continue
2611
+
2612
+ removal = wallet_protocol.RemovedMempoolItem(transaction_id, uint8(removal_info.reason.value))
2613
+ removals_to_send.setdefault(peer.peer_node_id, []).append(removal)
2614
+
2615
+ for peer_id, removals in removals_to_send.items():
2616
+ peer = self.server.all_connections.get(peer_id)
2617
+
2618
+ if peer is None:
2619
+ continue
2620
+
2621
+ msg = make_msg(
2622
+ ProtocolMessageTypes.mempool_items_removed,
2623
+ wallet_protocol.MempoolItemsRemoved(removals),
2624
+ )
2625
+ await peer.send_message(msg)
2626
+
2627
+ total_time = time.monotonic() - start_time
2628
+
2629
+ self.log.log(
2630
+ logging.DEBUG if total_time < 0.5 else logging.WARNING,
2631
+ f"Broadcasting {total_removals} removed transactions "
2632
+ f"to {len(removals_to_send)} peers took {total_time:.4f}s",
2633
+ )
2634
+
2635
+ async def _needs_compact_proof(
2636
+ self, vdf_info: VDFInfo, header_block: HeaderBlock, field_vdf: CompressibleVDFField
2637
+ ) -> bool:
2638
+ if field_vdf == CompressibleVDFField.CC_EOS_VDF:
2639
+ for sub_slot in header_block.finished_sub_slots:
2640
+ if sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf == vdf_info:
2641
+ if (
2642
+ sub_slot.proofs.challenge_chain_slot_proof.witness_type == 0
2643
+ and sub_slot.proofs.challenge_chain_slot_proof.normalized_to_identity
2644
+ ):
2645
+ return False
2646
+ return True
2647
+ if field_vdf == CompressibleVDFField.ICC_EOS_VDF:
2648
+ for sub_slot in header_block.finished_sub_slots:
2649
+ if (
2650
+ sub_slot.infused_challenge_chain is not None
2651
+ and sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf == vdf_info
2652
+ ):
2653
+ assert sub_slot.proofs.infused_challenge_chain_slot_proof is not None
2654
+ if (
2655
+ sub_slot.proofs.infused_challenge_chain_slot_proof.witness_type == 0
2656
+ and sub_slot.proofs.infused_challenge_chain_slot_proof.normalized_to_identity
2657
+ ):
2658
+ return False
2659
+ return True
2660
+ if field_vdf == CompressibleVDFField.CC_SP_VDF:
2661
+ if header_block.reward_chain_block.challenge_chain_sp_vdf is None:
2662
+ return False
2663
+ if vdf_info == header_block.reward_chain_block.challenge_chain_sp_vdf:
2664
+ assert header_block.challenge_chain_sp_proof is not None
2665
+ if (
2666
+ header_block.challenge_chain_sp_proof.witness_type == 0
2667
+ and header_block.challenge_chain_sp_proof.normalized_to_identity
2668
+ ):
2669
+ return False
2670
+ return True
2671
+ if field_vdf == CompressibleVDFField.CC_IP_VDF:
2672
+ if vdf_info == header_block.reward_chain_block.challenge_chain_ip_vdf:
2673
+ if (
2674
+ header_block.challenge_chain_ip_proof.witness_type == 0
2675
+ and header_block.challenge_chain_ip_proof.normalized_to_identity
2676
+ ):
2677
+ return False
2678
+ return True
2679
+ return False
2680
+
2681
+ async def _can_accept_compact_proof(
2682
+ self,
2683
+ vdf_info: VDFInfo,
2684
+ vdf_proof: VDFProof,
2685
+ height: uint32,
2686
+ header_hash: bytes32,
2687
+ field_vdf: CompressibleVDFField,
2688
+ ) -> bool:
2689
+ """
2690
+ - Checks if the provided proof is indeed compact.
2691
+ - Checks if proof verifies given the vdf_info from the start of sub-slot.
2692
+ - Checks if the provided vdf_info is correct, assuming it refers to the start of sub-slot.
2693
+ - Checks if the existing proof was non-compact. Ignore this proof if we already have a compact proof.
2694
+ """
2695
+ is_fully_compactified = await self.block_store.is_fully_compactified(header_hash)
2696
+ if is_fully_compactified is None or is_fully_compactified:
2697
+ self.log.info(f"Already compactified block: {header_hash}. Ignoring.")
2698
+ return False
2699
+ peak = self.blockchain.get_peak()
2700
+ if peak is None or peak.height - height < 5:
2701
+ self.log.debug("Will not compactify recent block")
2702
+ return False
2703
+ if vdf_proof.witness_type > 0 or not vdf_proof.normalized_to_identity:
2704
+ self.log.error(f"Received vdf proof is not compact: {vdf_proof}.")
2705
+ return False
2706
+ if not validate_vdf(vdf_proof, self.constants, ClassgroupElement.get_default_element(), vdf_info):
2707
+ self.log.error(f"Received compact vdf proof is not valid: {vdf_proof}.")
2708
+ return False
2709
+ header_block = await self.blockchain.get_header_block_by_height(height, header_hash, tx_filter=False)
2710
+ if header_block is None:
2711
+ self.log.error(f"Can't find block for given compact vdf. Height: {height} Header hash: {header_hash}")
2712
+ return False
2713
+ is_new_proof = await self._needs_compact_proof(vdf_info, header_block, field_vdf)
2714
+ if not is_new_proof:
2715
+ self.log.info(f"Duplicate compact proof. Height: {height}. Header hash: {header_hash}.")
2716
+ return is_new_proof
2717
+
2718
+ # returns True if we ended up replacing the proof, and False otherwise
2719
+ async def _replace_proof(
2720
+ self,
2721
+ vdf_info: VDFInfo,
2722
+ vdf_proof: VDFProof,
2723
+ header_hash: bytes32,
2724
+ field_vdf: CompressibleVDFField,
2725
+ ) -> bool:
2726
+ block = await self.block_store.get_full_block(header_hash)
2727
+ if block is None:
2728
+ return False
2729
+
2730
+ new_block = None
2731
+
2732
+ if field_vdf == CompressibleVDFField.CC_EOS_VDF:
2733
+ for index, sub_slot in enumerate(block.finished_sub_slots):
2734
+ if sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf == vdf_info:
2735
+ new_proofs = sub_slot.proofs.replace(challenge_chain_slot_proof=vdf_proof)
2736
+ new_subslot = sub_slot.replace(proofs=new_proofs)
2737
+ new_finished_subslots = block.finished_sub_slots
2738
+ new_finished_subslots[index] = new_subslot
2739
+ new_block = block.replace(finished_sub_slots=new_finished_subslots)
2740
+ break
2741
+ if field_vdf == CompressibleVDFField.ICC_EOS_VDF:
2742
+ for index, sub_slot in enumerate(block.finished_sub_slots):
2743
+ if (
2744
+ sub_slot.infused_challenge_chain is not None
2745
+ and sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf == vdf_info
2746
+ ):
2747
+ new_proofs = sub_slot.proofs.replace(infused_challenge_chain_slot_proof=vdf_proof)
2748
+ new_subslot = sub_slot.replace(proofs=new_proofs)
2749
+ new_finished_subslots = block.finished_sub_slots
2750
+ new_finished_subslots[index] = new_subslot
2751
+ new_block = block.replace(finished_sub_slots=new_finished_subslots)
2752
+ break
2753
+ if field_vdf == CompressibleVDFField.CC_SP_VDF:
2754
+ if block.reward_chain_block.challenge_chain_sp_vdf == vdf_info:
2755
+ assert block.challenge_chain_sp_proof is not None
2756
+ new_block = block.replace(challenge_chain_sp_proof=vdf_proof)
2757
+ if field_vdf == CompressibleVDFField.CC_IP_VDF:
2758
+ if block.reward_chain_block.challenge_chain_ip_vdf == vdf_info:
2759
+ new_block = block.replace(challenge_chain_ip_proof=vdf_proof)
2760
+ if new_block is None:
2761
+ return False
2762
+ async with self.db_wrapper.writer():
2763
+ try:
2764
+ await self.block_store.replace_proof(header_hash, new_block)
2765
+ return True
2766
+ except BaseException as e:
2767
+ self.log.error(
2768
+ f"_replace_proof error while adding block {block.header_hash} height {block.height},"
2769
+ f" rolling back: {e} {traceback.format_exc()}"
2770
+ )
2771
+ raise
2772
+
2773
+ async def add_compact_proof_of_time(self, request: timelord_protocol.RespondCompactProofOfTime) -> None:
2774
+ peak = self.blockchain.get_peak()
2775
+ if peak is None or peak.height - request.height < 5:
2776
+ self.log.info(f"Ignoring add_compact_proof_of_time, height {request.height} too recent.")
2777
+ return None
2778
+
2779
+ field_vdf = CompressibleVDFField(int(request.field_vdf))
2780
+ if not await self._can_accept_compact_proof(
2781
+ request.vdf_info, request.vdf_proof, request.height, request.header_hash, field_vdf
2782
+ ):
2783
+ return None
2784
+ async with self.blockchain.compact_proof_lock:
2785
+ replaced = await self._replace_proof(request.vdf_info, request.vdf_proof, request.header_hash, field_vdf)
2786
+ if not replaced:
2787
+ self.log.error(f"Could not replace compact proof: {request.height}")
2788
+ return None
2789
+ self.log.info(f"Replaced compact proof at height {request.height}")
2790
+ msg = make_msg(
2791
+ ProtocolMessageTypes.new_compact_vdf,
2792
+ full_node_protocol.NewCompactVDF(request.height, request.header_hash, request.field_vdf, request.vdf_info),
2793
+ )
2794
+ if self._server is not None:
2795
+ await self.server.send_to_all([msg], NodeType.FULL_NODE)
2796
+
2797
+ async def new_compact_vdf(self, request: full_node_protocol.NewCompactVDF, peer: WSChiaConnection) -> None:
2798
+ peak = self.blockchain.get_peak()
2799
+ if peak is None or peak.height - request.height < 5:
2800
+ self.log.info(f"Ignoring new_compact_vdf, height {request.height} too recent.")
2801
+ return None
2802
+ is_fully_compactified = await self.block_store.is_fully_compactified(request.header_hash)
2803
+ if is_fully_compactified is None or is_fully_compactified:
2804
+ return None
2805
+ header_block = await self.blockchain.get_header_block_by_height(
2806
+ request.height, request.header_hash, tx_filter=False
2807
+ )
2808
+ if header_block is None:
2809
+ return None
2810
+ field_vdf = CompressibleVDFField(int(request.field_vdf))
2811
+ if await self._needs_compact_proof(request.vdf_info, header_block, field_vdf):
2812
+ peer_request = full_node_protocol.RequestCompactVDF(
2813
+ request.height, request.header_hash, request.field_vdf, request.vdf_info
2814
+ )
2815
+ response = await peer.call_api(FullNodeAPI.request_compact_vdf, peer_request, timeout=10)
2816
+ if response is not None and isinstance(response, full_node_protocol.RespondCompactVDF):
2817
+ await self.add_compact_vdf(response, peer)
2818
+
2819
+ async def request_compact_vdf(self, request: full_node_protocol.RequestCompactVDF, peer: WSChiaConnection) -> None:
2820
+ header_block = await self.blockchain.get_header_block_by_height(
2821
+ request.height, request.header_hash, tx_filter=False
2822
+ )
2823
+ if header_block is None:
2824
+ return None
2825
+ vdf_proof: Optional[VDFProof] = None
2826
+ field_vdf = CompressibleVDFField(int(request.field_vdf))
2827
+ if field_vdf == CompressibleVDFField.CC_EOS_VDF:
2828
+ for sub_slot in header_block.finished_sub_slots:
2829
+ if sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf == request.vdf_info:
2830
+ vdf_proof = sub_slot.proofs.challenge_chain_slot_proof
2831
+ break
2832
+ if field_vdf == CompressibleVDFField.ICC_EOS_VDF:
2833
+ for sub_slot in header_block.finished_sub_slots:
2834
+ if (
2835
+ sub_slot.infused_challenge_chain is not None
2836
+ and sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf == request.vdf_info
2837
+ ):
2838
+ vdf_proof = sub_slot.proofs.infused_challenge_chain_slot_proof
2839
+ break
2840
+ if (
2841
+ field_vdf == CompressibleVDFField.CC_SP_VDF
2842
+ and header_block.reward_chain_block.challenge_chain_sp_vdf == request.vdf_info
2843
+ ):
2844
+ vdf_proof = header_block.challenge_chain_sp_proof
2845
+ if (
2846
+ field_vdf == CompressibleVDFField.CC_IP_VDF
2847
+ and header_block.reward_chain_block.challenge_chain_ip_vdf == request.vdf_info
2848
+ ):
2849
+ vdf_proof = header_block.challenge_chain_ip_proof
2850
+ if vdf_proof is None or vdf_proof.witness_type > 0 or not vdf_proof.normalized_to_identity:
2851
+ self.log.error(f"{peer} requested compact vdf we don't have, height: {request.height}.")
2852
+ return None
2853
+ compact_vdf = full_node_protocol.RespondCompactVDF(
2854
+ request.height,
2855
+ request.header_hash,
2856
+ request.field_vdf,
2857
+ request.vdf_info,
2858
+ vdf_proof,
2859
+ )
2860
+ msg = make_msg(ProtocolMessageTypes.respond_compact_vdf, compact_vdf)
2861
+ await peer.send_message(msg)
2862
+
2863
+ async def add_compact_vdf(self, request: full_node_protocol.RespondCompactVDF, peer: WSChiaConnection) -> None:
2864
+ field_vdf = CompressibleVDFField(int(request.field_vdf))
2865
+ if not await self._can_accept_compact_proof(
2866
+ request.vdf_info, request.vdf_proof, request.height, request.header_hash, field_vdf
2867
+ ):
2868
+ return None
2869
+ async with self.blockchain.compact_proof_lock:
2870
+ if self.blockchain.seen_compact_proofs(request.vdf_info, request.height):
2871
+ return None
2872
+ replaced = await self._replace_proof(request.vdf_info, request.vdf_proof, request.header_hash, field_vdf)
2873
+ if not replaced:
2874
+ self.log.error(f"Could not replace compact proof: {request.height}")
2875
+ return None
2876
+ msg = make_msg(
2877
+ ProtocolMessageTypes.new_compact_vdf,
2878
+ full_node_protocol.NewCompactVDF(request.height, request.header_hash, request.field_vdf, request.vdf_info),
2879
+ )
2880
+ if self._server is not None:
2881
+ await self.server.send_to_all([msg], NodeType.FULL_NODE, peer.peer_node_id)
2882
+
2883
+ def in_bad_peak_cache(self, wp: WeightProof) -> bool:
2884
+ for block in wp.recent_chain_data:
2885
+ if block.header_hash in self.bad_peak_cache.keys():
2886
+ return True
2887
+ return False
2888
+
2889
+ def add_to_bad_peak_cache(self, peak_header_hash: bytes32, peak_height: uint32) -> None:
2890
+ curr_height = self.blockchain.get_peak_height()
2891
+
2892
+ if curr_height is None:
2893
+ self.log.debug(f"add bad peak {peak_header_hash} to cache")
2894
+ self.bad_peak_cache[peak_header_hash] = peak_height
2895
+ return
2896
+ minimum_cache_height = curr_height - (2 * self.constants.SUB_EPOCH_BLOCKS)
2897
+ if peak_height < minimum_cache_height:
2898
+ return
2899
+
2900
+ new_cache = {}
2901
+ self.log.info(f"add bad peak {peak_header_hash} to cache")
2902
+ new_cache[peak_header_hash] = peak_height
2903
+ min_height = peak_height
2904
+ min_block = peak_header_hash
2905
+ for header_hash, height in self.bad_peak_cache.items():
2906
+ if height < minimum_cache_height:
2907
+ self.log.debug(f"remove bad peak {peak_header_hash} from cache")
2908
+ continue
2909
+ if height < min_height:
2910
+ min_block = header_hash
2911
+ new_cache[header_hash] = height
2912
+
2913
+ if len(new_cache) > self.config.get("bad_peak_cache_size", 100):
2914
+ del new_cache[min_block]
2915
+
2916
+ self.bad_peak_cache = new_cache
2917
+
2918
+ async def broadcast_uncompact_blocks(
2919
+ self, uncompact_interval_scan: int, target_uncompact_proofs: int, sanitize_weight_proof_only: bool
2920
+ ) -> None:
2921
+ try:
2922
+ while not self._shut_down:
2923
+ while self.sync_store.get_sync_mode() or self.sync_store.get_long_sync():
2924
+ if self._shut_down:
2925
+ return None
2926
+ await asyncio.sleep(30)
2927
+
2928
+ broadcast_list: List[timelord_protocol.RequestCompactProofOfTime] = []
2929
+
2930
+ self.log.info("Getting random heights for bluebox to compact")
2931
+
2932
+ if self._server is None:
2933
+ self.log.info("Not broadcasting uncompact blocks, no server found")
2934
+ await asyncio.sleep(uncompact_interval_scan)
2935
+ continue
2936
+ connected_timelords = self.server.get_connections(NodeType.TIMELORD)
2937
+
2938
+ total_target_uncompact_proofs = target_uncompact_proofs * max(1, len(connected_timelords))
2939
+ heights = await self.block_store.get_random_not_compactified(total_target_uncompact_proofs)
2940
+ self.log.info("Heights found for bluebox to compact: [%s]", ", ".join(map(str, heights)))
2941
+
2942
+ for h in heights:
2943
+ headers = await self.blockchain.get_header_blocks_in_range(h, h, tx_filter=False)
2944
+ records: Dict[bytes32, BlockRecord] = {}
2945
+ if sanitize_weight_proof_only:
2946
+ records = await self.blockchain.get_block_records_in_range(h, h)
2947
+ for header in headers.values():
2948
+ expected_header_hash = self.blockchain.height_to_hash(header.height)
2949
+ if header.header_hash != expected_header_hash:
2950
+ continue
2951
+ if sanitize_weight_proof_only:
2952
+ assert header.header_hash in records
2953
+ record = records[header.header_hash]
2954
+ for sub_slot in header.finished_sub_slots:
2955
+ if (
2956
+ sub_slot.proofs.challenge_chain_slot_proof.witness_type > 0
2957
+ or not sub_slot.proofs.challenge_chain_slot_proof.normalized_to_identity
2958
+ ):
2959
+ broadcast_list.append(
2960
+ timelord_protocol.RequestCompactProofOfTime(
2961
+ sub_slot.challenge_chain.challenge_chain_end_of_slot_vdf,
2962
+ header.header_hash,
2963
+ header.height,
2964
+ uint8(CompressibleVDFField.CC_EOS_VDF),
2965
+ )
2966
+ )
2967
+ if sub_slot.proofs.infused_challenge_chain_slot_proof is not None and (
2968
+ sub_slot.proofs.infused_challenge_chain_slot_proof.witness_type > 0
2969
+ or not sub_slot.proofs.infused_challenge_chain_slot_proof.normalized_to_identity
2970
+ ):
2971
+ assert sub_slot.infused_challenge_chain is not None
2972
+ broadcast_list.append(
2973
+ timelord_protocol.RequestCompactProofOfTime(
2974
+ sub_slot.infused_challenge_chain.infused_challenge_chain_end_of_slot_vdf,
2975
+ header.header_hash,
2976
+ header.height,
2977
+ uint8(CompressibleVDFField.ICC_EOS_VDF),
2978
+ )
2979
+ )
2980
+ # Running in 'sanitize_weight_proof_only' ignores CC_SP_VDF and CC_IP_VDF
2981
+ # unless this is a challenge block.
2982
+ if sanitize_weight_proof_only:
2983
+ if not record.is_challenge_block(self.constants):
2984
+ continue
2985
+ if header.challenge_chain_sp_proof is not None and (
2986
+ header.challenge_chain_sp_proof.witness_type > 0
2987
+ or not header.challenge_chain_sp_proof.normalized_to_identity
2988
+ ):
2989
+ assert header.reward_chain_block.challenge_chain_sp_vdf is not None
2990
+ broadcast_list.append(
2991
+ timelord_protocol.RequestCompactProofOfTime(
2992
+ header.reward_chain_block.challenge_chain_sp_vdf,
2993
+ header.header_hash,
2994
+ header.height,
2995
+ uint8(CompressibleVDFField.CC_SP_VDF),
2996
+ )
2997
+ )
2998
+
2999
+ if (
3000
+ header.challenge_chain_ip_proof.witness_type > 0
3001
+ or not header.challenge_chain_ip_proof.normalized_to_identity
3002
+ ):
3003
+ broadcast_list.append(
3004
+ timelord_protocol.RequestCompactProofOfTime(
3005
+ header.reward_chain_block.challenge_chain_ip_vdf,
3006
+ header.header_hash,
3007
+ header.height,
3008
+ uint8(CompressibleVDFField.CC_IP_VDF),
3009
+ )
3010
+ )
3011
+
3012
+ broadcast_list_chunks: List[List[timelord_protocol.RequestCompactProofOfTime]] = []
3013
+ for index in range(0, len(broadcast_list), target_uncompact_proofs):
3014
+ broadcast_list_chunks.append(broadcast_list[index : index + target_uncompact_proofs])
3015
+ if len(broadcast_list_chunks) == 0:
3016
+ self.log.info("Did not find any uncompact blocks.")
3017
+ await asyncio.sleep(uncompact_interval_scan)
3018
+ continue
3019
+ if self.sync_store.get_sync_mode() or self.sync_store.get_long_sync():
3020
+ await asyncio.sleep(uncompact_interval_scan)
3021
+ continue
3022
+ if self._server is not None:
3023
+ self.log.info(f"Broadcasting {len(broadcast_list)} items to the bluebox")
3024
+ connected_timelords = self.server.get_connections(NodeType.TIMELORD)
3025
+ chunk_index = 0
3026
+ for connection in connected_timelords:
3027
+ peer_node_id = connection.peer_node_id
3028
+ msgs = []
3029
+ broadcast_list = broadcast_list_chunks[chunk_index]
3030
+ chunk_index = (chunk_index + 1) % len(broadcast_list_chunks)
3031
+ for new_pot in broadcast_list:
3032
+ msg = make_msg(ProtocolMessageTypes.request_compact_proof_of_time, new_pot)
3033
+ msgs.append(msg)
3034
+ await self.server.send_to_specific(msgs, peer_node_id)
3035
+ await asyncio.sleep(uncompact_interval_scan)
3036
+ except Exception as e:
3037
+ error_stack = traceback.format_exc()
3038
+ self.log.error(f"Exception in broadcast_uncompact_blocks: {e}")
3039
+ self.log.error(f"Exception Stack: {error_stack}")
3040
+
3041
+
3042
+ async def node_next_block_check(
3043
+ peer: WSChiaConnection, potential_peek: uint32, blockchain: BlockchainInterface
3044
+ ) -> bool:
3045
+ block_response: Optional[Any] = await peer.call_api(
3046
+ FullNodeAPI.request_block, full_node_protocol.RequestBlock(potential_peek, True)
3047
+ )
3048
+ if block_response is not None and isinstance(block_response, full_node_protocol.RespondBlock):
3049
+ peak = blockchain.get_peak()
3050
+ if peak is not None and block_response.block.prev_header_hash == peak.header_hash:
3051
+ return True
3052
+ return False