chia-blockchain 2.5.1rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- chia/__init__.py +10 -0
- chia/__main__.py +5 -0
- chia/_tests/README.md +53 -0
- chia/_tests/__init__.py +0 -0
- chia/_tests/blockchain/__init__.py +0 -0
- chia/_tests/blockchain/blockchain_test_utils.py +195 -0
- chia/_tests/blockchain/config.py +4 -0
- chia/_tests/blockchain/test_augmented_chain.py +145 -0
- chia/_tests/blockchain/test_blockchain.py +4202 -0
- chia/_tests/blockchain/test_blockchain_transactions.py +1031 -0
- chia/_tests/blockchain/test_build_chains.py +59 -0
- chia/_tests/blockchain/test_get_block_generator.py +72 -0
- chia/_tests/blockchain/test_lookup_fork_chain.py +194 -0
- chia/_tests/build-init-files.py +92 -0
- chia/_tests/build-job-matrix.py +204 -0
- chia/_tests/check_pytest_monitor_output.py +34 -0
- chia/_tests/check_sql_statements.py +72 -0
- chia/_tests/chia-start-sim +42 -0
- chia/_tests/clvm/__init__.py +0 -0
- chia/_tests/clvm/benchmark_costs.py +23 -0
- chia/_tests/clvm/coin_store.py +149 -0
- chia/_tests/clvm/test_chialisp_deserialization.py +101 -0
- chia/_tests/clvm/test_clvm_step.py +37 -0
- chia/_tests/clvm/test_condition_codes.py +13 -0
- chia/_tests/clvm/test_curry_and_treehash.py +55 -0
- chia/_tests/clvm/test_message_conditions.py +184 -0
- chia/_tests/clvm/test_program.py +150 -0
- chia/_tests/clvm/test_puzzle_compression.py +143 -0
- chia/_tests/clvm/test_puzzle_drivers.py +45 -0
- chia/_tests/clvm/test_puzzles.py +242 -0
- chia/_tests/clvm/test_singletons.py +540 -0
- chia/_tests/clvm/test_spend_sim.py +181 -0
- chia/_tests/cmds/__init__.py +0 -0
- chia/_tests/cmds/cmd_test_utils.py +469 -0
- chia/_tests/cmds/config.py +3 -0
- chia/_tests/cmds/conftest.py +23 -0
- chia/_tests/cmds/test_click_types.py +200 -0
- chia/_tests/cmds/test_cmd_framework.py +620 -0
- chia/_tests/cmds/test_cmds_util.py +97 -0
- chia/_tests/cmds/test_daemon.py +92 -0
- chia/_tests/cmds/test_dev_gh.py +131 -0
- chia/_tests/cmds/test_farm_cmd.py +66 -0
- chia/_tests/cmds/test_show.py +116 -0
- chia/_tests/cmds/test_sim.py +207 -0
- chia/_tests/cmds/test_timelock_args.py +75 -0
- chia/_tests/cmds/test_tx_config_args.py +154 -0
- chia/_tests/cmds/testing_classes.py +59 -0
- chia/_tests/cmds/wallet/__init__.py +0 -0
- chia/_tests/cmds/wallet/test_consts.py +47 -0
- chia/_tests/cmds/wallet/test_dao.py +565 -0
- chia/_tests/cmds/wallet/test_did.py +403 -0
- chia/_tests/cmds/wallet/test_nft.py +471 -0
- chia/_tests/cmds/wallet/test_notifications.py +124 -0
- chia/_tests/cmds/wallet/test_offer.toffer +1 -0
- chia/_tests/cmds/wallet/test_tx_decorators.py +27 -0
- chia/_tests/cmds/wallet/test_vcs.py +400 -0
- chia/_tests/cmds/wallet/test_wallet.py +1125 -0
- chia/_tests/cmds/wallet/test_wallet_check.py +109 -0
- chia/_tests/conftest.py +1419 -0
- chia/_tests/connection_utils.py +125 -0
- chia/_tests/core/__init__.py +0 -0
- chia/_tests/core/cmds/__init__.py +0 -0
- chia/_tests/core/cmds/test_beta.py +382 -0
- chia/_tests/core/cmds/test_keys.py +1734 -0
- chia/_tests/core/cmds/test_wallet.py +126 -0
- chia/_tests/core/config.py +3 -0
- chia/_tests/core/consensus/__init__.py +0 -0
- chia/_tests/core/consensus/test_block_creation.py +54 -0
- chia/_tests/core/consensus/test_pot_iterations.py +117 -0
- chia/_tests/core/custom_types/__init__.py +0 -0
- chia/_tests/core/custom_types/test_coin.py +107 -0
- chia/_tests/core/custom_types/test_proof_of_space.py +144 -0
- chia/_tests/core/custom_types/test_spend_bundle.py +70 -0
- chia/_tests/core/daemon/__init__.py +0 -0
- chia/_tests/core/daemon/config.py +4 -0
- chia/_tests/core/daemon/test_daemon.py +2128 -0
- chia/_tests/core/daemon/test_daemon_register.py +109 -0
- chia/_tests/core/daemon/test_keychain_proxy.py +101 -0
- chia/_tests/core/data_layer/__init__.py +0 -0
- chia/_tests/core/data_layer/config.py +5 -0
- chia/_tests/core/data_layer/conftest.py +106 -0
- chia/_tests/core/data_layer/test_data_cli.py +56 -0
- chia/_tests/core/data_layer/test_data_layer.py +83 -0
- chia/_tests/core/data_layer/test_data_layer_util.py +218 -0
- chia/_tests/core/data_layer/test_data_rpc.py +3847 -0
- chia/_tests/core/data_layer/test_data_store.py +2424 -0
- chia/_tests/core/data_layer/test_data_store_schema.py +381 -0
- chia/_tests/core/data_layer/test_plugin.py +91 -0
- chia/_tests/core/data_layer/util.py +233 -0
- chia/_tests/core/farmer/__init__.py +0 -0
- chia/_tests/core/farmer/config.py +3 -0
- chia/_tests/core/farmer/test_farmer_api.py +103 -0
- chia/_tests/core/full_node/__init__.py +0 -0
- chia/_tests/core/full_node/config.py +4 -0
- chia/_tests/core/full_node/dos/__init__.py +0 -0
- chia/_tests/core/full_node/dos/config.py +3 -0
- chia/_tests/core/full_node/full_sync/__init__.py +0 -0
- chia/_tests/core/full_node/full_sync/config.py +4 -0
- chia/_tests/core/full_node/full_sync/test_full_sync.py +443 -0
- chia/_tests/core/full_node/ram_db.py +27 -0
- chia/_tests/core/full_node/stores/__init__.py +0 -0
- chia/_tests/core/full_node/stores/config.py +4 -0
- chia/_tests/core/full_node/stores/test_block_store.py +590 -0
- chia/_tests/core/full_node/stores/test_coin_store.py +897 -0
- chia/_tests/core/full_node/stores/test_full_node_store.py +1219 -0
- chia/_tests/core/full_node/stores/test_hint_store.py +229 -0
- chia/_tests/core/full_node/stores/test_sync_store.py +135 -0
- chia/_tests/core/full_node/test_address_manager.py +588 -0
- chia/_tests/core/full_node/test_block_height_map.py +556 -0
- chia/_tests/core/full_node/test_conditions.py +556 -0
- chia/_tests/core/full_node/test_full_node.py +2700 -0
- chia/_tests/core/full_node/test_generator_tools.py +82 -0
- chia/_tests/core/full_node/test_hint_management.py +104 -0
- chia/_tests/core/full_node/test_node_load.py +34 -0
- chia/_tests/core/full_node/test_performance.py +179 -0
- chia/_tests/core/full_node/test_subscriptions.py +492 -0
- chia/_tests/core/full_node/test_transactions.py +203 -0
- chia/_tests/core/full_node/test_tx_processing_queue.py +155 -0
- chia/_tests/core/large_block.py +2388 -0
- chia/_tests/core/make_block_generator.py +70 -0
- chia/_tests/core/mempool/__init__.py +0 -0
- chia/_tests/core/mempool/config.py +4 -0
- chia/_tests/core/mempool/test_mempool.py +3255 -0
- chia/_tests/core/mempool/test_mempool_fee_estimator.py +104 -0
- chia/_tests/core/mempool/test_mempool_fee_protocol.py +55 -0
- chia/_tests/core/mempool/test_mempool_item_queries.py +190 -0
- chia/_tests/core/mempool/test_mempool_manager.py +2084 -0
- chia/_tests/core/mempool/test_mempool_performance.py +64 -0
- chia/_tests/core/mempool/test_singleton_fast_forward.py +567 -0
- chia/_tests/core/node_height.py +28 -0
- chia/_tests/core/server/__init__.py +0 -0
- chia/_tests/core/server/config.py +3 -0
- chia/_tests/core/server/flood.py +84 -0
- chia/_tests/core/server/serve.py +135 -0
- chia/_tests/core/server/test_api_protocol.py +21 -0
- chia/_tests/core/server/test_capabilities.py +66 -0
- chia/_tests/core/server/test_dos.py +319 -0
- chia/_tests/core/server/test_event_loop.py +109 -0
- chia/_tests/core/server/test_loop.py +294 -0
- chia/_tests/core/server/test_node_discovery.py +73 -0
- chia/_tests/core/server/test_rate_limits.py +482 -0
- chia/_tests/core/server/test_server.py +226 -0
- chia/_tests/core/server/test_upnp.py +8 -0
- chia/_tests/core/services/__init__.py +0 -0
- chia/_tests/core/services/config.py +3 -0
- chia/_tests/core/services/test_services.py +188 -0
- chia/_tests/core/ssl/__init__.py +0 -0
- chia/_tests/core/ssl/config.py +3 -0
- chia/_tests/core/ssl/test_ssl.py +202 -0
- chia/_tests/core/test_coins.py +33 -0
- chia/_tests/core/test_cost_calculation.py +313 -0
- chia/_tests/core/test_crawler.py +175 -0
- chia/_tests/core/test_crawler_rpc.py +53 -0
- chia/_tests/core/test_daemon_rpc.py +24 -0
- chia/_tests/core/test_db_conversion.py +130 -0
- chia/_tests/core/test_db_validation.py +162 -0
- chia/_tests/core/test_farmer_harvester_rpc.py +505 -0
- chia/_tests/core/test_filter.py +35 -0
- chia/_tests/core/test_full_node_rpc.py +768 -0
- chia/_tests/core/test_merkle_set.py +343 -0
- chia/_tests/core/test_program.py +47 -0
- chia/_tests/core/test_rpc_util.py +86 -0
- chia/_tests/core/test_seeder.py +420 -0
- chia/_tests/core/test_setproctitle.py +13 -0
- chia/_tests/core/util/__init__.py +0 -0
- chia/_tests/core/util/config.py +4 -0
- chia/_tests/core/util/test_block_cache.py +44 -0
- chia/_tests/core/util/test_cached_bls.py +57 -0
- chia/_tests/core/util/test_config.py +337 -0
- chia/_tests/core/util/test_file_keyring_synchronization.py +105 -0
- chia/_tests/core/util/test_files.py +391 -0
- chia/_tests/core/util/test_jsonify.py +146 -0
- chia/_tests/core/util/test_keychain.py +522 -0
- chia/_tests/core/util/test_keyring_wrapper.py +491 -0
- chia/_tests/core/util/test_lockfile.py +380 -0
- chia/_tests/core/util/test_log_exceptions.py +187 -0
- chia/_tests/core/util/test_lru_cache.py +56 -0
- chia/_tests/core/util/test_significant_bits.py +40 -0
- chia/_tests/core/util/test_streamable.py +883 -0
- chia/_tests/db/__init__.py +0 -0
- chia/_tests/db/test_db_wrapper.py +566 -0
- chia/_tests/environments/__init__.py +0 -0
- chia/_tests/environments/common.py +35 -0
- chia/_tests/environments/full_node.py +47 -0
- chia/_tests/environments/wallet.py +429 -0
- chia/_tests/ether.py +19 -0
- chia/_tests/farmer_harvester/__init__.py +0 -0
- chia/_tests/farmer_harvester/config.py +3 -0
- chia/_tests/farmer_harvester/test_farmer.py +1264 -0
- chia/_tests/farmer_harvester/test_farmer_harvester.py +292 -0
- chia/_tests/farmer_harvester/test_filter_prefix_bits.py +131 -0
- chia/_tests/farmer_harvester/test_third_party_harvesters.py +528 -0
- chia/_tests/farmer_harvester/test_third_party_harvesters_data.json +29 -0
- chia/_tests/fee_estimation/__init__.py +0 -0
- chia/_tests/fee_estimation/config.py +3 -0
- chia/_tests/fee_estimation/test_fee_estimation_integration.py +262 -0
- chia/_tests/fee_estimation/test_fee_estimation_rpc.py +287 -0
- chia/_tests/fee_estimation/test_fee_estimation_unit_tests.py +144 -0
- chia/_tests/fee_estimation/test_mempoolitem_height_added.py +146 -0
- chia/_tests/generator/__init__.py +0 -0
- chia/_tests/generator/puzzles/__init__.py +0 -0
- chia/_tests/generator/puzzles/test_generator_deserialize.clsp +3 -0
- chia/_tests/generator/puzzles/test_generator_deserialize.clsp.hex +1 -0
- chia/_tests/generator/puzzles/test_multiple_generator_input_arguments.clsp +19 -0
- chia/_tests/generator/puzzles/test_multiple_generator_input_arguments.clsp.hex +1 -0
- chia/_tests/generator/test_compression.py +201 -0
- chia/_tests/generator/test_generator_types.py +44 -0
- chia/_tests/generator/test_rom.py +180 -0
- chia/_tests/plot_sync/__init__.py +0 -0
- chia/_tests/plot_sync/config.py +3 -0
- chia/_tests/plot_sync/test_delta.py +101 -0
- chia/_tests/plot_sync/test_plot_sync.py +618 -0
- chia/_tests/plot_sync/test_receiver.py +451 -0
- chia/_tests/plot_sync/test_sender.py +116 -0
- chia/_tests/plot_sync/test_sync_simulated.py +451 -0
- chia/_tests/plot_sync/util.py +68 -0
- chia/_tests/plotting/__init__.py +0 -0
- chia/_tests/plotting/config.py +3 -0
- chia/_tests/plotting/test_plot_manager.py +781 -0
- chia/_tests/plotting/util.py +12 -0
- chia/_tests/pools/__init__.py +0 -0
- chia/_tests/pools/config.py +5 -0
- chia/_tests/pools/test_pool_cli_parsing.py +128 -0
- chia/_tests/pools/test_pool_cmdline.py +1001 -0
- chia/_tests/pools/test_pool_config.py +42 -0
- chia/_tests/pools/test_pool_puzzles_lifecycle.py +397 -0
- chia/_tests/pools/test_pool_rpc.py +1123 -0
- chia/_tests/pools/test_pool_wallet.py +205 -0
- chia/_tests/pools/test_wallet_pool_store.py +161 -0
- chia/_tests/process_junit.py +348 -0
- chia/_tests/rpc/__init__.py +0 -0
- chia/_tests/rpc/test_rpc_client.py +138 -0
- chia/_tests/rpc/test_rpc_server.py +183 -0
- chia/_tests/simulation/__init__.py +0 -0
- chia/_tests/simulation/config.py +6 -0
- chia/_tests/simulation/test_simulation.py +501 -0
- chia/_tests/simulation/test_simulator.py +232 -0
- chia/_tests/simulation/test_start_simulator.py +107 -0
- chia/_tests/testconfig.py +13 -0
- chia/_tests/timelord/__init__.py +0 -0
- chia/_tests/timelord/config.py +3 -0
- chia/_tests/timelord/test_new_peak.py +437 -0
- chia/_tests/timelord/test_timelord.py +11 -0
- chia/_tests/tools/1315537.json +170 -0
- chia/_tests/tools/1315544.json +160 -0
- chia/_tests/tools/1315630.json +150 -0
- chia/_tests/tools/300000.json +105 -0
- chia/_tests/tools/442734.json +140 -0
- chia/_tests/tools/466212.json +130 -0
- chia/_tests/tools/__init__.py +0 -0
- chia/_tests/tools/config.py +5 -0
- chia/_tests/tools/test-blockchain-db.sqlite +0 -0
- chia/_tests/tools/test_full_sync.py +30 -0
- chia/_tests/tools/test_legacy_keyring.py +82 -0
- chia/_tests/tools/test_run_block.py +128 -0
- chia/_tests/tools/test_virtual_project.py +591 -0
- chia/_tests/util/__init__.py +0 -0
- chia/_tests/util/benchmark_cost.py +170 -0
- chia/_tests/util/benchmarks.py +153 -0
- chia/_tests/util/bip39_test_vectors.json +148 -0
- chia/_tests/util/blockchain.py +134 -0
- chia/_tests/util/blockchain_mock.py +132 -0
- chia/_tests/util/build_network_protocol_files.py +302 -0
- chia/_tests/util/clvm_generator.bin +0 -0
- chia/_tests/util/config.py +3 -0
- chia/_tests/util/constants.py +20 -0
- chia/_tests/util/db_connection.py +37 -0
- chia/_tests/util/full_sync.py +253 -0
- chia/_tests/util/gen_ssl_certs.py +114 -0
- chia/_tests/util/generator_tools_testing.py +45 -0
- chia/_tests/util/get_name_puzzle_conditions.py +52 -0
- chia/_tests/util/key_tool.py +36 -0
- chia/_tests/util/misc.py +675 -0
- chia/_tests/util/network_protocol_data.py +1072 -0
- chia/_tests/util/protocol_messages_bytes-v1.0 +0 -0
- chia/_tests/util/protocol_messages_json.py +2701 -0
- chia/_tests/util/rpc.py +26 -0
- chia/_tests/util/run_block.py +163 -0
- chia/_tests/util/setup_nodes.py +481 -0
- chia/_tests/util/spend_sim.py +492 -0
- chia/_tests/util/split_managers.py +102 -0
- chia/_tests/util/temp_file.py +14 -0
- chia/_tests/util/test_action_scope.py +144 -0
- chia/_tests/util/test_async_pool.py +366 -0
- chia/_tests/util/test_build_job_matrix.py +42 -0
- chia/_tests/util/test_build_network_protocol_files.py +7 -0
- chia/_tests/util/test_chia_version.py +50 -0
- chia/_tests/util/test_collection.py +11 -0
- chia/_tests/util/test_condition_tools.py +229 -0
- chia/_tests/util/test_config.py +426 -0
- chia/_tests/util/test_dump_keyring.py +60 -0
- chia/_tests/util/test_errors.py +10 -0
- chia/_tests/util/test_full_block_utils.py +279 -0
- chia/_tests/util/test_installed.py +20 -0
- chia/_tests/util/test_limited_semaphore.py +53 -0
- chia/_tests/util/test_logging_filter.py +42 -0
- chia/_tests/util/test_misc.py +445 -0
- chia/_tests/util/test_network.py +73 -0
- chia/_tests/util/test_network_protocol_files.py +578 -0
- chia/_tests/util/test_network_protocol_json.py +267 -0
- chia/_tests/util/test_network_protocol_test.py +256 -0
- chia/_tests/util/test_paginator.py +71 -0
- chia/_tests/util/test_pprint.py +17 -0
- chia/_tests/util/test_priority_mutex.py +488 -0
- chia/_tests/util/test_recursive_replace.py +116 -0
- chia/_tests/util/test_replace_str_to_bytes.py +137 -0
- chia/_tests/util/test_service_groups.py +15 -0
- chia/_tests/util/test_ssl_check.py +31 -0
- chia/_tests/util/test_testnet_overrides.py +19 -0
- chia/_tests/util/test_tests_misc.py +38 -0
- chia/_tests/util/test_timing.py +37 -0
- chia/_tests/util/test_trusted_peer.py +51 -0
- chia/_tests/util/time_out_assert.py +191 -0
- chia/_tests/wallet/__init__.py +0 -0
- chia/_tests/wallet/cat_wallet/__init__.py +0 -0
- chia/_tests/wallet/cat_wallet/config.py +4 -0
- chia/_tests/wallet/cat_wallet/test_cat_lifecycle.py +468 -0
- chia/_tests/wallet/cat_wallet/test_cat_outer_puzzle.py +69 -0
- chia/_tests/wallet/cat_wallet/test_cat_wallet.py +1826 -0
- chia/_tests/wallet/cat_wallet/test_offer_lifecycle.py +291 -0
- chia/_tests/wallet/cat_wallet/test_trades.py +2600 -0
- chia/_tests/wallet/clawback/__init__.py +0 -0
- chia/_tests/wallet/clawback/config.py +3 -0
- chia/_tests/wallet/clawback/test_clawback_decorator.py +78 -0
- chia/_tests/wallet/clawback/test_clawback_lifecycle.py +292 -0
- chia/_tests/wallet/clawback/test_clawback_metadata.py +50 -0
- chia/_tests/wallet/config.py +4 -0
- chia/_tests/wallet/conftest.py +278 -0
- chia/_tests/wallet/dao_wallet/__init__.py +0 -0
- chia/_tests/wallet/dao_wallet/config.py +3 -0
- chia/_tests/wallet/dao_wallet/test_dao_clvm.py +1330 -0
- chia/_tests/wallet/dao_wallet/test_dao_wallets.py +3488 -0
- chia/_tests/wallet/db_wallet/__init__.py +0 -0
- chia/_tests/wallet/db_wallet/config.py +3 -0
- chia/_tests/wallet/db_wallet/test_db_graftroot.py +141 -0
- chia/_tests/wallet/db_wallet/test_dl_offers.py +491 -0
- chia/_tests/wallet/db_wallet/test_dl_wallet.py +823 -0
- chia/_tests/wallet/did_wallet/__init__.py +0 -0
- chia/_tests/wallet/did_wallet/config.py +4 -0
- chia/_tests/wallet/did_wallet/test_did.py +2284 -0
- chia/_tests/wallet/nft_wallet/__init__.py +0 -0
- chia/_tests/wallet/nft_wallet/config.py +4 -0
- chia/_tests/wallet/nft_wallet/test_nft_1_offers.py +1493 -0
- chia/_tests/wallet/nft_wallet/test_nft_bulk_mint.py +1024 -0
- chia/_tests/wallet/nft_wallet/test_nft_lifecycle.py +375 -0
- chia/_tests/wallet/nft_wallet/test_nft_offers.py +1209 -0
- chia/_tests/wallet/nft_wallet/test_nft_puzzles.py +172 -0
- chia/_tests/wallet/nft_wallet/test_nft_wallet.py +2584 -0
- chia/_tests/wallet/nft_wallet/test_ownership_outer_puzzle.py +70 -0
- chia/_tests/wallet/rpc/__init__.py +0 -0
- chia/_tests/wallet/rpc/config.py +4 -0
- chia/_tests/wallet/rpc/test_dl_wallet_rpc.py +285 -0
- chia/_tests/wallet/rpc/test_wallet_rpc.py +3153 -0
- chia/_tests/wallet/simple_sync/__init__.py +0 -0
- chia/_tests/wallet/simple_sync/config.py +3 -0
- chia/_tests/wallet/simple_sync/test_simple_sync_protocol.py +718 -0
- chia/_tests/wallet/sync/__init__.py +0 -0
- chia/_tests/wallet/sync/config.py +4 -0
- chia/_tests/wallet/sync/test_wallet_sync.py +1692 -0
- chia/_tests/wallet/test_address_type.py +189 -0
- chia/_tests/wallet/test_bech32m.py +45 -0
- chia/_tests/wallet/test_clvm_streamable.py +244 -0
- chia/_tests/wallet/test_coin_management.py +354 -0
- chia/_tests/wallet/test_coin_selection.py +588 -0
- chia/_tests/wallet/test_conditions.py +400 -0
- chia/_tests/wallet/test_debug_spend_bundle.py +218 -0
- chia/_tests/wallet/test_new_wallet_protocol.py +1174 -0
- chia/_tests/wallet/test_nft_store.py +192 -0
- chia/_tests/wallet/test_notifications.py +196 -0
- chia/_tests/wallet/test_offer_parsing_performance.py +48 -0
- chia/_tests/wallet/test_puzzle_store.py +132 -0
- chia/_tests/wallet/test_sign_coin_spends.py +159 -0
- chia/_tests/wallet/test_signer_protocol.py +947 -0
- chia/_tests/wallet/test_singleton.py +122 -0
- chia/_tests/wallet/test_singleton_lifecycle_fast.py +772 -0
- chia/_tests/wallet/test_singleton_store.py +152 -0
- chia/_tests/wallet/test_taproot.py +19 -0
- chia/_tests/wallet/test_transaction_store.py +945 -0
- chia/_tests/wallet/test_util.py +185 -0
- chia/_tests/wallet/test_wallet.py +2139 -0
- chia/_tests/wallet/test_wallet_action_scope.py +85 -0
- chia/_tests/wallet/test_wallet_blockchain.py +111 -0
- chia/_tests/wallet/test_wallet_coin_store.py +1002 -0
- chia/_tests/wallet/test_wallet_interested_store.py +43 -0
- chia/_tests/wallet/test_wallet_key_val_store.py +40 -0
- chia/_tests/wallet/test_wallet_node.py +780 -0
- chia/_tests/wallet/test_wallet_retry.py +95 -0
- chia/_tests/wallet/test_wallet_state_manager.py +259 -0
- chia/_tests/wallet/test_wallet_test_framework.py +275 -0
- chia/_tests/wallet/test_wallet_trade_store.py +218 -0
- chia/_tests/wallet/test_wallet_user_store.py +34 -0
- chia/_tests/wallet/test_wallet_utils.py +156 -0
- chia/_tests/wallet/vc_wallet/__init__.py +0 -0
- chia/_tests/wallet/vc_wallet/config.py +3 -0
- chia/_tests/wallet/vc_wallet/test_cr_outer_puzzle.py +70 -0
- chia/_tests/wallet/vc_wallet/test_vc_lifecycle.py +883 -0
- chia/_tests/wallet/vc_wallet/test_vc_wallet.py +830 -0
- chia/_tests/wallet/wallet_block_tools.py +327 -0
- chia/_tests/weight_proof/__init__.py +0 -0
- chia/_tests/weight_proof/config.py +3 -0
- chia/_tests/weight_proof/test_weight_proof.py +528 -0
- chia/apis.py +19 -0
- chia/clvm/__init__.py +0 -0
- chia/cmds/__init__.py +0 -0
- chia/cmds/beta.py +184 -0
- chia/cmds/beta_funcs.py +137 -0
- chia/cmds/check_wallet_db.py +420 -0
- chia/cmds/chia.py +151 -0
- chia/cmds/cmd_classes.py +323 -0
- chia/cmds/cmd_helpers.py +242 -0
- chia/cmds/cmds_util.py +488 -0
- chia/cmds/coin_funcs.py +275 -0
- chia/cmds/coins.py +182 -0
- chia/cmds/completion.py +49 -0
- chia/cmds/configure.py +332 -0
- chia/cmds/dao.py +1064 -0
- chia/cmds/dao_funcs.py +598 -0
- chia/cmds/data.py +708 -0
- chia/cmds/data_funcs.py +385 -0
- chia/cmds/db.py +87 -0
- chia/cmds/db_backup_func.py +77 -0
- chia/cmds/db_upgrade_func.py +452 -0
- chia/cmds/db_validate_func.py +184 -0
- chia/cmds/dev.py +18 -0
- chia/cmds/farm.py +100 -0
- chia/cmds/farm_funcs.py +200 -0
- chia/cmds/gh.py +275 -0
- chia/cmds/init.py +63 -0
- chia/cmds/init_funcs.py +367 -0
- chia/cmds/installers.py +131 -0
- chia/cmds/keys.py +527 -0
- chia/cmds/keys_funcs.py +863 -0
- chia/cmds/netspace.py +50 -0
- chia/cmds/netspace_funcs.py +54 -0
- chia/cmds/options.py +32 -0
- chia/cmds/param_types.py +238 -0
- chia/cmds/passphrase.py +131 -0
- chia/cmds/passphrase_funcs.py +292 -0
- chia/cmds/peer.py +51 -0
- chia/cmds/peer_funcs.py +129 -0
- chia/cmds/plotnft.py +260 -0
- chia/cmds/plotnft_funcs.py +405 -0
- chia/cmds/plots.py +230 -0
- chia/cmds/plotters.py +18 -0
- chia/cmds/rpc.py +208 -0
- chia/cmds/show.py +72 -0
- chia/cmds/show_funcs.py +215 -0
- chia/cmds/signer.py +296 -0
- chia/cmds/sim.py +225 -0
- chia/cmds/sim_funcs.py +509 -0
- chia/cmds/start.py +24 -0
- chia/cmds/start_funcs.py +109 -0
- chia/cmds/stop.py +62 -0
- chia/cmds/units.py +9 -0
- chia/cmds/wallet.py +1901 -0
- chia/cmds/wallet_funcs.py +1874 -0
- chia/consensus/__init__.py +0 -0
- chia/consensus/block_body_validation.py +562 -0
- chia/consensus/block_creation.py +546 -0
- chia/consensus/block_header_validation.py +1059 -0
- chia/consensus/block_record.py +31 -0
- chia/consensus/block_rewards.py +53 -0
- chia/consensus/blockchain.py +1087 -0
- chia/consensus/blockchain_interface.py +56 -0
- chia/consensus/coinbase.py +30 -0
- chia/consensus/condition_costs.py +9 -0
- chia/consensus/constants.py +49 -0
- chia/consensus/cost_calculator.py +15 -0
- chia/consensus/default_constants.py +89 -0
- chia/consensus/deficit.py +55 -0
- chia/consensus/difficulty_adjustment.py +412 -0
- chia/consensus/find_fork_point.py +111 -0
- chia/consensus/full_block_to_block_record.py +167 -0
- chia/consensus/get_block_challenge.py +106 -0
- chia/consensus/get_block_generator.py +27 -0
- chia/consensus/make_sub_epoch_summary.py +210 -0
- chia/consensus/multiprocess_validation.py +268 -0
- chia/consensus/pos_quality.py +19 -0
- chia/consensus/pot_iterations.py +67 -0
- chia/consensus/puzzles/__init__.py +0 -0
- chia/consensus/puzzles/chialisp_deserialisation.clsp +69 -0
- chia/consensus/puzzles/chialisp_deserialisation.clsp.hex +1 -0
- chia/consensus/puzzles/rom_bootstrap_generator.clsp +37 -0
- chia/consensus/puzzles/rom_bootstrap_generator.clsp.hex +1 -0
- chia/consensus/vdf_info_computation.py +156 -0
- chia/daemon/__init__.py +0 -0
- chia/daemon/client.py +252 -0
- chia/daemon/keychain_proxy.py +502 -0
- chia/daemon/keychain_server.py +365 -0
- chia/daemon/server.py +1606 -0
- chia/daemon/windows_signal.py +56 -0
- chia/data_layer/__init__.py +0 -0
- chia/data_layer/data_layer.py +1291 -0
- chia/data_layer/data_layer_api.py +33 -0
- chia/data_layer/data_layer_errors.py +50 -0
- chia/data_layer/data_layer_server.py +170 -0
- chia/data_layer/data_layer_util.py +985 -0
- chia/data_layer/data_layer_wallet.py +1311 -0
- chia/data_layer/data_store.py +2267 -0
- chia/data_layer/dl_wallet_store.py +407 -0
- chia/data_layer/download_data.py +389 -0
- chia/data_layer/puzzles/__init__.py +0 -0
- chia/data_layer/puzzles/graftroot_dl_offers.clsp +100 -0
- chia/data_layer/puzzles/graftroot_dl_offers.clsp.hex +1 -0
- chia/data_layer/s3_plugin_config.yml +33 -0
- chia/data_layer/s3_plugin_service.py +468 -0
- chia/data_layer/util/__init__.py +0 -0
- chia/data_layer/util/benchmark.py +107 -0
- chia/data_layer/util/plugin.py +40 -0
- chia/farmer/__init__.py +0 -0
- chia/farmer/farmer.py +923 -0
- chia/farmer/farmer_api.py +820 -0
- chia/full_node/__init__.py +0 -0
- chia/full_node/bitcoin_fee_estimator.py +85 -0
- chia/full_node/block_height_map.py +271 -0
- chia/full_node/block_store.py +576 -0
- chia/full_node/bundle_tools.py +19 -0
- chia/full_node/coin_store.py +647 -0
- chia/full_node/fee_estimate.py +54 -0
- chia/full_node/fee_estimate_store.py +24 -0
- chia/full_node/fee_estimation.py +92 -0
- chia/full_node/fee_estimator.py +90 -0
- chia/full_node/fee_estimator_constants.py +38 -0
- chia/full_node/fee_estimator_interface.py +42 -0
- chia/full_node/fee_history.py +25 -0
- chia/full_node/fee_tracker.py +564 -0
- chia/full_node/full_node.py +3327 -0
- chia/full_node/full_node_api.py +2025 -0
- chia/full_node/full_node_store.py +1033 -0
- chia/full_node/hint_management.py +56 -0
- chia/full_node/hint_store.py +93 -0
- chia/full_node/mempool.py +589 -0
- chia/full_node/mempool_check_conditions.py +146 -0
- chia/full_node/mempool_manager.py +853 -0
- chia/full_node/pending_tx_cache.py +112 -0
- chia/full_node/puzzles/__init__.py +0 -0
- chia/full_node/puzzles/block_program_zero.clsp +14 -0
- chia/full_node/puzzles/block_program_zero.clsp.hex +1 -0
- chia/full_node/puzzles/decompress_coin_spend_entry.clsp +5 -0
- chia/full_node/puzzles/decompress_coin_spend_entry.clsp.hex +1 -0
- chia/full_node/puzzles/decompress_coin_spend_entry_with_prefix.clsp +7 -0
- chia/full_node/puzzles/decompress_coin_spend_entry_with_prefix.clsp.hex +1 -0
- chia/full_node/puzzles/decompress_puzzle.clsp +6 -0
- chia/full_node/puzzles/decompress_puzzle.clsp.hex +1 -0
- chia/full_node/signage_point.py +16 -0
- chia/full_node/subscriptions.py +247 -0
- chia/full_node/sync_store.py +146 -0
- chia/full_node/tx_processing_queue.py +78 -0
- chia/full_node/util/__init__.py +0 -0
- chia/full_node/weight_proof.py +1720 -0
- chia/harvester/__init__.py +0 -0
- chia/harvester/harvester.py +272 -0
- chia/harvester/harvester_api.py +380 -0
- chia/introducer/__init__.py +0 -0
- chia/introducer/introducer.py +122 -0
- chia/introducer/introducer_api.py +70 -0
- chia/legacy/__init__.py +0 -0
- chia/legacy/keyring.py +155 -0
- chia/plot_sync/__init__.py +0 -0
- chia/plot_sync/delta.py +61 -0
- chia/plot_sync/exceptions.py +56 -0
- chia/plot_sync/receiver.py +386 -0
- chia/plot_sync/sender.py +340 -0
- chia/plot_sync/util.py +43 -0
- chia/plotters/__init__.py +0 -0
- chia/plotters/bladebit.py +388 -0
- chia/plotters/chiapos.py +63 -0
- chia/plotters/madmax.py +224 -0
- chia/plotters/plotters.py +577 -0
- chia/plotters/plotters_util.py +133 -0
- chia/plotting/__init__.py +0 -0
- chia/plotting/cache.py +213 -0
- chia/plotting/check_plots.py +283 -0
- chia/plotting/create_plots.py +278 -0
- chia/plotting/manager.py +436 -0
- chia/plotting/util.py +336 -0
- chia/pools/__init__.py +0 -0
- chia/pools/pool_config.py +110 -0
- chia/pools/pool_puzzles.py +459 -0
- chia/pools/pool_wallet.py +933 -0
- chia/pools/pool_wallet_info.py +118 -0
- chia/pools/puzzles/__init__.py +0 -0
- chia/pools/puzzles/pool_member_innerpuz.clsp +70 -0
- chia/pools/puzzles/pool_member_innerpuz.clsp.hex +1 -0
- chia/pools/puzzles/pool_waitingroom_innerpuz.clsp +69 -0
- chia/pools/puzzles/pool_waitingroom_innerpuz.clsp.hex +1 -0
- chia/protocols/__init__.py +0 -0
- chia/protocols/farmer_protocol.py +102 -0
- chia/protocols/full_node_protocol.py +219 -0
- chia/protocols/harvester_protocol.py +216 -0
- chia/protocols/introducer_protocol.py +25 -0
- chia/protocols/pool_protocol.py +177 -0
- chia/protocols/protocol_message_types.py +139 -0
- chia/protocols/protocol_state_machine.py +87 -0
- chia/protocols/protocol_timing.py +8 -0
- chia/protocols/shared_protocol.py +86 -0
- chia/protocols/timelord_protocol.py +93 -0
- chia/protocols/wallet_protocol.py +401 -0
- chia/py.typed +0 -0
- chia/rpc/__init__.py +0 -0
- chia/rpc/crawler_rpc_api.py +80 -0
- chia/rpc/data_layer_rpc_api.py +644 -0
- chia/rpc/data_layer_rpc_client.py +188 -0
- chia/rpc/data_layer_rpc_util.py +58 -0
- chia/rpc/farmer_rpc_api.py +365 -0
- chia/rpc/farmer_rpc_client.py +86 -0
- chia/rpc/full_node_rpc_api.py +959 -0
- chia/rpc/full_node_rpc_client.py +292 -0
- chia/rpc/harvester_rpc_api.py +141 -0
- chia/rpc/harvester_rpc_client.py +54 -0
- chia/rpc/rpc_client.py +164 -0
- chia/rpc/rpc_server.py +521 -0
- chia/rpc/timelord_rpc_api.py +32 -0
- chia/rpc/util.py +93 -0
- chia/rpc/wallet_request_types.py +904 -0
- chia/rpc/wallet_rpc_api.py +4943 -0
- chia/rpc/wallet_rpc_client.py +1814 -0
- chia/seeder/__init__.py +0 -0
- chia/seeder/crawl_store.py +425 -0
- chia/seeder/crawler.py +410 -0
- chia/seeder/crawler_api.py +135 -0
- chia/seeder/dns_server.py +593 -0
- chia/seeder/peer_record.py +146 -0
- chia/seeder/start_crawler.py +92 -0
- chia/server/__init__.py +0 -0
- chia/server/address_manager.py +658 -0
- chia/server/address_manager_store.py +237 -0
- chia/server/api_protocol.py +116 -0
- chia/server/capabilities.py +24 -0
- chia/server/chia_policy.py +346 -0
- chia/server/introducer_peers.py +76 -0
- chia/server/node_discovery.py +714 -0
- chia/server/outbound_message.py +33 -0
- chia/server/rate_limit_numbers.py +214 -0
- chia/server/rate_limits.py +153 -0
- chia/server/server.py +741 -0
- chia/server/signal_handlers.py +120 -0
- chia/server/ssl_context.py +32 -0
- chia/server/start_data_layer.py +151 -0
- chia/server/start_farmer.py +98 -0
- chia/server/start_full_node.py +112 -0
- chia/server/start_harvester.py +93 -0
- chia/server/start_introducer.py +81 -0
- chia/server/start_service.py +316 -0
- chia/server/start_timelord.py +89 -0
- chia/server/start_wallet.py +113 -0
- chia/server/upnp.py +118 -0
- chia/server/ws_connection.py +766 -0
- chia/simulator/__init__.py +0 -0
- chia/simulator/add_blocks_in_batches.py +54 -0
- chia/simulator/block_tools.py +2054 -0
- chia/simulator/full_node_simulator.py +794 -0
- chia/simulator/keyring.py +128 -0
- chia/simulator/setup_services.py +506 -0
- chia/simulator/simulator_constants.py +13 -0
- chia/simulator/simulator_full_node_rpc_api.py +99 -0
- chia/simulator/simulator_full_node_rpc_client.py +60 -0
- chia/simulator/simulator_protocol.py +29 -0
- chia/simulator/simulator_test_tools.py +164 -0
- chia/simulator/socket.py +24 -0
- chia/simulator/ssl_certs.py +114 -0
- chia/simulator/ssl_certs_1.py +697 -0
- chia/simulator/ssl_certs_10.py +697 -0
- chia/simulator/ssl_certs_2.py +697 -0
- chia/simulator/ssl_certs_3.py +697 -0
- chia/simulator/ssl_certs_4.py +697 -0
- chia/simulator/ssl_certs_5.py +697 -0
- chia/simulator/ssl_certs_6.py +697 -0
- chia/simulator/ssl_certs_7.py +697 -0
- chia/simulator/ssl_certs_8.py +697 -0
- chia/simulator/ssl_certs_9.py +697 -0
- chia/simulator/start_simulator.py +143 -0
- chia/simulator/wallet_tools.py +246 -0
- chia/ssl/__init__.py +0 -0
- chia/ssl/chia_ca.crt +19 -0
- chia/ssl/chia_ca.key +28 -0
- chia/ssl/create_ssl.py +249 -0
- chia/ssl/dst_root_ca.pem +20 -0
- chia/timelord/__init__.py +0 -0
- chia/timelord/iters_from_block.py +50 -0
- chia/timelord/timelord.py +1226 -0
- chia/timelord/timelord_api.py +138 -0
- chia/timelord/timelord_launcher.py +190 -0
- chia/timelord/timelord_state.py +244 -0
- chia/timelord/types.py +22 -0
- chia/types/__init__.py +0 -0
- chia/types/aliases.py +35 -0
- chia/types/block_protocol.py +20 -0
- chia/types/blockchain_format/__init__.py +0 -0
- chia/types/blockchain_format/classgroup.py +5 -0
- chia/types/blockchain_format/coin.py +28 -0
- chia/types/blockchain_format/foliage.py +8 -0
- chia/types/blockchain_format/pool_target.py +5 -0
- chia/types/blockchain_format/program.py +269 -0
- chia/types/blockchain_format/proof_of_space.py +135 -0
- chia/types/blockchain_format/reward_chain_block.py +6 -0
- chia/types/blockchain_format/serialized_program.py +5 -0
- chia/types/blockchain_format/sized_bytes.py +11 -0
- chia/types/blockchain_format/slots.py +9 -0
- chia/types/blockchain_format/sub_epoch_summary.py +5 -0
- chia/types/blockchain_format/tree_hash.py +72 -0
- chia/types/blockchain_format/vdf.py +86 -0
- chia/types/clvm_cost.py +13 -0
- chia/types/coin_record.py +43 -0
- chia/types/coin_spend.py +115 -0
- chia/types/condition_opcodes.py +73 -0
- chia/types/condition_with_args.py +16 -0
- chia/types/eligible_coin_spends.py +365 -0
- chia/types/end_of_slot_bundle.py +5 -0
- chia/types/fee_rate.py +38 -0
- chia/types/full_block.py +5 -0
- chia/types/generator_types.py +13 -0
- chia/types/header_block.py +5 -0
- chia/types/internal_mempool_item.py +18 -0
- chia/types/mempool_inclusion_status.py +9 -0
- chia/types/mempool_item.py +85 -0
- chia/types/mempool_submission_status.py +30 -0
- chia/types/mojos.py +7 -0
- chia/types/peer_info.py +64 -0
- chia/types/signing_mode.py +29 -0
- chia/types/spend_bundle.py +30 -0
- chia/types/spend_bundle_conditions.py +7 -0
- chia/types/transaction_queue_entry.py +55 -0
- chia/types/unfinished_block.py +5 -0
- chia/types/unfinished_header_block.py +37 -0
- chia/types/validation_state.py +14 -0
- chia/types/weight_proof.py +49 -0
- chia/util/__init__.py +0 -0
- chia/util/action_scope.py +168 -0
- chia/util/async_pool.py +226 -0
- chia/util/augmented_chain.py +134 -0
- chia/util/batches.py +42 -0
- chia/util/bech32m.py +126 -0
- chia/util/beta_metrics.py +119 -0
- chia/util/block_cache.py +56 -0
- chia/util/byte_types.py +12 -0
- chia/util/check_fork_next_block.py +33 -0
- chia/util/chia_logging.py +144 -0
- chia/util/chia_version.py +33 -0
- chia/util/collection.py +17 -0
- chia/util/condition_tools.py +201 -0
- chia/util/config.py +367 -0
- chia/util/cpu.py +22 -0
- chia/util/db_synchronous.py +23 -0
- chia/util/db_version.py +32 -0
- chia/util/db_wrapper.py +430 -0
- chia/util/default_root.py +27 -0
- chia/util/dump_keyring.py +93 -0
- chia/util/english.txt +2048 -0
- chia/util/errors.py +353 -0
- chia/util/file_keyring.py +469 -0
- chia/util/files.py +97 -0
- chia/util/full_block_utils.py +345 -0
- chia/util/generator_tools.py +72 -0
- chia/util/hash.py +31 -0
- chia/util/initial-config.yaml +694 -0
- chia/util/inline_executor.py +26 -0
- chia/util/ints.py +19 -0
- chia/util/ip_address.py +39 -0
- chia/util/json_util.py +37 -0
- chia/util/keychain.py +676 -0
- chia/util/keyring_wrapper.py +327 -0
- chia/util/limited_semaphore.py +41 -0
- chia/util/lock.py +49 -0
- chia/util/log_exceptions.py +32 -0
- chia/util/logging.py +36 -0
- chia/util/lru_cache.py +31 -0
- chia/util/math.py +20 -0
- chia/util/network.py +182 -0
- chia/util/paginator.py +48 -0
- chia/util/path.py +31 -0
- chia/util/permissions.py +20 -0
- chia/util/prev_transaction_block.py +21 -0
- chia/util/priority_mutex.py +95 -0
- chia/util/profiler.py +197 -0
- chia/util/recursive_replace.py +24 -0
- chia/util/safe_cancel_task.py +16 -0
- chia/util/service_groups.py +47 -0
- chia/util/setproctitle.py +22 -0
- chia/util/significant_bits.py +32 -0
- chia/util/ssl_check.py +213 -0
- chia/util/streamable.py +642 -0
- chia/util/task_referencer.py +59 -0
- chia/util/task_timing.py +382 -0
- chia/util/timing.py +67 -0
- chia/util/vdf_prover.py +30 -0
- chia/util/virtual_project_analysis.py +540 -0
- chia/util/ws_message.py +66 -0
- chia/wallet/__init__.py +0 -0
- chia/wallet/cat_wallet/__init__.py +0 -0
- chia/wallet/cat_wallet/cat_constants.py +75 -0
- chia/wallet/cat_wallet/cat_info.py +47 -0
- chia/wallet/cat_wallet/cat_outer_puzzle.py +120 -0
- chia/wallet/cat_wallet/cat_utils.py +164 -0
- chia/wallet/cat_wallet/cat_wallet.py +855 -0
- chia/wallet/cat_wallet/dao_cat_info.py +28 -0
- chia/wallet/cat_wallet/dao_cat_wallet.py +669 -0
- chia/wallet/cat_wallet/lineage_store.py +74 -0
- chia/wallet/cat_wallet/puzzles/__init__.py +0 -0
- chia/wallet/cat_wallet/puzzles/cat_truths.clib +31 -0
- chia/wallet/cat_wallet/puzzles/cat_v2.clsp +397 -0
- chia/wallet/cat_wallet/puzzles/cat_v2.clsp.hex +1 -0
- chia/wallet/cat_wallet/puzzles/delegated_tail.clsp +25 -0
- chia/wallet/cat_wallet/puzzles/delegated_tail.clsp.hex +1 -0
- chia/wallet/cat_wallet/puzzles/everything_with_signature.clsp +15 -0
- chia/wallet/cat_wallet/puzzles/everything_with_signature.clsp.hex +1 -0
- chia/wallet/cat_wallet/puzzles/genesis_by_coin_id.clsp +26 -0
- chia/wallet/cat_wallet/puzzles/genesis_by_coin_id.clsp.hex +1 -0
- chia/wallet/cat_wallet/puzzles/genesis_by_coin_id_or_singleton.clsp +42 -0
- chia/wallet/cat_wallet/puzzles/genesis_by_coin_id_or_singleton.clsp.hex +1 -0
- chia/wallet/cat_wallet/puzzles/genesis_by_puzzle_hash.clsp +24 -0
- chia/wallet/cat_wallet/puzzles/genesis_by_puzzle_hash.clsp.hex +1 -0
- chia/wallet/coin_selection.py +188 -0
- chia/wallet/conditions.py +1512 -0
- chia/wallet/dao_wallet/__init__.py +0 -0
- chia/wallet/dao_wallet/dao_info.py +61 -0
- chia/wallet/dao_wallet/dao_utils.py +811 -0
- chia/wallet/dao_wallet/dao_wallet.py +2119 -0
- chia/wallet/db_wallet/__init__.py +0 -0
- chia/wallet/db_wallet/db_wallet_puzzles.py +111 -0
- chia/wallet/derivation_record.py +30 -0
- chia/wallet/derive_keys.py +146 -0
- chia/wallet/did_wallet/__init__.py +0 -0
- chia/wallet/did_wallet/did_info.py +39 -0
- chia/wallet/did_wallet/did_wallet.py +1494 -0
- chia/wallet/did_wallet/did_wallet_puzzles.py +221 -0
- chia/wallet/did_wallet/puzzles/__init__.py +0 -0
- chia/wallet/did_wallet/puzzles/did_innerpuz.clsp +135 -0
- chia/wallet/did_wallet/puzzles/did_innerpuz.clsp.hex +1 -0
- chia/wallet/driver_protocol.py +26 -0
- chia/wallet/key_val_store.py +55 -0
- chia/wallet/lineage_proof.py +58 -0
- chia/wallet/nft_wallet/__init__.py +0 -0
- chia/wallet/nft_wallet/metadata_outer_puzzle.py +92 -0
- chia/wallet/nft_wallet/nft_info.py +120 -0
- chia/wallet/nft_wallet/nft_puzzles.py +305 -0
- chia/wallet/nft_wallet/nft_wallet.py +1687 -0
- chia/wallet/nft_wallet/ownership_outer_puzzle.py +101 -0
- chia/wallet/nft_wallet/puzzles/__init__.py +0 -0
- chia/wallet/nft_wallet/puzzles/create_nft_launcher_from_did.clsp +6 -0
- chia/wallet/nft_wallet/puzzles/create_nft_launcher_from_did.clsp.hex +1 -0
- chia/wallet/nft_wallet/puzzles/nft_intermediate_launcher.clsp +6 -0
- chia/wallet/nft_wallet/puzzles/nft_intermediate_launcher.clsp.hex +1 -0
- chia/wallet/nft_wallet/puzzles/nft_metadata_updater_default.clsp +30 -0
- chia/wallet/nft_wallet/puzzles/nft_metadata_updater_default.clsp.hex +1 -0
- chia/wallet/nft_wallet/puzzles/nft_metadata_updater_updateable.clsp +28 -0
- chia/wallet/nft_wallet/puzzles/nft_metadata_updater_updateable.clsp.hex +1 -0
- chia/wallet/nft_wallet/puzzles/nft_ownership_layer.clsp +100 -0
- chia/wallet/nft_wallet/puzzles/nft_ownership_layer.clsp.hex +1 -0
- chia/wallet/nft_wallet/puzzles/nft_ownership_transfer_program_one_way_claim_with_royalties.clsp +78 -0
- chia/wallet/nft_wallet/puzzles/nft_ownership_transfer_program_one_way_claim_with_royalties.clsp.hex +1 -0
- chia/wallet/nft_wallet/puzzles/nft_state_layer.clsp +74 -0
- chia/wallet/nft_wallet/puzzles/nft_state_layer.clsp.hex +1 -0
- chia/wallet/nft_wallet/singleton_outer_puzzle.py +101 -0
- chia/wallet/nft_wallet/transfer_program_puzzle.py +82 -0
- chia/wallet/nft_wallet/uncurry_nft.py +217 -0
- chia/wallet/notification_manager.py +117 -0
- chia/wallet/notification_store.py +178 -0
- chia/wallet/outer_puzzles.py +84 -0
- chia/wallet/payment.py +33 -0
- chia/wallet/puzzle_drivers.py +118 -0
- chia/wallet/puzzles/__init__.py +0 -0
- chia/wallet/puzzles/augmented_condition.clsp +13 -0
- chia/wallet/puzzles/augmented_condition.clsp.hex +1 -0
- chia/wallet/puzzles/clawback/__init__.py +0 -0
- chia/wallet/puzzles/clawback/drivers.py +188 -0
- chia/wallet/puzzles/clawback/metadata.py +38 -0
- chia/wallet/puzzles/clawback/puzzle_decorator.py +67 -0
- chia/wallet/puzzles/condition_codes.clib +77 -0
- chia/wallet/puzzles/curry-and-treehash.clib +102 -0
- chia/wallet/puzzles/curry.clib +135 -0
- chia/wallet/puzzles/curry_by_index.clib +16 -0
- chia/wallet/puzzles/dao_cat_eve.clsp +17 -0
- chia/wallet/puzzles/dao_cat_eve.clsp.hex +1 -0
- chia/wallet/puzzles/dao_cat_launcher.clsp +36 -0
- chia/wallet/puzzles/dao_cat_launcher.clsp.hex +1 -0
- chia/wallet/puzzles/dao_finished_state.clsp +35 -0
- chia/wallet/puzzles/dao_finished_state.clsp.hex +1 -0
- chia/wallet/puzzles/dao_finished_state.clsp.hex.sha256tree +1 -0
- chia/wallet/puzzles/dao_lockup.clsp +288 -0
- chia/wallet/puzzles/dao_lockup.clsp.hex +1 -0
- chia/wallet/puzzles/dao_lockup.clsp.hex.sha256tree +1 -0
- chia/wallet/puzzles/dao_proposal.clsp +377 -0
- chia/wallet/puzzles/dao_proposal.clsp.hex +1 -0
- chia/wallet/puzzles/dao_proposal.clsp.hex.sha256tree +1 -0
- chia/wallet/puzzles/dao_proposal_timer.clsp +78 -0
- chia/wallet/puzzles/dao_proposal_timer.clsp.hex +1 -0
- chia/wallet/puzzles/dao_proposal_timer.clsp.hex.sha256tree +1 -0
- chia/wallet/puzzles/dao_proposal_validator.clsp +87 -0
- chia/wallet/puzzles/dao_proposal_validator.clsp.hex +1 -0
- chia/wallet/puzzles/dao_proposal_validator.clsp.hex.sha256tree +1 -0
- chia/wallet/puzzles/dao_spend_p2_singleton_v2.clsp +240 -0
- chia/wallet/puzzles/dao_spend_p2_singleton_v2.clsp.hex +1 -0
- chia/wallet/puzzles/dao_spend_p2_singleton_v2.clsp.hex.sha256tree +1 -0
- chia/wallet/puzzles/dao_treasury.clsp +115 -0
- chia/wallet/puzzles/dao_treasury.clsp.hex +1 -0
- chia/wallet/puzzles/dao_update_proposal.clsp +44 -0
- chia/wallet/puzzles/dao_update_proposal.clsp.hex +1 -0
- chia/wallet/puzzles/deployed_puzzle_hashes.json +67 -0
- chia/wallet/puzzles/json.clib +25 -0
- chia/wallet/puzzles/load_clvm.py +161 -0
- chia/wallet/puzzles/merkle_utils.clib +18 -0
- chia/wallet/puzzles/notification.clsp +7 -0
- chia/wallet/puzzles/notification.clsp.hex +1 -0
- chia/wallet/puzzles/p2_1_of_n.clsp +22 -0
- chia/wallet/puzzles/p2_1_of_n.clsp.hex +1 -0
- chia/wallet/puzzles/p2_conditions.clsp +3 -0
- chia/wallet/puzzles/p2_conditions.clsp.hex +1 -0
- chia/wallet/puzzles/p2_conditions.py +26 -0
- chia/wallet/puzzles/p2_delegated_conditions.clsp +18 -0
- chia/wallet/puzzles/p2_delegated_conditions.clsp.hex +1 -0
- chia/wallet/puzzles/p2_delegated_conditions.py +21 -0
- chia/wallet/puzzles/p2_delegated_puzzle.clsp +19 -0
- chia/wallet/puzzles/p2_delegated_puzzle.clsp.hex +1 -0
- chia/wallet/puzzles/p2_delegated_puzzle.py +34 -0
- chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.clsp +91 -0
- chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.clsp.hex +1 -0
- chia/wallet/puzzles/p2_delegated_puzzle_or_hidden_puzzle.py +160 -0
- chia/wallet/puzzles/p2_m_of_n_delegate_direct.clsp +108 -0
- chia/wallet/puzzles/p2_m_of_n_delegate_direct.clsp.hex +1 -0
- chia/wallet/puzzles/p2_m_of_n_delegate_direct.py +21 -0
- chia/wallet/puzzles/p2_parent.clsp +19 -0
- chia/wallet/puzzles/p2_parent.clsp.hex +1 -0
- chia/wallet/puzzles/p2_puzzle_hash.clsp +18 -0
- chia/wallet/puzzles/p2_puzzle_hash.clsp.hex +1 -0
- chia/wallet/puzzles/p2_puzzle_hash.py +27 -0
- chia/wallet/puzzles/p2_singleton.clsp +30 -0
- chia/wallet/puzzles/p2_singleton.clsp.hex +1 -0
- chia/wallet/puzzles/p2_singleton_aggregator.clsp +81 -0
- chia/wallet/puzzles/p2_singleton_aggregator.clsp.hex +1 -0
- chia/wallet/puzzles/p2_singleton_or_delayed_puzhash.clsp +50 -0
- chia/wallet/puzzles/p2_singleton_or_delayed_puzhash.clsp.hex +1 -0
- chia/wallet/puzzles/p2_singleton_via_delegated_puzzle.clsp +47 -0
- chia/wallet/puzzles/p2_singleton_via_delegated_puzzle.clsp.hex +1 -0
- chia/wallet/puzzles/puzzle_utils.py +34 -0
- chia/wallet/puzzles/settlement_payments.clsp +49 -0
- chia/wallet/puzzles/settlement_payments.clsp.hex +1 -0
- chia/wallet/puzzles/sha256tree.clib +11 -0
- chia/wallet/puzzles/singleton_launcher.clsp +16 -0
- chia/wallet/puzzles/singleton_launcher.clsp.hex +1 -0
- chia/wallet/puzzles/singleton_top_layer.clsp +177 -0
- chia/wallet/puzzles/singleton_top_layer.clsp.hex +1 -0
- chia/wallet/puzzles/singleton_top_layer.py +296 -0
- chia/wallet/puzzles/singleton_top_layer_v1_1.clsp +107 -0
- chia/wallet/puzzles/singleton_top_layer_v1_1.clsp.hex +1 -0
- chia/wallet/puzzles/singleton_top_layer_v1_1.py +345 -0
- chia/wallet/puzzles/singleton_truths.clib +21 -0
- chia/wallet/puzzles/tails.py +348 -0
- chia/wallet/puzzles/utility_macros.clib +48 -0
- chia/wallet/signer_protocol.py +125 -0
- chia/wallet/singleton.py +106 -0
- chia/wallet/singleton_record.py +30 -0
- chia/wallet/trade_manager.py +1102 -0
- chia/wallet/trade_record.py +67 -0
- chia/wallet/trading/__init__.py +0 -0
- chia/wallet/trading/offer.py +702 -0
- chia/wallet/trading/trade_status.py +13 -0
- chia/wallet/trading/trade_store.py +526 -0
- chia/wallet/transaction_record.py +158 -0
- chia/wallet/transaction_sorting.py +14 -0
- chia/wallet/uncurried_puzzle.py +17 -0
- chia/wallet/util/__init__.py +0 -0
- chia/wallet/util/address_type.py +55 -0
- chia/wallet/util/blind_signer_tl.py +164 -0
- chia/wallet/util/clvm_streamable.py +203 -0
- chia/wallet/util/compute_hints.py +66 -0
- chia/wallet/util/compute_memos.py +43 -0
- chia/wallet/util/curry_and_treehash.py +91 -0
- chia/wallet/util/debug_spend_bundle.py +232 -0
- chia/wallet/util/merkle_tree.py +100 -0
- chia/wallet/util/merkle_utils.py +102 -0
- chia/wallet/util/new_peak_queue.py +82 -0
- chia/wallet/util/notifications.py +12 -0
- chia/wallet/util/peer_request_cache.py +174 -0
- chia/wallet/util/pprint.py +39 -0
- chia/wallet/util/puzzle_compression.py +95 -0
- chia/wallet/util/puzzle_decorator.py +100 -0
- chia/wallet/util/puzzle_decorator_type.py +7 -0
- chia/wallet/util/query_filter.py +59 -0
- chia/wallet/util/transaction_type.py +23 -0
- chia/wallet/util/tx_config.py +158 -0
- chia/wallet/util/wallet_sync_utils.py +351 -0
- chia/wallet/util/wallet_types.py +72 -0
- chia/wallet/vc_wallet/__init__.py +0 -0
- chia/wallet/vc_wallet/cr_cat_drivers.py +664 -0
- chia/wallet/vc_wallet/cr_cat_wallet.py +877 -0
- chia/wallet/vc_wallet/cr_outer_puzzle.py +102 -0
- chia/wallet/vc_wallet/cr_puzzles/__init__.py +0 -0
- chia/wallet/vc_wallet/cr_puzzles/conditions_w_fee_announce.clsp +3 -0
- chia/wallet/vc_wallet/cr_puzzles/conditions_w_fee_announce.clsp.hex +1 -0
- chia/wallet/vc_wallet/cr_puzzles/credential_restriction.clsp +304 -0
- chia/wallet/vc_wallet/cr_puzzles/credential_restriction.clsp.hex +1 -0
- chia/wallet/vc_wallet/cr_puzzles/flag_proofs_checker.clsp +45 -0
- chia/wallet/vc_wallet/cr_puzzles/flag_proofs_checker.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_drivers.py +838 -0
- chia/wallet/vc_wallet/vc_puzzles/__init__.py +0 -0
- chia/wallet/vc_wallet/vc_puzzles/covenant_layer.clsp +30 -0
- chia/wallet/vc_wallet/vc_puzzles/covenant_layer.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/eml_covenant_morpher.clsp +75 -0
- chia/wallet/vc_wallet/vc_puzzles/eml_covenant_morpher.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/eml_transfer_program_covenant_adapter.clsp +32 -0
- chia/wallet/vc_wallet/vc_puzzles/eml_transfer_program_covenant_adapter.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/eml_update_metadata_with_DID.clsp +80 -0
- chia/wallet/vc_wallet/vc_puzzles/eml_update_metadata_with_DID.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/exigent_metadata_layer.clsp +163 -0
- chia/wallet/vc_wallet/vc_puzzles/exigent_metadata_layer.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/p2_announced_delegated_puzzle.clsp +16 -0
- chia/wallet/vc_wallet/vc_puzzles/p2_announced_delegated_puzzle.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/standard_vc_backdoor_puzzle.clsp +74 -0
- chia/wallet/vc_wallet/vc_puzzles/standard_vc_backdoor_puzzle.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/std_parent_morpher.clsp +23 -0
- chia/wallet/vc_wallet/vc_puzzles/std_parent_morpher.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_puzzles/viral_backdoor.clsp +64 -0
- chia/wallet/vc_wallet/vc_puzzles/viral_backdoor.clsp.hex +1 -0
- chia/wallet/vc_wallet/vc_store.py +263 -0
- chia/wallet/vc_wallet/vc_wallet.py +638 -0
- chia/wallet/wallet.py +698 -0
- chia/wallet/wallet_action_scope.py +96 -0
- chia/wallet/wallet_blockchain.py +244 -0
- chia/wallet/wallet_coin_record.py +72 -0
- chia/wallet/wallet_coin_store.py +351 -0
- chia/wallet/wallet_info.py +35 -0
- chia/wallet/wallet_interested_store.py +188 -0
- chia/wallet/wallet_nft_store.py +279 -0
- chia/wallet/wallet_node.py +1765 -0
- chia/wallet/wallet_node_api.py +207 -0
- chia/wallet/wallet_pool_store.py +119 -0
- chia/wallet/wallet_protocol.py +90 -0
- chia/wallet/wallet_puzzle_store.py +396 -0
- chia/wallet/wallet_retry_store.py +70 -0
- chia/wallet/wallet_singleton_store.py +259 -0
- chia/wallet/wallet_spend_bundle.py +25 -0
- chia/wallet/wallet_state_manager.py +2819 -0
- chia/wallet/wallet_transaction_store.py +496 -0
- chia/wallet/wallet_user_store.py +110 -0
- chia/wallet/wallet_weight_proof_handler.py +126 -0
- chia_blockchain-2.5.1rc1.dist-info/LICENSE +201 -0
- chia_blockchain-2.5.1rc1.dist-info/METADATA +156 -0
- chia_blockchain-2.5.1rc1.dist-info/RECORD +1042 -0
- chia_blockchain-2.5.1rc1.dist-info/WHEEL +4 -0
- chia_blockchain-2.5.1rc1.dist-info/entry_points.txt +17 -0
- mozilla-ca/cacert.pem +3611 -0
|
@@ -0,0 +1,2424 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import itertools
|
|
4
|
+
import logging
|
|
5
|
+
import os
|
|
6
|
+
import random
|
|
7
|
+
import re
|
|
8
|
+
import statistics
|
|
9
|
+
import time
|
|
10
|
+
from collections.abc import Awaitable
|
|
11
|
+
from dataclasses import dataclass
|
|
12
|
+
from pathlib import Path
|
|
13
|
+
from random import Random
|
|
14
|
+
from typing import Any, Callable, Optional, cast
|
|
15
|
+
|
|
16
|
+
import aiohttp
|
|
17
|
+
import aiosqlite
|
|
18
|
+
import pytest
|
|
19
|
+
|
|
20
|
+
from chia._tests.core.data_layer.util import Example, add_0123_example, add_01234567_example
|
|
21
|
+
from chia._tests.util.misc import BenchmarkRunner, Marks, boolean_datacases, datacases
|
|
22
|
+
from chia.data_layer.data_layer_errors import KeyNotFoundError, NodeHashError, TreeGenerationIncrementingError
|
|
23
|
+
from chia.data_layer.data_layer_util import (
|
|
24
|
+
DiffData,
|
|
25
|
+
InternalNode,
|
|
26
|
+
Node,
|
|
27
|
+
NodeType,
|
|
28
|
+
OperationType,
|
|
29
|
+
ProofOfInclusion,
|
|
30
|
+
ProofOfInclusionLayer,
|
|
31
|
+
Root,
|
|
32
|
+
ServerInfo,
|
|
33
|
+
Side,
|
|
34
|
+
Status,
|
|
35
|
+
Subscription,
|
|
36
|
+
TerminalNode,
|
|
37
|
+
_debug_dump,
|
|
38
|
+
leaf_hash,
|
|
39
|
+
)
|
|
40
|
+
from chia.data_layer.data_store import DataStore
|
|
41
|
+
from chia.data_layer.download_data import (
|
|
42
|
+
get_delta_filename_path,
|
|
43
|
+
get_full_tree_filename_path,
|
|
44
|
+
insert_from_delta_file,
|
|
45
|
+
insert_into_data_store_from_file,
|
|
46
|
+
write_files_for_root,
|
|
47
|
+
)
|
|
48
|
+
from chia.types.blockchain_format.program import Program
|
|
49
|
+
from chia.types.blockchain_format.sized_bytes import bytes32
|
|
50
|
+
from chia.util.byte_types import hexstr_to_bytes
|
|
51
|
+
from chia.util.db_wrapper import DBWrapper2, generate_in_memory_db_uri
|
|
52
|
+
|
|
53
|
+
log = logging.getLogger(__name__)
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
pytestmark = pytest.mark.data_layer
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
table_columns: dict[str, list[str]] = {
|
|
60
|
+
"node": ["hash", "node_type", "left", "right", "key", "value"],
|
|
61
|
+
"root": ["tree_id", "generation", "node_hash", "status"],
|
|
62
|
+
}
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
# TODO: Someday add tests for malformed DB data to make sure we handle it gracefully
|
|
66
|
+
# and with good error messages.
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
@pytest.mark.anyio
|
|
70
|
+
async def test_valid_node_values_fixture_are_valid(data_store: DataStore, valid_node_values: dict[str, Any]) -> None:
|
|
71
|
+
async with data_store.db_wrapper.writer() as writer:
|
|
72
|
+
await writer.execute(
|
|
73
|
+
"""
|
|
74
|
+
INSERT INTO node(hash, node_type, left, right, key, value)
|
|
75
|
+
VALUES(:hash, :node_type, :left, :right, :key, :value)
|
|
76
|
+
""",
|
|
77
|
+
valid_node_values,
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
|
|
81
|
+
@pytest.mark.parametrize(argnames=["table_name", "expected_columns"], argvalues=table_columns.items())
|
|
82
|
+
@pytest.mark.anyio
|
|
83
|
+
async def test_create_creates_tables_and_columns(
|
|
84
|
+
database_uri: str, table_name: str, expected_columns: list[str]
|
|
85
|
+
) -> None:
|
|
86
|
+
# Never string-interpolate sql queries... Except maybe in tests when it does not
|
|
87
|
+
# allow you to parametrize the query.
|
|
88
|
+
query = f"pragma table_info({table_name});"
|
|
89
|
+
|
|
90
|
+
async with DBWrapper2.managed(database=database_uri, uri=True, reader_count=1) as db_wrapper:
|
|
91
|
+
async with db_wrapper.reader() as reader:
|
|
92
|
+
cursor = await reader.execute(query)
|
|
93
|
+
columns = await cursor.fetchall()
|
|
94
|
+
assert columns == []
|
|
95
|
+
|
|
96
|
+
async with DataStore.managed(database=database_uri, uri=True):
|
|
97
|
+
async with db_wrapper.reader() as reader:
|
|
98
|
+
cursor = await reader.execute(query)
|
|
99
|
+
columns = await cursor.fetchall()
|
|
100
|
+
assert [column[1] for column in columns] == expected_columns
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
@pytest.mark.anyio
|
|
104
|
+
async def test_create_tree_accepts_bytes32(raw_data_store: DataStore) -> None:
|
|
105
|
+
store_id = bytes32.zeros
|
|
106
|
+
|
|
107
|
+
await raw_data_store.create_tree(store_id=store_id)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
@pytest.mark.parametrize(argnames=["length"], argvalues=[[length] for length in [*range(0, 32), *range(33, 48)]])
|
|
111
|
+
@pytest.mark.anyio
|
|
112
|
+
async def test_create_store_fails_for_not_bytes32(raw_data_store: DataStore, length: int) -> None:
|
|
113
|
+
bad_store_id = b"\0" * length
|
|
114
|
+
|
|
115
|
+
# TODO: require a more specific exception
|
|
116
|
+
with pytest.raises(Exception):
|
|
117
|
+
# type ignore since we are trying to intentionally pass a bad argument
|
|
118
|
+
await raw_data_store.create_tree(store_id=bad_store_id) # type: ignore[arg-type]
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
@pytest.mark.anyio
|
|
122
|
+
async def test_get_trees(raw_data_store: DataStore) -> None:
|
|
123
|
+
expected_store_ids = set()
|
|
124
|
+
|
|
125
|
+
for n in range(10):
|
|
126
|
+
store_id = bytes32(b"\0" * 31 + bytes([n]))
|
|
127
|
+
await raw_data_store.create_tree(store_id=store_id)
|
|
128
|
+
expected_store_ids.add(store_id)
|
|
129
|
+
|
|
130
|
+
store_ids = await raw_data_store.get_store_ids()
|
|
131
|
+
|
|
132
|
+
assert store_ids == expected_store_ids
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
@pytest.mark.anyio
|
|
136
|
+
async def test_table_is_empty(data_store: DataStore, store_id: bytes32) -> None:
|
|
137
|
+
is_empty = await data_store.table_is_empty(store_id=store_id)
|
|
138
|
+
assert is_empty
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
@pytest.mark.anyio
|
|
142
|
+
async def test_table_is_not_empty(data_store: DataStore, store_id: bytes32) -> None:
|
|
143
|
+
key = b"\x01\x02"
|
|
144
|
+
value = b"abc"
|
|
145
|
+
|
|
146
|
+
await data_store.insert(
|
|
147
|
+
key=key,
|
|
148
|
+
value=value,
|
|
149
|
+
store_id=store_id,
|
|
150
|
+
reference_node_hash=None,
|
|
151
|
+
side=None,
|
|
152
|
+
status=Status.COMMITTED,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
is_empty = await data_store.table_is_empty(store_id=store_id)
|
|
156
|
+
assert not is_empty
|
|
157
|
+
|
|
158
|
+
|
|
159
|
+
# @pytest.mark.anyio
|
|
160
|
+
# async def test_create_root_provides_bytes32(raw_data_store: DataStore, store_id: bytes32) -> None:
|
|
161
|
+
# await raw_data_store.create_tree(store_id=store_id)
|
|
162
|
+
# # TODO: catchup with the node_hash=
|
|
163
|
+
# root_hash = await raw_data_store.create_root(store_id=store_id, node_hash=23)
|
|
164
|
+
#
|
|
165
|
+
# assert isinstance(root_hash, bytes32)
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@pytest.mark.anyio
|
|
169
|
+
async def test_insert_over_empty(data_store: DataStore, store_id: bytes32) -> None:
|
|
170
|
+
key = b"\x01\x02"
|
|
171
|
+
value = b"abc"
|
|
172
|
+
|
|
173
|
+
insert_result = await data_store.insert(
|
|
174
|
+
key=key, value=value, store_id=store_id, reference_node_hash=None, side=None
|
|
175
|
+
)
|
|
176
|
+
assert insert_result.node_hash == leaf_hash(key=key, value=value)
|
|
177
|
+
|
|
178
|
+
|
|
179
|
+
@pytest.mark.anyio
|
|
180
|
+
async def test_insert_increments_generation(data_store: DataStore, store_id: bytes32) -> None:
|
|
181
|
+
keys = [b"a", b"b", b"c", b"d"] # efghijklmnopqrstuvwxyz")
|
|
182
|
+
value = b"\x01\x02\x03"
|
|
183
|
+
|
|
184
|
+
generations = []
|
|
185
|
+
expected = []
|
|
186
|
+
|
|
187
|
+
node_hash = None
|
|
188
|
+
for key, expected_generation in zip(keys, itertools.count(start=1)):
|
|
189
|
+
insert_result = await data_store.insert(
|
|
190
|
+
key=key,
|
|
191
|
+
value=value,
|
|
192
|
+
store_id=store_id,
|
|
193
|
+
reference_node_hash=node_hash,
|
|
194
|
+
side=None if node_hash is None else Side.LEFT,
|
|
195
|
+
status=Status.COMMITTED,
|
|
196
|
+
)
|
|
197
|
+
node_hash = insert_result.node_hash
|
|
198
|
+
generation = await data_store.get_tree_generation(store_id=store_id)
|
|
199
|
+
generations.append(generation)
|
|
200
|
+
expected.append(expected_generation)
|
|
201
|
+
|
|
202
|
+
assert generations == expected
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
@pytest.mark.anyio
|
|
206
|
+
async def test_get_tree_generation_returns_none_when_none_available(
|
|
207
|
+
raw_data_store: DataStore,
|
|
208
|
+
store_id: bytes32,
|
|
209
|
+
) -> None:
|
|
210
|
+
with pytest.raises(Exception, match=re.escape(f"No generations found for store ID: {store_id.hex()}")):
|
|
211
|
+
await raw_data_store.get_tree_generation(store_id=store_id)
|
|
212
|
+
|
|
213
|
+
|
|
214
|
+
@pytest.mark.anyio
|
|
215
|
+
async def test_insert_internal_node_does_nothing_if_matching(data_store: DataStore, store_id: bytes32) -> None:
|
|
216
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
217
|
+
|
|
218
|
+
kv_node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
219
|
+
ancestors = await data_store.get_ancestors(node_hash=kv_node.hash, store_id=store_id)
|
|
220
|
+
parent = ancestors[0]
|
|
221
|
+
|
|
222
|
+
async with data_store.db_wrapper.reader() as reader:
|
|
223
|
+
cursor = await reader.execute("SELECT * FROM node")
|
|
224
|
+
before = await cursor.fetchall()
|
|
225
|
+
|
|
226
|
+
await data_store._insert_internal_node(left_hash=parent.left_hash, right_hash=parent.right_hash)
|
|
227
|
+
|
|
228
|
+
async with data_store.db_wrapper.reader() as reader:
|
|
229
|
+
cursor = await reader.execute("SELECT * FROM node")
|
|
230
|
+
after = await cursor.fetchall()
|
|
231
|
+
|
|
232
|
+
assert after == before
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
@pytest.mark.anyio
|
|
236
|
+
async def test_insert_terminal_node_does_nothing_if_matching(data_store: DataStore, store_id: bytes32) -> None:
|
|
237
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
238
|
+
|
|
239
|
+
kv_node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
240
|
+
|
|
241
|
+
async with data_store.db_wrapper.reader() as reader:
|
|
242
|
+
cursor = await reader.execute("SELECT * FROM node")
|
|
243
|
+
before = await cursor.fetchall()
|
|
244
|
+
|
|
245
|
+
await data_store._insert_terminal_node(key=kv_node.key, value=kv_node.value)
|
|
246
|
+
|
|
247
|
+
async with data_store.db_wrapper.reader() as reader:
|
|
248
|
+
cursor = await reader.execute("SELECT * FROM node")
|
|
249
|
+
after = await cursor.fetchall()
|
|
250
|
+
|
|
251
|
+
assert after == before
|
|
252
|
+
|
|
253
|
+
|
|
254
|
+
@pytest.mark.anyio
|
|
255
|
+
async def test_build_a_tree(
|
|
256
|
+
data_store: DataStore,
|
|
257
|
+
store_id: bytes32,
|
|
258
|
+
create_example: Callable[[DataStore, bytes32], Awaitable[Example]],
|
|
259
|
+
) -> None:
|
|
260
|
+
example = await create_example(data_store, store_id)
|
|
261
|
+
|
|
262
|
+
await _debug_dump(db=data_store.db_wrapper, description="final")
|
|
263
|
+
actual = await data_store.get_tree_as_nodes(store_id=store_id)
|
|
264
|
+
# print("actual ", actual.as_python())
|
|
265
|
+
# print("expected", example.expected.as_python())
|
|
266
|
+
assert actual == example.expected
|
|
267
|
+
|
|
268
|
+
|
|
269
|
+
@pytest.mark.anyio
|
|
270
|
+
async def test_get_node_by_key(data_store: DataStore, store_id: bytes32) -> None:
|
|
271
|
+
example = await add_0123_example(data_store=data_store, store_id=store_id)
|
|
272
|
+
|
|
273
|
+
key_node_hash = example.terminal_nodes[2]
|
|
274
|
+
|
|
275
|
+
# TODO: make a nicer relationship between the hash and the key
|
|
276
|
+
|
|
277
|
+
actual = await data_store.get_node_by_key(key=b"\x02", store_id=store_id)
|
|
278
|
+
assert actual.hash == key_node_hash
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
@pytest.mark.anyio
|
|
282
|
+
async def test_get_ancestors(data_store: DataStore, store_id: bytes32) -> None:
|
|
283
|
+
example = await add_0123_example(data_store=data_store, store_id=store_id)
|
|
284
|
+
|
|
285
|
+
reference_node_hash = example.terminal_nodes[2]
|
|
286
|
+
|
|
287
|
+
ancestors = await data_store.get_ancestors(node_hash=reference_node_hash, store_id=store_id)
|
|
288
|
+
hashes = [node.hash.hex() for node in ancestors]
|
|
289
|
+
|
|
290
|
+
# TODO: reverify these are correct
|
|
291
|
+
assert hashes == [
|
|
292
|
+
"3ab212e30b0e746d81a993e39f2cb4ba843412d44b402c1117a500d6451309e3",
|
|
293
|
+
"c852ecd8fb61549a0a42f9eb9dde65e6c94a01934dbd9c1d35ab94e2a0ae58e2",
|
|
294
|
+
]
|
|
295
|
+
|
|
296
|
+
ancestors_2 = await data_store.get_ancestors_optimized(node_hash=reference_node_hash, store_id=store_id)
|
|
297
|
+
assert ancestors == ancestors_2
|
|
298
|
+
|
|
299
|
+
|
|
300
|
+
@pytest.mark.anyio
|
|
301
|
+
async def test_get_ancestors_optimized(data_store: DataStore, store_id: bytes32) -> None:
|
|
302
|
+
ancestors: list[tuple[int, bytes32, list[InternalNode]]] = []
|
|
303
|
+
random = Random()
|
|
304
|
+
random.seed(100, version=2)
|
|
305
|
+
|
|
306
|
+
first_insertions = [True, False, True, False, True, True, False, True, False, True, True, False, False, True, False]
|
|
307
|
+
deleted_all = False
|
|
308
|
+
node_count = 0
|
|
309
|
+
for i in range(1000):
|
|
310
|
+
is_insert = False
|
|
311
|
+
if i <= 14:
|
|
312
|
+
is_insert = first_insertions[i]
|
|
313
|
+
if i > 14 and i <= 25:
|
|
314
|
+
is_insert = True
|
|
315
|
+
if i > 25 and i <= 200 and random.randint(0, 4):
|
|
316
|
+
is_insert = True
|
|
317
|
+
if i > 200:
|
|
318
|
+
if not deleted_all:
|
|
319
|
+
while node_count > 0:
|
|
320
|
+
node_count -= 1
|
|
321
|
+
seed = bytes32(b"0" * 32)
|
|
322
|
+
node_hash = await data_store.get_terminal_node_for_seed(store_id, seed)
|
|
323
|
+
assert node_hash is not None
|
|
324
|
+
node = await data_store.get_node(node_hash)
|
|
325
|
+
assert isinstance(node, TerminalNode)
|
|
326
|
+
await data_store.delete(key=node.key, store_id=store_id, status=Status.COMMITTED)
|
|
327
|
+
deleted_all = True
|
|
328
|
+
is_insert = True
|
|
329
|
+
else:
|
|
330
|
+
assert node_count <= 4
|
|
331
|
+
if node_count == 0:
|
|
332
|
+
is_insert = True
|
|
333
|
+
elif node_count < 4 and random.randint(0, 2):
|
|
334
|
+
is_insert = True
|
|
335
|
+
key = (i % 200).to_bytes(4, byteorder="big")
|
|
336
|
+
value = (i % 200).to_bytes(4, byteorder="big")
|
|
337
|
+
seed = Program.to((key, value)).get_tree_hash()
|
|
338
|
+
node_hash = await data_store.get_terminal_node_for_seed(store_id, seed)
|
|
339
|
+
if is_insert:
|
|
340
|
+
node_count += 1
|
|
341
|
+
side = None if node_hash is None else data_store.get_side_for_seed(seed)
|
|
342
|
+
|
|
343
|
+
insert_result = await data_store.insert(
|
|
344
|
+
key=key,
|
|
345
|
+
value=value,
|
|
346
|
+
store_id=store_id,
|
|
347
|
+
reference_node_hash=node_hash,
|
|
348
|
+
side=side,
|
|
349
|
+
use_optimized=False,
|
|
350
|
+
status=Status.COMMITTED,
|
|
351
|
+
)
|
|
352
|
+
node_hash = insert_result.node_hash
|
|
353
|
+
if node_hash is not None:
|
|
354
|
+
generation = await data_store.get_tree_generation(store_id=store_id)
|
|
355
|
+
current_ancestors = await data_store.get_ancestors(node_hash=node_hash, store_id=store_id)
|
|
356
|
+
ancestors.append((generation, node_hash, current_ancestors))
|
|
357
|
+
else:
|
|
358
|
+
node_count -= 1
|
|
359
|
+
assert node_hash is not None
|
|
360
|
+
node = await data_store.get_node(node_hash)
|
|
361
|
+
assert isinstance(node, TerminalNode)
|
|
362
|
+
await data_store.delete(key=node.key, store_id=store_id, use_optimized=False, status=Status.COMMITTED)
|
|
363
|
+
|
|
364
|
+
for generation, node_hash, expected_ancestors in ancestors:
|
|
365
|
+
current_ancestors = await data_store.get_ancestors_optimized(
|
|
366
|
+
node_hash=node_hash, store_id=store_id, generation=generation
|
|
367
|
+
)
|
|
368
|
+
assert current_ancestors == expected_ancestors
|
|
369
|
+
|
|
370
|
+
|
|
371
|
+
@pytest.mark.anyio
|
|
372
|
+
@pytest.mark.parametrize(
|
|
373
|
+
"use_optimized",
|
|
374
|
+
[True, False],
|
|
375
|
+
)
|
|
376
|
+
@pytest.mark.parametrize(
|
|
377
|
+
"num_batches",
|
|
378
|
+
[1, 5, 10, 25],
|
|
379
|
+
)
|
|
380
|
+
async def test_batch_update(
|
|
381
|
+
data_store: DataStore,
|
|
382
|
+
store_id: bytes32,
|
|
383
|
+
use_optimized: bool,
|
|
384
|
+
tmp_path: Path,
|
|
385
|
+
num_batches: int,
|
|
386
|
+
) -> None:
|
|
387
|
+
total_operations = 1000 if use_optimized else 100
|
|
388
|
+
num_ops_per_batch = total_operations // num_batches
|
|
389
|
+
saved_batches: list[list[dict[str, Any]]] = []
|
|
390
|
+
saved_kv: list[list[TerminalNode]] = []
|
|
391
|
+
db_uri = generate_in_memory_db_uri()
|
|
392
|
+
async with DataStore.managed(database=db_uri, uri=True) as single_op_data_store:
|
|
393
|
+
await single_op_data_store.create_tree(store_id, status=Status.COMMITTED)
|
|
394
|
+
random = Random()
|
|
395
|
+
random.seed(100, version=2)
|
|
396
|
+
|
|
397
|
+
batch: list[dict[str, Any]] = []
|
|
398
|
+
keys_values: dict[bytes, bytes] = {}
|
|
399
|
+
for operation in range(num_batches * num_ops_per_batch):
|
|
400
|
+
[op_type] = random.choices(
|
|
401
|
+
["insert", "upsert-insert", "upsert-update", "delete"],
|
|
402
|
+
[0.4, 0.2, 0.2, 0.2],
|
|
403
|
+
k=1,
|
|
404
|
+
)
|
|
405
|
+
if op_type in {"insert", "upsert-insert"} or len(keys_values) == 0:
|
|
406
|
+
if len(keys_values) == 0:
|
|
407
|
+
op_type = "insert"
|
|
408
|
+
key = operation.to_bytes(4, byteorder="big")
|
|
409
|
+
value = (2 * operation).to_bytes(4, byteorder="big")
|
|
410
|
+
if op_type == "insert":
|
|
411
|
+
await single_op_data_store.autoinsert(
|
|
412
|
+
key=key,
|
|
413
|
+
value=value,
|
|
414
|
+
store_id=store_id,
|
|
415
|
+
use_optimized=use_optimized,
|
|
416
|
+
status=Status.COMMITTED,
|
|
417
|
+
)
|
|
418
|
+
else:
|
|
419
|
+
await single_op_data_store.upsert(
|
|
420
|
+
key=key,
|
|
421
|
+
new_value=value,
|
|
422
|
+
store_id=store_id,
|
|
423
|
+
use_optimized=use_optimized,
|
|
424
|
+
status=Status.COMMITTED,
|
|
425
|
+
)
|
|
426
|
+
action = "insert" if op_type == "insert" else "upsert"
|
|
427
|
+
batch.append({"action": action, "key": key, "value": value})
|
|
428
|
+
keys_values[key] = value
|
|
429
|
+
elif op_type == "delete":
|
|
430
|
+
key = random.choice(list(keys_values.keys()))
|
|
431
|
+
del keys_values[key]
|
|
432
|
+
await single_op_data_store.delete(
|
|
433
|
+
key=key,
|
|
434
|
+
store_id=store_id,
|
|
435
|
+
use_optimized=use_optimized,
|
|
436
|
+
status=Status.COMMITTED,
|
|
437
|
+
)
|
|
438
|
+
batch.append({"action": "delete", "key": key})
|
|
439
|
+
else:
|
|
440
|
+
assert op_type == "upsert-update"
|
|
441
|
+
key = random.choice(list(keys_values.keys()))
|
|
442
|
+
old_value = keys_values[key]
|
|
443
|
+
new_value_int = int.from_bytes(old_value, byteorder="big") + 1
|
|
444
|
+
new_value = new_value_int.to_bytes(4, byteorder="big")
|
|
445
|
+
await single_op_data_store.upsert(
|
|
446
|
+
key=key,
|
|
447
|
+
new_value=new_value,
|
|
448
|
+
store_id=store_id,
|
|
449
|
+
use_optimized=use_optimized,
|
|
450
|
+
status=Status.COMMITTED,
|
|
451
|
+
)
|
|
452
|
+
keys_values[key] = new_value
|
|
453
|
+
batch.append({"action": "upsert", "key": key, "value": new_value})
|
|
454
|
+
if (operation + 1) % num_ops_per_batch == 0:
|
|
455
|
+
saved_batches.append(batch)
|
|
456
|
+
batch = []
|
|
457
|
+
current_kv = await single_op_data_store.get_keys_values(store_id=store_id)
|
|
458
|
+
assert {kv.key: kv.value for kv in current_kv} == keys_values
|
|
459
|
+
saved_kv.append(current_kv)
|
|
460
|
+
|
|
461
|
+
for batch_number, batch in enumerate(saved_batches):
|
|
462
|
+
assert len(batch) == num_ops_per_batch
|
|
463
|
+
await data_store.insert_batch(store_id, batch, status=Status.COMMITTED)
|
|
464
|
+
root = await data_store.get_tree_root(store_id)
|
|
465
|
+
assert root.generation == batch_number + 1
|
|
466
|
+
assert root.node_hash is not None
|
|
467
|
+
current_kv = await data_store.get_keys_values(store_id=store_id)
|
|
468
|
+
# Get the same keys/values, but possibly stored in other order.
|
|
469
|
+
assert {node.key: node.value for node in current_kv} == {
|
|
470
|
+
node.key: node.value for node in saved_kv[batch_number]
|
|
471
|
+
}
|
|
472
|
+
queue: list[bytes32] = [root.node_hash]
|
|
473
|
+
ancestors: dict[bytes32, bytes32] = {}
|
|
474
|
+
while len(queue) > 0:
|
|
475
|
+
node_hash = queue.pop(0)
|
|
476
|
+
expected_ancestors = []
|
|
477
|
+
ancestor = node_hash
|
|
478
|
+
while ancestor in ancestors:
|
|
479
|
+
ancestor = ancestors[ancestor]
|
|
480
|
+
expected_ancestors.append(ancestor)
|
|
481
|
+
result_ancestors = await data_store.get_ancestors_optimized(node_hash, store_id)
|
|
482
|
+
assert [node.hash for node in result_ancestors] == expected_ancestors
|
|
483
|
+
node = await data_store.get_node(node_hash)
|
|
484
|
+
if isinstance(node, InternalNode):
|
|
485
|
+
queue.append(node.left_hash)
|
|
486
|
+
queue.append(node.right_hash)
|
|
487
|
+
ancestors[node.left_hash] = node_hash
|
|
488
|
+
ancestors[node.right_hash] = node_hash
|
|
489
|
+
|
|
490
|
+
all_kv = await data_store.get_keys_values(store_id)
|
|
491
|
+
assert {node.key: node.value for node in all_kv} == keys_values
|
|
492
|
+
|
|
493
|
+
|
|
494
|
+
@pytest.mark.anyio
|
|
495
|
+
@pytest.mark.parametrize(
|
|
496
|
+
"use_optimized",
|
|
497
|
+
[True, False],
|
|
498
|
+
)
|
|
499
|
+
async def test_upsert_ignores_existing_arguments(
|
|
500
|
+
data_store: DataStore,
|
|
501
|
+
store_id: bytes32,
|
|
502
|
+
use_optimized: bool,
|
|
503
|
+
) -> None:
|
|
504
|
+
key = b"key"
|
|
505
|
+
value = b"value1"
|
|
506
|
+
|
|
507
|
+
await data_store.autoinsert(
|
|
508
|
+
key=key,
|
|
509
|
+
value=value,
|
|
510
|
+
store_id=store_id,
|
|
511
|
+
use_optimized=use_optimized,
|
|
512
|
+
status=Status.COMMITTED,
|
|
513
|
+
)
|
|
514
|
+
node = await data_store.get_node_by_key(key, store_id)
|
|
515
|
+
assert node.value == value
|
|
516
|
+
|
|
517
|
+
new_value = b"value2"
|
|
518
|
+
await data_store.upsert(
|
|
519
|
+
key=key,
|
|
520
|
+
new_value=new_value,
|
|
521
|
+
store_id=store_id,
|
|
522
|
+
use_optimized=use_optimized,
|
|
523
|
+
status=Status.COMMITTED,
|
|
524
|
+
)
|
|
525
|
+
node = await data_store.get_node_by_key(key, store_id)
|
|
526
|
+
assert node.value == new_value
|
|
527
|
+
|
|
528
|
+
await data_store.upsert(
|
|
529
|
+
key=key,
|
|
530
|
+
new_value=new_value,
|
|
531
|
+
store_id=store_id,
|
|
532
|
+
use_optimized=use_optimized,
|
|
533
|
+
status=Status.COMMITTED,
|
|
534
|
+
)
|
|
535
|
+
node = await data_store.get_node_by_key(key, store_id)
|
|
536
|
+
assert node.value == new_value
|
|
537
|
+
|
|
538
|
+
key2 = b"key2"
|
|
539
|
+
await data_store.upsert(
|
|
540
|
+
key=key2,
|
|
541
|
+
new_value=value,
|
|
542
|
+
store_id=store_id,
|
|
543
|
+
use_optimized=use_optimized,
|
|
544
|
+
status=Status.COMMITTED,
|
|
545
|
+
)
|
|
546
|
+
node = await data_store.get_node_by_key(key2, store_id)
|
|
547
|
+
assert node.value == value
|
|
548
|
+
|
|
549
|
+
|
|
550
|
+
@pytest.mark.parametrize(argnames="side", argvalues=list(Side))
|
|
551
|
+
@pytest.mark.anyio
|
|
552
|
+
async def test_insert_batch_reference_and_side(
|
|
553
|
+
data_store: DataStore,
|
|
554
|
+
store_id: bytes32,
|
|
555
|
+
side: Side,
|
|
556
|
+
) -> None:
|
|
557
|
+
insert_result = await data_store.autoinsert(
|
|
558
|
+
key=b"key1",
|
|
559
|
+
value=b"value1",
|
|
560
|
+
store_id=store_id,
|
|
561
|
+
status=Status.COMMITTED,
|
|
562
|
+
)
|
|
563
|
+
|
|
564
|
+
new_root_hash = await data_store.insert_batch(
|
|
565
|
+
store_id=store_id,
|
|
566
|
+
changelist=[
|
|
567
|
+
{
|
|
568
|
+
"action": "insert",
|
|
569
|
+
"key": b"key2",
|
|
570
|
+
"value": b"value2",
|
|
571
|
+
"reference_node_hash": insert_result.node_hash,
|
|
572
|
+
"side": side,
|
|
573
|
+
},
|
|
574
|
+
],
|
|
575
|
+
)
|
|
576
|
+
assert new_root_hash is not None, "batch insert failed or failed to update root"
|
|
577
|
+
|
|
578
|
+
parent = await data_store.get_node(new_root_hash)
|
|
579
|
+
assert isinstance(parent, InternalNode)
|
|
580
|
+
if side == Side.LEFT:
|
|
581
|
+
child = await data_store.get_node(parent.left_hash)
|
|
582
|
+
assert parent.left_hash == child.hash
|
|
583
|
+
elif side == Side.RIGHT:
|
|
584
|
+
child = await data_store.get_node(parent.right_hash)
|
|
585
|
+
assert parent.right_hash == child.hash
|
|
586
|
+
else: # pragma: no cover
|
|
587
|
+
raise Exception("invalid side for test")
|
|
588
|
+
|
|
589
|
+
|
|
590
|
+
@pytest.mark.anyio
|
|
591
|
+
async def test_ancestor_table_unique_inserts(data_store: DataStore, store_id: bytes32) -> None:
|
|
592
|
+
await add_0123_example(data_store=data_store, store_id=store_id)
|
|
593
|
+
hash_1 = bytes32.from_hexstr("0763561814685fbf92f6ca71fbb1cb11821951450d996375c239979bd63e9535")
|
|
594
|
+
hash_2 = bytes32.from_hexstr("924be8ff27e84cba17f5bc918097f8410fab9824713a4668a21c8e060a8cab40")
|
|
595
|
+
await data_store._insert_ancestor_table(hash_1, hash_2, store_id, 2)
|
|
596
|
+
await data_store._insert_ancestor_table(hash_1, hash_2, store_id, 2)
|
|
597
|
+
with pytest.raises(Exception, match="^Requested insertion of ancestor"):
|
|
598
|
+
await data_store._insert_ancestor_table(hash_1, hash_1, store_id, 2)
|
|
599
|
+
await data_store._insert_ancestor_table(hash_1, hash_2, store_id, 2)
|
|
600
|
+
|
|
601
|
+
|
|
602
|
+
@pytest.mark.anyio
|
|
603
|
+
async def test_get_pairs(
|
|
604
|
+
data_store: DataStore,
|
|
605
|
+
store_id: bytes32,
|
|
606
|
+
create_example: Callable[[DataStore, bytes32], Awaitable[Example]],
|
|
607
|
+
) -> None:
|
|
608
|
+
example = await create_example(data_store, store_id)
|
|
609
|
+
|
|
610
|
+
pairs = await data_store.get_keys_values(store_id=store_id)
|
|
611
|
+
|
|
612
|
+
assert [node.hash for node in pairs] == example.terminal_nodes
|
|
613
|
+
|
|
614
|
+
|
|
615
|
+
@pytest.mark.anyio
|
|
616
|
+
async def test_get_pairs_when_empty(data_store: DataStore, store_id: bytes32) -> None:
|
|
617
|
+
pairs = await data_store.get_keys_values(store_id=store_id)
|
|
618
|
+
|
|
619
|
+
assert pairs == []
|
|
620
|
+
|
|
621
|
+
|
|
622
|
+
@pytest.mark.parametrize(
|
|
623
|
+
argnames=["first_value", "second_value"],
|
|
624
|
+
argvalues=[[b"\x06", b"\x06"], [b"\x06", b"\x07"]],
|
|
625
|
+
ids=["same values", "different values"],
|
|
626
|
+
)
|
|
627
|
+
@pytest.mark.anyio()
|
|
628
|
+
async def test_inserting_duplicate_key_fails(
|
|
629
|
+
data_store: DataStore,
|
|
630
|
+
store_id: bytes32,
|
|
631
|
+
first_value: bytes,
|
|
632
|
+
second_value: bytes,
|
|
633
|
+
) -> None:
|
|
634
|
+
key = b"\x05"
|
|
635
|
+
|
|
636
|
+
insert_result = await data_store.insert(
|
|
637
|
+
key=key,
|
|
638
|
+
value=first_value,
|
|
639
|
+
store_id=store_id,
|
|
640
|
+
reference_node_hash=None,
|
|
641
|
+
side=None,
|
|
642
|
+
)
|
|
643
|
+
|
|
644
|
+
# TODO: more specific exception
|
|
645
|
+
with pytest.raises(Exception):
|
|
646
|
+
await data_store.insert(
|
|
647
|
+
key=key,
|
|
648
|
+
value=second_value,
|
|
649
|
+
store_id=store_id,
|
|
650
|
+
reference_node_hash=insert_result.node_hash,
|
|
651
|
+
side=Side.RIGHT,
|
|
652
|
+
)
|
|
653
|
+
|
|
654
|
+
# TODO: more specific exception
|
|
655
|
+
with pytest.raises(Exception):
|
|
656
|
+
await data_store.insert(
|
|
657
|
+
key=key,
|
|
658
|
+
value=second_value,
|
|
659
|
+
store_id=store_id,
|
|
660
|
+
reference_node_hash=insert_result.node_hash,
|
|
661
|
+
side=Side.RIGHT,
|
|
662
|
+
)
|
|
663
|
+
|
|
664
|
+
|
|
665
|
+
@pytest.mark.anyio()
|
|
666
|
+
async def test_inserting_invalid_length_hash_raises_original_exception(
|
|
667
|
+
data_store: DataStore,
|
|
668
|
+
) -> None:
|
|
669
|
+
with pytest.raises(aiosqlite.IntegrityError):
|
|
670
|
+
# casting since we are testing an invalid case
|
|
671
|
+
await data_store._insert_node(
|
|
672
|
+
node_hash=cast(bytes32, b"\x05"),
|
|
673
|
+
node_type=NodeType.TERMINAL,
|
|
674
|
+
left_hash=None,
|
|
675
|
+
right_hash=None,
|
|
676
|
+
key=b"\x06",
|
|
677
|
+
value=b"\x07",
|
|
678
|
+
)
|
|
679
|
+
|
|
680
|
+
|
|
681
|
+
@pytest.mark.anyio()
|
|
682
|
+
async def test_inserting_invalid_length_ancestor_hash_raises_original_exception(
|
|
683
|
+
data_store: DataStore,
|
|
684
|
+
store_id: bytes32,
|
|
685
|
+
) -> None:
|
|
686
|
+
with pytest.raises(aiosqlite.IntegrityError):
|
|
687
|
+
# casting since we are testing an invalid case
|
|
688
|
+
await data_store._insert_ancestor_table(
|
|
689
|
+
left_hash=bytes32(b"\x01" * 32),
|
|
690
|
+
right_hash=bytes32(b"\x02" * 32),
|
|
691
|
+
store_id=store_id,
|
|
692
|
+
generation=0,
|
|
693
|
+
)
|
|
694
|
+
|
|
695
|
+
|
|
696
|
+
@pytest.mark.anyio()
|
|
697
|
+
async def test_autoinsert_balances_from_scratch(data_store: DataStore, store_id: bytes32) -> None:
|
|
698
|
+
random = Random()
|
|
699
|
+
random.seed(100, version=2)
|
|
700
|
+
hashes = []
|
|
701
|
+
|
|
702
|
+
for i in range(2000):
|
|
703
|
+
key = (i + 100).to_bytes(4, byteorder="big")
|
|
704
|
+
value = (i + 200).to_bytes(4, byteorder="big")
|
|
705
|
+
insert_result = await data_store.autoinsert(key, value, store_id, status=Status.COMMITTED)
|
|
706
|
+
hashes.append(insert_result.node_hash)
|
|
707
|
+
|
|
708
|
+
heights = {node_hash: len(await data_store.get_ancestors_optimized(node_hash, store_id)) for node_hash in hashes}
|
|
709
|
+
too_tall = {hash: height for hash, height in heights.items() if height > 14}
|
|
710
|
+
assert too_tall == {}
|
|
711
|
+
assert 11 <= statistics.mean(heights.values()) <= 12
|
|
712
|
+
|
|
713
|
+
|
|
714
|
+
@pytest.mark.anyio()
|
|
715
|
+
async def test_autoinsert_balances_gaps(data_store: DataStore, store_id: bytes32) -> None:
|
|
716
|
+
random = Random()
|
|
717
|
+
random.seed(101, version=2)
|
|
718
|
+
hashes = []
|
|
719
|
+
|
|
720
|
+
for i in range(2000):
|
|
721
|
+
key = (i + 100).to_bytes(4, byteorder="big")
|
|
722
|
+
value = (i + 200).to_bytes(4, byteorder="big")
|
|
723
|
+
if i == 0 or i > 10:
|
|
724
|
+
insert_result = await data_store.autoinsert(key, value, store_id, status=Status.COMMITTED)
|
|
725
|
+
else:
|
|
726
|
+
reference_node_hash = await data_store.get_terminal_node_for_seed(store_id, bytes32.zeros)
|
|
727
|
+
insert_result = await data_store.insert(
|
|
728
|
+
key=key,
|
|
729
|
+
value=value,
|
|
730
|
+
store_id=store_id,
|
|
731
|
+
reference_node_hash=reference_node_hash,
|
|
732
|
+
side=Side.LEFT,
|
|
733
|
+
status=Status.COMMITTED,
|
|
734
|
+
)
|
|
735
|
+
ancestors = await data_store.get_ancestors_optimized(insert_result.node_hash, store_id)
|
|
736
|
+
assert len(ancestors) == i
|
|
737
|
+
hashes.append(insert_result.node_hash)
|
|
738
|
+
|
|
739
|
+
heights = {node_hash: len(await data_store.get_ancestors_optimized(node_hash, store_id)) for node_hash in hashes}
|
|
740
|
+
too_tall = {hash: height for hash, height in heights.items() if height > 14}
|
|
741
|
+
assert too_tall == {}
|
|
742
|
+
assert 11 <= statistics.mean(heights.values()) <= 12
|
|
743
|
+
|
|
744
|
+
|
|
745
|
+
@pytest.mark.anyio()
|
|
746
|
+
async def test_delete_from_left_both_terminal(data_store: DataStore, store_id: bytes32) -> None:
|
|
747
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
748
|
+
|
|
749
|
+
expected = InternalNode.from_child_nodes(
|
|
750
|
+
left=InternalNode.from_child_nodes(
|
|
751
|
+
left=InternalNode.from_child_nodes(
|
|
752
|
+
left=TerminalNode.from_key_value(key=b"\x00", value=b"\x10\x00"),
|
|
753
|
+
right=TerminalNode.from_key_value(key=b"\x01", value=b"\x11\x01"),
|
|
754
|
+
),
|
|
755
|
+
right=InternalNode.from_child_nodes(
|
|
756
|
+
left=TerminalNode.from_key_value(key=b"\x02", value=b"\x12\x02"),
|
|
757
|
+
right=TerminalNode.from_key_value(key=b"\x03", value=b"\x13\x03"),
|
|
758
|
+
),
|
|
759
|
+
),
|
|
760
|
+
right=InternalNode.from_child_nodes(
|
|
761
|
+
left=TerminalNode.from_key_value(key=b"\x05", value=b"\x15\x05"),
|
|
762
|
+
right=InternalNode.from_child_nodes(
|
|
763
|
+
left=TerminalNode.from_key_value(key=b"\x06", value=b"\x16\x06"),
|
|
764
|
+
right=TerminalNode.from_key_value(key=b"\x07", value=b"\x17\x07"),
|
|
765
|
+
),
|
|
766
|
+
),
|
|
767
|
+
)
|
|
768
|
+
|
|
769
|
+
await data_store.delete(key=b"\x04", store_id=store_id, status=Status.COMMITTED)
|
|
770
|
+
result = await data_store.get_tree_as_nodes(store_id=store_id)
|
|
771
|
+
|
|
772
|
+
assert result == expected
|
|
773
|
+
|
|
774
|
+
|
|
775
|
+
@pytest.mark.anyio()
|
|
776
|
+
async def test_delete_from_left_other_not_terminal(data_store: DataStore, store_id: bytes32) -> None:
|
|
777
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
778
|
+
|
|
779
|
+
expected = InternalNode.from_child_nodes(
|
|
780
|
+
left=InternalNode.from_child_nodes(
|
|
781
|
+
left=InternalNode.from_child_nodes(
|
|
782
|
+
left=TerminalNode.from_key_value(key=b"\x00", value=b"\x10\x00"),
|
|
783
|
+
right=TerminalNode.from_key_value(key=b"\x01", value=b"\x11\x01"),
|
|
784
|
+
),
|
|
785
|
+
right=InternalNode.from_child_nodes(
|
|
786
|
+
left=TerminalNode.from_key_value(key=b"\x02", value=b"\x12\x02"),
|
|
787
|
+
right=TerminalNode.from_key_value(key=b"\x03", value=b"\x13\x03"),
|
|
788
|
+
),
|
|
789
|
+
),
|
|
790
|
+
right=InternalNode.from_child_nodes(
|
|
791
|
+
left=TerminalNode.from_key_value(key=b"\x06", value=b"\x16\x06"),
|
|
792
|
+
right=TerminalNode.from_key_value(key=b"\x07", value=b"\x17\x07"),
|
|
793
|
+
),
|
|
794
|
+
)
|
|
795
|
+
|
|
796
|
+
await data_store.delete(key=b"\x04", store_id=store_id, status=Status.COMMITTED)
|
|
797
|
+
await data_store.delete(key=b"\x05", store_id=store_id, status=Status.COMMITTED)
|
|
798
|
+
result = await data_store.get_tree_as_nodes(store_id=store_id)
|
|
799
|
+
|
|
800
|
+
assert result == expected
|
|
801
|
+
|
|
802
|
+
|
|
803
|
+
@pytest.mark.anyio()
|
|
804
|
+
async def test_delete_from_right_both_terminal(data_store: DataStore, store_id: bytes32) -> None:
|
|
805
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
806
|
+
|
|
807
|
+
expected = InternalNode.from_child_nodes(
|
|
808
|
+
left=InternalNode.from_child_nodes(
|
|
809
|
+
left=InternalNode.from_child_nodes(
|
|
810
|
+
left=TerminalNode.from_key_value(key=b"\x00", value=b"\x10\x00"),
|
|
811
|
+
right=TerminalNode.from_key_value(key=b"\x01", value=b"\x11\x01"),
|
|
812
|
+
),
|
|
813
|
+
right=TerminalNode.from_key_value(key=b"\x02", value=b"\x12\x02"),
|
|
814
|
+
),
|
|
815
|
+
right=InternalNode.from_child_nodes(
|
|
816
|
+
left=InternalNode.from_child_nodes(
|
|
817
|
+
left=TerminalNode.from_key_value(key=b"\x04", value=b"\x14\x04"),
|
|
818
|
+
right=TerminalNode.from_key_value(key=b"\x05", value=b"\x15\x05"),
|
|
819
|
+
),
|
|
820
|
+
right=InternalNode.from_child_nodes(
|
|
821
|
+
left=TerminalNode.from_key_value(key=b"\x06", value=b"\x16\x06"),
|
|
822
|
+
right=TerminalNode.from_key_value(key=b"\x07", value=b"\x17\x07"),
|
|
823
|
+
),
|
|
824
|
+
),
|
|
825
|
+
)
|
|
826
|
+
|
|
827
|
+
await data_store.delete(key=b"\x03", store_id=store_id, status=Status.COMMITTED)
|
|
828
|
+
result = await data_store.get_tree_as_nodes(store_id=store_id)
|
|
829
|
+
|
|
830
|
+
assert result == expected
|
|
831
|
+
|
|
832
|
+
|
|
833
|
+
@pytest.mark.anyio()
|
|
834
|
+
async def test_delete_from_right_other_not_terminal(data_store: DataStore, store_id: bytes32) -> None:
|
|
835
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
836
|
+
|
|
837
|
+
expected = InternalNode.from_child_nodes(
|
|
838
|
+
left=InternalNode.from_child_nodes(
|
|
839
|
+
left=TerminalNode.from_key_value(key=b"\x00", value=b"\x10\x00"),
|
|
840
|
+
right=TerminalNode.from_key_value(key=b"\x01", value=b"\x11\x01"),
|
|
841
|
+
),
|
|
842
|
+
right=InternalNode.from_child_nodes(
|
|
843
|
+
left=InternalNode.from_child_nodes(
|
|
844
|
+
left=TerminalNode.from_key_value(key=b"\x04", value=b"\x14\x04"),
|
|
845
|
+
right=TerminalNode.from_key_value(key=b"\x05", value=b"\x15\x05"),
|
|
846
|
+
),
|
|
847
|
+
right=InternalNode.from_child_nodes(
|
|
848
|
+
left=TerminalNode.from_key_value(key=b"\x06", value=b"\x16\x06"),
|
|
849
|
+
right=TerminalNode.from_key_value(key=b"\x07", value=b"\x17\x07"),
|
|
850
|
+
),
|
|
851
|
+
),
|
|
852
|
+
)
|
|
853
|
+
|
|
854
|
+
await data_store.delete(key=b"\x03", store_id=store_id, status=Status.COMMITTED)
|
|
855
|
+
await data_store.delete(key=b"\x02", store_id=store_id, status=Status.COMMITTED)
|
|
856
|
+
result = await data_store.get_tree_as_nodes(store_id=store_id)
|
|
857
|
+
|
|
858
|
+
assert result == expected
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
@pytest.mark.anyio
|
|
862
|
+
async def test_proof_of_inclusion_by_hash(data_store: DataStore, store_id: bytes32) -> None:
|
|
863
|
+
"""A proof of inclusion contains the expected sibling side, sibling hash, combined
|
|
864
|
+
hash, key, value, and root hash values.
|
|
865
|
+
"""
|
|
866
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
867
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
868
|
+
assert root.node_hash is not None
|
|
869
|
+
node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
870
|
+
|
|
871
|
+
proof = await data_store.get_proof_of_inclusion_by_hash(node_hash=node.hash, store_id=store_id)
|
|
872
|
+
|
|
873
|
+
print(node)
|
|
874
|
+
await _debug_dump(db=data_store.db_wrapper)
|
|
875
|
+
|
|
876
|
+
expected_layers = [
|
|
877
|
+
ProofOfInclusionLayer(
|
|
878
|
+
other_hash_side=Side.RIGHT,
|
|
879
|
+
other_hash=bytes32.fromhex("fb66fe539b3eb2020dfbfadfd601fa318521292b41f04c2057c16fca6b947ca1"),
|
|
880
|
+
combined_hash=bytes32.fromhex("36cb1fc56017944213055da8cb0178fb0938c32df3ec4472f5edf0dff85ba4a3"),
|
|
881
|
+
),
|
|
882
|
+
ProofOfInclusionLayer(
|
|
883
|
+
other_hash_side=Side.RIGHT,
|
|
884
|
+
other_hash=bytes32.fromhex("6d3af8d93db948e8b6aa4386958e137c6be8bab726db86789594b3588b35adcd"),
|
|
885
|
+
combined_hash=bytes32.fromhex("5f67a0ab1976e090b834bf70e5ce2a0f0a9cd474e19a905348c44ae12274d30b"),
|
|
886
|
+
),
|
|
887
|
+
ProofOfInclusionLayer(
|
|
888
|
+
other_hash_side=Side.LEFT,
|
|
889
|
+
other_hash=bytes32.fromhex("c852ecd8fb61549a0a42f9eb9dde65e6c94a01934dbd9c1d35ab94e2a0ae58e2"),
|
|
890
|
+
combined_hash=bytes32.fromhex("7a5193a4e31a0a72f6623dfeb2876022ab74a48abb5966088a1c6f5451cc5d81"),
|
|
891
|
+
),
|
|
892
|
+
]
|
|
893
|
+
|
|
894
|
+
assert proof == ProofOfInclusion(node_hash=node.hash, layers=expected_layers)
|
|
895
|
+
|
|
896
|
+
|
|
897
|
+
@pytest.mark.anyio
|
|
898
|
+
async def test_proof_of_inclusion_by_hash_no_ancestors(data_store: DataStore, store_id: bytes32) -> None:
|
|
899
|
+
"""Check proper proof of inclusion creation when the node being proved is the root."""
|
|
900
|
+
await data_store.autoinsert(key=b"\x04", value=b"\x03", store_id=store_id, status=Status.COMMITTED)
|
|
901
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
902
|
+
assert root.node_hash is not None
|
|
903
|
+
node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
904
|
+
|
|
905
|
+
proof = await data_store.get_proof_of_inclusion_by_hash(node_hash=node.hash, store_id=store_id)
|
|
906
|
+
|
|
907
|
+
assert proof == ProofOfInclusion(node_hash=node.hash, layers=[])
|
|
908
|
+
|
|
909
|
+
|
|
910
|
+
@pytest.mark.anyio
|
|
911
|
+
async def test_proof_of_inclusion_by_hash_program(data_store: DataStore, store_id: bytes32) -> None:
|
|
912
|
+
"""The proof of inclusion program has the expected Python equivalence."""
|
|
913
|
+
|
|
914
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
915
|
+
node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
916
|
+
|
|
917
|
+
proof = await data_store.get_proof_of_inclusion_by_hash(node_hash=node.hash, store_id=store_id)
|
|
918
|
+
|
|
919
|
+
assert proof.as_program() == [
|
|
920
|
+
b"\x04",
|
|
921
|
+
[
|
|
922
|
+
bytes32.fromhex("fb66fe539b3eb2020dfbfadfd601fa318521292b41f04c2057c16fca6b947ca1"),
|
|
923
|
+
bytes32.fromhex("6d3af8d93db948e8b6aa4386958e137c6be8bab726db86789594b3588b35adcd"),
|
|
924
|
+
bytes32.fromhex("c852ecd8fb61549a0a42f9eb9dde65e6c94a01934dbd9c1d35ab94e2a0ae58e2"),
|
|
925
|
+
],
|
|
926
|
+
]
|
|
927
|
+
|
|
928
|
+
|
|
929
|
+
@pytest.mark.anyio
|
|
930
|
+
async def test_proof_of_inclusion_by_hash_equals_by_key(data_store: DataStore, store_id: bytes32) -> None:
|
|
931
|
+
"""The proof of inclusion is equal between hash and key requests."""
|
|
932
|
+
|
|
933
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
934
|
+
node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
935
|
+
|
|
936
|
+
proof_by_hash = await data_store.get_proof_of_inclusion_by_hash(node_hash=node.hash, store_id=store_id)
|
|
937
|
+
proof_by_key = await data_store.get_proof_of_inclusion_by_key(key=b"\x04", store_id=store_id)
|
|
938
|
+
|
|
939
|
+
assert proof_by_hash == proof_by_key
|
|
940
|
+
|
|
941
|
+
|
|
942
|
+
@pytest.mark.anyio
|
|
943
|
+
async def test_proof_of_inclusion_by_hash_bytes(data_store: DataStore, store_id: bytes32) -> None:
|
|
944
|
+
"""The proof of inclusion provided by the data store is able to be converted to a
|
|
945
|
+
program and subsequently to bytes.
|
|
946
|
+
"""
|
|
947
|
+
await add_01234567_example(data_store=data_store, store_id=store_id)
|
|
948
|
+
node = await data_store.get_node_by_key(key=b"\x04", store_id=store_id)
|
|
949
|
+
|
|
950
|
+
proof = await data_store.get_proof_of_inclusion_by_hash(node_hash=node.hash, store_id=store_id)
|
|
951
|
+
|
|
952
|
+
expected = (
|
|
953
|
+
b"\xff\x04\xff\xff\xa0\xfbf\xfeS\x9b>\xb2\x02\r\xfb\xfa\xdf\xd6\x01\xfa1\x85!)"
|
|
954
|
+
b"+A\xf0L W\xc1o\xcak\x94|\xa1\xff\xa0m:\xf8\xd9=\xb9H\xe8\xb6\xaaC\x86\x95"
|
|
955
|
+
b"\x8e\x13|k\xe8\xba\xb7&\xdb\x86x\x95\x94\xb3X\x8b5\xad\xcd\xff\xa0\xc8R\xec"
|
|
956
|
+
b"\xd8\xfbaT\x9a\nB\xf9\xeb\x9d\xdee\xe6\xc9J\x01\x93M\xbd\x9c\x1d5\xab\x94"
|
|
957
|
+
b"\xe2\xa0\xaeX\xe2\x80\x80"
|
|
958
|
+
)
|
|
959
|
+
|
|
960
|
+
assert bytes(proof.as_program()) == expected
|
|
961
|
+
|
|
962
|
+
|
|
963
|
+
# @pytest.mark.anyio
|
|
964
|
+
# async def test_create_first_pair(data_store: DataStore, store_id: bytes) -> None:
|
|
965
|
+
# key = SExp.to([1, 2])
|
|
966
|
+
# value = SExp.to(b'abc')
|
|
967
|
+
#
|
|
968
|
+
# root_hash = await data_store.create_root(store_id=store_id)
|
|
969
|
+
#
|
|
970
|
+
#
|
|
971
|
+
# await data_store.create_pair(key=key, value=value)
|
|
972
|
+
|
|
973
|
+
|
|
974
|
+
def test_all_checks_collected() -> None:
|
|
975
|
+
expected = {value for name, value in vars(DataStore).items() if name.startswith("_check_") and callable(value)}
|
|
976
|
+
|
|
977
|
+
assert set(DataStore._checks) == expected
|
|
978
|
+
|
|
979
|
+
|
|
980
|
+
a_bytes_32 = bytes32(range(32))
|
|
981
|
+
another_bytes_32 = bytes(reversed(a_bytes_32))
|
|
982
|
+
|
|
983
|
+
valid_program_hex = Program.to((b"abc", 2)).as_bin().hex()
|
|
984
|
+
invalid_program_hex = b"\xab\xcd".hex()
|
|
985
|
+
|
|
986
|
+
|
|
987
|
+
@pytest.mark.anyio
|
|
988
|
+
async def test_check_roots_are_incrementing_missing_zero(raw_data_store: DataStore) -> None:
|
|
989
|
+
store_id = hexstr_to_bytes("c954ab71ffaf5b0f129b04b35fdc7c84541f4375167e730e2646bfcfdb7cf2cd")
|
|
990
|
+
|
|
991
|
+
async with raw_data_store.db_wrapper.writer() as writer:
|
|
992
|
+
for generation in range(1, 5):
|
|
993
|
+
await writer.execute(
|
|
994
|
+
"""
|
|
995
|
+
INSERT INTO root(tree_id, generation, node_hash, status)
|
|
996
|
+
VALUES(:tree_id, :generation, :node_hash, :status)
|
|
997
|
+
""",
|
|
998
|
+
{
|
|
999
|
+
"tree_id": store_id,
|
|
1000
|
+
"generation": generation,
|
|
1001
|
+
"node_hash": None,
|
|
1002
|
+
"status": Status.COMMITTED.value,
|
|
1003
|
+
},
|
|
1004
|
+
)
|
|
1005
|
+
|
|
1006
|
+
with pytest.raises(
|
|
1007
|
+
TreeGenerationIncrementingError,
|
|
1008
|
+
match=r"\n +c954ab71ffaf5b0f129b04b35fdc7c84541f4375167e730e2646bfcfdb7cf2cd$",
|
|
1009
|
+
):
|
|
1010
|
+
await raw_data_store._check_roots_are_incrementing()
|
|
1011
|
+
|
|
1012
|
+
|
|
1013
|
+
@pytest.mark.anyio
|
|
1014
|
+
async def test_check_roots_are_incrementing_gap(raw_data_store: DataStore) -> None:
|
|
1015
|
+
store_id = hexstr_to_bytes("c954ab71ffaf5b0f129b04b35fdc7c84541f4375167e730e2646bfcfdb7cf2cd")
|
|
1016
|
+
|
|
1017
|
+
async with raw_data_store.db_wrapper.writer() as writer:
|
|
1018
|
+
for generation in [*range(5), *range(6, 10)]:
|
|
1019
|
+
await writer.execute(
|
|
1020
|
+
"""
|
|
1021
|
+
INSERT INTO root(tree_id, generation, node_hash, status)
|
|
1022
|
+
VALUES(:tree_id, :generation, :node_hash, :status)
|
|
1023
|
+
""",
|
|
1024
|
+
{
|
|
1025
|
+
"tree_id": store_id,
|
|
1026
|
+
"generation": generation,
|
|
1027
|
+
"node_hash": None,
|
|
1028
|
+
"status": Status.COMMITTED.value,
|
|
1029
|
+
},
|
|
1030
|
+
)
|
|
1031
|
+
|
|
1032
|
+
with pytest.raises(
|
|
1033
|
+
TreeGenerationIncrementingError,
|
|
1034
|
+
match=r"\n +c954ab71ffaf5b0f129b04b35fdc7c84541f4375167e730e2646bfcfdb7cf2cd$",
|
|
1035
|
+
):
|
|
1036
|
+
await raw_data_store._check_roots_are_incrementing()
|
|
1037
|
+
|
|
1038
|
+
|
|
1039
|
+
@pytest.mark.anyio
|
|
1040
|
+
async def test_check_hashes_internal(raw_data_store: DataStore) -> None:
|
|
1041
|
+
async with raw_data_store.db_wrapper.writer() as writer:
|
|
1042
|
+
await writer.execute(
|
|
1043
|
+
"INSERT INTO node(hash, node_type, left, right) VALUES(:hash, :node_type, :left, :right)",
|
|
1044
|
+
{
|
|
1045
|
+
"hash": a_bytes_32,
|
|
1046
|
+
"node_type": NodeType.INTERNAL,
|
|
1047
|
+
"left": a_bytes_32,
|
|
1048
|
+
"right": a_bytes_32,
|
|
1049
|
+
},
|
|
1050
|
+
)
|
|
1051
|
+
|
|
1052
|
+
with pytest.raises(
|
|
1053
|
+
NodeHashError,
|
|
1054
|
+
match=r"\n +000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f$",
|
|
1055
|
+
):
|
|
1056
|
+
await raw_data_store._check_hashes()
|
|
1057
|
+
|
|
1058
|
+
|
|
1059
|
+
@pytest.mark.anyio
|
|
1060
|
+
async def test_check_hashes_terminal(raw_data_store: DataStore) -> None:
|
|
1061
|
+
async with raw_data_store.db_wrapper.writer() as writer:
|
|
1062
|
+
await writer.execute(
|
|
1063
|
+
"INSERT INTO node(hash, node_type, key, value) VALUES(:hash, :node_type, :key, :value)",
|
|
1064
|
+
{
|
|
1065
|
+
"hash": a_bytes_32,
|
|
1066
|
+
"node_type": NodeType.TERMINAL,
|
|
1067
|
+
"key": Program.to((1, 2)).as_bin(),
|
|
1068
|
+
"value": Program.to((1, 2)).as_bin(),
|
|
1069
|
+
},
|
|
1070
|
+
)
|
|
1071
|
+
|
|
1072
|
+
with pytest.raises(
|
|
1073
|
+
NodeHashError,
|
|
1074
|
+
match=r"\n +000102030405060708090a0b0c0d0e0f101112131415161718191a1b1c1d1e1f$",
|
|
1075
|
+
):
|
|
1076
|
+
await raw_data_store._check_hashes()
|
|
1077
|
+
|
|
1078
|
+
|
|
1079
|
+
@pytest.mark.anyio
|
|
1080
|
+
async def test_root_state(data_store: DataStore, store_id: bytes32) -> None:
|
|
1081
|
+
key = b"\x01\x02"
|
|
1082
|
+
value = b"abc"
|
|
1083
|
+
await data_store.insert(
|
|
1084
|
+
key=key, value=value, store_id=store_id, reference_node_hash=None, side=None, status=Status.PENDING
|
|
1085
|
+
)
|
|
1086
|
+
is_empty = await data_store.table_is_empty(store_id=store_id)
|
|
1087
|
+
assert is_empty
|
|
1088
|
+
|
|
1089
|
+
|
|
1090
|
+
@pytest.mark.anyio
|
|
1091
|
+
async def test_change_root_state(data_store: DataStore, store_id: bytes32) -> None:
|
|
1092
|
+
key = b"\x01\x02"
|
|
1093
|
+
value = b"abc"
|
|
1094
|
+
await data_store.insert(
|
|
1095
|
+
key=key,
|
|
1096
|
+
value=value,
|
|
1097
|
+
store_id=store_id,
|
|
1098
|
+
reference_node_hash=None,
|
|
1099
|
+
side=None,
|
|
1100
|
+
)
|
|
1101
|
+
root = await data_store.get_pending_root(store_id)
|
|
1102
|
+
assert root is not None
|
|
1103
|
+
assert root.status == Status.PENDING
|
|
1104
|
+
is_empty = await data_store.table_is_empty(store_id=store_id)
|
|
1105
|
+
assert is_empty
|
|
1106
|
+
|
|
1107
|
+
await data_store.change_root_status(root, Status.PENDING_BATCH)
|
|
1108
|
+
root = await data_store.get_pending_root(store_id)
|
|
1109
|
+
assert root is not None
|
|
1110
|
+
assert root.status == Status.PENDING_BATCH
|
|
1111
|
+
is_empty = await data_store.table_is_empty(store_id=store_id)
|
|
1112
|
+
assert is_empty
|
|
1113
|
+
|
|
1114
|
+
await data_store.change_root_status(root, Status.COMMITTED)
|
|
1115
|
+
root = await data_store.get_tree_root(store_id)
|
|
1116
|
+
is_empty = await data_store.table_is_empty(store_id=store_id)
|
|
1117
|
+
assert not is_empty
|
|
1118
|
+
assert root.node_hash is not None
|
|
1119
|
+
root = await data_store.get_pending_root(store_id)
|
|
1120
|
+
assert root is None
|
|
1121
|
+
|
|
1122
|
+
|
|
1123
|
+
@pytest.mark.anyio
|
|
1124
|
+
async def test_kv_diff(data_store: DataStore, store_id: bytes32) -> None:
|
|
1125
|
+
random = Random()
|
|
1126
|
+
random.seed(100, version=2)
|
|
1127
|
+
insertions = 0
|
|
1128
|
+
expected_diff: set[DiffData] = set()
|
|
1129
|
+
root_start = None
|
|
1130
|
+
for i in range(500):
|
|
1131
|
+
key = (i + 100).to_bytes(4, byteorder="big")
|
|
1132
|
+
value = (i + 200).to_bytes(4, byteorder="big")
|
|
1133
|
+
seed = leaf_hash(key=key, value=value)
|
|
1134
|
+
node_hash = await data_store.get_terminal_node_for_seed(store_id, seed)
|
|
1135
|
+
if random.randint(0, 4) > 0 or insertions < 10:
|
|
1136
|
+
insertions += 1
|
|
1137
|
+
side = None if node_hash is None else data_store.get_side_for_seed(seed)
|
|
1138
|
+
|
|
1139
|
+
await data_store.insert(
|
|
1140
|
+
key=key,
|
|
1141
|
+
value=value,
|
|
1142
|
+
store_id=store_id,
|
|
1143
|
+
reference_node_hash=node_hash,
|
|
1144
|
+
side=side,
|
|
1145
|
+
status=Status.COMMITTED,
|
|
1146
|
+
)
|
|
1147
|
+
if i > 200:
|
|
1148
|
+
expected_diff.add(DiffData(OperationType.INSERT, key, value))
|
|
1149
|
+
else:
|
|
1150
|
+
assert node_hash is not None
|
|
1151
|
+
node = await data_store.get_node(node_hash)
|
|
1152
|
+
assert isinstance(node, TerminalNode)
|
|
1153
|
+
await data_store.delete(key=node.key, store_id=store_id, status=Status.COMMITTED)
|
|
1154
|
+
if i > 200:
|
|
1155
|
+
if DiffData(OperationType.INSERT, node.key, node.value) in expected_diff:
|
|
1156
|
+
expected_diff.remove(DiffData(OperationType.INSERT, node.key, node.value))
|
|
1157
|
+
else:
|
|
1158
|
+
expected_diff.add(DiffData(OperationType.DELETE, node.key, node.value))
|
|
1159
|
+
if i == 200:
|
|
1160
|
+
root_start = await data_store.get_tree_root(store_id)
|
|
1161
|
+
|
|
1162
|
+
root_end = await data_store.get_tree_root(store_id)
|
|
1163
|
+
assert root_start is not None
|
|
1164
|
+
assert root_start.node_hash is not None
|
|
1165
|
+
assert root_end.node_hash is not None
|
|
1166
|
+
diffs = await data_store.get_kv_diff(store_id, root_start.node_hash, root_end.node_hash)
|
|
1167
|
+
assert diffs == expected_diff
|
|
1168
|
+
|
|
1169
|
+
|
|
1170
|
+
@pytest.mark.anyio
|
|
1171
|
+
async def test_kv_diff_2(data_store: DataStore, store_id: bytes32) -> None:
|
|
1172
|
+
insert_result = await data_store.insert(
|
|
1173
|
+
key=b"000",
|
|
1174
|
+
value=b"000",
|
|
1175
|
+
store_id=store_id,
|
|
1176
|
+
reference_node_hash=None,
|
|
1177
|
+
side=None,
|
|
1178
|
+
)
|
|
1179
|
+
empty_hash = bytes32.zeros
|
|
1180
|
+
invalid_hash = bytes32([0] * 31 + [1])
|
|
1181
|
+
diff_1 = await data_store.get_kv_diff(store_id, empty_hash, insert_result.node_hash)
|
|
1182
|
+
assert diff_1 == {DiffData(OperationType.INSERT, b"000", b"000")}
|
|
1183
|
+
diff_2 = await data_store.get_kv_diff(store_id, insert_result.node_hash, empty_hash)
|
|
1184
|
+
assert diff_2 == {DiffData(OperationType.DELETE, b"000", b"000")}
|
|
1185
|
+
with pytest.raises(Exception, match=f"Unable to diff: Can't find keys and values for {invalid_hash.hex()}"):
|
|
1186
|
+
await data_store.get_kv_diff(store_id, invalid_hash, insert_result.node_hash)
|
|
1187
|
+
with pytest.raises(Exception, match=f"Unable to diff: Can't find keys and values for {invalid_hash.hex()}"):
|
|
1188
|
+
await data_store.get_kv_diff(store_id, insert_result.node_hash, invalid_hash)
|
|
1189
|
+
|
|
1190
|
+
|
|
1191
|
+
@pytest.mark.anyio
|
|
1192
|
+
async def test_kv_diff_3(data_store: DataStore, store_id: bytes32) -> None:
|
|
1193
|
+
insert_result = await data_store.autoinsert(
|
|
1194
|
+
key=b"000",
|
|
1195
|
+
value=b"000",
|
|
1196
|
+
store_id=store_id,
|
|
1197
|
+
status=Status.COMMITTED,
|
|
1198
|
+
)
|
|
1199
|
+
await data_store.delete(store_id=store_id, key=b"000", status=Status.COMMITTED)
|
|
1200
|
+
insert_result_2 = await data_store.autoinsert(
|
|
1201
|
+
key=b"000",
|
|
1202
|
+
value=b"001",
|
|
1203
|
+
store_id=store_id,
|
|
1204
|
+
status=Status.COMMITTED,
|
|
1205
|
+
)
|
|
1206
|
+
diff_1 = await data_store.get_kv_diff(store_id, insert_result.node_hash, insert_result_2.node_hash)
|
|
1207
|
+
assert diff_1 == {DiffData(OperationType.DELETE, b"000", b"000"), DiffData(OperationType.INSERT, b"000", b"001")}
|
|
1208
|
+
insert_result_3 = await data_store.upsert(
|
|
1209
|
+
key=b"000",
|
|
1210
|
+
new_value=b"002",
|
|
1211
|
+
store_id=store_id,
|
|
1212
|
+
status=Status.COMMITTED,
|
|
1213
|
+
)
|
|
1214
|
+
diff_2 = await data_store.get_kv_diff(store_id, insert_result_2.node_hash, insert_result_3.node_hash)
|
|
1215
|
+
assert diff_2 == {DiffData(OperationType.DELETE, b"000", b"001"), DiffData(OperationType.INSERT, b"000", b"002")}
|
|
1216
|
+
|
|
1217
|
+
|
|
1218
|
+
@pytest.mark.anyio
|
|
1219
|
+
async def test_rollback_to_generation(data_store: DataStore, store_id: bytes32) -> None:
|
|
1220
|
+
await add_0123_example(data_store, store_id)
|
|
1221
|
+
expected_hashes = []
|
|
1222
|
+
roots = await data_store.get_roots_between(store_id, 1, 5)
|
|
1223
|
+
for generation, root in enumerate(roots):
|
|
1224
|
+
expected_hashes.append((generation + 1, root.node_hash))
|
|
1225
|
+
for generation, expected_hash in reversed(expected_hashes):
|
|
1226
|
+
await data_store.rollback_to_generation(store_id, generation)
|
|
1227
|
+
root = await data_store.get_tree_root(store_id)
|
|
1228
|
+
assert root.node_hash == expected_hash
|
|
1229
|
+
|
|
1230
|
+
|
|
1231
|
+
@pytest.mark.anyio
|
|
1232
|
+
async def test_subscribe_unsubscribe(data_store: DataStore, store_id: bytes32) -> None:
|
|
1233
|
+
await data_store.subscribe(Subscription(store_id, [ServerInfo("http://127:0:0:1/8000", 1, 1)]))
|
|
1234
|
+
subscriptions = await data_store.get_subscriptions()
|
|
1235
|
+
urls = [server_info.url for subscription in subscriptions for server_info in subscription.servers_info]
|
|
1236
|
+
assert urls == ["http://127:0:0:1/8000"]
|
|
1237
|
+
|
|
1238
|
+
await data_store.subscribe(Subscription(store_id, [ServerInfo("http://127:0:0:1/8001", 2, 2)]))
|
|
1239
|
+
subscriptions = await data_store.get_subscriptions()
|
|
1240
|
+
urls = [server_info.url for subscription in subscriptions for server_info in subscription.servers_info]
|
|
1241
|
+
assert urls == ["http://127:0:0:1/8000", "http://127:0:0:1/8001"]
|
|
1242
|
+
|
|
1243
|
+
await data_store.subscribe(
|
|
1244
|
+
Subscription(
|
|
1245
|
+
store_id, [ServerInfo("http://127:0:0:1/8000", 100, 100), ServerInfo("http://127:0:0:1/8001", 200, 200)]
|
|
1246
|
+
)
|
|
1247
|
+
)
|
|
1248
|
+
subscriptions = await data_store.get_subscriptions()
|
|
1249
|
+
assert subscriptions == [
|
|
1250
|
+
Subscription(store_id, [ServerInfo("http://127:0:0:1/8000", 1, 1), ServerInfo("http://127:0:0:1/8001", 2, 2)]),
|
|
1251
|
+
]
|
|
1252
|
+
|
|
1253
|
+
await data_store.unsubscribe(store_id)
|
|
1254
|
+
assert await data_store.get_subscriptions() == []
|
|
1255
|
+
store_id2 = bytes32.zeros
|
|
1256
|
+
|
|
1257
|
+
await data_store.subscribe(
|
|
1258
|
+
Subscription(
|
|
1259
|
+
store_id, [ServerInfo("http://127:0:0:1/8000", 100, 100), ServerInfo("http://127:0:0:1/8001", 200, 200)]
|
|
1260
|
+
)
|
|
1261
|
+
)
|
|
1262
|
+
await data_store.subscribe(
|
|
1263
|
+
Subscription(
|
|
1264
|
+
store_id2, [ServerInfo("http://127:0:0:1/8000", 300, 300), ServerInfo("http://127:0:0:1/8001", 400, 400)]
|
|
1265
|
+
)
|
|
1266
|
+
)
|
|
1267
|
+
subscriptions = await data_store.get_subscriptions()
|
|
1268
|
+
assert subscriptions == [
|
|
1269
|
+
Subscription(
|
|
1270
|
+
store_id, [ServerInfo("http://127:0:0:1/8000", 100, 100), ServerInfo("http://127:0:0:1/8001", 200, 200)]
|
|
1271
|
+
),
|
|
1272
|
+
Subscription(
|
|
1273
|
+
store_id2, [ServerInfo("http://127:0:0:1/8000", 300, 300), ServerInfo("http://127:0:0:1/8001", 400, 400)]
|
|
1274
|
+
),
|
|
1275
|
+
]
|
|
1276
|
+
|
|
1277
|
+
|
|
1278
|
+
@pytest.mark.anyio
|
|
1279
|
+
async def test_server_selection(data_store: DataStore, store_id: bytes32) -> None:
|
|
1280
|
+
start_timestamp = 1000
|
|
1281
|
+
await data_store.subscribe(
|
|
1282
|
+
Subscription(store_id, [ServerInfo(f"http://127.0.0.1/{port}", 0, 0) for port in range(8000, 8010)])
|
|
1283
|
+
)
|
|
1284
|
+
|
|
1285
|
+
free_servers = {f"http://127.0.0.1/{port}" for port in range(8000, 8010)}
|
|
1286
|
+
tried_servers = 0
|
|
1287
|
+
random = Random()
|
|
1288
|
+
random.seed(100, version=2)
|
|
1289
|
+
while len(free_servers) > 0:
|
|
1290
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=start_timestamp)
|
|
1291
|
+
random.shuffle(servers_info)
|
|
1292
|
+
assert servers_info != []
|
|
1293
|
+
server_info = servers_info[0]
|
|
1294
|
+
assert server_info.ignore_till == 0
|
|
1295
|
+
await data_store.received_incorrect_file(store_id=store_id, server_info=server_info, timestamp=start_timestamp)
|
|
1296
|
+
assert server_info.url in free_servers
|
|
1297
|
+
tried_servers += 1
|
|
1298
|
+
free_servers.remove(server_info.url)
|
|
1299
|
+
|
|
1300
|
+
assert tried_servers == 10
|
|
1301
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=start_timestamp)
|
|
1302
|
+
assert servers_info == []
|
|
1303
|
+
|
|
1304
|
+
current_timestamp = 2000 + 7 * 24 * 3600
|
|
1305
|
+
selected_servers = set()
|
|
1306
|
+
for _ in range(100):
|
|
1307
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=current_timestamp)
|
|
1308
|
+
random.shuffle(servers_info)
|
|
1309
|
+
assert servers_info != []
|
|
1310
|
+
selected_servers.add(servers_info[0].url)
|
|
1311
|
+
assert selected_servers == {f"http://127.0.0.1/{port}" for port in range(8000, 8010)}
|
|
1312
|
+
|
|
1313
|
+
for _ in range(100):
|
|
1314
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=current_timestamp)
|
|
1315
|
+
random.shuffle(servers_info)
|
|
1316
|
+
assert servers_info != []
|
|
1317
|
+
if servers_info[0].url != "http://127.0.0.1/8000":
|
|
1318
|
+
await data_store.received_incorrect_file(
|
|
1319
|
+
store_id=store_id, server_info=servers_info[0], timestamp=current_timestamp
|
|
1320
|
+
)
|
|
1321
|
+
|
|
1322
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=current_timestamp)
|
|
1323
|
+
random.shuffle(servers_info)
|
|
1324
|
+
assert len(servers_info) == 1
|
|
1325
|
+
assert servers_info[0].url == "http://127.0.0.1/8000"
|
|
1326
|
+
await data_store.received_correct_file(store_id=store_id, server_info=servers_info[0])
|
|
1327
|
+
|
|
1328
|
+
ban_times = [5 * 60] * 3 + [15 * 60] * 3 + [30 * 60] * 2 + [60 * 60] * 10
|
|
1329
|
+
for ban_time in ban_times:
|
|
1330
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=current_timestamp)
|
|
1331
|
+
assert len(servers_info) == 1
|
|
1332
|
+
await data_store.server_misses_file(store_id=store_id, server_info=servers_info[0], timestamp=current_timestamp)
|
|
1333
|
+
current_timestamp += ban_time
|
|
1334
|
+
servers_info = await data_store.get_available_servers_for_store(store_id=store_id, timestamp=current_timestamp)
|
|
1335
|
+
assert servers_info == []
|
|
1336
|
+
current_timestamp += 1
|
|
1337
|
+
|
|
1338
|
+
|
|
1339
|
+
@pytest.mark.parametrize(
|
|
1340
|
+
"error",
|
|
1341
|
+
[True, False],
|
|
1342
|
+
)
|
|
1343
|
+
@pytest.mark.anyio
|
|
1344
|
+
async def test_server_http_ban(
|
|
1345
|
+
data_store: DataStore,
|
|
1346
|
+
store_id: bytes32,
|
|
1347
|
+
error: bool,
|
|
1348
|
+
monkeypatch: Any,
|
|
1349
|
+
tmp_path: Path,
|
|
1350
|
+
seeded_random: random.Random,
|
|
1351
|
+
) -> None:
|
|
1352
|
+
sinfo = ServerInfo("http://127.0.0.1/8003", 0, 0)
|
|
1353
|
+
await data_store.subscribe(Subscription(store_id, [sinfo]))
|
|
1354
|
+
|
|
1355
|
+
async def mock_http_download(
|
|
1356
|
+
target_filename_path: Path,
|
|
1357
|
+
filename: str,
|
|
1358
|
+
proxy_url: str,
|
|
1359
|
+
server_info: ServerInfo,
|
|
1360
|
+
timeout: aiohttp.ClientTimeout,
|
|
1361
|
+
log: logging.Logger,
|
|
1362
|
+
) -> None:
|
|
1363
|
+
if error:
|
|
1364
|
+
raise aiohttp.ClientConnectionError()
|
|
1365
|
+
|
|
1366
|
+
start_timestamp = int(time.time())
|
|
1367
|
+
with monkeypatch.context() as m:
|
|
1368
|
+
m.setattr("chia.data_layer.download_data.http_download", mock_http_download)
|
|
1369
|
+
success = await insert_from_delta_file(
|
|
1370
|
+
data_store=data_store,
|
|
1371
|
+
store_id=store_id,
|
|
1372
|
+
existing_generation=3,
|
|
1373
|
+
target_generation=4,
|
|
1374
|
+
root_hashes=[bytes32.random(seeded_random)],
|
|
1375
|
+
server_info=sinfo,
|
|
1376
|
+
client_foldername=tmp_path,
|
|
1377
|
+
timeout=aiohttp.ClientTimeout(total=15, sock_connect=5),
|
|
1378
|
+
log=log,
|
|
1379
|
+
proxy_url="",
|
|
1380
|
+
downloader=None,
|
|
1381
|
+
)
|
|
1382
|
+
|
|
1383
|
+
assert success is False
|
|
1384
|
+
|
|
1385
|
+
subscriptions = await data_store.get_subscriptions()
|
|
1386
|
+
sinfo = subscriptions[0].servers_info[0]
|
|
1387
|
+
assert sinfo.num_consecutive_failures == 1
|
|
1388
|
+
assert sinfo.ignore_till >= start_timestamp + 5 * 60 # ban for 5 minutes
|
|
1389
|
+
start_timestamp = sinfo.ignore_till
|
|
1390
|
+
|
|
1391
|
+
with monkeypatch.context() as m:
|
|
1392
|
+
m.setattr("chia.data_layer.download_data.http_download", mock_http_download)
|
|
1393
|
+
success = await insert_from_delta_file(
|
|
1394
|
+
data_store=data_store,
|
|
1395
|
+
store_id=store_id,
|
|
1396
|
+
existing_generation=3,
|
|
1397
|
+
target_generation=4,
|
|
1398
|
+
root_hashes=[bytes32.random(seeded_random)],
|
|
1399
|
+
server_info=sinfo,
|
|
1400
|
+
client_foldername=tmp_path,
|
|
1401
|
+
timeout=aiohttp.ClientTimeout(total=15, sock_connect=5),
|
|
1402
|
+
log=log,
|
|
1403
|
+
proxy_url="",
|
|
1404
|
+
downloader=None,
|
|
1405
|
+
)
|
|
1406
|
+
|
|
1407
|
+
subscriptions = await data_store.get_subscriptions()
|
|
1408
|
+
sinfo = subscriptions[0].servers_info[0]
|
|
1409
|
+
assert sinfo.num_consecutive_failures == 2
|
|
1410
|
+
assert sinfo.ignore_till == start_timestamp # we don't increase on second failure
|
|
1411
|
+
|
|
1412
|
+
|
|
1413
|
+
@pytest.mark.parametrize(
|
|
1414
|
+
"test_delta",
|
|
1415
|
+
[True, False],
|
|
1416
|
+
)
|
|
1417
|
+
@boolean_datacases(name="group_files_by_store", false="group by singleton", true="don't group by singleton")
|
|
1418
|
+
@pytest.mark.anyio
|
|
1419
|
+
async def test_data_server_files(
|
|
1420
|
+
data_store: DataStore,
|
|
1421
|
+
store_id: bytes32,
|
|
1422
|
+
test_delta: bool,
|
|
1423
|
+
group_files_by_store: bool,
|
|
1424
|
+
tmp_path: Path,
|
|
1425
|
+
) -> None:
|
|
1426
|
+
roots: list[Root] = []
|
|
1427
|
+
num_batches = 10
|
|
1428
|
+
num_ops_per_batch = 100
|
|
1429
|
+
|
|
1430
|
+
db_uri = generate_in_memory_db_uri()
|
|
1431
|
+
async with DataStore.managed(database=db_uri, uri=True) as data_store_server:
|
|
1432
|
+
await data_store_server.create_tree(store_id, status=Status.COMMITTED)
|
|
1433
|
+
random = Random()
|
|
1434
|
+
random.seed(100, version=2)
|
|
1435
|
+
|
|
1436
|
+
keys: list[bytes] = []
|
|
1437
|
+
counter = 0
|
|
1438
|
+
|
|
1439
|
+
for batch in range(num_batches):
|
|
1440
|
+
changelist: list[dict[str, Any]] = []
|
|
1441
|
+
for operation in range(num_ops_per_batch):
|
|
1442
|
+
if random.randint(0, 4) > 0 or len(keys) == 0:
|
|
1443
|
+
key = counter.to_bytes(4, byteorder="big")
|
|
1444
|
+
value = (2 * counter).to_bytes(4, byteorder="big")
|
|
1445
|
+
keys.append(key)
|
|
1446
|
+
changelist.append({"action": "insert", "key": key, "value": value})
|
|
1447
|
+
else:
|
|
1448
|
+
key = random.choice(keys)
|
|
1449
|
+
keys.remove(key)
|
|
1450
|
+
changelist.append({"action": "delete", "key": key})
|
|
1451
|
+
counter += 1
|
|
1452
|
+
await data_store_server.insert_batch(store_id, changelist, status=Status.COMMITTED)
|
|
1453
|
+
root = await data_store_server.get_tree_root(store_id)
|
|
1454
|
+
await write_files_for_root(
|
|
1455
|
+
data_store_server, store_id, root, tmp_path, 0, group_by_store=group_files_by_store
|
|
1456
|
+
)
|
|
1457
|
+
roots.append(root)
|
|
1458
|
+
|
|
1459
|
+
generation = 1
|
|
1460
|
+
assert len(roots) == num_batches
|
|
1461
|
+
for root in roots:
|
|
1462
|
+
assert root.node_hash is not None
|
|
1463
|
+
if not test_delta:
|
|
1464
|
+
filename = get_full_tree_filename_path(tmp_path, store_id, root.node_hash, generation, group_files_by_store)
|
|
1465
|
+
assert filename.exists()
|
|
1466
|
+
else:
|
|
1467
|
+
filename = get_delta_filename_path(tmp_path, store_id, root.node_hash, generation, group_files_by_store)
|
|
1468
|
+
assert filename.exists()
|
|
1469
|
+
await insert_into_data_store_from_file(data_store, store_id, root.node_hash, tmp_path.joinpath(filename))
|
|
1470
|
+
current_root = await data_store.get_tree_root(store_id=store_id)
|
|
1471
|
+
assert current_root.node_hash == root.node_hash
|
|
1472
|
+
generation += 1
|
|
1473
|
+
|
|
1474
|
+
|
|
1475
|
+
@pytest.mark.anyio
|
|
1476
|
+
@pytest.mark.parametrize("pending_status", [Status.PENDING, Status.PENDING_BATCH])
|
|
1477
|
+
async def test_pending_roots(data_store: DataStore, store_id: bytes32, pending_status: Status) -> None:
|
|
1478
|
+
key = b"\x01\x02"
|
|
1479
|
+
value = b"abc"
|
|
1480
|
+
|
|
1481
|
+
await data_store.insert(
|
|
1482
|
+
key=key,
|
|
1483
|
+
value=value,
|
|
1484
|
+
store_id=store_id,
|
|
1485
|
+
reference_node_hash=None,
|
|
1486
|
+
side=None,
|
|
1487
|
+
status=Status.COMMITTED,
|
|
1488
|
+
)
|
|
1489
|
+
|
|
1490
|
+
key = b"\x01\x03"
|
|
1491
|
+
value = b"abc"
|
|
1492
|
+
|
|
1493
|
+
await data_store.autoinsert(
|
|
1494
|
+
key=key,
|
|
1495
|
+
value=value,
|
|
1496
|
+
store_id=store_id,
|
|
1497
|
+
status=pending_status,
|
|
1498
|
+
)
|
|
1499
|
+
pending_root = await data_store.get_pending_root(store_id=store_id)
|
|
1500
|
+
assert pending_root is not None
|
|
1501
|
+
assert pending_root.generation == 2 and pending_root.status == pending_status
|
|
1502
|
+
|
|
1503
|
+
await data_store.clear_pending_roots(store_id=store_id)
|
|
1504
|
+
pending_root = await data_store.get_pending_root(store_id=store_id)
|
|
1505
|
+
assert pending_root is None
|
|
1506
|
+
|
|
1507
|
+
|
|
1508
|
+
@pytest.mark.anyio
|
|
1509
|
+
@pytest.mark.parametrize("pending_status", [Status.PENDING, Status.PENDING_BATCH])
|
|
1510
|
+
async def test_clear_pending_roots_returns_root(
|
|
1511
|
+
data_store: DataStore, store_id: bytes32, pending_status: Status
|
|
1512
|
+
) -> None:
|
|
1513
|
+
key = b"\x01\x02"
|
|
1514
|
+
value = b"abc"
|
|
1515
|
+
|
|
1516
|
+
await data_store.insert(
|
|
1517
|
+
key=key,
|
|
1518
|
+
value=value,
|
|
1519
|
+
store_id=store_id,
|
|
1520
|
+
reference_node_hash=None,
|
|
1521
|
+
side=None,
|
|
1522
|
+
status=pending_status,
|
|
1523
|
+
)
|
|
1524
|
+
|
|
1525
|
+
pending_root = await data_store.get_pending_root(store_id=store_id)
|
|
1526
|
+
cleared_root = await data_store.clear_pending_roots(store_id=store_id)
|
|
1527
|
+
assert cleared_root == pending_root
|
|
1528
|
+
|
|
1529
|
+
|
|
1530
|
+
@dataclass
|
|
1531
|
+
class BatchInsertBenchmarkCase:
|
|
1532
|
+
pre: int
|
|
1533
|
+
count: int
|
|
1534
|
+
limit: float
|
|
1535
|
+
marks: Marks = ()
|
|
1536
|
+
|
|
1537
|
+
@property
|
|
1538
|
+
def id(self) -> str:
|
|
1539
|
+
return f"pre={self.pre},count={self.count}"
|
|
1540
|
+
|
|
1541
|
+
|
|
1542
|
+
@dataclass
|
|
1543
|
+
class BatchesInsertBenchmarkCase:
|
|
1544
|
+
count: int
|
|
1545
|
+
batch_count: int
|
|
1546
|
+
limit: float
|
|
1547
|
+
marks: Marks = ()
|
|
1548
|
+
|
|
1549
|
+
@property
|
|
1550
|
+
def id(self) -> str:
|
|
1551
|
+
return f"count={self.count},batch_count={self.batch_count}"
|
|
1552
|
+
|
|
1553
|
+
|
|
1554
|
+
@datacases(
|
|
1555
|
+
BatchInsertBenchmarkCase(
|
|
1556
|
+
pre=0,
|
|
1557
|
+
count=100,
|
|
1558
|
+
limit=2.2,
|
|
1559
|
+
),
|
|
1560
|
+
BatchInsertBenchmarkCase(
|
|
1561
|
+
pre=1_000,
|
|
1562
|
+
count=100,
|
|
1563
|
+
limit=4,
|
|
1564
|
+
),
|
|
1565
|
+
BatchInsertBenchmarkCase(
|
|
1566
|
+
pre=0,
|
|
1567
|
+
count=1_000,
|
|
1568
|
+
limit=30,
|
|
1569
|
+
),
|
|
1570
|
+
BatchInsertBenchmarkCase(
|
|
1571
|
+
pre=1_000,
|
|
1572
|
+
count=1_000,
|
|
1573
|
+
limit=36,
|
|
1574
|
+
),
|
|
1575
|
+
BatchInsertBenchmarkCase(
|
|
1576
|
+
pre=10_000,
|
|
1577
|
+
count=25_000,
|
|
1578
|
+
limit=52,
|
|
1579
|
+
),
|
|
1580
|
+
)
|
|
1581
|
+
@pytest.mark.anyio
|
|
1582
|
+
async def test_benchmark_batch_insert_speed(
|
|
1583
|
+
data_store: DataStore,
|
|
1584
|
+
store_id: bytes32,
|
|
1585
|
+
benchmark_runner: BenchmarkRunner,
|
|
1586
|
+
case: BatchInsertBenchmarkCase,
|
|
1587
|
+
) -> None:
|
|
1588
|
+
r = random.Random()
|
|
1589
|
+
r.seed("shadowlands", version=2)
|
|
1590
|
+
|
|
1591
|
+
changelist = [
|
|
1592
|
+
{
|
|
1593
|
+
"action": "insert",
|
|
1594
|
+
"key": x.to_bytes(32, byteorder="big", signed=False),
|
|
1595
|
+
"value": bytes(r.getrandbits(8) for _ in range(1200)),
|
|
1596
|
+
}
|
|
1597
|
+
for x in range(case.pre + case.count)
|
|
1598
|
+
]
|
|
1599
|
+
|
|
1600
|
+
pre = changelist[: case.pre]
|
|
1601
|
+
batch = changelist[case.pre : case.pre + case.count]
|
|
1602
|
+
|
|
1603
|
+
if case.pre > 0:
|
|
1604
|
+
await data_store.insert_batch(
|
|
1605
|
+
store_id=store_id,
|
|
1606
|
+
changelist=pre,
|
|
1607
|
+
status=Status.COMMITTED,
|
|
1608
|
+
)
|
|
1609
|
+
|
|
1610
|
+
with benchmark_runner.assert_runtime(seconds=case.limit):
|
|
1611
|
+
await data_store.insert_batch(
|
|
1612
|
+
store_id=store_id,
|
|
1613
|
+
changelist=batch,
|
|
1614
|
+
)
|
|
1615
|
+
|
|
1616
|
+
|
|
1617
|
+
@datacases(
|
|
1618
|
+
BatchesInsertBenchmarkCase(
|
|
1619
|
+
count=50,
|
|
1620
|
+
batch_count=200,
|
|
1621
|
+
limit=195,
|
|
1622
|
+
),
|
|
1623
|
+
)
|
|
1624
|
+
@pytest.mark.anyio
|
|
1625
|
+
async def test_benchmark_batch_insert_speed_multiple_batches(
|
|
1626
|
+
data_store: DataStore,
|
|
1627
|
+
store_id: bytes32,
|
|
1628
|
+
benchmark_runner: BenchmarkRunner,
|
|
1629
|
+
case: BatchesInsertBenchmarkCase,
|
|
1630
|
+
) -> None:
|
|
1631
|
+
r = random.Random()
|
|
1632
|
+
r.seed("shadowlands", version=2)
|
|
1633
|
+
|
|
1634
|
+
with benchmark_runner.assert_runtime(seconds=case.limit):
|
|
1635
|
+
for batch in range(case.batch_count):
|
|
1636
|
+
changelist = [
|
|
1637
|
+
{
|
|
1638
|
+
"action": "insert",
|
|
1639
|
+
"key": x.to_bytes(32, byteorder="big", signed=False),
|
|
1640
|
+
"value": bytes(r.getrandbits(8) for _ in range(10000)),
|
|
1641
|
+
}
|
|
1642
|
+
for x in range(batch * case.count, (batch + 1) * case.count)
|
|
1643
|
+
]
|
|
1644
|
+
await data_store.insert_batch(
|
|
1645
|
+
store_id=store_id,
|
|
1646
|
+
changelist=changelist,
|
|
1647
|
+
status=Status.COMMITTED,
|
|
1648
|
+
)
|
|
1649
|
+
|
|
1650
|
+
|
|
1651
|
+
@pytest.mark.anyio
|
|
1652
|
+
async def test_delete_store_data(raw_data_store: DataStore) -> None:
|
|
1653
|
+
store_id = bytes32.zeros
|
|
1654
|
+
store_id_2 = bytes32(b"\0" * 31 + b"\1")
|
|
1655
|
+
await raw_data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
1656
|
+
await raw_data_store.create_tree(store_id=store_id_2, status=Status.COMMITTED)
|
|
1657
|
+
total_keys = 4
|
|
1658
|
+
keys = [key.to_bytes(4, byteorder="big") for key in range(total_keys)]
|
|
1659
|
+
batch1 = [
|
|
1660
|
+
{"action": "insert", "key": keys[0], "value": keys[0]},
|
|
1661
|
+
{"action": "insert", "key": keys[1], "value": keys[1]},
|
|
1662
|
+
]
|
|
1663
|
+
batch2 = batch1.copy()
|
|
1664
|
+
batch1.append({"action": "insert", "key": keys[2], "value": keys[2]})
|
|
1665
|
+
batch2.append({"action": "insert", "key": keys[3], "value": keys[3]})
|
|
1666
|
+
assert batch1 != batch2
|
|
1667
|
+
await raw_data_store.insert_batch(store_id, batch1, status=Status.COMMITTED)
|
|
1668
|
+
await raw_data_store.insert_batch(store_id_2, batch2, status=Status.COMMITTED)
|
|
1669
|
+
keys_values_before = await raw_data_store.get_keys_values(store_id_2)
|
|
1670
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1671
|
+
result = await reader.execute("SELECT * FROM node")
|
|
1672
|
+
nodes = await result.fetchall()
|
|
1673
|
+
kv_nodes_before = {}
|
|
1674
|
+
for node in nodes:
|
|
1675
|
+
if node["key"] is not None:
|
|
1676
|
+
kv_nodes_before[node["key"]] = node["value"]
|
|
1677
|
+
assert [kv_nodes_before[key] for key in keys] == keys
|
|
1678
|
+
await raw_data_store.delete_store_data(store_id)
|
|
1679
|
+
# Deleting from `node` table doesn't alter other stores.
|
|
1680
|
+
keys_values_after = await raw_data_store.get_keys_values(store_id_2)
|
|
1681
|
+
assert keys_values_before == keys_values_after
|
|
1682
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1683
|
+
result = await reader.execute("SELECT * FROM node")
|
|
1684
|
+
nodes = await result.fetchall()
|
|
1685
|
+
kv_nodes_after = {}
|
|
1686
|
+
for node in nodes:
|
|
1687
|
+
if node["key"] is not None:
|
|
1688
|
+
kv_nodes_after[node["key"]] = node["value"]
|
|
1689
|
+
for i in range(total_keys):
|
|
1690
|
+
if i != 2:
|
|
1691
|
+
assert kv_nodes_after[keys[i]] == keys[i]
|
|
1692
|
+
else:
|
|
1693
|
+
# `keys[2]` was only present in the first store.
|
|
1694
|
+
assert keys[i] not in kv_nodes_after
|
|
1695
|
+
assert not await raw_data_store.store_id_exists(store_id)
|
|
1696
|
+
await raw_data_store.delete_store_data(store_id_2)
|
|
1697
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1698
|
+
async with reader.execute("SELECT COUNT(*) FROM node") as cursor:
|
|
1699
|
+
row_count = await cursor.fetchone()
|
|
1700
|
+
assert row_count is not None
|
|
1701
|
+
assert row_count[0] == 0
|
|
1702
|
+
assert not await raw_data_store.store_id_exists(store_id_2)
|
|
1703
|
+
|
|
1704
|
+
|
|
1705
|
+
@pytest.mark.anyio
|
|
1706
|
+
async def test_delete_store_data_multiple_stores(raw_data_store: DataStore) -> None:
|
|
1707
|
+
# Make sure inserting and deleting the same data works
|
|
1708
|
+
for repetition in range(2):
|
|
1709
|
+
num_stores = 50
|
|
1710
|
+
total_keys = 150
|
|
1711
|
+
keys_deleted_per_store = 3
|
|
1712
|
+
store_ids = [bytes32(i.to_bytes(32, byteorder="big")) for i in range(num_stores)]
|
|
1713
|
+
for store_id in store_ids:
|
|
1714
|
+
await raw_data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
1715
|
+
original_keys = [key.to_bytes(4, byteorder="big") for key in range(total_keys)]
|
|
1716
|
+
batches = []
|
|
1717
|
+
for i in range(num_stores):
|
|
1718
|
+
batch = [
|
|
1719
|
+
{"action": "insert", "key": key, "value": key} for key in original_keys[i * keys_deleted_per_store :]
|
|
1720
|
+
]
|
|
1721
|
+
batches.append(batch)
|
|
1722
|
+
|
|
1723
|
+
for store_id, batch in zip(store_ids, batches):
|
|
1724
|
+
await raw_data_store.insert_batch(store_id, batch, status=Status.COMMITTED)
|
|
1725
|
+
|
|
1726
|
+
for tree_index in range(num_stores):
|
|
1727
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1728
|
+
result = await reader.execute("SELECT * FROM node")
|
|
1729
|
+
nodes = await result.fetchall()
|
|
1730
|
+
|
|
1731
|
+
keys = {node["key"] for node in nodes if node["key"] is not None}
|
|
1732
|
+
assert len(keys) == total_keys - tree_index * keys_deleted_per_store
|
|
1733
|
+
keys_after_index = set(original_keys[tree_index * keys_deleted_per_store :])
|
|
1734
|
+
keys_before_index = set(original_keys[: tree_index * keys_deleted_per_store])
|
|
1735
|
+
assert keys_after_index.issubset(keys)
|
|
1736
|
+
assert keys.isdisjoint(keys_before_index)
|
|
1737
|
+
await raw_data_store.delete_store_data(store_ids[tree_index])
|
|
1738
|
+
|
|
1739
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1740
|
+
async with reader.execute("SELECT COUNT(*) FROM node") as cursor:
|
|
1741
|
+
row_count = await cursor.fetchone()
|
|
1742
|
+
assert row_count is not None
|
|
1743
|
+
assert row_count[0] == 0
|
|
1744
|
+
|
|
1745
|
+
|
|
1746
|
+
@pytest.mark.parametrize("common_keys_count", [1, 250, 499])
|
|
1747
|
+
@pytest.mark.anyio
|
|
1748
|
+
async def test_delete_store_data_with_common_values(raw_data_store: DataStore, common_keys_count: int) -> None:
|
|
1749
|
+
store_id_1 = bytes32(b"\x00" * 31 + b"\x01")
|
|
1750
|
+
store_id_2 = bytes32(b"\x00" * 31 + b"\x02")
|
|
1751
|
+
|
|
1752
|
+
await raw_data_store.create_tree(store_id=store_id_1, status=Status.COMMITTED)
|
|
1753
|
+
await raw_data_store.create_tree(store_id=store_id_2, status=Status.COMMITTED)
|
|
1754
|
+
|
|
1755
|
+
key_offset = 1000
|
|
1756
|
+
total_keys_per_store = 500
|
|
1757
|
+
assert common_keys_count < key_offset
|
|
1758
|
+
common_keys = {key.to_bytes(4, byteorder="big") for key in range(common_keys_count)}
|
|
1759
|
+
unique_keys_1 = {
|
|
1760
|
+
(key + key_offset).to_bytes(4, byteorder="big") for key in range(total_keys_per_store - common_keys_count)
|
|
1761
|
+
}
|
|
1762
|
+
unique_keys_2 = {
|
|
1763
|
+
(key + (2 * key_offset)).to_bytes(4, byteorder="big") for key in range(total_keys_per_store - common_keys_count)
|
|
1764
|
+
}
|
|
1765
|
+
|
|
1766
|
+
batch1 = [{"action": "insert", "key": key, "value": key} for key in common_keys.union(unique_keys_1)]
|
|
1767
|
+
batch2 = [{"action": "insert", "key": key, "value": key} for key in common_keys.union(unique_keys_2)]
|
|
1768
|
+
|
|
1769
|
+
await raw_data_store.insert_batch(store_id_1, batch1, status=Status.COMMITTED)
|
|
1770
|
+
await raw_data_store.insert_batch(store_id_2, batch2, status=Status.COMMITTED)
|
|
1771
|
+
|
|
1772
|
+
await raw_data_store.delete_store_data(store_id_1)
|
|
1773
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1774
|
+
result = await reader.execute("SELECT * FROM node")
|
|
1775
|
+
nodes = await result.fetchall()
|
|
1776
|
+
|
|
1777
|
+
keys = {node["key"] for node in nodes if node["key"] is not None}
|
|
1778
|
+
# Since one store got all its keys deleted, we're left only with the keys of the other store.
|
|
1779
|
+
assert len(keys) == total_keys_per_store
|
|
1780
|
+
assert keys.intersection(unique_keys_1) == set()
|
|
1781
|
+
assert keys.symmetric_difference(common_keys.union(unique_keys_2)) == set()
|
|
1782
|
+
|
|
1783
|
+
|
|
1784
|
+
@pytest.mark.anyio
|
|
1785
|
+
@pytest.mark.parametrize("pending_status", [Status.PENDING, Status.PENDING_BATCH])
|
|
1786
|
+
async def test_delete_store_data_protects_pending_roots(raw_data_store: DataStore, pending_status: Status) -> None:
|
|
1787
|
+
num_stores = 5
|
|
1788
|
+
total_keys = 15
|
|
1789
|
+
store_ids = [bytes32(i.to_bytes(32, byteorder="big")) for i in range(num_stores)]
|
|
1790
|
+
for store_id in store_ids:
|
|
1791
|
+
await raw_data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
1792
|
+
original_keys = [key.to_bytes(4, byteorder="big") for key in range(total_keys)]
|
|
1793
|
+
batches = []
|
|
1794
|
+
keys_per_pending_root = 2
|
|
1795
|
+
|
|
1796
|
+
for i in range(num_stores - 1):
|
|
1797
|
+
start_index = i * keys_per_pending_root
|
|
1798
|
+
end_index = (i + 1) * keys_per_pending_root
|
|
1799
|
+
batch = [{"action": "insert", "key": key, "value": key} for key in original_keys[start_index:end_index]]
|
|
1800
|
+
batches.append(batch)
|
|
1801
|
+
for store_id, batch in zip(store_ids, batches):
|
|
1802
|
+
await raw_data_store.insert_batch(store_id, batch, status=pending_status)
|
|
1803
|
+
|
|
1804
|
+
store_id = store_ids[-1]
|
|
1805
|
+
batch = [{"action": "insert", "key": key, "value": key} for key in original_keys]
|
|
1806
|
+
await raw_data_store.insert_batch(store_id, batch, status=Status.COMMITTED)
|
|
1807
|
+
|
|
1808
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1809
|
+
result = await reader.execute("SELECT * FROM node")
|
|
1810
|
+
nodes = await result.fetchall()
|
|
1811
|
+
|
|
1812
|
+
keys = {node["key"] for node in nodes if node["key"] is not None}
|
|
1813
|
+
assert keys == set(original_keys)
|
|
1814
|
+
|
|
1815
|
+
await raw_data_store.delete_store_data(store_id)
|
|
1816
|
+
async with raw_data_store.db_wrapper.reader() as reader:
|
|
1817
|
+
result = await reader.execute("SELECT * FROM node")
|
|
1818
|
+
nodes = await result.fetchall()
|
|
1819
|
+
|
|
1820
|
+
keys = {node["key"] for node in nodes if node["key"] is not None}
|
|
1821
|
+
assert keys == set(original_keys[: (num_stores - 1) * keys_per_pending_root])
|
|
1822
|
+
|
|
1823
|
+
for index in range(num_stores - 1):
|
|
1824
|
+
store_id = store_ids[index]
|
|
1825
|
+
root = await raw_data_store.get_pending_root(store_id)
|
|
1826
|
+
assert root is not None
|
|
1827
|
+
await raw_data_store.change_root_status(root, Status.COMMITTED)
|
|
1828
|
+
kv = await raw_data_store.get_keys_values(store_id=store_id)
|
|
1829
|
+
start_index = index * keys_per_pending_root
|
|
1830
|
+
end_index = (index + 1) * keys_per_pending_root
|
|
1831
|
+
assert {pair.key for pair in kv} == set(original_keys[start_index:end_index])
|
|
1832
|
+
|
|
1833
|
+
|
|
1834
|
+
@pytest.mark.anyio
|
|
1835
|
+
@boolean_datacases(name="group_files_by_store", true="group by singleton", false="don't group by singleton")
|
|
1836
|
+
@pytest.mark.parametrize("max_full_files", [1, 2, 5])
|
|
1837
|
+
async def test_insert_from_delta_file(
|
|
1838
|
+
data_store: DataStore,
|
|
1839
|
+
store_id: bytes32,
|
|
1840
|
+
monkeypatch: Any,
|
|
1841
|
+
tmp_path: Path,
|
|
1842
|
+
seeded_random: random.Random,
|
|
1843
|
+
group_files_by_store: bool,
|
|
1844
|
+
max_full_files: int,
|
|
1845
|
+
) -> None:
|
|
1846
|
+
await data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
1847
|
+
num_files = 5
|
|
1848
|
+
for generation in range(num_files):
|
|
1849
|
+
key = generation.to_bytes(4, byteorder="big")
|
|
1850
|
+
value = generation.to_bytes(4, byteorder="big")
|
|
1851
|
+
await data_store.autoinsert(
|
|
1852
|
+
key=key,
|
|
1853
|
+
value=value,
|
|
1854
|
+
store_id=store_id,
|
|
1855
|
+
status=Status.COMMITTED,
|
|
1856
|
+
)
|
|
1857
|
+
|
|
1858
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
1859
|
+
assert root.generation == num_files + 1
|
|
1860
|
+
root_hashes = []
|
|
1861
|
+
|
|
1862
|
+
tmp_path_1 = tmp_path.joinpath("1")
|
|
1863
|
+
tmp_path_2 = tmp_path.joinpath("2")
|
|
1864
|
+
|
|
1865
|
+
for generation in range(1, num_files + 2):
|
|
1866
|
+
root = await data_store.get_tree_root(store_id=store_id, generation=generation)
|
|
1867
|
+
await write_files_for_root(data_store, store_id, root, tmp_path_1, 0, False, group_files_by_store)
|
|
1868
|
+
root_hashes.append(bytes32.zeros if root.node_hash is None else root.node_hash)
|
|
1869
|
+
store_path = tmp_path_1.joinpath(f"{store_id}") if group_files_by_store else tmp_path_1
|
|
1870
|
+
with os.scandir(store_path) as entries:
|
|
1871
|
+
filenames = {entry.name for entry in entries}
|
|
1872
|
+
assert len(filenames) == 2 * (num_files + 1)
|
|
1873
|
+
for filename in filenames:
|
|
1874
|
+
if "full" in filename:
|
|
1875
|
+
store_path.joinpath(filename).unlink()
|
|
1876
|
+
with os.scandir(store_path) as entries:
|
|
1877
|
+
filenames = {entry.name for entry in entries}
|
|
1878
|
+
assert len(filenames) == num_files + 1
|
|
1879
|
+
kv_before = await data_store.get_keys_values(store_id=store_id)
|
|
1880
|
+
await data_store.rollback_to_generation(store_id, 0)
|
|
1881
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
1882
|
+
assert root.generation == 0
|
|
1883
|
+
os.rename(store_path, tmp_path_2)
|
|
1884
|
+
|
|
1885
|
+
async def mock_http_download(
|
|
1886
|
+
target_filename_path: Path,
|
|
1887
|
+
filename: str,
|
|
1888
|
+
proxy_url: str,
|
|
1889
|
+
server_info: ServerInfo,
|
|
1890
|
+
timeout: int,
|
|
1891
|
+
log: logging.Logger,
|
|
1892
|
+
) -> None:
|
|
1893
|
+
pass
|
|
1894
|
+
|
|
1895
|
+
async def mock_http_download_2(
|
|
1896
|
+
target_filename_path: Path,
|
|
1897
|
+
filename: str,
|
|
1898
|
+
proxy_url: str,
|
|
1899
|
+
server_info: ServerInfo,
|
|
1900
|
+
timeout: int,
|
|
1901
|
+
log: logging.Logger,
|
|
1902
|
+
) -> None:
|
|
1903
|
+
try:
|
|
1904
|
+
os.rmdir(store_path)
|
|
1905
|
+
except OSError:
|
|
1906
|
+
pass
|
|
1907
|
+
os.rename(tmp_path_2, store_path)
|
|
1908
|
+
|
|
1909
|
+
sinfo = ServerInfo("http://127.0.0.1/8003", 0, 0)
|
|
1910
|
+
with monkeypatch.context() as m:
|
|
1911
|
+
m.setattr("chia.data_layer.download_data.http_download", mock_http_download)
|
|
1912
|
+
success = await insert_from_delta_file(
|
|
1913
|
+
data_store=data_store,
|
|
1914
|
+
store_id=store_id,
|
|
1915
|
+
existing_generation=0,
|
|
1916
|
+
target_generation=num_files + 1,
|
|
1917
|
+
root_hashes=root_hashes,
|
|
1918
|
+
server_info=sinfo,
|
|
1919
|
+
client_foldername=tmp_path_1,
|
|
1920
|
+
timeout=aiohttp.ClientTimeout(total=15, sock_connect=5),
|
|
1921
|
+
log=log,
|
|
1922
|
+
proxy_url="",
|
|
1923
|
+
downloader=None,
|
|
1924
|
+
group_files_by_store=group_files_by_store,
|
|
1925
|
+
maximum_full_file_count=max_full_files,
|
|
1926
|
+
)
|
|
1927
|
+
assert not success
|
|
1928
|
+
|
|
1929
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
1930
|
+
assert root.generation == 0
|
|
1931
|
+
|
|
1932
|
+
sinfo = ServerInfo("http://127.0.0.1/8003", 0, 0)
|
|
1933
|
+
with monkeypatch.context() as m:
|
|
1934
|
+
m.setattr("chia.data_layer.download_data.http_download", mock_http_download_2)
|
|
1935
|
+
success = await insert_from_delta_file(
|
|
1936
|
+
data_store=data_store,
|
|
1937
|
+
store_id=store_id,
|
|
1938
|
+
existing_generation=0,
|
|
1939
|
+
target_generation=num_files + 1,
|
|
1940
|
+
root_hashes=root_hashes,
|
|
1941
|
+
server_info=sinfo,
|
|
1942
|
+
client_foldername=tmp_path_1,
|
|
1943
|
+
timeout=aiohttp.ClientTimeout(total=15, sock_connect=5),
|
|
1944
|
+
log=log,
|
|
1945
|
+
proxy_url="",
|
|
1946
|
+
downloader=None,
|
|
1947
|
+
group_files_by_store=group_files_by_store,
|
|
1948
|
+
maximum_full_file_count=max_full_files,
|
|
1949
|
+
)
|
|
1950
|
+
assert success
|
|
1951
|
+
|
|
1952
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
1953
|
+
assert root.generation == num_files + 1
|
|
1954
|
+
with os.scandir(store_path) as entries:
|
|
1955
|
+
filenames = {entry.name for entry in entries}
|
|
1956
|
+
assert len(filenames) == num_files + 1 + max_full_files # 6 deltas and max_full_files full files
|
|
1957
|
+
kv = await data_store.get_keys_values(store_id=store_id)
|
|
1958
|
+
assert kv == kv_before
|
|
1959
|
+
|
|
1960
|
+
|
|
1961
|
+
@pytest.mark.anyio
|
|
1962
|
+
async def test_get_node_by_key_with_overlapping_keys(raw_data_store: DataStore) -> None:
|
|
1963
|
+
num_stores = 5
|
|
1964
|
+
num_keys = 20
|
|
1965
|
+
values_offset = 10000
|
|
1966
|
+
repetitions = 25
|
|
1967
|
+
random = Random()
|
|
1968
|
+
random.seed(100, version=2)
|
|
1969
|
+
|
|
1970
|
+
store_ids = [bytes32(i.to_bytes(32, byteorder="big")) for i in range(num_stores)]
|
|
1971
|
+
for store_id in store_ids:
|
|
1972
|
+
await raw_data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
1973
|
+
keys = [key.to_bytes(4, byteorder="big") for key in range(num_keys)]
|
|
1974
|
+
for repetition in range(repetitions):
|
|
1975
|
+
for index, store_id in enumerate(store_ids):
|
|
1976
|
+
values = [
|
|
1977
|
+
(value + values_offset * repetition).to_bytes(4, byteorder="big")
|
|
1978
|
+
for value in range(index * num_keys, (index + 1) * num_keys)
|
|
1979
|
+
]
|
|
1980
|
+
batch = []
|
|
1981
|
+
for key, value in zip(keys, values):
|
|
1982
|
+
batch.append({"action": "upsert", "key": key, "value": value})
|
|
1983
|
+
await raw_data_store.insert_batch(store_id, batch, status=Status.COMMITTED)
|
|
1984
|
+
|
|
1985
|
+
for index, store_id in enumerate(store_ids):
|
|
1986
|
+
values = [
|
|
1987
|
+
(value + values_offset * repetition).to_bytes(4, byteorder="big")
|
|
1988
|
+
for value in range(index * num_keys, (index + 1) * num_keys)
|
|
1989
|
+
]
|
|
1990
|
+
for key, value in zip(keys, values):
|
|
1991
|
+
node = await raw_data_store.get_node_by_key(store_id=store_id, key=key)
|
|
1992
|
+
assert node.value == value
|
|
1993
|
+
if random.randint(0, 4) == 0:
|
|
1994
|
+
batch = [{"action": "delete", "key": key}]
|
|
1995
|
+
await raw_data_store.insert_batch(store_id, batch, status=Status.COMMITTED)
|
|
1996
|
+
with pytest.raises(KeyNotFoundError, match=f"Key not found: {key.hex()}"):
|
|
1997
|
+
await raw_data_store.get_node_by_key(store_id=store_id, key=key)
|
|
1998
|
+
|
|
1999
|
+
|
|
2000
|
+
@pytest.mark.anyio
|
|
2001
|
+
@boolean_datacases(name="group_files_by_store", true="group by singleton", false="don't group by singleton")
|
|
2002
|
+
async def test_insert_from_delta_file_correct_file_exists(
|
|
2003
|
+
data_store: DataStore, store_id: bytes32, tmp_path: Path, group_files_by_store: bool
|
|
2004
|
+
) -> None:
|
|
2005
|
+
await data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
2006
|
+
num_files = 5
|
|
2007
|
+
for generation in range(num_files):
|
|
2008
|
+
key = generation.to_bytes(4, byteorder="big")
|
|
2009
|
+
value = generation.to_bytes(4, byteorder="big")
|
|
2010
|
+
await data_store.autoinsert(
|
|
2011
|
+
key=key,
|
|
2012
|
+
value=value,
|
|
2013
|
+
store_id=store_id,
|
|
2014
|
+
status=Status.COMMITTED,
|
|
2015
|
+
)
|
|
2016
|
+
|
|
2017
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2018
|
+
assert root.generation == num_files + 1
|
|
2019
|
+
root_hashes = []
|
|
2020
|
+
for generation in range(1, num_files + 2):
|
|
2021
|
+
root = await data_store.get_tree_root(store_id=store_id, generation=generation)
|
|
2022
|
+
await write_files_for_root(data_store, store_id, root, tmp_path, 0, group_by_store=group_files_by_store)
|
|
2023
|
+
root_hashes.append(bytes32.zeros if root.node_hash is None else root.node_hash)
|
|
2024
|
+
store_path = tmp_path.joinpath(f"{store_id}") if group_files_by_store else tmp_path
|
|
2025
|
+
with os.scandir(store_path) as entries:
|
|
2026
|
+
filenames = {entry.name for entry in entries}
|
|
2027
|
+
assert len(filenames) == 2 * (num_files + 1)
|
|
2028
|
+
for filename in filenames:
|
|
2029
|
+
if "full" in filename:
|
|
2030
|
+
store_path.joinpath(filename).unlink()
|
|
2031
|
+
with os.scandir(store_path) as entries:
|
|
2032
|
+
filenames = {entry.name for entry in entries}
|
|
2033
|
+
assert len(filenames) == num_files + 1
|
|
2034
|
+
kv_before = await data_store.get_keys_values(store_id=store_id)
|
|
2035
|
+
await data_store.rollback_to_generation(store_id, 0)
|
|
2036
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2037
|
+
assert root.generation == 0
|
|
2038
|
+
|
|
2039
|
+
sinfo = ServerInfo("http://127.0.0.1/8003", 0, 0)
|
|
2040
|
+
success = await insert_from_delta_file(
|
|
2041
|
+
data_store=data_store,
|
|
2042
|
+
store_id=store_id,
|
|
2043
|
+
existing_generation=0,
|
|
2044
|
+
target_generation=num_files + 1,
|
|
2045
|
+
root_hashes=root_hashes,
|
|
2046
|
+
server_info=sinfo,
|
|
2047
|
+
client_foldername=tmp_path,
|
|
2048
|
+
timeout=aiohttp.ClientTimeout(total=15, sock_connect=5),
|
|
2049
|
+
log=log,
|
|
2050
|
+
proxy_url="",
|
|
2051
|
+
downloader=None,
|
|
2052
|
+
group_files_by_store=group_files_by_store,
|
|
2053
|
+
)
|
|
2054
|
+
assert success
|
|
2055
|
+
|
|
2056
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2057
|
+
assert root.generation == num_files + 1
|
|
2058
|
+
with os.scandir(store_path) as entries:
|
|
2059
|
+
filenames = {entry.name for entry in entries}
|
|
2060
|
+
assert len(filenames) == num_files + 2 # 1 full and 6 deltas
|
|
2061
|
+
kv = await data_store.get_keys_values(store_id=store_id)
|
|
2062
|
+
assert kv == kv_before
|
|
2063
|
+
|
|
2064
|
+
|
|
2065
|
+
@pytest.mark.anyio
|
|
2066
|
+
@boolean_datacases(name="group_files_by_store", true="group by singleton", false="don't group by singleton")
|
|
2067
|
+
async def test_insert_from_delta_file_incorrect_file_exists(
|
|
2068
|
+
data_store: DataStore, store_id: bytes32, tmp_path: Path, group_files_by_store: bool
|
|
2069
|
+
) -> None:
|
|
2070
|
+
await data_store.create_tree(store_id=store_id, status=Status.COMMITTED)
|
|
2071
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2072
|
+
assert root.generation == 1
|
|
2073
|
+
|
|
2074
|
+
key = b"a"
|
|
2075
|
+
value = b"a"
|
|
2076
|
+
await data_store.autoinsert(
|
|
2077
|
+
key=key,
|
|
2078
|
+
value=value,
|
|
2079
|
+
store_id=store_id,
|
|
2080
|
+
status=Status.COMMITTED,
|
|
2081
|
+
)
|
|
2082
|
+
|
|
2083
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2084
|
+
assert root.generation == 2
|
|
2085
|
+
await write_files_for_root(data_store, store_id, root, tmp_path, 0, group_by_store=group_files_by_store)
|
|
2086
|
+
|
|
2087
|
+
incorrect_root_hash = bytes32([0] * 31 + [1])
|
|
2088
|
+
store_path = tmp_path.joinpath(f"{store_id}") if group_files_by_store else tmp_path
|
|
2089
|
+
with os.scandir(store_path) as entries:
|
|
2090
|
+
filenames = [entry.name for entry in entries]
|
|
2091
|
+
assert len(filenames) == 2
|
|
2092
|
+
os.rename(
|
|
2093
|
+
store_path.joinpath(filenames[0]),
|
|
2094
|
+
get_delta_filename_path(tmp_path, store_id, incorrect_root_hash, 2, group_files_by_store),
|
|
2095
|
+
)
|
|
2096
|
+
os.rename(
|
|
2097
|
+
store_path.joinpath(filenames[1]),
|
|
2098
|
+
get_full_tree_filename_path(tmp_path, store_id, incorrect_root_hash, 2, group_files_by_store),
|
|
2099
|
+
)
|
|
2100
|
+
|
|
2101
|
+
await data_store.rollback_to_generation(store_id, 1)
|
|
2102
|
+
sinfo = ServerInfo("http://127.0.0.1/8003", 0, 0)
|
|
2103
|
+
success = await insert_from_delta_file(
|
|
2104
|
+
data_store=data_store,
|
|
2105
|
+
store_id=store_id,
|
|
2106
|
+
existing_generation=1,
|
|
2107
|
+
target_generation=6,
|
|
2108
|
+
root_hashes=[incorrect_root_hash],
|
|
2109
|
+
server_info=sinfo,
|
|
2110
|
+
client_foldername=tmp_path,
|
|
2111
|
+
timeout=aiohttp.ClientTimeout(total=15, sock_connect=5),
|
|
2112
|
+
log=log,
|
|
2113
|
+
proxy_url="",
|
|
2114
|
+
downloader=None,
|
|
2115
|
+
group_files_by_store=group_files_by_store,
|
|
2116
|
+
)
|
|
2117
|
+
assert not success
|
|
2118
|
+
|
|
2119
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2120
|
+
assert root.generation == 1
|
|
2121
|
+
with os.scandir(store_path) as entries:
|
|
2122
|
+
filenames = [entry.name for entry in entries]
|
|
2123
|
+
assert len(filenames) == 0
|
|
2124
|
+
|
|
2125
|
+
|
|
2126
|
+
@pytest.mark.anyio
|
|
2127
|
+
async def test_insert_key_already_present(data_store: DataStore, store_id: bytes32) -> None:
|
|
2128
|
+
key = b"foo"
|
|
2129
|
+
value = b"bar"
|
|
2130
|
+
await data_store.insert(
|
|
2131
|
+
key=key, value=value, store_id=store_id, reference_node_hash=None, side=None, status=Status.COMMITTED
|
|
2132
|
+
)
|
|
2133
|
+
with pytest.raises(Exception, match=f"Key already present: {key.hex()}"):
|
|
2134
|
+
await data_store.insert(key=key, value=value, store_id=store_id, reference_node_hash=None, side=None)
|
|
2135
|
+
|
|
2136
|
+
|
|
2137
|
+
@pytest.mark.anyio
|
|
2138
|
+
@boolean_datacases(name="use_batch_autoinsert", false="not optimized batch insert", true="optimized batch insert")
|
|
2139
|
+
async def test_batch_insert_key_already_present(
|
|
2140
|
+
data_store: DataStore,
|
|
2141
|
+
store_id: bytes32,
|
|
2142
|
+
use_batch_autoinsert: bool,
|
|
2143
|
+
) -> None:
|
|
2144
|
+
key = b"foo"
|
|
2145
|
+
value = b"bar"
|
|
2146
|
+
changelist = [{"action": "insert", "key": key, "value": value}]
|
|
2147
|
+
await data_store.insert_batch(store_id, changelist, Status.COMMITTED, use_batch_autoinsert)
|
|
2148
|
+
with pytest.raises(Exception, match=f"Key already present: {key.hex()}"):
|
|
2149
|
+
await data_store.insert_batch(store_id, changelist, Status.COMMITTED, use_batch_autoinsert)
|
|
2150
|
+
|
|
2151
|
+
|
|
2152
|
+
@pytest.mark.anyio
|
|
2153
|
+
@boolean_datacases(name="use_upsert", false="update with delete and insert", true="update with upsert")
|
|
2154
|
+
async def test_update_keys(data_store: DataStore, store_id: bytes32, use_upsert: bool) -> None:
|
|
2155
|
+
num_keys = 10
|
|
2156
|
+
missing_keys = 50
|
|
2157
|
+
num_values = 10
|
|
2158
|
+
new_keys = 10
|
|
2159
|
+
for value in range(num_values):
|
|
2160
|
+
changelist: list[dict[str, Any]] = []
|
|
2161
|
+
bytes_value = value.to_bytes(4, byteorder="big")
|
|
2162
|
+
if use_upsert:
|
|
2163
|
+
for key in range(num_keys):
|
|
2164
|
+
bytes_key = key.to_bytes(4, byteorder="big")
|
|
2165
|
+
changelist.append({"action": "upsert", "key": bytes_key, "value": bytes_value})
|
|
2166
|
+
else:
|
|
2167
|
+
for key in range(num_keys + missing_keys):
|
|
2168
|
+
bytes_key = key.to_bytes(4, byteorder="big")
|
|
2169
|
+
changelist.append({"action": "delete", "key": bytes_key})
|
|
2170
|
+
for key in range(num_keys):
|
|
2171
|
+
bytes_key = key.to_bytes(4, byteorder="big")
|
|
2172
|
+
changelist.append({"action": "insert", "key": bytes_key, "value": bytes_value})
|
|
2173
|
+
|
|
2174
|
+
await data_store.insert_batch(
|
|
2175
|
+
store_id=store_id,
|
|
2176
|
+
changelist=changelist,
|
|
2177
|
+
status=Status.COMMITTED,
|
|
2178
|
+
)
|
|
2179
|
+
for key in range(num_keys):
|
|
2180
|
+
bytes_key = key.to_bytes(4, byteorder="big")
|
|
2181
|
+
node = await data_store.get_node_by_key(bytes_key, store_id)
|
|
2182
|
+
assert node.value == bytes_value
|
|
2183
|
+
for key in range(num_keys, num_keys + missing_keys):
|
|
2184
|
+
bytes_key = key.to_bytes(4, byteorder="big")
|
|
2185
|
+
with pytest.raises(KeyNotFoundError, match=f"Key not found: {bytes_key.hex()}"):
|
|
2186
|
+
await data_store.get_node_by_key(bytes_key, store_id)
|
|
2187
|
+
num_keys += new_keys
|
|
2188
|
+
|
|
2189
|
+
|
|
2190
|
+
@pytest.mark.anyio
|
|
2191
|
+
async def test_migration_unknown_version(data_store: DataStore) -> None:
|
|
2192
|
+
async with data_store.db_wrapper.writer() as writer:
|
|
2193
|
+
await writer.execute(
|
|
2194
|
+
"INSERT INTO schema(version_id) VALUES(:version_id)",
|
|
2195
|
+
{
|
|
2196
|
+
"version_id": "unknown version",
|
|
2197
|
+
},
|
|
2198
|
+
)
|
|
2199
|
+
with pytest.raises(Exception, match="Unknown version"):
|
|
2200
|
+
await data_store.migrate_db()
|
|
2201
|
+
|
|
2202
|
+
|
|
2203
|
+
async def _check_ancestors(
|
|
2204
|
+
data_store: DataStore, store_id: bytes32, root_hash: bytes32
|
|
2205
|
+
) -> dict[bytes32, Optional[bytes32]]:
|
|
2206
|
+
ancestors: dict[bytes32, Optional[bytes32]] = {}
|
|
2207
|
+
root_node: Node = await data_store.get_node(root_hash)
|
|
2208
|
+
queue: list[Node] = [root_node]
|
|
2209
|
+
|
|
2210
|
+
while queue:
|
|
2211
|
+
node = queue.pop(0)
|
|
2212
|
+
if isinstance(node, InternalNode):
|
|
2213
|
+
left_node = await data_store.get_node(node.left_hash)
|
|
2214
|
+
right_node = await data_store.get_node(node.right_hash)
|
|
2215
|
+
ancestors[left_node.hash] = node.hash
|
|
2216
|
+
ancestors[right_node.hash] = node.hash
|
|
2217
|
+
queue.append(left_node)
|
|
2218
|
+
queue.append(right_node)
|
|
2219
|
+
|
|
2220
|
+
ancestors[root_hash] = None
|
|
2221
|
+
for node_hash, ancestor_hash in ancestors.items():
|
|
2222
|
+
ancestor_node = await data_store._get_one_ancestor(node_hash, store_id)
|
|
2223
|
+
if ancestor_hash is None:
|
|
2224
|
+
assert ancestor_node is None
|
|
2225
|
+
else:
|
|
2226
|
+
assert ancestor_node is not None
|
|
2227
|
+
assert ancestor_node.hash == ancestor_hash
|
|
2228
|
+
|
|
2229
|
+
return ancestors
|
|
2230
|
+
|
|
2231
|
+
|
|
2232
|
+
@pytest.mark.anyio
|
|
2233
|
+
async def test_build_ancestor_table(data_store: DataStore, store_id: bytes32) -> None:
|
|
2234
|
+
num_values = 1000
|
|
2235
|
+
changelist: list[dict[str, Any]] = []
|
|
2236
|
+
for value in range(num_values):
|
|
2237
|
+
value_bytes = value.to_bytes(4, byteorder="big")
|
|
2238
|
+
changelist.append({"action": "upsert", "key": value_bytes, "value": value_bytes})
|
|
2239
|
+
await data_store.insert_batch(
|
|
2240
|
+
store_id=store_id,
|
|
2241
|
+
changelist=changelist,
|
|
2242
|
+
status=Status.PENDING,
|
|
2243
|
+
)
|
|
2244
|
+
|
|
2245
|
+
pending_root = await data_store.get_pending_root(store_id=store_id)
|
|
2246
|
+
assert pending_root is not None
|
|
2247
|
+
assert pending_root.node_hash is not None
|
|
2248
|
+
await data_store.change_root_status(pending_root, Status.COMMITTED)
|
|
2249
|
+
await data_store.build_ancestor_table_for_latest_root(store_id=store_id)
|
|
2250
|
+
|
|
2251
|
+
assert pending_root.node_hash is not None
|
|
2252
|
+
await _check_ancestors(data_store, store_id, pending_root.node_hash)
|
|
2253
|
+
|
|
2254
|
+
|
|
2255
|
+
@pytest.mark.anyio
|
|
2256
|
+
async def test_sparse_ancestor_table(data_store: DataStore, store_id: bytes32) -> None:
|
|
2257
|
+
num_values = 100
|
|
2258
|
+
for value in range(num_values):
|
|
2259
|
+
value_bytes = value.to_bytes(4, byteorder="big")
|
|
2260
|
+
await data_store.autoinsert(
|
|
2261
|
+
key=value_bytes,
|
|
2262
|
+
value=value_bytes,
|
|
2263
|
+
store_id=store_id,
|
|
2264
|
+
status=Status.COMMITTED,
|
|
2265
|
+
)
|
|
2266
|
+
root = await data_store.get_tree_root(store_id=store_id)
|
|
2267
|
+
assert root.node_hash is not None
|
|
2268
|
+
ancestors = await _check_ancestors(data_store, store_id, root.node_hash)
|
|
2269
|
+
|
|
2270
|
+
# Check the ancestor table is sparse
|
|
2271
|
+
root_generation = root.generation
|
|
2272
|
+
current_generation_count = 0
|
|
2273
|
+
previous_generation_count = 0
|
|
2274
|
+
for node_hash, ancestor_hash in ancestors.items():
|
|
2275
|
+
async with data_store.db_wrapper.reader() as reader:
|
|
2276
|
+
if ancestor_hash is not None:
|
|
2277
|
+
cursor = await reader.execute(
|
|
2278
|
+
"SELECT MAX(generation) AS generation FROM ancestors WHERE hash == :hash AND ancestor == :ancestor",
|
|
2279
|
+
{"hash": node_hash, "ancestor": ancestor_hash},
|
|
2280
|
+
)
|
|
2281
|
+
else:
|
|
2282
|
+
cursor = await reader.execute(
|
|
2283
|
+
"SELECT MAX(generation) AS generation FROM ancestors WHERE hash == :hash AND ancestor IS NULL",
|
|
2284
|
+
{"hash": node_hash},
|
|
2285
|
+
)
|
|
2286
|
+
row = await cursor.fetchone()
|
|
2287
|
+
assert row is not None
|
|
2288
|
+
generation = row["generation"]
|
|
2289
|
+
assert generation <= root_generation
|
|
2290
|
+
if generation == root_generation:
|
|
2291
|
+
current_generation_count += 1
|
|
2292
|
+
else:
|
|
2293
|
+
previous_generation_count += 1
|
|
2294
|
+
|
|
2295
|
+
assert current_generation_count == 15
|
|
2296
|
+
assert previous_generation_count == 184
|
|
2297
|
+
|
|
2298
|
+
|
|
2299
|
+
async def get_all_nodes(data_store: DataStore, store_id: bytes32) -> list[Node]:
|
|
2300
|
+
root = await data_store.get_tree_root(store_id)
|
|
2301
|
+
assert root.node_hash is not None
|
|
2302
|
+
root_node = await data_store.get_node(root.node_hash)
|
|
2303
|
+
nodes: list[Node] = []
|
|
2304
|
+
queue: list[Node] = [root_node]
|
|
2305
|
+
|
|
2306
|
+
while len(queue) > 0:
|
|
2307
|
+
node = queue.pop(0)
|
|
2308
|
+
nodes.append(node)
|
|
2309
|
+
if isinstance(node, InternalNode):
|
|
2310
|
+
left_node = await data_store.get_node(node.left_hash)
|
|
2311
|
+
right_node = await data_store.get_node(node.right_hash)
|
|
2312
|
+
queue.append(left_node)
|
|
2313
|
+
queue.append(right_node)
|
|
2314
|
+
|
|
2315
|
+
return nodes
|
|
2316
|
+
|
|
2317
|
+
|
|
2318
|
+
@pytest.mark.anyio
|
|
2319
|
+
async def test_get_nodes(data_store: DataStore, store_id: bytes32) -> None:
|
|
2320
|
+
num_values = 50
|
|
2321
|
+
changelist: list[dict[str, Any]] = []
|
|
2322
|
+
|
|
2323
|
+
for value in range(num_values):
|
|
2324
|
+
value_bytes = value.to_bytes(4, byteorder="big")
|
|
2325
|
+
changelist.append({"action": "upsert", "key": value_bytes, "value": value_bytes})
|
|
2326
|
+
await data_store.insert_batch(
|
|
2327
|
+
store_id=store_id,
|
|
2328
|
+
changelist=changelist,
|
|
2329
|
+
status=Status.COMMITTED,
|
|
2330
|
+
)
|
|
2331
|
+
|
|
2332
|
+
expected_nodes = await get_all_nodes(data_store, store_id)
|
|
2333
|
+
nodes = await data_store.get_nodes([node.hash for node in expected_nodes])
|
|
2334
|
+
assert nodes == expected_nodes
|
|
2335
|
+
|
|
2336
|
+
node_hash = bytes32.zeros
|
|
2337
|
+
node_hash_2 = bytes32([0] * 31 + [1])
|
|
2338
|
+
with pytest.raises(Exception, match=f"^Nodes not found for hashes: {node_hash.hex()}, {node_hash_2.hex()}"):
|
|
2339
|
+
await data_store.get_nodes([node_hash, node_hash_2] + [node.hash for node in expected_nodes])
|
|
2340
|
+
|
|
2341
|
+
|
|
2342
|
+
@pytest.mark.anyio
|
|
2343
|
+
@pytest.mark.parametrize("pre", [0, 2048])
|
|
2344
|
+
@pytest.mark.parametrize("batch_size", [25, 100, 500])
|
|
2345
|
+
async def test_get_leaf_at_minimum_height(
|
|
2346
|
+
data_store: DataStore,
|
|
2347
|
+
store_id: bytes32,
|
|
2348
|
+
pre: int,
|
|
2349
|
+
batch_size: int,
|
|
2350
|
+
) -> None:
|
|
2351
|
+
num_values = 1000
|
|
2352
|
+
value_offset = 1000000
|
|
2353
|
+
all_min_leafs: set[TerminalNode] = set()
|
|
2354
|
+
|
|
2355
|
+
if pre > 0:
|
|
2356
|
+
# This builds a complete binary tree, in order to test more than one batch in the queue before finding the leaf
|
|
2357
|
+
changelist: list[dict[str, Any]] = []
|
|
2358
|
+
|
|
2359
|
+
for value in range(pre):
|
|
2360
|
+
value_bytes = (value * value).to_bytes(8, byteorder="big")
|
|
2361
|
+
changelist.append({"action": "upsert", "key": value_bytes, "value": value_bytes})
|
|
2362
|
+
await data_store.insert_batch(
|
|
2363
|
+
store_id=store_id,
|
|
2364
|
+
changelist=changelist,
|
|
2365
|
+
status=Status.COMMITTED,
|
|
2366
|
+
)
|
|
2367
|
+
|
|
2368
|
+
for value in range(num_values):
|
|
2369
|
+
value_bytes = value.to_bytes(4, byteorder="big")
|
|
2370
|
+
# Use autoinsert instead of `insert_batch` to get a more randomly shaped tree
|
|
2371
|
+
await data_store.autoinsert(
|
|
2372
|
+
key=value_bytes,
|
|
2373
|
+
value=value_bytes,
|
|
2374
|
+
store_id=store_id,
|
|
2375
|
+
status=Status.COMMITTED,
|
|
2376
|
+
)
|
|
2377
|
+
|
|
2378
|
+
if (value + 1) % batch_size == 0:
|
|
2379
|
+
hash_to_parent: dict[bytes32, InternalNode] = {}
|
|
2380
|
+
root = await data_store.get_tree_root(store_id)
|
|
2381
|
+
assert root.node_hash is not None
|
|
2382
|
+
min_leaf = await data_store.get_leaf_at_minimum_height(root.node_hash, hash_to_parent)
|
|
2383
|
+
all_nodes = await get_all_nodes(data_store, store_id)
|
|
2384
|
+
heights: dict[bytes32, int] = {}
|
|
2385
|
+
heights[root.node_hash] = 0
|
|
2386
|
+
min_leaf_height = None
|
|
2387
|
+
|
|
2388
|
+
for node in all_nodes:
|
|
2389
|
+
if isinstance(node, InternalNode):
|
|
2390
|
+
heights[node.left_hash] = heights[node.hash] + 1
|
|
2391
|
+
heights[node.right_hash] = heights[node.hash] + 1
|
|
2392
|
+
else:
|
|
2393
|
+
if min_leaf_height is not None:
|
|
2394
|
+
min_leaf_height = min(min_leaf_height, heights[node.hash])
|
|
2395
|
+
else:
|
|
2396
|
+
min_leaf_height = heights[node.hash]
|
|
2397
|
+
|
|
2398
|
+
assert min_leaf_height is not None
|
|
2399
|
+
if pre > 0:
|
|
2400
|
+
assert min_leaf_height >= 11
|
|
2401
|
+
for node in all_nodes:
|
|
2402
|
+
if isinstance(node, TerminalNode):
|
|
2403
|
+
assert node == min_leaf
|
|
2404
|
+
assert heights[min_leaf.hash] == min_leaf_height
|
|
2405
|
+
break
|
|
2406
|
+
if node.left_hash in hash_to_parent:
|
|
2407
|
+
assert hash_to_parent[node.left_hash] == node
|
|
2408
|
+
if node.right_hash in hash_to_parent:
|
|
2409
|
+
assert hash_to_parent[node.right_hash] == node
|
|
2410
|
+
|
|
2411
|
+
# Push down the min height leaf, so on the next iteration we get a different leaf
|
|
2412
|
+
pushdown_height = 20
|
|
2413
|
+
for repeat in range(pushdown_height):
|
|
2414
|
+
value_bytes = (value + (repeat + 1) * value_offset).to_bytes(4, byteorder="big")
|
|
2415
|
+
await data_store.insert(
|
|
2416
|
+
key=value_bytes,
|
|
2417
|
+
value=value_bytes,
|
|
2418
|
+
store_id=store_id,
|
|
2419
|
+
reference_node_hash=min_leaf.hash,
|
|
2420
|
+
side=Side.RIGHT,
|
|
2421
|
+
status=Status.COMMITTED,
|
|
2422
|
+
)
|
|
2423
|
+
assert min_leaf not in all_min_leafs
|
|
2424
|
+
all_min_leafs.add(min_leaf)
|