rucio 37.0.0rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rucio might be problematic. Click here for more details.
- rucio/__init__.py +17 -0
- rucio/alembicrevision.py +15 -0
- rucio/cli/__init__.py +14 -0
- rucio/cli/account.py +216 -0
- rucio/cli/bin_legacy/__init__.py +13 -0
- rucio/cli/bin_legacy/rucio.py +2825 -0
- rucio/cli/bin_legacy/rucio_admin.py +2500 -0
- rucio/cli/command.py +272 -0
- rucio/cli/config.py +72 -0
- rucio/cli/did.py +191 -0
- rucio/cli/download.py +128 -0
- rucio/cli/lifetime_exception.py +33 -0
- rucio/cli/replica.py +162 -0
- rucio/cli/rse.py +293 -0
- rucio/cli/rule.py +158 -0
- rucio/cli/scope.py +40 -0
- rucio/cli/subscription.py +73 -0
- rucio/cli/upload.py +60 -0
- rucio/cli/utils.py +226 -0
- rucio/client/__init__.py +15 -0
- rucio/client/accountclient.py +432 -0
- rucio/client/accountlimitclient.py +183 -0
- rucio/client/baseclient.py +983 -0
- rucio/client/client.py +120 -0
- rucio/client/configclient.py +126 -0
- rucio/client/credentialclient.py +59 -0
- rucio/client/didclient.py +868 -0
- rucio/client/diracclient.py +56 -0
- rucio/client/downloadclient.py +1783 -0
- rucio/client/exportclient.py +44 -0
- rucio/client/fileclient.py +50 -0
- rucio/client/importclient.py +42 -0
- rucio/client/lifetimeclient.py +90 -0
- rucio/client/lockclient.py +109 -0
- rucio/client/metaconventionsclient.py +140 -0
- rucio/client/pingclient.py +44 -0
- rucio/client/replicaclient.py +452 -0
- rucio/client/requestclient.py +125 -0
- rucio/client/richclient.py +317 -0
- rucio/client/rseclient.py +746 -0
- rucio/client/ruleclient.py +294 -0
- rucio/client/scopeclient.py +90 -0
- rucio/client/subscriptionclient.py +173 -0
- rucio/client/touchclient.py +82 -0
- rucio/client/uploadclient.py +969 -0
- rucio/common/__init__.py +13 -0
- rucio/common/bittorrent.py +234 -0
- rucio/common/cache.py +111 -0
- rucio/common/checksum.py +168 -0
- rucio/common/client.py +122 -0
- rucio/common/config.py +788 -0
- rucio/common/constants.py +217 -0
- rucio/common/constraints.py +17 -0
- rucio/common/didtype.py +237 -0
- rucio/common/dumper/__init__.py +342 -0
- rucio/common/dumper/consistency.py +497 -0
- rucio/common/dumper/data_models.py +362 -0
- rucio/common/dumper/path_parsing.py +75 -0
- rucio/common/exception.py +1208 -0
- rucio/common/extra.py +31 -0
- rucio/common/logging.py +420 -0
- rucio/common/pcache.py +1409 -0
- rucio/common/plugins.py +185 -0
- rucio/common/policy.py +93 -0
- rucio/common/schema/__init__.py +200 -0
- rucio/common/schema/generic.py +416 -0
- rucio/common/schema/generic_multi_vo.py +395 -0
- rucio/common/stomp_utils.py +423 -0
- rucio/common/stopwatch.py +55 -0
- rucio/common/test_rucio_server.py +154 -0
- rucio/common/types.py +483 -0
- rucio/common/utils.py +1688 -0
- rucio/core/__init__.py +13 -0
- rucio/core/account.py +496 -0
- rucio/core/account_counter.py +236 -0
- rucio/core/account_limit.py +425 -0
- rucio/core/authentication.py +620 -0
- rucio/core/config.py +437 -0
- rucio/core/credential.py +224 -0
- rucio/core/did.py +3004 -0
- rucio/core/did_meta_plugins/__init__.py +252 -0
- rucio/core/did_meta_plugins/did_column_meta.py +331 -0
- rucio/core/did_meta_plugins/did_meta_plugin_interface.py +165 -0
- rucio/core/did_meta_plugins/elasticsearch_meta.py +407 -0
- rucio/core/did_meta_plugins/filter_engine.py +672 -0
- rucio/core/did_meta_plugins/json_meta.py +240 -0
- rucio/core/did_meta_plugins/mongo_meta.py +229 -0
- rucio/core/did_meta_plugins/postgres_meta.py +352 -0
- rucio/core/dirac.py +237 -0
- rucio/core/distance.py +187 -0
- rucio/core/exporter.py +59 -0
- rucio/core/heartbeat.py +363 -0
- rucio/core/identity.py +301 -0
- rucio/core/importer.py +260 -0
- rucio/core/lifetime_exception.py +377 -0
- rucio/core/lock.py +577 -0
- rucio/core/message.py +288 -0
- rucio/core/meta_conventions.py +203 -0
- rucio/core/monitor.py +448 -0
- rucio/core/naming_convention.py +195 -0
- rucio/core/nongrid_trace.py +136 -0
- rucio/core/oidc.py +1463 -0
- rucio/core/permission/__init__.py +161 -0
- rucio/core/permission/generic.py +1124 -0
- rucio/core/permission/generic_multi_vo.py +1144 -0
- rucio/core/quarantined_replica.py +224 -0
- rucio/core/replica.py +4483 -0
- rucio/core/replica_sorter.py +362 -0
- rucio/core/request.py +3091 -0
- rucio/core/rse.py +2079 -0
- rucio/core/rse_counter.py +185 -0
- rucio/core/rse_expression_parser.py +459 -0
- rucio/core/rse_selector.py +304 -0
- rucio/core/rule.py +4484 -0
- rucio/core/rule_grouping.py +1620 -0
- rucio/core/scope.py +181 -0
- rucio/core/subscription.py +362 -0
- rucio/core/topology.py +490 -0
- rucio/core/trace.py +375 -0
- rucio/core/transfer.py +1531 -0
- rucio/core/vo.py +169 -0
- rucio/core/volatile_replica.py +151 -0
- rucio/daemons/__init__.py +13 -0
- rucio/daemons/abacus/__init__.py +13 -0
- rucio/daemons/abacus/account.py +116 -0
- rucio/daemons/abacus/collection_replica.py +124 -0
- rucio/daemons/abacus/rse.py +117 -0
- rucio/daemons/atropos/__init__.py +13 -0
- rucio/daemons/atropos/atropos.py +242 -0
- rucio/daemons/auditor/__init__.py +289 -0
- rucio/daemons/auditor/hdfs.py +97 -0
- rucio/daemons/auditor/srmdumps.py +355 -0
- rucio/daemons/automatix/__init__.py +13 -0
- rucio/daemons/automatix/automatix.py +304 -0
- rucio/daemons/badreplicas/__init__.py +13 -0
- rucio/daemons/badreplicas/minos.py +322 -0
- rucio/daemons/badreplicas/minos_temporary_expiration.py +171 -0
- rucio/daemons/badreplicas/necromancer.py +196 -0
- rucio/daemons/bb8/__init__.py +13 -0
- rucio/daemons/bb8/bb8.py +353 -0
- rucio/daemons/bb8/common.py +759 -0
- rucio/daemons/bb8/nuclei_background_rebalance.py +153 -0
- rucio/daemons/bb8/t2_background_rebalance.py +153 -0
- rucio/daemons/cache/__init__.py +13 -0
- rucio/daemons/cache/consumer.py +133 -0
- rucio/daemons/common.py +405 -0
- rucio/daemons/conveyor/__init__.py +13 -0
- rucio/daemons/conveyor/common.py +562 -0
- rucio/daemons/conveyor/finisher.py +529 -0
- rucio/daemons/conveyor/poller.py +394 -0
- rucio/daemons/conveyor/preparer.py +205 -0
- rucio/daemons/conveyor/receiver.py +179 -0
- rucio/daemons/conveyor/stager.py +133 -0
- rucio/daemons/conveyor/submitter.py +403 -0
- rucio/daemons/conveyor/throttler.py +532 -0
- rucio/daemons/follower/__init__.py +13 -0
- rucio/daemons/follower/follower.py +101 -0
- rucio/daemons/hermes/__init__.py +13 -0
- rucio/daemons/hermes/hermes.py +534 -0
- rucio/daemons/judge/__init__.py +13 -0
- rucio/daemons/judge/cleaner.py +159 -0
- rucio/daemons/judge/evaluator.py +185 -0
- rucio/daemons/judge/injector.py +162 -0
- rucio/daemons/judge/repairer.py +154 -0
- rucio/daemons/oauthmanager/__init__.py +13 -0
- rucio/daemons/oauthmanager/oauthmanager.py +198 -0
- rucio/daemons/reaper/__init__.py +13 -0
- rucio/daemons/reaper/dark_reaper.py +282 -0
- rucio/daemons/reaper/reaper.py +739 -0
- rucio/daemons/replicarecoverer/__init__.py +13 -0
- rucio/daemons/replicarecoverer/suspicious_replica_recoverer.py +626 -0
- rucio/daemons/rsedecommissioner/__init__.py +13 -0
- rucio/daemons/rsedecommissioner/config.py +81 -0
- rucio/daemons/rsedecommissioner/profiles/__init__.py +24 -0
- rucio/daemons/rsedecommissioner/profiles/atlas.py +60 -0
- rucio/daemons/rsedecommissioner/profiles/generic.py +452 -0
- rucio/daemons/rsedecommissioner/profiles/types.py +93 -0
- rucio/daemons/rsedecommissioner/rse_decommissioner.py +280 -0
- rucio/daemons/storage/__init__.py +13 -0
- rucio/daemons/storage/consistency/__init__.py +13 -0
- rucio/daemons/storage/consistency/actions.py +848 -0
- rucio/daemons/tracer/__init__.py +13 -0
- rucio/daemons/tracer/kronos.py +511 -0
- rucio/daemons/transmogrifier/__init__.py +13 -0
- rucio/daemons/transmogrifier/transmogrifier.py +762 -0
- rucio/daemons/undertaker/__init__.py +13 -0
- rucio/daemons/undertaker/undertaker.py +137 -0
- rucio/db/__init__.py +13 -0
- rucio/db/sqla/__init__.py +52 -0
- rucio/db/sqla/constants.py +206 -0
- rucio/db/sqla/migrate_repo/__init__.py +13 -0
- rucio/db/sqla/migrate_repo/env.py +110 -0
- rucio/db/sqla/migrate_repo/versions/01eaf73ab656_add_new_rule_notification_state_progress.py +70 -0
- rucio/db/sqla/migrate_repo/versions/0437a40dbfd1_add_eol_at_in_rules.py +47 -0
- rucio/db/sqla/migrate_repo/versions/0f1adb7a599a_create_transfer_hops_table.py +59 -0
- rucio/db/sqla/migrate_repo/versions/102efcf145f4_added_stuck_at_column_to_rules.py +43 -0
- rucio/db/sqla/migrate_repo/versions/13d4f70c66a9_introduce_transfer_limits.py +91 -0
- rucio/db/sqla/migrate_repo/versions/140fef722e91_cleanup_distances_table.py +76 -0
- rucio/db/sqla/migrate_repo/versions/14ec5aeb64cf_add_request_external_host.py +43 -0
- rucio/db/sqla/migrate_repo/versions/156fb5b5a14_add_request_type_to_requests_idx.py +50 -0
- rucio/db/sqla/migrate_repo/versions/1677d4d803c8_split_rse_availability_into_multiple.py +68 -0
- rucio/db/sqla/migrate_repo/versions/16a0aca82e12_create_index_on_table_replicas_path.py +40 -0
- rucio/db/sqla/migrate_repo/versions/1803333ac20f_adding_provenance_and_phys_group.py +45 -0
- rucio/db/sqla/migrate_repo/versions/1a29d6a9504c_add_didtype_chck_to_requests.py +60 -0
- rucio/db/sqla/migrate_repo/versions/1a80adff031a_create_index_on_rules_hist_recent.py +40 -0
- rucio/db/sqla/migrate_repo/versions/1c45d9730ca6_increase_identity_length.py +140 -0
- rucio/db/sqla/migrate_repo/versions/1d1215494e95_add_quarantined_replicas_table.py +73 -0
- rucio/db/sqla/migrate_repo/versions/1d96f484df21_asynchronous_rules_and_rule_approval.py +74 -0
- rucio/db/sqla/migrate_repo/versions/1f46c5f240ac_add_bytes_column_to_bad_replicas.py +43 -0
- rucio/db/sqla/migrate_repo/versions/1fc15ab60d43_add_message_history_table.py +50 -0
- rucio/db/sqla/migrate_repo/versions/2190e703eb6e_move_rse_settings_to_rse_attributes.py +134 -0
- rucio/db/sqla/migrate_repo/versions/21d6b9dc9961_add_mismatch_scheme_state_to_requests.py +64 -0
- rucio/db/sqla/migrate_repo/versions/22cf51430c78_add_availability_column_to_table_rses.py +39 -0
- rucio/db/sqla/migrate_repo/versions/22d887e4ec0a_create_sources_table.py +64 -0
- rucio/db/sqla/migrate_repo/versions/25821a8a45a3_remove_unique_constraint_on_requests.py +51 -0
- rucio/db/sqla/migrate_repo/versions/25fc855625cf_added_unique_constraint_to_rules.py +41 -0
- rucio/db/sqla/migrate_repo/versions/269fee20dee9_add_repair_cnt_to_locks.py +43 -0
- rucio/db/sqla/migrate_repo/versions/271a46ea6244_add_ignore_availability_column_to_rules.py +44 -0
- rucio/db/sqla/migrate_repo/versions/277b5fbb41d3_switch_heartbeats_executable.py +53 -0
- rucio/db/sqla/migrate_repo/versions/27e3a68927fb_remove_replicas_tombstone_and_replicas_.py +38 -0
- rucio/db/sqla/migrate_repo/versions/2854cd9e168_added_rule_id_column.py +47 -0
- rucio/db/sqla/migrate_repo/versions/295289b5a800_processed_by_and__at_in_requests.py +45 -0
- rucio/db/sqla/migrate_repo/versions/2962ece31cf4_add_nbaccesses_column_in_the_did_table.py +45 -0
- rucio/db/sqla/migrate_repo/versions/2af3291ec4c_added_replicas_history_table.py +57 -0
- rucio/db/sqla/migrate_repo/versions/2b69addda658_add_columns_for_third_party_copy_read_.py +45 -0
- rucio/db/sqla/migrate_repo/versions/2b8e7bcb4783_add_config_table.py +69 -0
- rucio/db/sqla/migrate_repo/versions/2ba5229cb54c_add_submitted_at_to_requests_table.py +43 -0
- rucio/db/sqla/migrate_repo/versions/2cbee484dcf9_added_column_volume_to_rse_transfer_.py +42 -0
- rucio/db/sqla/migrate_repo/versions/2edee4a83846_add_source_to_requests_and_requests_.py +47 -0
- rucio/db/sqla/migrate_repo/versions/2eef46be23d4_change_tokens_pk.py +46 -0
- rucio/db/sqla/migrate_repo/versions/2f648fc909f3_index_in_rule_history_on_scope_name.py +40 -0
- rucio/db/sqla/migrate_repo/versions/3082b8cef557_add_naming_convention_table_and_closed_.py +67 -0
- rucio/db/sqla/migrate_repo/versions/30d5206e9cad_increase_oauthrequest_redirect_msg_.py +37 -0
- rucio/db/sqla/migrate_repo/versions/30fa38b6434e_add_index_on_service_column_in_the_message_table.py +44 -0
- rucio/db/sqla/migrate_repo/versions/3152492b110b_added_staging_area_column.py +77 -0
- rucio/db/sqla/migrate_repo/versions/32c7d2783f7e_create_bad_replicas_table.py +60 -0
- rucio/db/sqla/migrate_repo/versions/3345511706b8_replicas_table_pk_definition_is_in_.py +72 -0
- rucio/db/sqla/migrate_repo/versions/35ef10d1e11b_change_index_on_table_requests.py +42 -0
- rucio/db/sqla/migrate_repo/versions/379a19b5332d_create_rse_limits_table.py +65 -0
- rucio/db/sqla/migrate_repo/versions/384b96aa0f60_created_rule_history_tables.py +133 -0
- rucio/db/sqla/migrate_repo/versions/3ac1660a1a72_extend_distance_table.py +55 -0
- rucio/db/sqla/migrate_repo/versions/3ad36e2268b0_create_collection_replicas_updates_table.py +76 -0
- rucio/db/sqla/migrate_repo/versions/3c9df354071b_extend_waiting_request_state.py +60 -0
- rucio/db/sqla/migrate_repo/versions/3d9813fab443_add_a_new_state_lost_in_badfilesstatus.py +44 -0
- rucio/db/sqla/migrate_repo/versions/40ad39ce3160_add_transferred_at_to_requests_table.py +43 -0
- rucio/db/sqla/migrate_repo/versions/4207be2fd914_add_notification_column_to_rules.py +64 -0
- rucio/db/sqla/migrate_repo/versions/42db2617c364_create_index_on_requests_external_id.py +40 -0
- rucio/db/sqla/migrate_repo/versions/436827b13f82_added_column_activity_to_table_requests.py +43 -0
- rucio/db/sqla/migrate_repo/versions/44278720f774_update_requests_typ_sta_upd_idx_index.py +44 -0
- rucio/db/sqla/migrate_repo/versions/45378a1e76a8_create_collection_replica_table.py +78 -0
- rucio/db/sqla/migrate_repo/versions/469d262be19_removing_created_at_index.py +41 -0
- rucio/db/sqla/migrate_repo/versions/4783c1f49cb4_create_distance_table.py +59 -0
- rucio/db/sqla/migrate_repo/versions/49a21b4d4357_create_index_on_table_tokens.py +44 -0
- rucio/db/sqla/migrate_repo/versions/4a2cbedda8b9_add_source_replica_expression_column_to_.py +43 -0
- rucio/db/sqla/migrate_repo/versions/4a7182d9578b_added_bytes_length_accessed_at_columns.py +49 -0
- rucio/db/sqla/migrate_repo/versions/4bab9edd01fc_create_index_on_requests_rule_id.py +40 -0
- rucio/db/sqla/migrate_repo/versions/4c3a4acfe006_new_attr_account_table.py +63 -0
- rucio/db/sqla/migrate_repo/versions/4cf0a2e127d4_adding_transient_metadata.py +43 -0
- rucio/db/sqla/migrate_repo/versions/4df2c5ddabc0_remove_temporary_dids.py +55 -0
- rucio/db/sqla/migrate_repo/versions/50280c53117c_add_qos_class_to_rse.py +45 -0
- rucio/db/sqla/migrate_repo/versions/52153819589c_add_rse_id_to_replicas_table.py +43 -0
- rucio/db/sqla/migrate_repo/versions/52fd9f4916fa_added_activity_to_rules.py +43 -0
- rucio/db/sqla/migrate_repo/versions/53b479c3cb0f_fix_did_meta_table_missing_updated_at_.py +45 -0
- rucio/db/sqla/migrate_repo/versions/5673b4b6e843_add_wfms_metadata_to_rule_tables.py +47 -0
- rucio/db/sqla/migrate_repo/versions/575767d9f89_added_source_history_table.py +58 -0
- rucio/db/sqla/migrate_repo/versions/58bff7008037_add_started_at_to_requests.py +45 -0
- rucio/db/sqla/migrate_repo/versions/58c8b78301ab_rename_callback_to_message.py +106 -0
- rucio/db/sqla/migrate_repo/versions/5f139f77382a_added_child_rule_id_column.py +55 -0
- rucio/db/sqla/migrate_repo/versions/688ef1840840_adding_did_meta_table.py +50 -0
- rucio/db/sqla/migrate_repo/versions/6e572a9bfbf3_add_new_split_container_column_to_rules.py +47 -0
- rucio/db/sqla/migrate_repo/versions/70587619328_add_comment_column_for_subscriptions.py +43 -0
- rucio/db/sqla/migrate_repo/versions/739064d31565_remove_history_table_pks.py +41 -0
- rucio/db/sqla/migrate_repo/versions/7541902bf173_add_didsfollowed_and_followevents_table.py +91 -0
- rucio/db/sqla/migrate_repo/versions/7ec22226cdbf_new_replica_state_for_temporary_.py +72 -0
- rucio/db/sqla/migrate_repo/versions/810a41685bc1_added_columns_rse_transfer_limits.py +49 -0
- rucio/db/sqla/migrate_repo/versions/83f991c63a93_correct_rse_expression_length.py +43 -0
- rucio/db/sqla/migrate_repo/versions/8523998e2e76_increase_size_of_extended_attributes_.py +43 -0
- rucio/db/sqla/migrate_repo/versions/8ea9122275b1_adding_missing_function_based_indices.py +53 -0
- rucio/db/sqla/migrate_repo/versions/90f47792bb76_add_clob_payload_to_messages.py +45 -0
- rucio/db/sqla/migrate_repo/versions/914b8f02df38_new_table_for_lifetime_model_exceptions.py +68 -0
- rucio/db/sqla/migrate_repo/versions/94a5961ddbf2_add_estimator_columns.py +45 -0
- rucio/db/sqla/migrate_repo/versions/9a1b149a2044_add_saml_identity_type.py +94 -0
- rucio/db/sqla/migrate_repo/versions/9a45bc4ea66d_add_vp_table.py +54 -0
- rucio/db/sqla/migrate_repo/versions/9eb936a81eb1_true_is_true.py +72 -0
- rucio/db/sqla/migrate_repo/versions/a08fa8de1545_transfer_stats_table.py +55 -0
- rucio/db/sqla/migrate_repo/versions/a118956323f8_added_vo_table_and_vo_col_to_rse.py +76 -0
- rucio/db/sqla/migrate_repo/versions/a193a275255c_add_status_column_in_messages.py +47 -0
- rucio/db/sqla/migrate_repo/versions/a5f6f6e928a7_1_7_0.py +121 -0
- rucio/db/sqla/migrate_repo/versions/a616581ee47_added_columns_to_table_requests.py +59 -0
- rucio/db/sqla/migrate_repo/versions/a6eb23955c28_state_idx_non_functional.py +52 -0
- rucio/db/sqla/migrate_repo/versions/a74275a1ad30_added_global_quota_table.py +54 -0
- rucio/db/sqla/migrate_repo/versions/a93e4e47bda_heartbeats.py +64 -0
- rucio/db/sqla/migrate_repo/versions/ae2a56fcc89_added_comment_column_to_rules.py +49 -0
- rucio/db/sqla/migrate_repo/versions/b0070f3695c8_add_deletedidmeta_table.py +57 -0
- rucio/db/sqla/migrate_repo/versions/b4293a99f344_added_column_identity_to_table_tokens.py +43 -0
- rucio/db/sqla/migrate_repo/versions/b5493606bbf5_fix_primary_key_for_subscription_history.py +41 -0
- rucio/db/sqla/migrate_repo/versions/b7d287de34fd_removal_of_replicastate_source.py +91 -0
- rucio/db/sqla/migrate_repo/versions/b818052fa670_add_index_to_quarantined_replicas.py +40 -0
- rucio/db/sqla/migrate_repo/versions/b8caac94d7f0_add_comments_column_for_subscriptions_.py +43 -0
- rucio/db/sqla/migrate_repo/versions/b96a1c7e1cc4_new_bad_pfns_table_and_bad_replicas_.py +143 -0
- rucio/db/sqla/migrate_repo/versions/bb695f45c04_extend_request_state.py +76 -0
- rucio/db/sqla/migrate_repo/versions/bc68e9946deb_add_staging_timestamps_to_request.py +50 -0
- rucio/db/sqla/migrate_repo/versions/bf3baa1c1474_correct_pk_and_idx_for_history_tables.py +72 -0
- rucio/db/sqla/migrate_repo/versions/c0937668555f_add_qos_policy_map_table.py +55 -0
- rucio/db/sqla/migrate_repo/versions/c129ccdb2d5_add_lumiblocknr_to_dids.py +43 -0
- rucio/db/sqla/migrate_repo/versions/ccdbcd48206e_add_did_type_column_index_on_did_meta_.py +65 -0
- rucio/db/sqla/migrate_repo/versions/cebad904c4dd_new_payload_column_for_heartbeats.py +47 -0
- rucio/db/sqla/migrate_repo/versions/d1189a09c6e0_oauth2_0_and_jwt_feature_support_adding_.py +146 -0
- rucio/db/sqla/migrate_repo/versions/d23453595260_extend_request_state_for_preparer.py +104 -0
- rucio/db/sqla/migrate_repo/versions/d6dceb1de2d_added_purge_column_to_rules.py +44 -0
- rucio/db/sqla/migrate_repo/versions/d6e2c3b2cf26_remove_third_party_copy_column_from_rse.py +43 -0
- rucio/db/sqla/migrate_repo/versions/d91002c5841_new_account_limits_table.py +103 -0
- rucio/db/sqla/migrate_repo/versions/e138c364ebd0_extending_columns_for_filter_and_.py +49 -0
- rucio/db/sqla/migrate_repo/versions/e59300c8b179_support_for_archive.py +104 -0
- rucio/db/sqla/migrate_repo/versions/f1b14a8c2ac1_postgres_use_check_constraints.py +29 -0
- rucio/db/sqla/migrate_repo/versions/f41ffe206f37_oracle_global_temporary_tables.py +74 -0
- rucio/db/sqla/migrate_repo/versions/f85a2962b021_adding_transfertool_column_to_requests_.py +47 -0
- rucio/db/sqla/migrate_repo/versions/fa7a7d78b602_increase_refresh_token_size.py +43 -0
- rucio/db/sqla/migrate_repo/versions/fb28a95fe288_add_replicas_rse_id_tombstone_idx.py +37 -0
- rucio/db/sqla/migrate_repo/versions/fe1a65b176c9_set_third_party_copy_read_and_write_.py +43 -0
- rucio/db/sqla/migrate_repo/versions/fe8ea2fa9788_added_third_party_copy_column_to_rse_.py +43 -0
- rucio/db/sqla/models.py +1743 -0
- rucio/db/sqla/sautils.py +55 -0
- rucio/db/sqla/session.py +529 -0
- rucio/db/sqla/types.py +206 -0
- rucio/db/sqla/util.py +543 -0
- rucio/gateway/__init__.py +13 -0
- rucio/gateway/account.py +345 -0
- rucio/gateway/account_limit.py +363 -0
- rucio/gateway/authentication.py +381 -0
- rucio/gateway/config.py +227 -0
- rucio/gateway/credential.py +70 -0
- rucio/gateway/did.py +987 -0
- rucio/gateway/dirac.py +83 -0
- rucio/gateway/exporter.py +60 -0
- rucio/gateway/heartbeat.py +76 -0
- rucio/gateway/identity.py +189 -0
- rucio/gateway/importer.py +46 -0
- rucio/gateway/lifetime_exception.py +121 -0
- rucio/gateway/lock.py +153 -0
- rucio/gateway/meta_conventions.py +98 -0
- rucio/gateway/permission.py +74 -0
- rucio/gateway/quarantined_replica.py +79 -0
- rucio/gateway/replica.py +538 -0
- rucio/gateway/request.py +330 -0
- rucio/gateway/rse.py +632 -0
- rucio/gateway/rule.py +437 -0
- rucio/gateway/scope.py +100 -0
- rucio/gateway/subscription.py +280 -0
- rucio/gateway/vo.py +126 -0
- rucio/rse/__init__.py +96 -0
- rucio/rse/protocols/__init__.py +13 -0
- rucio/rse/protocols/bittorrent.py +194 -0
- rucio/rse/protocols/cache.py +111 -0
- rucio/rse/protocols/dummy.py +100 -0
- rucio/rse/protocols/gfal.py +708 -0
- rucio/rse/protocols/globus.py +243 -0
- rucio/rse/protocols/http_cache.py +82 -0
- rucio/rse/protocols/mock.py +123 -0
- rucio/rse/protocols/ngarc.py +209 -0
- rucio/rse/protocols/posix.py +250 -0
- rucio/rse/protocols/protocol.py +361 -0
- rucio/rse/protocols/rclone.py +365 -0
- rucio/rse/protocols/rfio.py +145 -0
- rucio/rse/protocols/srm.py +338 -0
- rucio/rse/protocols/ssh.py +414 -0
- rucio/rse/protocols/storm.py +195 -0
- rucio/rse/protocols/webdav.py +594 -0
- rucio/rse/protocols/xrootd.py +302 -0
- rucio/rse/rsemanager.py +881 -0
- rucio/rse/translation.py +260 -0
- rucio/tests/__init__.py +13 -0
- rucio/tests/common.py +280 -0
- rucio/tests/common_server.py +149 -0
- rucio/transfertool/__init__.py +13 -0
- rucio/transfertool/bittorrent.py +200 -0
- rucio/transfertool/bittorrent_driver.py +50 -0
- rucio/transfertool/bittorrent_driver_qbittorrent.py +134 -0
- rucio/transfertool/fts3.py +1600 -0
- rucio/transfertool/fts3_plugins.py +152 -0
- rucio/transfertool/globus.py +201 -0
- rucio/transfertool/globus_library.py +181 -0
- rucio/transfertool/mock.py +89 -0
- rucio/transfertool/transfertool.py +221 -0
- rucio/vcsversion.py +11 -0
- rucio/version.py +45 -0
- rucio/web/__init__.py +13 -0
- rucio/web/rest/__init__.py +13 -0
- rucio/web/rest/flaskapi/__init__.py +13 -0
- rucio/web/rest/flaskapi/authenticated_bp.py +27 -0
- rucio/web/rest/flaskapi/v1/__init__.py +13 -0
- rucio/web/rest/flaskapi/v1/accountlimits.py +236 -0
- rucio/web/rest/flaskapi/v1/accounts.py +1103 -0
- rucio/web/rest/flaskapi/v1/archives.py +102 -0
- rucio/web/rest/flaskapi/v1/auth.py +1644 -0
- rucio/web/rest/flaskapi/v1/common.py +426 -0
- rucio/web/rest/flaskapi/v1/config.py +304 -0
- rucio/web/rest/flaskapi/v1/credentials.py +213 -0
- rucio/web/rest/flaskapi/v1/dids.py +2340 -0
- rucio/web/rest/flaskapi/v1/dirac.py +116 -0
- rucio/web/rest/flaskapi/v1/export.py +75 -0
- rucio/web/rest/flaskapi/v1/heartbeats.py +127 -0
- rucio/web/rest/flaskapi/v1/identities.py +285 -0
- rucio/web/rest/flaskapi/v1/import.py +132 -0
- rucio/web/rest/flaskapi/v1/lifetime_exceptions.py +312 -0
- rucio/web/rest/flaskapi/v1/locks.py +358 -0
- rucio/web/rest/flaskapi/v1/main.py +91 -0
- rucio/web/rest/flaskapi/v1/meta_conventions.py +241 -0
- rucio/web/rest/flaskapi/v1/metrics.py +36 -0
- rucio/web/rest/flaskapi/v1/nongrid_traces.py +97 -0
- rucio/web/rest/flaskapi/v1/ping.py +88 -0
- rucio/web/rest/flaskapi/v1/redirect.py +366 -0
- rucio/web/rest/flaskapi/v1/replicas.py +1894 -0
- rucio/web/rest/flaskapi/v1/requests.py +998 -0
- rucio/web/rest/flaskapi/v1/rses.py +2250 -0
- rucio/web/rest/flaskapi/v1/rules.py +854 -0
- rucio/web/rest/flaskapi/v1/scopes.py +159 -0
- rucio/web/rest/flaskapi/v1/subscriptions.py +650 -0
- rucio/web/rest/flaskapi/v1/templates/auth_crash.html +80 -0
- rucio/web/rest/flaskapi/v1/templates/auth_granted.html +82 -0
- rucio/web/rest/flaskapi/v1/traces.py +137 -0
- rucio/web/rest/flaskapi/v1/types.py +20 -0
- rucio/web/rest/flaskapi/v1/vos.py +278 -0
- rucio/web/rest/main.py +18 -0
- rucio/web/rest/metrics.py +27 -0
- rucio/web/rest/ping.py +27 -0
- rucio-37.0.0rc1.data/data/rucio/etc/alembic.ini.template +71 -0
- rucio-37.0.0rc1.data/data/rucio/etc/alembic_offline.ini.template +74 -0
- rucio-37.0.0rc1.data/data/rucio/etc/globus-config.yml.template +5 -0
- rucio-37.0.0rc1.data/data/rucio/etc/ldap.cfg.template +30 -0
- rucio-37.0.0rc1.data/data/rucio/etc/mail_templates/rule_approval_request.tmpl +38 -0
- rucio-37.0.0rc1.data/data/rucio/etc/mail_templates/rule_approved_admin.tmpl +4 -0
- rucio-37.0.0rc1.data/data/rucio/etc/mail_templates/rule_approved_user.tmpl +17 -0
- rucio-37.0.0rc1.data/data/rucio/etc/mail_templates/rule_denied_admin.tmpl +6 -0
- rucio-37.0.0rc1.data/data/rucio/etc/mail_templates/rule_denied_user.tmpl +17 -0
- rucio-37.0.0rc1.data/data/rucio/etc/mail_templates/rule_ok_notification.tmpl +19 -0
- rucio-37.0.0rc1.data/data/rucio/etc/rse-accounts.cfg.template +25 -0
- rucio-37.0.0rc1.data/data/rucio/etc/rucio.cfg.atlas.client.template +43 -0
- rucio-37.0.0rc1.data/data/rucio/etc/rucio.cfg.template +241 -0
- rucio-37.0.0rc1.data/data/rucio/etc/rucio_multi_vo.cfg.template +217 -0
- rucio-37.0.0rc1.data/data/rucio/requirements.server.txt +297 -0
- rucio-37.0.0rc1.data/data/rucio/tools/bootstrap.py +34 -0
- rucio-37.0.0rc1.data/data/rucio/tools/merge_rucio_configs.py +144 -0
- rucio-37.0.0rc1.data/data/rucio/tools/reset_database.py +40 -0
- rucio-37.0.0rc1.data/scripts/rucio +133 -0
- rucio-37.0.0rc1.data/scripts/rucio-abacus-account +74 -0
- rucio-37.0.0rc1.data/scripts/rucio-abacus-collection-replica +46 -0
- rucio-37.0.0rc1.data/scripts/rucio-abacus-rse +78 -0
- rucio-37.0.0rc1.data/scripts/rucio-admin +97 -0
- rucio-37.0.0rc1.data/scripts/rucio-atropos +60 -0
- rucio-37.0.0rc1.data/scripts/rucio-auditor +206 -0
- rucio-37.0.0rc1.data/scripts/rucio-automatix +50 -0
- rucio-37.0.0rc1.data/scripts/rucio-bb8 +57 -0
- rucio-37.0.0rc1.data/scripts/rucio-cache-client +141 -0
- rucio-37.0.0rc1.data/scripts/rucio-cache-consumer +42 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-finisher +58 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-poller +66 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-preparer +37 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-receiver +44 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-stager +76 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-submitter +139 -0
- rucio-37.0.0rc1.data/scripts/rucio-conveyor-throttler +104 -0
- rucio-37.0.0rc1.data/scripts/rucio-dark-reaper +53 -0
- rucio-37.0.0rc1.data/scripts/rucio-dumper +160 -0
- rucio-37.0.0rc1.data/scripts/rucio-follower +44 -0
- rucio-37.0.0rc1.data/scripts/rucio-hermes +54 -0
- rucio-37.0.0rc1.data/scripts/rucio-judge-cleaner +89 -0
- rucio-37.0.0rc1.data/scripts/rucio-judge-evaluator +137 -0
- rucio-37.0.0rc1.data/scripts/rucio-judge-injector +44 -0
- rucio-37.0.0rc1.data/scripts/rucio-judge-repairer +44 -0
- rucio-37.0.0rc1.data/scripts/rucio-kronos +44 -0
- rucio-37.0.0rc1.data/scripts/rucio-minos +53 -0
- rucio-37.0.0rc1.data/scripts/rucio-minos-temporary-expiration +50 -0
- rucio-37.0.0rc1.data/scripts/rucio-necromancer +120 -0
- rucio-37.0.0rc1.data/scripts/rucio-oauth-manager +63 -0
- rucio-37.0.0rc1.data/scripts/rucio-reaper +83 -0
- rucio-37.0.0rc1.data/scripts/rucio-replica-recoverer +248 -0
- rucio-37.0.0rc1.data/scripts/rucio-rse-decommissioner +66 -0
- rucio-37.0.0rc1.data/scripts/rucio-storage-consistency-actions +74 -0
- rucio-37.0.0rc1.data/scripts/rucio-transmogrifier +77 -0
- rucio-37.0.0rc1.data/scripts/rucio-undertaker +76 -0
- rucio-37.0.0rc1.dist-info/METADATA +92 -0
- rucio-37.0.0rc1.dist-info/RECORD +487 -0
- rucio-37.0.0rc1.dist-info/WHEEL +5 -0
- rucio-37.0.0rc1.dist-info/licenses/AUTHORS.rst +100 -0
- rucio-37.0.0rc1.dist-info/licenses/LICENSE +201 -0
- rucio-37.0.0rc1.dist-info/top_level.txt +1 -0
rucio/core/transfer.py
ADDED
|
@@ -0,0 +1,1531 @@
|
|
|
1
|
+
# Copyright European Organization for Nuclear Research (CERN) since 2012
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
|
|
15
|
+
import datetime
|
|
16
|
+
import logging
|
|
17
|
+
import operator
|
|
18
|
+
import re
|
|
19
|
+
import sys
|
|
20
|
+
import time
|
|
21
|
+
import traceback
|
|
22
|
+
from collections import defaultdict
|
|
23
|
+
from typing import TYPE_CHECKING, cast
|
|
24
|
+
|
|
25
|
+
from dogpile.cache import make_region
|
|
26
|
+
from dogpile.cache.api import NoValue
|
|
27
|
+
from sqlalchemy import select, update
|
|
28
|
+
from sqlalchemy.exc import IntegrityError
|
|
29
|
+
|
|
30
|
+
from rucio.common import constants
|
|
31
|
+
from rucio.common.config import config_get, config_get_list
|
|
32
|
+
from rucio.common.constants import SUPPORTED_PROTOCOLS, RseAttr
|
|
33
|
+
from rucio.common.exception import InvalidRSEExpression, RequestNotFound, RSEProtocolNotSupported, RucioException, UnsupportedOperation
|
|
34
|
+
from rucio.common.utils import construct_non_deterministic_pfn
|
|
35
|
+
from rucio.core import did
|
|
36
|
+
from rucio.core import message as message_core
|
|
37
|
+
from rucio.core import request as request_core
|
|
38
|
+
from rucio.core.account import list_accounts
|
|
39
|
+
from rucio.core.monitor import MetricManager
|
|
40
|
+
from rucio.core.request import DirectTransfer, RequestSource, RequestWithSources, TransferDestination, transition_request_state
|
|
41
|
+
from rucio.core.rse_expression_parser import parse_expression
|
|
42
|
+
from rucio.db.sqla import models
|
|
43
|
+
from rucio.db.sqla.constants import DIDType, RequestState, RequestType, TransferLimitDirection
|
|
44
|
+
from rucio.db.sqla.session import read_session, stream_session, transactional_session
|
|
45
|
+
from rucio.rse import rsemanager as rsemgr
|
|
46
|
+
from rucio.transfertool.bittorrent import BittorrentTransfertool
|
|
47
|
+
from rucio.transfertool.fts3 import FTS3Transfertool
|
|
48
|
+
from rucio.transfertool.globus import GlobusTransferTool
|
|
49
|
+
from rucio.transfertool.mock import MockTransfertool
|
|
50
|
+
|
|
51
|
+
if TYPE_CHECKING:
|
|
52
|
+
from collections.abc import Callable, Iterable, Iterator, Mapping, Sequence
|
|
53
|
+
from typing import Any, Optional
|
|
54
|
+
|
|
55
|
+
from sqlalchemy.orm import Session
|
|
56
|
+
|
|
57
|
+
from rucio.common.types import InternalAccount, LFNDict
|
|
58
|
+
from rucio.core.rse import RseData
|
|
59
|
+
from rucio.core.topology import Topology
|
|
60
|
+
from rucio.rse.protocols.protocol import RSEProtocol
|
|
61
|
+
from rucio.transfertool.transfertool import TransferStatusReport, Transfertool
|
|
62
|
+
|
|
63
|
+
LoggerFunction = Callable[..., Any]
|
|
64
|
+
|
|
65
|
+
"""
|
|
66
|
+
The core transfer.py is specifically for handling transfer-requests, thus requests
|
|
67
|
+
where the external_id is already known.
|
|
68
|
+
Requests accessed by request_id are covered in the core request.py
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
REGION_ACCOUNTS = make_region().configure('dogpile.cache.memory', expiration_time=600)
|
|
72
|
+
METRICS = MetricManager(module=__name__)
|
|
73
|
+
|
|
74
|
+
WEBDAV_TRANSFER_MODE = config_get('conveyor', 'webdav_transfer_mode', False, None)
|
|
75
|
+
|
|
76
|
+
DEFAULT_MULTIHOP_TOMBSTONE_DELAY = int(datetime.timedelta(hours=2).total_seconds())
|
|
77
|
+
|
|
78
|
+
TRANSFERTOOL_CLASSES_BY_NAME: "dict[str, type[Transfertool]]" = {
|
|
79
|
+
FTS3Transfertool.external_name: FTS3Transfertool,
|
|
80
|
+
GlobusTransferTool.external_name: GlobusTransferTool,
|
|
81
|
+
MockTransfertool.external_name: MockTransfertool,
|
|
82
|
+
BittorrentTransfertool.external_name: BittorrentTransfertool,
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
class ProtocolFactory:
|
|
87
|
+
"""
|
|
88
|
+
Creates and caches protocol objects. Allowing to reuse them.
|
|
89
|
+
"""
|
|
90
|
+
def __init__(self):
|
|
91
|
+
self.protocols = {}
|
|
92
|
+
|
|
93
|
+
def protocol(self, rse: 'RseData', scheme: "Optional[str]", operation: str):
|
|
94
|
+
protocol_key = '%s_%s_%s' % (operation, rse.id, scheme)
|
|
95
|
+
protocol = self.protocols.get(protocol_key)
|
|
96
|
+
if not protocol:
|
|
97
|
+
protocol = rsemgr.create_protocol(rse.info, operation, scheme)
|
|
98
|
+
self.protocols[protocol_key] = protocol
|
|
99
|
+
return protocol
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class DirectTransferImplementation(DirectTransfer):
|
|
103
|
+
"""
|
|
104
|
+
The configuration for a direct (non-multi-hop) transfer. It can be a multi-source transfer.
|
|
105
|
+
|
|
106
|
+
The class wraps the legacy dict-based transfer definition to maintain compatibility with existing code
|
|
107
|
+
during the migration.
|
|
108
|
+
"""
|
|
109
|
+
def __init__(self, source: RequestSource, destination: TransferDestination, rws: RequestWithSources,
|
|
110
|
+
protocol_factory: ProtocolFactory, operation_src: str, operation_dest: str):
|
|
111
|
+
super().__init__(sources=[source], rws=rws)
|
|
112
|
+
self.destination = destination
|
|
113
|
+
|
|
114
|
+
self.protocol_factory = protocol_factory
|
|
115
|
+
self.operation_src = operation_src
|
|
116
|
+
self.operation_dest = operation_dest
|
|
117
|
+
|
|
118
|
+
self._dest_url = None
|
|
119
|
+
self._source_urls = {}
|
|
120
|
+
|
|
121
|
+
def __str__(self):
|
|
122
|
+
return '{sources}--{request_id}->{destination}'.format(
|
|
123
|
+
sources=','.join([str(s.rse) for s in self.sources]),
|
|
124
|
+
request_id=self.rws.request_id or '',
|
|
125
|
+
destination=self.dst.rse
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
@property
|
|
129
|
+
def src(self) -> RequestSource:
|
|
130
|
+
return self.sources[0]
|
|
131
|
+
|
|
132
|
+
@property
|
|
133
|
+
def dst(self) -> TransferDestination:
|
|
134
|
+
return self.destination
|
|
135
|
+
|
|
136
|
+
@property
|
|
137
|
+
def dest_url(self) -> str:
|
|
138
|
+
if not self._dest_url:
|
|
139
|
+
self._dest_url = self._generate_dest_url(self.dst, self.rws, self.protocol_factory, self.operation_dest)
|
|
140
|
+
return self._dest_url
|
|
141
|
+
|
|
142
|
+
def source_url(self, source: RequestSource) -> str:
|
|
143
|
+
url = self._source_urls.get(source.rse)
|
|
144
|
+
if not url:
|
|
145
|
+
self._source_urls[source.rse] = url = self._generate_source_url(
|
|
146
|
+
source,
|
|
147
|
+
self.dst,
|
|
148
|
+
rws=self.rws,
|
|
149
|
+
protocol_factory=self.protocol_factory,
|
|
150
|
+
operation=self.operation_src
|
|
151
|
+
)
|
|
152
|
+
return url
|
|
153
|
+
|
|
154
|
+
def dest_protocol(self) -> "RSEProtocol":
|
|
155
|
+
return self.protocol_factory.protocol(self.dst.rse, self.dst.scheme, self.operation_dest)
|
|
156
|
+
|
|
157
|
+
def source_protocol(self, source: RequestSource) -> "RSEProtocol":
|
|
158
|
+
return self.protocol_factory.protocol(source.rse, source.scheme, self.operation_src)
|
|
159
|
+
|
|
160
|
+
@staticmethod
|
|
161
|
+
def __rewrite_source_url(source_url, source_sign_url, dest_sign_url, source_scheme):
|
|
162
|
+
"""
|
|
163
|
+
Parametrize source url for some special cases of source and destination schemes
|
|
164
|
+
"""
|
|
165
|
+
if dest_sign_url == 'gcs':
|
|
166
|
+
if source_scheme in ['davs', 'https']:
|
|
167
|
+
source_url += '?copy_mode=push'
|
|
168
|
+
elif dest_sign_url == 's3':
|
|
169
|
+
if source_scheme in ['davs', 'https']:
|
|
170
|
+
source_url += '?copy_mode=push'
|
|
171
|
+
elif WEBDAV_TRANSFER_MODE:
|
|
172
|
+
if source_scheme in ['davs', 'https']:
|
|
173
|
+
source_url += '?copy_mode=%s' % WEBDAV_TRANSFER_MODE
|
|
174
|
+
|
|
175
|
+
source_sign_url_map = {'gcs': 'gclouds', 's3': 's3s'}
|
|
176
|
+
if source_sign_url in source_sign_url_map:
|
|
177
|
+
if source_url[:7] == 'davs://':
|
|
178
|
+
source_url = source_sign_url_map[source_sign_url] + source_url[4:]
|
|
179
|
+
if source_url[:8] == 'https://':
|
|
180
|
+
source_url = source_sign_url_map[source_sign_url] + source_url[5:]
|
|
181
|
+
|
|
182
|
+
if source_url[:12] == 'srm+https://':
|
|
183
|
+
source_url = 'srm' + source_url[9:]
|
|
184
|
+
return source_url
|
|
185
|
+
|
|
186
|
+
@staticmethod
|
|
187
|
+
def __rewrite_dest_url(dest_url, dest_sign_url):
|
|
188
|
+
"""
|
|
189
|
+
Parametrize destination url for some special cases of destination schemes
|
|
190
|
+
"""
|
|
191
|
+
if dest_sign_url == 'gcs':
|
|
192
|
+
dest_url = re.sub('davs', 'gclouds', dest_url)
|
|
193
|
+
dest_url = re.sub('https', 'gclouds', dest_url)
|
|
194
|
+
elif dest_sign_url == 's3':
|
|
195
|
+
dest_url = re.sub('davs', 's3s', dest_url)
|
|
196
|
+
dest_url = re.sub('https', 's3s', dest_url)
|
|
197
|
+
|
|
198
|
+
if dest_url[:12] == 'srm+https://':
|
|
199
|
+
dest_url = 'srm' + dest_url[9:]
|
|
200
|
+
return dest_url
|
|
201
|
+
|
|
202
|
+
@classmethod
|
|
203
|
+
def _generate_source_url(cls, src: RequestSource, dst: TransferDestination, rws: RequestWithSources, protocol_factory: ProtocolFactory, operation: str):
|
|
204
|
+
"""
|
|
205
|
+
Generate the source url which will be used as origin to copy the file from request rws towards the given dst endpoint
|
|
206
|
+
"""
|
|
207
|
+
# Get source protocol
|
|
208
|
+
protocol = protocol_factory.protocol(src.rse, src.scheme, operation)
|
|
209
|
+
|
|
210
|
+
# Compute the source URL
|
|
211
|
+
source_sign_url = src.rse.attributes.get(RseAttr.SIGN_URL, None)
|
|
212
|
+
dest_sign_url = dst.rse.attributes.get(RseAttr.SIGN_URL, None)
|
|
213
|
+
lfn: "LFNDict" = {
|
|
214
|
+
'scope': rws.scope.external, # type: ignore (scope.external might be None)
|
|
215
|
+
'name': rws.name,
|
|
216
|
+
'path': src.file_path
|
|
217
|
+
}
|
|
218
|
+
source_url = list(protocol.lfns2pfns(lfns=lfn).values())[0]
|
|
219
|
+
source_url = cls.__rewrite_source_url(source_url, source_sign_url=source_sign_url, dest_sign_url=dest_sign_url, source_scheme=src.scheme)
|
|
220
|
+
return source_url
|
|
221
|
+
|
|
222
|
+
@classmethod
|
|
223
|
+
def _generate_dest_url(cls, dst: TransferDestination, rws: RequestWithSources, protocol_factory: ProtocolFactory, operation: str):
|
|
224
|
+
"""
|
|
225
|
+
Generate the destination url for copying the file of request rws
|
|
226
|
+
"""
|
|
227
|
+
# Get destination protocol
|
|
228
|
+
protocol = protocol_factory.protocol(dst.rse, dst.scheme, operation)
|
|
229
|
+
|
|
230
|
+
if dst.rse.info['deterministic']:
|
|
231
|
+
lfn: "LFNDict" = {
|
|
232
|
+
'scope': rws.scope.external, # type: ignore (scope.external might be None)
|
|
233
|
+
'name': rws.name
|
|
234
|
+
}
|
|
235
|
+
dest_url = list(protocol.lfns2pfns(lfns=lfn).values())[0]
|
|
236
|
+
else:
|
|
237
|
+
# compute dest url in case of non deterministic
|
|
238
|
+
# naming convention, etc.
|
|
239
|
+
dsn = get_dsn(rws.scope, rws.name, rws.attributes.get('dsn', None))
|
|
240
|
+
# DQ2 path always starts with /, but prefix might not end with /
|
|
241
|
+
naming_convention = dst.rse.attributes.get(RseAttr.NAMING_CONVENTION, None)
|
|
242
|
+
if rws.scope.external is not None:
|
|
243
|
+
dest_path = construct_non_deterministic_pfn(dsn, rws.scope.external, rws.name, naming_convention)
|
|
244
|
+
if dst.rse.is_tape():
|
|
245
|
+
if rws.retry_count or rws.activity == 'Recovery':
|
|
246
|
+
dest_path = '%s_%i' % (dest_path, int(time.time()))
|
|
247
|
+
|
|
248
|
+
lfn: "LFNDict" = {
|
|
249
|
+
'scope': rws.scope.external, # type: ignore (scope.external might be None)
|
|
250
|
+
'name': rws.name,
|
|
251
|
+
'path': dest_path
|
|
252
|
+
}
|
|
253
|
+
dest_url = list(protocol.lfns2pfns(lfns=lfn).values())[0]
|
|
254
|
+
|
|
255
|
+
dest_sign_url = dst.rse.attributes.get(RseAttr.SIGN_URL, None)
|
|
256
|
+
dest_url = cls.__rewrite_dest_url(dest_url, dest_sign_url=dest_sign_url)
|
|
257
|
+
return dest_url
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
class StageinTransferImplementation(DirectTransferImplementation):
|
|
261
|
+
"""
|
|
262
|
+
A definition of a transfer which triggers a stagein operation.
|
|
263
|
+
- The source and destination url are identical
|
|
264
|
+
- must be from TAPE to non-TAPE RSE
|
|
265
|
+
- can only have one source
|
|
266
|
+
"""
|
|
267
|
+
def __init__(
|
|
268
|
+
self,
|
|
269
|
+
source: RequestSource,
|
|
270
|
+
destination: TransferDestination,
|
|
271
|
+
rws: RequestWithSources,
|
|
272
|
+
protocol_factory: ProtocolFactory,
|
|
273
|
+
operation_src: str,
|
|
274
|
+
operation_dest: str
|
|
275
|
+
):
|
|
276
|
+
if not source.rse.is_tape() or destination.rse.is_tape():
|
|
277
|
+
# allow staging_required QoS RSE to be TAPE to TAPE for pin
|
|
278
|
+
if not destination.rse.attributes.get(RseAttr.STAGING_REQUIRED, None):
|
|
279
|
+
raise RucioException("Stageing request {} must be from TAPE to DISK rse. Got {} and {}.".format(rws, source, destination))
|
|
280
|
+
super().__init__(source, destination, rws, protocol_factory, operation_src, operation_dest)
|
|
281
|
+
|
|
282
|
+
@property
|
|
283
|
+
def dest_url(self) -> str:
|
|
284
|
+
if not self._dest_url:
|
|
285
|
+
self._dest_url = self.src.url if self.src.url else self._generate_source_url(self.src,
|
|
286
|
+
self.dst,
|
|
287
|
+
rws=self.rws,
|
|
288
|
+
protocol_factory=self.protocol_factory,
|
|
289
|
+
operation=self.operation_dest)
|
|
290
|
+
return self._dest_url
|
|
291
|
+
|
|
292
|
+
def source_url(self, source: RequestSource) -> str:
|
|
293
|
+
# Source and dest url is the same for stagein requests
|
|
294
|
+
return self.dest_url
|
|
295
|
+
|
|
296
|
+
|
|
297
|
+
def transfer_path_str(transfer_path: "list[DirectTransfer]") -> str:
|
|
298
|
+
"""
|
|
299
|
+
an implementation of __str__ for a transfer path, which is a list of direct transfers, so not really an object
|
|
300
|
+
"""
|
|
301
|
+
if not transfer_path:
|
|
302
|
+
return 'empty transfer path'
|
|
303
|
+
|
|
304
|
+
multi_tt = False
|
|
305
|
+
if len({hop.rws.transfertool for hop in transfer_path if hop.rws.transfertool}) > 1:
|
|
306
|
+
# The path relies on more than one transfertool
|
|
307
|
+
multi_tt = True
|
|
308
|
+
|
|
309
|
+
if len(transfer_path) == 1:
|
|
310
|
+
return str(transfer_path[0])
|
|
311
|
+
|
|
312
|
+
path_str = str(transfer_path[0].src.rse)
|
|
313
|
+
for hop in transfer_path:
|
|
314
|
+
path_str += '--{request_id}{transfertool}->{destination}'.format(
|
|
315
|
+
request_id=hop.rws.request_id or '',
|
|
316
|
+
transfertool=':{}'.format(hop.rws.transfertool) if multi_tt else '',
|
|
317
|
+
destination=hop.dst.rse,
|
|
318
|
+
)
|
|
319
|
+
return path_str
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
@transactional_session
|
|
323
|
+
def mark_submitting(
|
|
324
|
+
transfer: "DirectTransfer",
|
|
325
|
+
external_host: str,
|
|
326
|
+
*,
|
|
327
|
+
logger: "Callable",
|
|
328
|
+
session: "Session",
|
|
329
|
+
):
|
|
330
|
+
"""
|
|
331
|
+
Mark a transfer as submitting
|
|
332
|
+
|
|
333
|
+
:param transfer: A transfer object
|
|
334
|
+
:param session: Database session to use.
|
|
335
|
+
"""
|
|
336
|
+
|
|
337
|
+
log_str = 'PREPARING REQUEST %s DID %s:%s TO SUBMITTING STATE PREVIOUS %s FROM %s TO %s USING %s ' % (transfer.rws.request_id,
|
|
338
|
+
transfer.rws.scope,
|
|
339
|
+
transfer.rws.name,
|
|
340
|
+
transfer.rws.previous_attempt_id,
|
|
341
|
+
[transfer.source_url(s) for s in transfer.sources],
|
|
342
|
+
transfer.dest_url,
|
|
343
|
+
external_host)
|
|
344
|
+
logger(logging.DEBUG, "%s", log_str)
|
|
345
|
+
|
|
346
|
+
stmt = update(
|
|
347
|
+
models.Request
|
|
348
|
+
).where(
|
|
349
|
+
models.Request.id == transfer.rws.request_id,
|
|
350
|
+
models.Request.state == RequestState.QUEUED
|
|
351
|
+
).execution_options(
|
|
352
|
+
synchronize_session=False
|
|
353
|
+
).values(
|
|
354
|
+
{
|
|
355
|
+
'state': RequestState.SUBMITTING,
|
|
356
|
+
'external_id': None,
|
|
357
|
+
'external_host': external_host,
|
|
358
|
+
'dest_url': transfer.dest_url,
|
|
359
|
+
'submitted_at': datetime.datetime.utcnow(),
|
|
360
|
+
}
|
|
361
|
+
)
|
|
362
|
+
rowcount = session.execute(stmt).rowcount
|
|
363
|
+
|
|
364
|
+
if rowcount == 0:
|
|
365
|
+
raise RequestNotFound("Failed to prepare transfer: request %s does not exist or is not in queued state" % transfer.rws)
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
@transactional_session
|
|
369
|
+
def ensure_db_sources(
|
|
370
|
+
transfer_path: "list[DirectTransfer]",
|
|
371
|
+
*,
|
|
372
|
+
logger: "Callable",
|
|
373
|
+
session: "Session",
|
|
374
|
+
):
|
|
375
|
+
"""
|
|
376
|
+
Ensure the needed DB source objects exist
|
|
377
|
+
"""
|
|
378
|
+
|
|
379
|
+
desired_sources = []
|
|
380
|
+
for transfer in transfer_path:
|
|
381
|
+
|
|
382
|
+
for source in transfer.sources:
|
|
383
|
+
common_source_attrs = {
|
|
384
|
+
"scope": transfer.rws.scope,
|
|
385
|
+
"name": transfer.rws.name,
|
|
386
|
+
"rse_id": source.rse.id,
|
|
387
|
+
"dest_rse_id": transfer.dst.rse.id,
|
|
388
|
+
"ranking": source.ranking,
|
|
389
|
+
"bytes": transfer.rws.byte_count,
|
|
390
|
+
"url": transfer.source_url(source),
|
|
391
|
+
"is_using": True,
|
|
392
|
+
}
|
|
393
|
+
|
|
394
|
+
desired_sources.append({'request_id': transfer.rws.request_id, **common_source_attrs})
|
|
395
|
+
if len(transfer_path) > 1 and transfer is not transfer_path[-1]:
|
|
396
|
+
# For multihop transfers, each hop's source is also an initial transfer's source.
|
|
397
|
+
desired_sources.append({'request_id': transfer_path[-1].rws.request_id, **common_source_attrs})
|
|
398
|
+
|
|
399
|
+
for source in desired_sources:
|
|
400
|
+
stmt = update(
|
|
401
|
+
models.Source
|
|
402
|
+
).where(
|
|
403
|
+
models.Source.request_id == source['request_id'],
|
|
404
|
+
models.Source.rse_id == source['rse_id']
|
|
405
|
+
).execution_options(
|
|
406
|
+
synchronize_session=False
|
|
407
|
+
).values(
|
|
408
|
+
is_using=True
|
|
409
|
+
)
|
|
410
|
+
src_rowcount = session.execute(stmt).rowcount
|
|
411
|
+
if src_rowcount == 0:
|
|
412
|
+
models.Source(**source).save(session=session, flush=False)
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
@transactional_session
|
|
416
|
+
def set_transfers_state(
|
|
417
|
+
transfers,
|
|
418
|
+
state: "RequestState",
|
|
419
|
+
submitted_at: datetime.datetime,
|
|
420
|
+
external_host: str,
|
|
421
|
+
external_id: str,
|
|
422
|
+
transfertool: str,
|
|
423
|
+
*,
|
|
424
|
+
session: "Session",
|
|
425
|
+
logger
|
|
426
|
+
):
|
|
427
|
+
"""
|
|
428
|
+
Update the transfer info of a request.
|
|
429
|
+
:param transfers: Dictionary containing request transfer info.
|
|
430
|
+
:param session: Database session to use.
|
|
431
|
+
"""
|
|
432
|
+
|
|
433
|
+
logger(logging.INFO, 'Setting state(%s), transfertool(%s), external_host(%s) and eid(%s) for transfers: %s',
|
|
434
|
+
state.name, transfertool, external_host, external_id, ', '.join(t.rws.request_id for t in transfers))
|
|
435
|
+
try:
|
|
436
|
+
for transfer in transfers:
|
|
437
|
+
rws = transfer.rws
|
|
438
|
+
logger(logging.DEBUG, 'COPYING REQUEST %s DID %s:%s USING %s with state(%s) with eid(%s)' % (rws.request_id, rws.scope, rws.name, external_host, state, external_id))
|
|
439
|
+
stmt = update(
|
|
440
|
+
models.Request
|
|
441
|
+
).where(
|
|
442
|
+
models.Request.id == transfer.rws.request_id,
|
|
443
|
+
models.Request.state == RequestState.SUBMITTING
|
|
444
|
+
).execution_options(
|
|
445
|
+
synchronize_session=False
|
|
446
|
+
).values(
|
|
447
|
+
{
|
|
448
|
+
models.Request.state: state,
|
|
449
|
+
models.Request.external_id: external_id,
|
|
450
|
+
models.Request.external_host: external_host,
|
|
451
|
+
models.Request.source_rse_id: transfer.src.rse.id,
|
|
452
|
+
models.Request.submitted_at: submitted_at,
|
|
453
|
+
models.Request.transfertool: transfertool,
|
|
454
|
+
}
|
|
455
|
+
)
|
|
456
|
+
rowcount = session.execute(stmt).rowcount
|
|
457
|
+
|
|
458
|
+
if rowcount == 0:
|
|
459
|
+
raise RucioException("%s: failed to set transfer state: request doesn't exist or is not in SUBMITTING state" % rws)
|
|
460
|
+
|
|
461
|
+
stmt = select(
|
|
462
|
+
models.DataIdentifier.datatype
|
|
463
|
+
).where(
|
|
464
|
+
models.DataIdentifier.scope == rws.scope,
|
|
465
|
+
models.DataIdentifier.name == rws.name,
|
|
466
|
+
)
|
|
467
|
+
datatype = session.execute(stmt).scalar_one_or_none()
|
|
468
|
+
|
|
469
|
+
msg = {'request-id': rws.request_id,
|
|
470
|
+
'request-type': rws.request_type,
|
|
471
|
+
'scope': rws.scope.external,
|
|
472
|
+
'name': rws.name,
|
|
473
|
+
'dataset': None,
|
|
474
|
+
'datasetScope': None,
|
|
475
|
+
'src-rse-id': transfer.src.rse.id,
|
|
476
|
+
'src-rse': transfer.src.rse.name,
|
|
477
|
+
'dst-rse-id': transfer.dst.rse.id,
|
|
478
|
+
'dst-rse': transfer.dst.rse.name,
|
|
479
|
+
'state': state,
|
|
480
|
+
'activity': rws.activity,
|
|
481
|
+
'file-size': rws.byte_count,
|
|
482
|
+
'bytes': rws.byte_count,
|
|
483
|
+
'checksum-md5': rws.md5,
|
|
484
|
+
'checksum-adler': rws.adler32,
|
|
485
|
+
'external-id': external_id,
|
|
486
|
+
'external-host': external_host,
|
|
487
|
+
'queued_at': str(submitted_at),
|
|
488
|
+
'datatype': datatype}
|
|
489
|
+
if rws.scope.vo != 'def':
|
|
490
|
+
msg['vo'] = rws.scope.vo
|
|
491
|
+
|
|
492
|
+
ds_scope = transfer.rws.attributes.get('ds_scope')
|
|
493
|
+
if ds_scope:
|
|
494
|
+
msg['datasetScope'] = ds_scope
|
|
495
|
+
ds_name = transfer.rws.attributes.get('ds_name')
|
|
496
|
+
if ds_name:
|
|
497
|
+
msg['dataset'] = ds_name
|
|
498
|
+
|
|
499
|
+
if msg['request-type']:
|
|
500
|
+
transfer_status = '%s-%s' % (msg['request-type'].name, msg['state'].name)
|
|
501
|
+
else:
|
|
502
|
+
transfer_status = 'transfer-%s' % msg['state']
|
|
503
|
+
transfer_status = transfer_status.lower()
|
|
504
|
+
|
|
505
|
+
message_core.add_message(transfer_status, msg, session=session)
|
|
506
|
+
|
|
507
|
+
except IntegrityError as error:
|
|
508
|
+
raise RucioException(error.args)
|
|
509
|
+
|
|
510
|
+
logger(logging.DEBUG, 'Finished to register transfer state for %s' % external_id)
|
|
511
|
+
|
|
512
|
+
|
|
513
|
+
@transactional_session
|
|
514
|
+
def update_transfer_state(
|
|
515
|
+
tt_status_report: 'TransferStatusReport',
|
|
516
|
+
stats_manager: request_core.TransferStatsManager,
|
|
517
|
+
*,
|
|
518
|
+
session: "Session",
|
|
519
|
+
logger=logging.log
|
|
520
|
+
):
|
|
521
|
+
"""
|
|
522
|
+
Used by poller and consumer to update the internal state of requests,
|
|
523
|
+
after the response by the external transfertool.
|
|
524
|
+
|
|
525
|
+
:param tt_status_report: The transfertool status update, retrieved via request.query_request().
|
|
526
|
+
:param session: The database session to use.
|
|
527
|
+
:param logger: Optional decorated logger that can be passed from the calling daemons or servers.
|
|
528
|
+
:returns: The number of updated requests
|
|
529
|
+
"""
|
|
530
|
+
|
|
531
|
+
request_id = tt_status_report.request_id
|
|
532
|
+
nb_updated = 0
|
|
533
|
+
try:
|
|
534
|
+
fields_to_update = tt_status_report.get_db_fields_to_update(session=session, logger=logger)
|
|
535
|
+
if not fields_to_update:
|
|
536
|
+
request_core.update_request(request_id, raise_on_missing=True, session=session)
|
|
537
|
+
return False
|
|
538
|
+
else:
|
|
539
|
+
logger(logging.INFO, 'UPDATING REQUEST %s FOR %s with changes: %s' % (str(request_id), tt_status_report, fields_to_update))
|
|
540
|
+
|
|
541
|
+
request = request_core.get_request(request_id, session=session)
|
|
542
|
+
updated = transition_request_state(request_id, request=request, session=session, **fields_to_update)
|
|
543
|
+
|
|
544
|
+
if not updated:
|
|
545
|
+
return nb_updated
|
|
546
|
+
nb_updated += 1
|
|
547
|
+
|
|
548
|
+
if tt_status_report.state == RequestState.FAILED:
|
|
549
|
+
if request_core.is_intermediate_hop(request):
|
|
550
|
+
nb_updated += request_core.handle_failed_intermediate_hop(request, session=session)
|
|
551
|
+
|
|
552
|
+
if tt_status_report.state:
|
|
553
|
+
stats_manager.observe(
|
|
554
|
+
src_rse_id=request['source_rse_id'],
|
|
555
|
+
dst_rse_id=request['dest_rse_id'],
|
|
556
|
+
activity=request['activity'],
|
|
557
|
+
state=tt_status_report.state,
|
|
558
|
+
file_size=request['bytes'],
|
|
559
|
+
submitted_at=request.get('submitted_at', None),
|
|
560
|
+
started_at=fields_to_update.get('started_at', None),
|
|
561
|
+
transferred_at=fields_to_update.get('transferred_at', None),
|
|
562
|
+
session=session,
|
|
563
|
+
)
|
|
564
|
+
request_core.add_monitor_message(
|
|
565
|
+
new_state=tt_status_report.state,
|
|
566
|
+
request=request,
|
|
567
|
+
additional_fields=tt_status_report.get_monitor_msg_fields(session=session, logger=logger),
|
|
568
|
+
session=session
|
|
569
|
+
)
|
|
570
|
+
return nb_updated
|
|
571
|
+
except UnsupportedOperation as error:
|
|
572
|
+
logger(logging.WARNING, "Request %s doesn't exist - Error: %s" % (request_id, str(error).replace('\n', '')))
|
|
573
|
+
return 0
|
|
574
|
+
except Exception:
|
|
575
|
+
logger(logging.CRITICAL, "Exception", exc_info=True)
|
|
576
|
+
|
|
577
|
+
|
|
578
|
+
@transactional_session
|
|
579
|
+
def mark_transfer_lost(request, *, session: "Session", logger=logging.log):
|
|
580
|
+
new_state = RequestState.LOST
|
|
581
|
+
reason = "The FTS job lost"
|
|
582
|
+
|
|
583
|
+
err_msg = request_core.get_transfer_error(new_state, reason)
|
|
584
|
+
transition_request_state(request['id'], state=new_state, external_id=request['external_id'], err_msg=err_msg, session=session, logger=logger)
|
|
585
|
+
|
|
586
|
+
request_core.add_monitor_message(new_state=new_state, request=request, additional_fields={'reason': reason}, session=session)
|
|
587
|
+
|
|
588
|
+
|
|
589
|
+
@METRICS.count_it
|
|
590
|
+
@transactional_session
|
|
591
|
+
def touch_transfer(external_host, transfer_id, *, session: "Session"):
|
|
592
|
+
"""
|
|
593
|
+
Update the timestamp of requests in a transfer. Fails silently if the transfer_id does not exist.
|
|
594
|
+
:param request_host: Name of the external host.
|
|
595
|
+
:param transfer_id: External transfer job id as a string.
|
|
596
|
+
:param session: Database session to use.
|
|
597
|
+
"""
|
|
598
|
+
try:
|
|
599
|
+
# don't touch it if it's already touched in 30 seconds
|
|
600
|
+
stmt = update(
|
|
601
|
+
models.Request
|
|
602
|
+
).prefix_with(
|
|
603
|
+
"/*+ INDEX(REQUESTS REQUESTS_EXTERNALID_UQ) */", dialect='oracle'
|
|
604
|
+
).where(
|
|
605
|
+
models.Request.external_id == transfer_id,
|
|
606
|
+
models.Request.state == RequestState.SUBMITTED,
|
|
607
|
+
models.Request.updated_at < datetime.datetime.utcnow() - datetime.timedelta(seconds=30)
|
|
608
|
+
).execution_options(
|
|
609
|
+
synchronize_session=False
|
|
610
|
+
).values(
|
|
611
|
+
updated_at=datetime.datetime.utcnow()
|
|
612
|
+
)
|
|
613
|
+
session.execute(stmt)
|
|
614
|
+
except IntegrityError as error:
|
|
615
|
+
raise RucioException(error.args)
|
|
616
|
+
|
|
617
|
+
|
|
618
|
+
def _create_transfer_definitions(
|
|
619
|
+
topology: "Topology",
|
|
620
|
+
protocol_factory: ProtocolFactory,
|
|
621
|
+
rws: RequestWithSources,
|
|
622
|
+
sources: "Iterable[RequestSource]",
|
|
623
|
+
max_sources: int,
|
|
624
|
+
multi_source_sources: "Iterable[RequestSource]",
|
|
625
|
+
limit_dest_schemes: list[str],
|
|
626
|
+
operation_src: str,
|
|
627
|
+
operation_dest: str,
|
|
628
|
+
domain: str,
|
|
629
|
+
*,
|
|
630
|
+
session: "Session",
|
|
631
|
+
) -> "dict[RseData, list[DirectTransfer]]":
|
|
632
|
+
"""
|
|
633
|
+
Find the all paths from sources towards the destination of the given transfer request.
|
|
634
|
+
Create the transfer definitions for each point-to-point transfer (multi-source, when possible)
|
|
635
|
+
"""
|
|
636
|
+
shortest_paths = topology.search_shortest_paths(src_nodes=[s.rse for s in sources], dst_node=rws.dest_rse,
|
|
637
|
+
operation_src=operation_src, operation_dest=operation_dest,
|
|
638
|
+
domain=domain, limit_dest_schemes=limit_dest_schemes, session=session)
|
|
639
|
+
|
|
640
|
+
transfers_by_source = {}
|
|
641
|
+
sources_by_rse = {s.rse: s for s in sources}
|
|
642
|
+
paths_by_source = {sources_by_rse[rse]: path for rse, path in shortest_paths.items()}
|
|
643
|
+
for source, list_hops in paths_by_source.items():
|
|
644
|
+
transfer_path = []
|
|
645
|
+
for hop in list_hops:
|
|
646
|
+
hop_src_rse = hop['source_rse']
|
|
647
|
+
hop_dst_rse = hop['dest_rse']
|
|
648
|
+
src = RequestSource(
|
|
649
|
+
rse=hop_src_rse,
|
|
650
|
+
file_path=source.file_path if hop_src_rse == source.rse else None,
|
|
651
|
+
ranking=source.ranking if hop_src_rse == source.rse else 0,
|
|
652
|
+
distance=hop['cumulated_distance'] if hop_src_rse == source.rse else hop['hop_distance'],
|
|
653
|
+
scheme=hop['source_scheme'],
|
|
654
|
+
)
|
|
655
|
+
dst = TransferDestination(
|
|
656
|
+
rse=hop_dst_rse,
|
|
657
|
+
scheme=hop['dest_scheme'],
|
|
658
|
+
)
|
|
659
|
+
hop_definition = DirectTransferImplementation(
|
|
660
|
+
source=src,
|
|
661
|
+
destination=dst,
|
|
662
|
+
operation_src=operation_src,
|
|
663
|
+
operation_dest=operation_dest,
|
|
664
|
+
# keep the current rws for last hop; create a new one for other hops
|
|
665
|
+
rws=rws if hop_dst_rse == rws.dest_rse else RequestWithSources(
|
|
666
|
+
id_=None,
|
|
667
|
+
request_type=rws.request_type,
|
|
668
|
+
rule_id=None,
|
|
669
|
+
scope=rws.scope,
|
|
670
|
+
name=rws.name,
|
|
671
|
+
md5=rws.md5,
|
|
672
|
+
adler32=rws.adler32,
|
|
673
|
+
byte_count=rws.byte_count,
|
|
674
|
+
activity=rws.activity,
|
|
675
|
+
attributes={
|
|
676
|
+
'activity': rws.activity,
|
|
677
|
+
'source_replica_expression': None,
|
|
678
|
+
'lifetime': None,
|
|
679
|
+
'ds_scope': None,
|
|
680
|
+
'ds_name': None,
|
|
681
|
+
'bytes': rws.byte_count,
|
|
682
|
+
'md5': rws.md5,
|
|
683
|
+
'adler32': rws.adler32,
|
|
684
|
+
'priority': None,
|
|
685
|
+
'allow_tape_source': True
|
|
686
|
+
},
|
|
687
|
+
previous_attempt_id=None,
|
|
688
|
+
dest_rse=hop_dst_rse,
|
|
689
|
+
account=rws.account,
|
|
690
|
+
retry_count=0,
|
|
691
|
+
priority=rws.priority,
|
|
692
|
+
transfertool=rws.transfertool,
|
|
693
|
+
),
|
|
694
|
+
protocol_factory=protocol_factory,
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
transfer_path.append(hop_definition)
|
|
698
|
+
transfers_by_source[source.rse] = transfer_path
|
|
699
|
+
|
|
700
|
+
# create multi-source transfers: add additional sources if possible
|
|
701
|
+
for transfer_path in transfers_by_source.values():
|
|
702
|
+
if len(transfer_path) == 1 and not transfer_path[0].src.rse.is_tape():
|
|
703
|
+
# Multiple single-hop DISK rses can be used together in "multi-source" transfers
|
|
704
|
+
#
|
|
705
|
+
# Try adding additional single-hop DISK rses sources to the transfer
|
|
706
|
+
main_source_schemes = __add_compatible_schemes(schemes=[transfer_path[0].dst.scheme], allowed_schemes=SUPPORTED_PROTOCOLS)
|
|
707
|
+
added_sources = 0
|
|
708
|
+
for source in sorted(multi_source_sources, key=lambda s: (-s.ranking, s.distance)):
|
|
709
|
+
if added_sources >= max_sources:
|
|
710
|
+
break
|
|
711
|
+
|
|
712
|
+
edge = topology.edge(source.rse, transfer_path[0].dst.rse)
|
|
713
|
+
if not edge:
|
|
714
|
+
# There is no direct connection between this source and the destination
|
|
715
|
+
continue
|
|
716
|
+
|
|
717
|
+
if source.rse == transfer_path[0].src.rse:
|
|
718
|
+
# This is the main source. Don't add a duplicate.
|
|
719
|
+
continue
|
|
720
|
+
|
|
721
|
+
if source.rse.is_tape():
|
|
722
|
+
continue
|
|
723
|
+
|
|
724
|
+
try:
|
|
725
|
+
matching_scheme = rsemgr.find_matching_scheme(
|
|
726
|
+
rse_settings_src=source.rse.info,
|
|
727
|
+
rse_settings_dest=transfer_path[0].dst.rse.info,
|
|
728
|
+
operation_src=operation_src,
|
|
729
|
+
operation_dest=operation_dest,
|
|
730
|
+
domain=domain,
|
|
731
|
+
scheme=main_source_schemes)
|
|
732
|
+
except RSEProtocolNotSupported:
|
|
733
|
+
continue
|
|
734
|
+
|
|
735
|
+
transfer_path[0].sources.append(
|
|
736
|
+
RequestSource(
|
|
737
|
+
rse=source.rse,
|
|
738
|
+
file_path=source.file_path,
|
|
739
|
+
ranking=source.ranking,
|
|
740
|
+
distance=edge.cost,
|
|
741
|
+
scheme=matching_scheme[1],
|
|
742
|
+
)
|
|
743
|
+
)
|
|
744
|
+
added_sources += 1
|
|
745
|
+
return transfers_by_source
|
|
746
|
+
|
|
747
|
+
|
|
748
|
+
def _create_stagein_definitions(
|
|
749
|
+
rws: RequestWithSources,
|
|
750
|
+
sources: "Iterable[RequestSource]",
|
|
751
|
+
limit_dest_schemes: list[str],
|
|
752
|
+
operation_src: str,
|
|
753
|
+
operation_dest: str,
|
|
754
|
+
protocol_factory: ProtocolFactory,
|
|
755
|
+
) -> "dict[RseData, list[DirectTransfer]]":
|
|
756
|
+
"""
|
|
757
|
+
for each source, create a single-hop transfer path with a one stageing definition inside
|
|
758
|
+
"""
|
|
759
|
+
transfers_by_source = {
|
|
760
|
+
source.rse: [
|
|
761
|
+
cast('DirectTransfer', StageinTransferImplementation(
|
|
762
|
+
source=RequestSource(
|
|
763
|
+
rse=source.rse,
|
|
764
|
+
file_path=source.file_path,
|
|
765
|
+
url=source.url,
|
|
766
|
+
scheme=limit_dest_schemes, # type: ignore (list passed instead of single scheme)
|
|
767
|
+
),
|
|
768
|
+
destination=TransferDestination(
|
|
769
|
+
rse=rws.dest_rse,
|
|
770
|
+
scheme=limit_dest_schemes, # type: ignore (list passed instead of single scheme)
|
|
771
|
+
),
|
|
772
|
+
operation_src=operation_src,
|
|
773
|
+
operation_dest=operation_dest,
|
|
774
|
+
rws=rws,
|
|
775
|
+
protocol_factory=protocol_factory,
|
|
776
|
+
))
|
|
777
|
+
|
|
778
|
+
]
|
|
779
|
+
for source in sources
|
|
780
|
+
}
|
|
781
|
+
return transfers_by_source
|
|
782
|
+
|
|
783
|
+
|
|
784
|
+
def get_dsn(scope, name, dsn):
|
|
785
|
+
if dsn:
|
|
786
|
+
return dsn
|
|
787
|
+
# select a containing dataset
|
|
788
|
+
for parent in did.list_parent_dids(scope, name):
|
|
789
|
+
if parent['type'] == DIDType.DATASET:
|
|
790
|
+
return parent['name']
|
|
791
|
+
return 'other'
|
|
792
|
+
|
|
793
|
+
|
|
794
|
+
def __compress_multihops(
|
|
795
|
+
paths_by_source: "Iterable[tuple[RequestSource, Sequence[DirectTransfer]]]",
|
|
796
|
+
sources: "Iterable[RequestSource]",
|
|
797
|
+
) -> "Iterator[tuple[RequestSource, Sequence[DirectTransfer]]]":
|
|
798
|
+
# Compress multihop transfers which contain other sources as part of itself.
|
|
799
|
+
# For example: multihop A->B->C and B is a source, compress A->B->C into B->C
|
|
800
|
+
source_rses = {s.rse.id for s in sources}
|
|
801
|
+
seen_source_rses = set()
|
|
802
|
+
for source, path in paths_by_source:
|
|
803
|
+
if len(path) > 1:
|
|
804
|
+
# find the index of the first hop starting from the end which is also a source. Path[0] will always be a source.
|
|
805
|
+
last_source_idx = next((idx for idx, hop in reversed(list(enumerate(path))) if hop.src.rse.id in source_rses), (0, None))
|
|
806
|
+
if last_source_idx > 0:
|
|
807
|
+
path = path[last_source_idx:]
|
|
808
|
+
|
|
809
|
+
# Deduplicate paths from same source
|
|
810
|
+
src_rse_id = path[0].src.rse.id
|
|
811
|
+
if src_rse_id not in seen_source_rses:
|
|
812
|
+
seen_source_rses.add(src_rse_id)
|
|
813
|
+
yield source, path
|
|
814
|
+
|
|
815
|
+
|
|
816
|
+
class TransferPathBuilder:
|
|
817
|
+
def __init__(
|
|
818
|
+
self,
|
|
819
|
+
topology: "Topology",
|
|
820
|
+
protocol_factory: ProtocolFactory,
|
|
821
|
+
max_sources: int,
|
|
822
|
+
preparer_mode: bool = False,
|
|
823
|
+
schemes: "Optional[list[str]]" = None,
|
|
824
|
+
failover_schemes: "Optional[list[str]]" = None,
|
|
825
|
+
requested_source_only: bool = False,
|
|
826
|
+
):
|
|
827
|
+
self.failover_schemes = failover_schemes if failover_schemes is not None else []
|
|
828
|
+
self.schemes = schemes if schemes is not None else []
|
|
829
|
+
self.topology = topology
|
|
830
|
+
self.preparer_mode = preparer_mode
|
|
831
|
+
self.protocol_factory = protocol_factory
|
|
832
|
+
self.max_sources = max_sources
|
|
833
|
+
self.requested_source_only = requested_source_only
|
|
834
|
+
|
|
835
|
+
self.definition_by_request_id = {}
|
|
836
|
+
|
|
837
|
+
def build_or_return_cached(
|
|
838
|
+
self,
|
|
839
|
+
rws: RequestWithSources,
|
|
840
|
+
sources: "Iterable[RequestSource]",
|
|
841
|
+
*,
|
|
842
|
+
logger: "LoggerFunction" = logging.log,
|
|
843
|
+
session: "Session"
|
|
844
|
+
) -> "Mapping[RseData, Sequence[DirectTransfer]]":
|
|
845
|
+
"""
|
|
846
|
+
Warning: The function currently caches the result for the given request and returns it for later calls
|
|
847
|
+
with the same request id. As a result: it can return more (or less) sources than what is provided in the
|
|
848
|
+
`sources` argument. This is done for performance reasons. As of time of writing, this behavior is not problematic
|
|
849
|
+
for the callers of this method.
|
|
850
|
+
"""
|
|
851
|
+
definition = self.definition_by_request_id.get(rws.request_id)
|
|
852
|
+
if definition:
|
|
853
|
+
return definition
|
|
854
|
+
|
|
855
|
+
transfer_schemes = self.schemes
|
|
856
|
+
if rws.previous_attempt_id and self.failover_schemes:
|
|
857
|
+
transfer_schemes = self.failover_schemes
|
|
858
|
+
|
|
859
|
+
candidate_sources = sources
|
|
860
|
+
if self.requested_source_only and rws.requested_source:
|
|
861
|
+
candidate_sources = [rws.requested_source] if rws.requested_source in sources else []
|
|
862
|
+
|
|
863
|
+
if rws.request_type == RequestType.STAGEIN:
|
|
864
|
+
definition = _create_stagein_definitions(
|
|
865
|
+
rws=rws,
|
|
866
|
+
sources=sources,
|
|
867
|
+
limit_dest_schemes=transfer_schemes,
|
|
868
|
+
operation_src='read',
|
|
869
|
+
operation_dest='write',
|
|
870
|
+
protocol_factory=self.protocol_factory
|
|
871
|
+
)
|
|
872
|
+
else:
|
|
873
|
+
definition = _create_transfer_definitions(
|
|
874
|
+
topology=self.topology,
|
|
875
|
+
rws=rws,
|
|
876
|
+
sources=candidate_sources,
|
|
877
|
+
max_sources=self.max_sources,
|
|
878
|
+
multi_source_sources=[] if self.preparer_mode else sources,
|
|
879
|
+
limit_dest_schemes=transfer_schemes,
|
|
880
|
+
operation_src='third_party_copy_read',
|
|
881
|
+
operation_dest='third_party_copy_write',
|
|
882
|
+
domain='wan',
|
|
883
|
+
protocol_factory=self.protocol_factory,
|
|
884
|
+
session=session
|
|
885
|
+
)
|
|
886
|
+
self.definition_by_request_id[rws.request_id] = definition
|
|
887
|
+
return definition
|
|
888
|
+
|
|
889
|
+
|
|
890
|
+
class _SkipSource:
|
|
891
|
+
pass
|
|
892
|
+
|
|
893
|
+
|
|
894
|
+
SKIP_SOURCE = _SkipSource()
|
|
895
|
+
|
|
896
|
+
|
|
897
|
+
class RequestRankingContext:
|
|
898
|
+
"""
|
|
899
|
+
Helper class used by SourceRankingStrategy. It allows to store additional request-specific
|
|
900
|
+
context data and access it when handling a specific source of the given request.
|
|
901
|
+
"""
|
|
902
|
+
|
|
903
|
+
def __init__(self, strategy: "SourceRankingStrategy", rws: "RequestWithSources"):
|
|
904
|
+
self.strategy = strategy
|
|
905
|
+
self.rws = rws
|
|
906
|
+
|
|
907
|
+
def apply(self, source: RequestSource) -> "int | _SkipSource":
|
|
908
|
+
verdict = self.strategy.apply(self, source)
|
|
909
|
+
if verdict is None:
|
|
910
|
+
verdict = sys.maxsize
|
|
911
|
+
return verdict
|
|
912
|
+
|
|
913
|
+
|
|
914
|
+
class SourceRankingStrategy:
|
|
915
|
+
"""
|
|
916
|
+
Represents a source ranking strategy. Used to order the sources of a request and decide
|
|
917
|
+
which will be the actual source used for the transfer.
|
|
918
|
+
|
|
919
|
+
If filter_only is True, any value other than SKIP_SOURCE returned by apply() will be ignored.
|
|
920
|
+
"""
|
|
921
|
+
filter_only: bool = False
|
|
922
|
+
|
|
923
|
+
def for_request(
|
|
924
|
+
self,
|
|
925
|
+
rws: RequestWithSources,
|
|
926
|
+
sources: "Iterable[RequestSource]",
|
|
927
|
+
*,
|
|
928
|
+
logger: "LoggerFunction" = logging.log,
|
|
929
|
+
session: "Session"
|
|
930
|
+
) -> "RequestRankingContext":
|
|
931
|
+
return RequestRankingContext(self, rws)
|
|
932
|
+
|
|
933
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
934
|
+
"""
|
|
935
|
+
Normally, this function will be called indirectly, via self.for_request(...).apply(source).
|
|
936
|
+
|
|
937
|
+
It is expected to either return SKIP_SOURCE to signal that this source must be ignored;
|
|
938
|
+
or an integer which gives the cost of the given source under the current strategy
|
|
939
|
+
(smaller cost: higher priority).
|
|
940
|
+
If `None` is returned, it will be interpreted as sys.maxsize (i.e. very low priority).
|
|
941
|
+
This is done to avoid requiring an explicit integer in filter-only strategies.
|
|
942
|
+
"""
|
|
943
|
+
pass
|
|
944
|
+
|
|
945
|
+
class _ClassNameDescriptor:
|
|
946
|
+
"""
|
|
947
|
+
Automatically set the external_name of the strategy to the class name.
|
|
948
|
+
"""
|
|
949
|
+
def __get__(self, obj, objtype=None):
|
|
950
|
+
if objtype is not None:
|
|
951
|
+
return objtype.__name__
|
|
952
|
+
return type(obj).__name__
|
|
953
|
+
|
|
954
|
+
external_name = _ClassNameDescriptor()
|
|
955
|
+
|
|
956
|
+
|
|
957
|
+
class SourceFilterStrategy(SourceRankingStrategy):
|
|
958
|
+
filter_only = True
|
|
959
|
+
|
|
960
|
+
|
|
961
|
+
class EnforceSourceRSEExpression(SourceFilterStrategy):
|
|
962
|
+
|
|
963
|
+
class _RankingContext(RequestRankingContext):
|
|
964
|
+
def __init__(self, strategy: "SourceRankingStrategy", rws: "RequestWithSources", allowed_source_rses: "Optional[set[str]]"):
|
|
965
|
+
super().__init__(strategy, rws)
|
|
966
|
+
self.allowed_source_rses = allowed_source_rses
|
|
967
|
+
|
|
968
|
+
def for_request(
|
|
969
|
+
self,
|
|
970
|
+
rws: RequestWithSources,
|
|
971
|
+
sources: "Iterable[RequestSource]",
|
|
972
|
+
*,
|
|
973
|
+
logger: "LoggerFunction" = logging.log,
|
|
974
|
+
session: "Session"
|
|
975
|
+
) -> "RequestRankingContext":
|
|
976
|
+
# parse source expression
|
|
977
|
+
allowed_source_rses = None
|
|
978
|
+
source_replica_expression = rws.attributes.get('source_replica_expression', None)
|
|
979
|
+
if source_replica_expression:
|
|
980
|
+
try:
|
|
981
|
+
parsed_rses = parse_expression(source_replica_expression, session=session)
|
|
982
|
+
except InvalidRSEExpression as error:
|
|
983
|
+
logger(logging.ERROR, "%s: Invalid RSE exception %s: %s", rws.request_id, source_replica_expression, str(error))
|
|
984
|
+
allowed_source_rses = set()
|
|
985
|
+
else:
|
|
986
|
+
allowed_source_rses = {x['id'] for x in parsed_rses}
|
|
987
|
+
return self._RankingContext(self, rws, allowed_source_rses)
|
|
988
|
+
|
|
989
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
990
|
+
ctx = cast('EnforceSourceRSEExpression._RankingContext', ctx)
|
|
991
|
+
if ctx.allowed_source_rses is not None and source.rse.id not in ctx.allowed_source_rses:
|
|
992
|
+
return SKIP_SOURCE
|
|
993
|
+
|
|
994
|
+
|
|
995
|
+
class SkipRestrictedRSEs(SourceFilterStrategy):
|
|
996
|
+
|
|
997
|
+
def __init__(self, admin_accounts: "Optional[set[InternalAccount]]" = None):
|
|
998
|
+
super().__init__()
|
|
999
|
+
self.admin_accounts = admin_accounts if admin_accounts is not None else []
|
|
1000
|
+
|
|
1001
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1002
|
+
if source.rse.attributes.get(RseAttr.RESTRICTED_READ) and ctx.rws.account not in self.admin_accounts:
|
|
1003
|
+
return SKIP_SOURCE
|
|
1004
|
+
|
|
1005
|
+
|
|
1006
|
+
class SkipBlocklistedRSEs(SourceFilterStrategy):
|
|
1007
|
+
|
|
1008
|
+
def __init__(self, topology: "Topology"):
|
|
1009
|
+
super().__init__()
|
|
1010
|
+
self.topology = topology
|
|
1011
|
+
|
|
1012
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1013
|
+
# Ignore blocklisted RSEs
|
|
1014
|
+
if not source.rse.columns['availability_read'] and not self.topology.ignore_availability:
|
|
1015
|
+
return SKIP_SOURCE
|
|
1016
|
+
|
|
1017
|
+
|
|
1018
|
+
class EnforceStagingBuffer(SourceFilterStrategy):
|
|
1019
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1020
|
+
# For staging requests, the staging_buffer attribute must be correctly set
|
|
1021
|
+
if ctx.rws.request_type == RequestType.STAGEIN and source.rse.attributes.get(RseAttr.STAGING_BUFFER) != ctx.rws.dest_rse.name:
|
|
1022
|
+
return SKIP_SOURCE
|
|
1023
|
+
|
|
1024
|
+
|
|
1025
|
+
class RestrictTapeSources(SourceFilterStrategy):
|
|
1026
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1027
|
+
# Ignore tape sources if they are not desired
|
|
1028
|
+
if source.rse.is_tape_or_staging_required() and not ctx.rws.attributes.get("allow_tape_source", True):
|
|
1029
|
+
return SKIP_SOURCE
|
|
1030
|
+
|
|
1031
|
+
|
|
1032
|
+
class HighestAdjustedRankingFirst(SourceRankingStrategy):
|
|
1033
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1034
|
+
source_ranking_penalty = 1 if source.rse.is_tape_or_staging_required() else 0
|
|
1035
|
+
return - source.ranking + source_ranking_penalty
|
|
1036
|
+
|
|
1037
|
+
|
|
1038
|
+
class PreferDiskOverTape(SourceRankingStrategy):
|
|
1039
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1040
|
+
return int(source.rse.is_tape_or_staging_required()) # rely on the fact that False < True
|
|
1041
|
+
|
|
1042
|
+
|
|
1043
|
+
class PathDistance(SourceRankingStrategy):
|
|
1044
|
+
|
|
1045
|
+
class _RankingContext(RequestRankingContext):
|
|
1046
|
+
def __init__(self, strategy: "SourceRankingStrategy", rws: "RequestWithSources", paths_for_rws: "Mapping[RseData, Sequence[DirectTransfer]]"):
|
|
1047
|
+
super().__init__(strategy, rws)
|
|
1048
|
+
self.paths_for_rws = paths_for_rws
|
|
1049
|
+
|
|
1050
|
+
def __init__(self, transfer_path_builder: TransferPathBuilder):
|
|
1051
|
+
super().__init__()
|
|
1052
|
+
self.transfer_path_builder = transfer_path_builder
|
|
1053
|
+
|
|
1054
|
+
def for_request(
|
|
1055
|
+
self,
|
|
1056
|
+
rws: RequestWithSources,
|
|
1057
|
+
sources: "Iterable[RequestSource]",
|
|
1058
|
+
*,
|
|
1059
|
+
logger: "LoggerFunction" = logging.log,
|
|
1060
|
+
session: "Session"
|
|
1061
|
+
) -> "RequestRankingContext":
|
|
1062
|
+
paths_for_rws = self.transfer_path_builder.build_or_return_cached(rws, sources, logger=logger, session=session)
|
|
1063
|
+
return PathDistance._RankingContext(self, rws, paths_for_rws)
|
|
1064
|
+
|
|
1065
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1066
|
+
path = cast('PathDistance._RankingContext', ctx).paths_for_rws.get(source.rse)
|
|
1067
|
+
if not path:
|
|
1068
|
+
return SKIP_SOURCE
|
|
1069
|
+
return path[0].src.distance
|
|
1070
|
+
|
|
1071
|
+
|
|
1072
|
+
class PreferSingleHop(PathDistance):
|
|
1073
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1074
|
+
path = cast('PathDistance._RankingContext', ctx).paths_for_rws.get(source.rse)
|
|
1075
|
+
if not path:
|
|
1076
|
+
return SKIP_SOURCE
|
|
1077
|
+
return int(len(path) > 1)
|
|
1078
|
+
|
|
1079
|
+
|
|
1080
|
+
class FailureRate(SourceRankingStrategy):
|
|
1081
|
+
"""
|
|
1082
|
+
A source ranking strategy that ranks source nodes based on their failure rates for the past hour. Failure rate is
|
|
1083
|
+
calculated by dividing files failed by files attempted.
|
|
1084
|
+
"""
|
|
1085
|
+
class _FailureRateStat:
|
|
1086
|
+
def __init__(self) -> None:
|
|
1087
|
+
self.files_done = 0
|
|
1088
|
+
self.files_failed = 0
|
|
1089
|
+
|
|
1090
|
+
def incorporate_stat(self, stat: "Mapping[str, int]") -> None:
|
|
1091
|
+
self.files_done += stat['files_done']
|
|
1092
|
+
self.files_failed += stat['files_failed']
|
|
1093
|
+
|
|
1094
|
+
def get_failure_rate(self) -> int:
|
|
1095
|
+
files_attempted = self.files_done + self.files_failed
|
|
1096
|
+
|
|
1097
|
+
# If no files have been sent yet, return failure rate as 0
|
|
1098
|
+
if files_attempted == 0:
|
|
1099
|
+
return 0
|
|
1100
|
+
|
|
1101
|
+
return int((self.files_failed / files_attempted) * 10000)
|
|
1102
|
+
|
|
1103
|
+
def __init__(self, stats_manager: "request_core.TransferStatsManager") -> None:
|
|
1104
|
+
super().__init__()
|
|
1105
|
+
self.source_stats = {}
|
|
1106
|
+
|
|
1107
|
+
for stat in stats_manager.load_totals(
|
|
1108
|
+
datetime.datetime.utcnow() - datetime.timedelta(hours=1),
|
|
1109
|
+
by_activity=False
|
|
1110
|
+
):
|
|
1111
|
+
self.source_stats.setdefault(stat['src_rse_id'], self._FailureRateStat()).incorporate_stat(stat)
|
|
1112
|
+
|
|
1113
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1114
|
+
failure_rate = cast('FailureRate', ctx.strategy).source_stats.get(source.rse.id, self._FailureRateStat()).get_failure_rate()
|
|
1115
|
+
return failure_rate
|
|
1116
|
+
|
|
1117
|
+
|
|
1118
|
+
class SkipSchemeMissmatch(PathDistance):
|
|
1119
|
+
filter_only = True
|
|
1120
|
+
|
|
1121
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1122
|
+
path = cast('PathDistance._RankingContext', ctx).paths_for_rws.get(source.rse)
|
|
1123
|
+
# path == None means that there is no path;
|
|
1124
|
+
# path == [] means that a path exists (according to distances) but cannot be used (scheme mismatch)
|
|
1125
|
+
if path is not None and not path:
|
|
1126
|
+
return SKIP_SOURCE
|
|
1127
|
+
|
|
1128
|
+
|
|
1129
|
+
class SkipIntermediateTape(PathDistance):
|
|
1130
|
+
filter_only = True
|
|
1131
|
+
|
|
1132
|
+
def apply(self, ctx: RequestRankingContext, source: RequestSource) -> "Optional[int | _SkipSource]":
|
|
1133
|
+
# Discard multihop transfers which contain a tape source as an intermediate hop
|
|
1134
|
+
path = cast('PathDistance._RankingContext', ctx).paths_for_rws.get(source.rse)
|
|
1135
|
+
if path and any(transfer.src.rse.is_tape_or_staging_required() for transfer in path[1:]):
|
|
1136
|
+
return SKIP_SOURCE
|
|
1137
|
+
|
|
1138
|
+
|
|
1139
|
+
@transactional_session
|
|
1140
|
+
def build_transfer_paths(
|
|
1141
|
+
topology: "Topology",
|
|
1142
|
+
protocol_factory: "ProtocolFactory",
|
|
1143
|
+
requests_with_sources: "Iterable[RequestWithSources]",
|
|
1144
|
+
admin_accounts: "Optional[set[InternalAccount]]" = None,
|
|
1145
|
+
schemes: "Optional[list[str]]" = None,
|
|
1146
|
+
failover_schemes: "Optional[list[str]]" = None,
|
|
1147
|
+
max_sources: int = 4,
|
|
1148
|
+
transfertools: "Optional[list[str]]" = None,
|
|
1149
|
+
requested_source_only: bool = False,
|
|
1150
|
+
preparer_mode: bool = False,
|
|
1151
|
+
*,
|
|
1152
|
+
session: "Session",
|
|
1153
|
+
logger: "Callable" = logging.log,
|
|
1154
|
+
):
|
|
1155
|
+
"""
|
|
1156
|
+
For each request, find all possible transfer paths from its sources, which respect the
|
|
1157
|
+
constraints enforced by the request (attributes, type, etc) and the arguments of this function
|
|
1158
|
+
|
|
1159
|
+
build a multi-source transfer if possible: The scheme compatibility is important for multi-source transfers.
|
|
1160
|
+
We iterate again over the single-hop sources and build a new transfer definition while enforcing the scheme compatibility
|
|
1161
|
+
with the initial source.
|
|
1162
|
+
|
|
1163
|
+
Each path is a list of hops. Each hop is a transfer definition.
|
|
1164
|
+
"""
|
|
1165
|
+
transfer_path_builder = TransferPathBuilder(
|
|
1166
|
+
topology=topology,
|
|
1167
|
+
schemes=schemes,
|
|
1168
|
+
failover_schemes=failover_schemes,
|
|
1169
|
+
protocol_factory=protocol_factory,
|
|
1170
|
+
max_sources=max_sources,
|
|
1171
|
+
preparer_mode=preparer_mode,
|
|
1172
|
+
requested_source_only=requested_source_only,
|
|
1173
|
+
)
|
|
1174
|
+
|
|
1175
|
+
stats_manager = request_core.TransferStatsManager()
|
|
1176
|
+
|
|
1177
|
+
available_strategies = {
|
|
1178
|
+
EnforceSourceRSEExpression.external_name: lambda: EnforceSourceRSEExpression(),
|
|
1179
|
+
SkipBlocklistedRSEs.external_name: lambda: SkipBlocklistedRSEs(topology=topology),
|
|
1180
|
+
SkipRestrictedRSEs.external_name: lambda: SkipRestrictedRSEs(admin_accounts=admin_accounts),
|
|
1181
|
+
EnforceStagingBuffer.external_name: lambda: EnforceStagingBuffer(),
|
|
1182
|
+
RestrictTapeSources.external_name: lambda: RestrictTapeSources(),
|
|
1183
|
+
SkipSchemeMissmatch.external_name: lambda: SkipSchemeMissmatch(transfer_path_builder=transfer_path_builder),
|
|
1184
|
+
SkipIntermediateTape.external_name: lambda: SkipIntermediateTape(transfer_path_builder=transfer_path_builder),
|
|
1185
|
+
HighestAdjustedRankingFirst.external_name: lambda: HighestAdjustedRankingFirst(),
|
|
1186
|
+
PreferDiskOverTape.external_name: lambda: PreferDiskOverTape(),
|
|
1187
|
+
PathDistance.external_name: lambda: PathDistance(transfer_path_builder=transfer_path_builder),
|
|
1188
|
+
PreferSingleHop.external_name: lambda: PreferSingleHop(transfer_path_builder=transfer_path_builder),
|
|
1189
|
+
FailureRate.external_name: lambda: FailureRate(stats_manager=stats_manager),
|
|
1190
|
+
}
|
|
1191
|
+
|
|
1192
|
+
default_strategies = [
|
|
1193
|
+
EnforceSourceRSEExpression.external_name,
|
|
1194
|
+
SkipBlocklistedRSEs.external_name,
|
|
1195
|
+
SkipRestrictedRSEs.external_name,
|
|
1196
|
+
EnforceStagingBuffer.external_name,
|
|
1197
|
+
RestrictTapeSources.external_name,
|
|
1198
|
+
# Without the SkipSchemeMissmatch strategy, requests will never be transitioned to the
|
|
1199
|
+
# RequestState.MISMATCH_SCHEME state. It _MUST_ be placed before the other Path-based strategies.
|
|
1200
|
+
SkipSchemeMissmatch.external_name,
|
|
1201
|
+
SkipIntermediateTape.external_name,
|
|
1202
|
+
HighestAdjustedRankingFirst.external_name,
|
|
1203
|
+
PreferDiskOverTape.external_name,
|
|
1204
|
+
PathDistance.external_name,
|
|
1205
|
+
PreferSingleHop.external_name,
|
|
1206
|
+
]
|
|
1207
|
+
strategy_names = config_get_list('transfers', 'source_ranking_strategies', default=default_strategies)
|
|
1208
|
+
|
|
1209
|
+
try:
|
|
1210
|
+
strategies = list(available_strategies[name]() for name in strategy_names)
|
|
1211
|
+
except KeyError:
|
|
1212
|
+
logger(logging.ERROR, "One of the configured source_ranking_strategies doesn't exist %s", strategy_names, exc_info=True)
|
|
1213
|
+
raise
|
|
1214
|
+
|
|
1215
|
+
if admin_accounts is None:
|
|
1216
|
+
admin_accounts = set()
|
|
1217
|
+
|
|
1218
|
+
# Do not print full source RSE list for DIDs which have many sources. Otherwise we fill the monitoring
|
|
1219
|
+
# storage with data which has little to no benefit. This log message is unlikely to help debugging
|
|
1220
|
+
# transfers issues when there are many sources, but can be very useful for small number of sources.
|
|
1221
|
+
num_sources_in_logs = 4
|
|
1222
|
+
|
|
1223
|
+
candidate_paths_by_request_id, reqs_no_source, reqs_only_tape_source, reqs_scheme_mismatch = {}, set(), set(), set()
|
|
1224
|
+
reqs_unsupported_transfertool = set()
|
|
1225
|
+
for rws in requests_with_sources:
|
|
1226
|
+
|
|
1227
|
+
rws.dest_rse.ensure_loaded(load_name=True, load_info=True, load_attributes=True, load_columns=True, session=session)
|
|
1228
|
+
all_sources = rws.sources
|
|
1229
|
+
for source in all_sources:
|
|
1230
|
+
source.rse.ensure_loaded(load_name=True, load_info=True, load_attributes=True, load_columns=True, session=session)
|
|
1231
|
+
|
|
1232
|
+
# Assume request doesn't have any sources. Will be removed later if sources are found.
|
|
1233
|
+
reqs_no_source.add(rws.request_id)
|
|
1234
|
+
if not all_sources:
|
|
1235
|
+
logger(logging.INFO, '%s: has no sources. Skipping.', rws)
|
|
1236
|
+
continue
|
|
1237
|
+
|
|
1238
|
+
logger(logging.DEBUG, '%s: Working on %d sources%s: %s%s',
|
|
1239
|
+
rws,
|
|
1240
|
+
len(all_sources),
|
|
1241
|
+
f' (priority {rws.requested_source.rse})' if requested_source_only and rws.requested_source else '',
|
|
1242
|
+
','.join('{}:{}:{}'.format(src.rse, src.ranking, src.distance) for src in all_sources[:num_sources_in_logs]),
|
|
1243
|
+
'... and %d others' % (len(all_sources) - num_sources_in_logs) if len(all_sources) > num_sources_in_logs else '')
|
|
1244
|
+
|
|
1245
|
+
# Check if destination is blocked
|
|
1246
|
+
if not (topology.ignore_availability or rws.dest_rse.columns['availability_write']):
|
|
1247
|
+
logger(logging.WARNING, '%s: dst RSE is blocked for write. Will skip the submission of new jobs', rws.request_id)
|
|
1248
|
+
continue
|
|
1249
|
+
if rws.account not in admin_accounts and rws.dest_rse.attributes.get(RseAttr.RESTRICTED_WRITE):
|
|
1250
|
+
logger(logging.WARNING, '%s: dst RSE is restricted for write. Will skip the submission', rws.request_id)
|
|
1251
|
+
continue
|
|
1252
|
+
|
|
1253
|
+
if rws.transfertool and transfertools and rws.transfertool not in transfertools:
|
|
1254
|
+
# The request explicitly asks for a transfertool which this submitter doesn't support
|
|
1255
|
+
logger(logging.INFO, '%s: unsupported transfertool. Skipping.', rws.request_id)
|
|
1256
|
+
reqs_unsupported_transfertool.add(rws.request_id)
|
|
1257
|
+
reqs_no_source.remove(rws.request_id)
|
|
1258
|
+
continue
|
|
1259
|
+
|
|
1260
|
+
# For each strategy name, gives the sources which were rejected by it
|
|
1261
|
+
rejected_sources = defaultdict(list)
|
|
1262
|
+
# Cost of each accepted source (lists of ordered costs: one for each ranking strategy)
|
|
1263
|
+
cost_vectors = {s: [] for s in rws.sources}
|
|
1264
|
+
for strategy in strategies:
|
|
1265
|
+
sources = list(cost_vectors)
|
|
1266
|
+
if not sources:
|
|
1267
|
+
# All sources where filtered by previous strategies. It's worthless to continue.
|
|
1268
|
+
break
|
|
1269
|
+
rws_strategy = strategy.for_request(rws, sources, logger=logger, session=session)
|
|
1270
|
+
for source in sources:
|
|
1271
|
+
verdict = rws_strategy.apply(source)
|
|
1272
|
+
if verdict is SKIP_SOURCE:
|
|
1273
|
+
rejected_sources[strategy.external_name].append(source)
|
|
1274
|
+
cost_vectors.pop(source)
|
|
1275
|
+
elif not strategy.filter_only:
|
|
1276
|
+
cost_vectors[source].append(verdict)
|
|
1277
|
+
|
|
1278
|
+
transfers_by_rse = transfer_path_builder.build_or_return_cached(rws, cost_vectors, logger=logger, session=session)
|
|
1279
|
+
candidate_paths = ((s, transfers_by_rse[s.rse]) for s, _ in sorted(cost_vectors.items(), key=operator.itemgetter(1)))
|
|
1280
|
+
if not preparer_mode:
|
|
1281
|
+
candidate_paths = __compress_multihops(candidate_paths, all_sources)
|
|
1282
|
+
candidate_paths = list(candidate_paths)
|
|
1283
|
+
|
|
1284
|
+
ordered_sources_log = ', '.join(
|
|
1285
|
+
f"{s.rse}:{':'.join(str(e) for e in cost_vectors[s])}"
|
|
1286
|
+
f"{'(actual source ' + str(path[0].src.rse) + ')' if s.rse != path[0].src.rse else ''}"
|
|
1287
|
+
f"{'(multihop)' if len(path) > 1 else ''}"
|
|
1288
|
+
for s, path in candidate_paths[:num_sources_in_logs]
|
|
1289
|
+
)
|
|
1290
|
+
if len(candidate_paths) > num_sources_in_logs:
|
|
1291
|
+
ordered_sources_log += '... and %d others' % (len(candidate_paths) - num_sources_in_logs)
|
|
1292
|
+
filtered_rses_log = ''
|
|
1293
|
+
for strategy_name, sources in rejected_sources.items():
|
|
1294
|
+
filtered_rses_log += f'; {len(sources)} dropped by strategy "{strategy_name}": '
|
|
1295
|
+
filtered_rses_log += ','.join(str(s.rse) for s in sources[:num_sources_in_logs])
|
|
1296
|
+
if len(sources) > num_sources_in_logs:
|
|
1297
|
+
filtered_rses_log += '... and %d others' % (len(sources) - num_sources_in_logs)
|
|
1298
|
+
logger(logging.INFO, '%s: %d ordered sources: %s%s', rws, len(candidate_paths), ordered_sources_log, filtered_rses_log)
|
|
1299
|
+
|
|
1300
|
+
if not candidate_paths:
|
|
1301
|
+
# It can happen that some sources are skipped because they are TAPE, and others because
|
|
1302
|
+
# of scheme mismatch. However, we can only have one state in the database. I picked to
|
|
1303
|
+
# prioritize setting only_tape_source without any particular reason.
|
|
1304
|
+
if RestrictTapeSources.external_name in rejected_sources:
|
|
1305
|
+
logger(logging.DEBUG, '%s: Only tape sources found' % rws.request_id)
|
|
1306
|
+
reqs_only_tape_source.add(rws.request_id)
|
|
1307
|
+
reqs_no_source.remove(rws.request_id)
|
|
1308
|
+
elif SkipSchemeMissmatch.external_name in rejected_sources:
|
|
1309
|
+
logger(logging.DEBUG, '%s: Scheme mismatch detected' % rws.request_id)
|
|
1310
|
+
reqs_scheme_mismatch.add(rws.request_id)
|
|
1311
|
+
reqs_no_source.remove(rws.request_id)
|
|
1312
|
+
else:
|
|
1313
|
+
logger(logging.DEBUG, '%s: No candidate path found' % rws.request_id)
|
|
1314
|
+
continue
|
|
1315
|
+
|
|
1316
|
+
candidate_paths_by_request_id[rws.request_id] = [path for _, path in candidate_paths]
|
|
1317
|
+
reqs_no_source.remove(rws.request_id)
|
|
1318
|
+
|
|
1319
|
+
return candidate_paths_by_request_id, reqs_no_source, reqs_scheme_mismatch, reqs_only_tape_source, reqs_unsupported_transfertool
|
|
1320
|
+
|
|
1321
|
+
|
|
1322
|
+
def __add_compatible_schemes(schemes, allowed_schemes):
|
|
1323
|
+
"""
|
|
1324
|
+
Add the compatible schemes to a list of schemes
|
|
1325
|
+
:param schemes: Schemes as input.
|
|
1326
|
+
:param allowed_schemes: Allowed schemes, only these can be in the output.
|
|
1327
|
+
:returns: List of schemes
|
|
1328
|
+
"""
|
|
1329
|
+
|
|
1330
|
+
return_schemes = []
|
|
1331
|
+
for scheme in schemes:
|
|
1332
|
+
if scheme in allowed_schemes:
|
|
1333
|
+
return_schemes.append(scheme)
|
|
1334
|
+
for scheme_map_scheme in constants.SCHEME_MAP.get(scheme, []):
|
|
1335
|
+
if scheme_map_scheme not in allowed_schemes:
|
|
1336
|
+
continue
|
|
1337
|
+
else:
|
|
1338
|
+
return_schemes.append(scheme_map_scheme)
|
|
1339
|
+
return list(set(return_schemes))
|
|
1340
|
+
|
|
1341
|
+
|
|
1342
|
+
@read_session
|
|
1343
|
+
def list_transfer_admin_accounts(*, session: "Session") -> "set[InternalAccount]":
|
|
1344
|
+
"""
|
|
1345
|
+
List admin accounts and cache the result in memory
|
|
1346
|
+
"""
|
|
1347
|
+
|
|
1348
|
+
result = REGION_ACCOUNTS.get('transfer_admin_accounts')
|
|
1349
|
+
if isinstance(result, NoValue):
|
|
1350
|
+
result = [acc['account'] for acc in list_accounts(filter_={'admin': True}, session=session)]
|
|
1351
|
+
REGION_ACCOUNTS.set('transfer_admin_accounts', result)
|
|
1352
|
+
return set(result)
|
|
1353
|
+
|
|
1354
|
+
|
|
1355
|
+
def update_transfer_priority(transfers_to_update, logger=logging.log):
|
|
1356
|
+
"""
|
|
1357
|
+
Update transfer priority in fts
|
|
1358
|
+
|
|
1359
|
+
:param transfers_to_update: dict {external_host1: {transfer_id1: priority, transfer_id2: priority, ...}, ...}
|
|
1360
|
+
:param logger: decorated logger instance
|
|
1361
|
+
"""
|
|
1362
|
+
|
|
1363
|
+
for external_host, priority_by_transfer_id in transfers_to_update.items():
|
|
1364
|
+
transfertool_obj = FTS3Transfertool(external_host=external_host)
|
|
1365
|
+
for transfer_id, priority in priority_by_transfer_id.items():
|
|
1366
|
+
res = transfertool_obj.update_priority(transfer_id=transfer_id, priority=priority)
|
|
1367
|
+
logger(logging.DEBUG, "Updated transfer %s priority in transfertool to %s: %s" % (transfer_id, priority, res['http_message']))
|
|
1368
|
+
|
|
1369
|
+
|
|
1370
|
+
def cancel_transfers(transfers_to_cancel, logger=logging.log):
|
|
1371
|
+
"""
|
|
1372
|
+
Cancel transfers in fts
|
|
1373
|
+
|
|
1374
|
+
:param transfers_to_cancel: dict {external_host1: {transfer_id1, transfer_id2}, external_host2: [...], ...}
|
|
1375
|
+
:param logger: decorated logger instance
|
|
1376
|
+
"""
|
|
1377
|
+
|
|
1378
|
+
for external_host, transfer_ids in transfers_to_cancel.items():
|
|
1379
|
+
transfertool_obj = FTS3Transfertool(external_host=external_host)
|
|
1380
|
+
for transfer_id in transfer_ids:
|
|
1381
|
+
try:
|
|
1382
|
+
transfertool_obj.cancel(transfer_ids=[transfer_id])
|
|
1383
|
+
logger(logging.DEBUG, "Cancelled FTS3 transfer %s on %s" % (transfer_id, transfertool_obj))
|
|
1384
|
+
except Exception as error:
|
|
1385
|
+
logger(logging.WARNING, 'Could not cancel FTS3 transfer %s on %s: %s' % (transfer_id, transfertool_obj, str(error)))
|
|
1386
|
+
|
|
1387
|
+
|
|
1388
|
+
@METRICS.count_it
|
|
1389
|
+
def cancel_transfer(transfertool_obj, transfer_id):
|
|
1390
|
+
"""
|
|
1391
|
+
Cancel a transfer based on external transfer id.
|
|
1392
|
+
|
|
1393
|
+
:param transfertool_obj: Transfertool object to be used for cancellation.
|
|
1394
|
+
:param transfer_id: External-ID as a 32 character hex string.
|
|
1395
|
+
"""
|
|
1396
|
+
|
|
1397
|
+
try:
|
|
1398
|
+
transfertool_obj.cancel(transfer_ids=[transfer_id])
|
|
1399
|
+
except Exception:
|
|
1400
|
+
raise RucioException('Could not cancel FTS3 transfer %s on %s: %s' % (transfer_id, transfertool_obj, traceback.format_exc()))
|
|
1401
|
+
|
|
1402
|
+
|
|
1403
|
+
@transactional_session
|
|
1404
|
+
def prepare_transfers(
|
|
1405
|
+
candidate_paths_by_request_id: "dict[str, list[list[DirectTransfer]]]",
|
|
1406
|
+
logger: "LoggerFunction" = logging.log,
|
|
1407
|
+
transfertools: "Optional[list[str]]" = None,
|
|
1408
|
+
*,
|
|
1409
|
+
session: "Session",
|
|
1410
|
+
) -> tuple[list[str], list[str]]:
|
|
1411
|
+
"""
|
|
1412
|
+
Update transfer requests according to preparer settings.
|
|
1413
|
+
"""
|
|
1414
|
+
|
|
1415
|
+
reqs_no_transfertool = []
|
|
1416
|
+
updated_reqs = []
|
|
1417
|
+
for request_id, candidate_paths in candidate_paths_by_request_id.items():
|
|
1418
|
+
selected_source = None
|
|
1419
|
+
transfertool = None
|
|
1420
|
+
rws = candidate_paths[0][-1].rws
|
|
1421
|
+
|
|
1422
|
+
for candidate_path in candidate_paths:
|
|
1423
|
+
source = candidate_path[0].src
|
|
1424
|
+
all_hops_ok = True
|
|
1425
|
+
transfertool = None
|
|
1426
|
+
for hop in candidate_path:
|
|
1427
|
+
common_transfertools = get_supported_transfertools(hop.src.rse, hop.dst.rse, transfertools=transfertools, session=session)
|
|
1428
|
+
if not common_transfertools:
|
|
1429
|
+
all_hops_ok = False
|
|
1430
|
+
break
|
|
1431
|
+
# We need the last hop transfertool. Always prioritize fts3 if it exists.
|
|
1432
|
+
transfertool = 'fts3' if 'fts3' in common_transfertools else common_transfertools.pop()
|
|
1433
|
+
|
|
1434
|
+
if all_hops_ok and transfertool:
|
|
1435
|
+
selected_source = source
|
|
1436
|
+
break
|
|
1437
|
+
|
|
1438
|
+
if not selected_source:
|
|
1439
|
+
reqs_no_transfertool.append(request_id)
|
|
1440
|
+
logger(logging.WARNING, '%s: all available sources were filtered', rws)
|
|
1441
|
+
continue
|
|
1442
|
+
|
|
1443
|
+
update_dict: "dict[Any, Any]" = {
|
|
1444
|
+
models.Request.state.name: _throttler_request_state(
|
|
1445
|
+
activity=rws.activity,
|
|
1446
|
+
source_rse=selected_source.rse,
|
|
1447
|
+
dest_rse=rws.dest_rse,
|
|
1448
|
+
session=session,
|
|
1449
|
+
),
|
|
1450
|
+
models.Request.source_rse_id.name: selected_source.rse.id,
|
|
1451
|
+
}
|
|
1452
|
+
if transfertool:
|
|
1453
|
+
update_dict[models.Request.transfertool.name] = transfertool
|
|
1454
|
+
|
|
1455
|
+
request_core.update_request(rws.request_id, session=session, **update_dict)
|
|
1456
|
+
updated_reqs.append(request_id)
|
|
1457
|
+
|
|
1458
|
+
return updated_reqs, reqs_no_transfertool
|
|
1459
|
+
|
|
1460
|
+
|
|
1461
|
+
@stream_session
|
|
1462
|
+
def applicable_rse_transfer_limits(
|
|
1463
|
+
source_rse: "Optional[RseData]" = None,
|
|
1464
|
+
dest_rse: "Optional[RseData]" = None,
|
|
1465
|
+
activity: "Optional[str]" = None,
|
|
1466
|
+
*,
|
|
1467
|
+
session: "Session",
|
|
1468
|
+
):
|
|
1469
|
+
"""
|
|
1470
|
+
Find all RseTransferLimits which must be enforced for transfers between source and destination RSEs for the given activity.
|
|
1471
|
+
"""
|
|
1472
|
+
source_limits = {}
|
|
1473
|
+
if source_rse:
|
|
1474
|
+
source_limits = source_rse.ensure_loaded(load_transfer_limits=True, session=session).transfer_limits.get(TransferLimitDirection.SOURCE, {})
|
|
1475
|
+
dest_limits = {}
|
|
1476
|
+
if dest_rse:
|
|
1477
|
+
dest_limits = dest_rse.ensure_loaded(load_transfer_limits=True, session=session).transfer_limits.get(TransferLimitDirection.DESTINATION, {})
|
|
1478
|
+
|
|
1479
|
+
if activity is not None:
|
|
1480
|
+
limit = source_limits.get(activity)
|
|
1481
|
+
if limit:
|
|
1482
|
+
yield limit
|
|
1483
|
+
|
|
1484
|
+
limit = dest_limits.get(activity)
|
|
1485
|
+
if limit:
|
|
1486
|
+
yield limit
|
|
1487
|
+
|
|
1488
|
+
# get "all_activities" limits
|
|
1489
|
+
limit = source_limits.get(None)
|
|
1490
|
+
if limit:
|
|
1491
|
+
yield limit
|
|
1492
|
+
|
|
1493
|
+
limit = dest_limits.get(None)
|
|
1494
|
+
if limit:
|
|
1495
|
+
yield limit
|
|
1496
|
+
|
|
1497
|
+
|
|
1498
|
+
def _throttler_request_state(activity, source_rse, dest_rse, *, session: "Session") -> RequestState:
|
|
1499
|
+
"""
|
|
1500
|
+
Takes request attributes to return a new state for the request
|
|
1501
|
+
based on throttler settings. Always returns QUEUED,
|
|
1502
|
+
if the throttler mode is not set.
|
|
1503
|
+
"""
|
|
1504
|
+
limit_found = False
|
|
1505
|
+
if any(applicable_rse_transfer_limits(activity=activity, source_rse=source_rse, dest_rse=dest_rse, session=session)):
|
|
1506
|
+
limit_found = True
|
|
1507
|
+
|
|
1508
|
+
return RequestState.WAITING if limit_found else RequestState.QUEUED
|
|
1509
|
+
|
|
1510
|
+
|
|
1511
|
+
@read_session
|
|
1512
|
+
def get_supported_transfertools(
|
|
1513
|
+
source_rse: "RseData",
|
|
1514
|
+
dest_rse: "RseData",
|
|
1515
|
+
transfertools: "Optional[list[str]]" = None,
|
|
1516
|
+
*,
|
|
1517
|
+
session: "Session",
|
|
1518
|
+
) -> set[str]:
|
|
1519
|
+
|
|
1520
|
+
if not transfertools:
|
|
1521
|
+
transfertools = list(TRANSFERTOOL_CLASSES_BY_NAME)
|
|
1522
|
+
|
|
1523
|
+
source_rse.ensure_loaded(load_attributes=True, session=session)
|
|
1524
|
+
dest_rse.ensure_loaded(load_attributes=True, session=session)
|
|
1525
|
+
|
|
1526
|
+
result = set()
|
|
1527
|
+
for tt_name in transfertools:
|
|
1528
|
+
tt_class = TRANSFERTOOL_CLASSES_BY_NAME.get(tt_name)
|
|
1529
|
+
if tt_class and tt_class.can_perform_transfer(source_rse, dest_rse):
|
|
1530
|
+
result.add(tt_name)
|
|
1531
|
+
return result
|