rucio 32.8.6__py3-none-any.whl → 35.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rucio might be problematic. Click here for more details.
- rucio/__init__.py +0 -1
- rucio/alembicrevision.py +1 -2
- rucio/client/__init__.py +0 -1
- rucio/client/accountclient.py +45 -25
- rucio/client/accountlimitclient.py +37 -9
- rucio/client/baseclient.py +199 -154
- rucio/client/client.py +2 -3
- rucio/client/configclient.py +19 -6
- rucio/client/credentialclient.py +9 -4
- rucio/client/didclient.py +238 -63
- rucio/client/diracclient.py +13 -5
- rucio/client/downloadclient.py +162 -51
- rucio/client/exportclient.py +4 -4
- rucio/client/fileclient.py +3 -4
- rucio/client/importclient.py +4 -4
- rucio/client/lifetimeclient.py +21 -5
- rucio/client/lockclient.py +18 -8
- rucio/client/{metaclient.py → metaconventionsclient.py} +18 -15
- rucio/client/pingclient.py +0 -1
- rucio/client/replicaclient.py +15 -5
- rucio/client/requestclient.py +35 -19
- rucio/client/rseclient.py +133 -51
- rucio/client/ruleclient.py +29 -22
- rucio/client/scopeclient.py +8 -6
- rucio/client/subscriptionclient.py +47 -35
- rucio/client/touchclient.py +8 -4
- rucio/client/uploadclient.py +166 -82
- rucio/common/__init__.py +0 -1
- rucio/common/cache.py +4 -4
- rucio/common/config.py +52 -47
- rucio/common/constants.py +69 -2
- rucio/common/constraints.py +0 -1
- rucio/common/didtype.py +24 -22
- rucio/common/dumper/__init__.py +70 -41
- rucio/common/dumper/consistency.py +26 -22
- rucio/common/dumper/data_models.py +16 -23
- rucio/common/dumper/path_parsing.py +0 -1
- rucio/common/exception.py +281 -222
- rucio/common/extra.py +0 -1
- rucio/common/logging.py +54 -38
- rucio/common/pcache.py +122 -101
- rucio/common/plugins.py +153 -0
- rucio/common/policy.py +4 -4
- rucio/common/schema/__init__.py +17 -10
- rucio/common/schema/atlas.py +7 -5
- rucio/common/schema/belleii.py +7 -5
- rucio/common/schema/domatpc.py +7 -5
- rucio/common/schema/escape.py +7 -5
- rucio/common/schema/generic.py +8 -6
- rucio/common/schema/generic_multi_vo.py +7 -5
- rucio/common/schema/icecube.py +7 -5
- rucio/common/stomp_utils.py +0 -1
- rucio/common/stopwatch.py +0 -1
- rucio/common/test_rucio_server.py +2 -2
- rucio/common/types.py +262 -17
- rucio/common/utils.py +743 -451
- rucio/core/__init__.py +0 -1
- rucio/core/account.py +99 -29
- rucio/core/account_counter.py +89 -24
- rucio/core/account_limit.py +90 -24
- rucio/core/authentication.py +86 -29
- rucio/core/config.py +108 -38
- rucio/core/credential.py +14 -7
- rucio/core/did.py +680 -782
- rucio/core/did_meta_plugins/__init__.py +8 -6
- rucio/core/did_meta_plugins/did_column_meta.py +17 -12
- rucio/core/did_meta_plugins/did_meta_plugin_interface.py +60 -11
- rucio/core/did_meta_plugins/filter_engine.py +90 -50
- rucio/core/did_meta_plugins/json_meta.py +41 -16
- rucio/core/did_meta_plugins/mongo_meta.py +25 -8
- rucio/core/did_meta_plugins/postgres_meta.py +3 -4
- rucio/core/dirac.py +46 -17
- rucio/core/distance.py +66 -43
- rucio/core/exporter.py +5 -5
- rucio/core/heartbeat.py +181 -81
- rucio/core/identity.py +22 -12
- rucio/core/importer.py +23 -12
- rucio/core/lifetime_exception.py +32 -32
- rucio/core/lock.py +244 -142
- rucio/core/message.py +79 -38
- rucio/core/{meta.py → meta_conventions.py} +57 -44
- rucio/core/monitor.py +19 -13
- rucio/core/naming_convention.py +68 -27
- rucio/core/nongrid_trace.py +17 -5
- rucio/core/oidc.py +151 -29
- rucio/core/permission/__init__.py +18 -6
- rucio/core/permission/atlas.py +50 -35
- rucio/core/permission/belleii.py +6 -5
- rucio/core/permission/escape.py +8 -6
- rucio/core/permission/generic.py +82 -80
- rucio/core/permission/generic_multi_vo.py +9 -7
- rucio/core/quarantined_replica.py +91 -58
- rucio/core/replica.py +1303 -772
- rucio/core/replica_sorter.py +10 -12
- rucio/core/request.py +1133 -285
- rucio/core/rse.py +142 -102
- rucio/core/rse_counter.py +49 -18
- rucio/core/rse_expression_parser.py +6 -7
- rucio/core/rse_selector.py +41 -16
- rucio/core/rule.py +1538 -474
- rucio/core/rule_grouping.py +213 -68
- rucio/core/scope.py +50 -22
- rucio/core/subscription.py +92 -44
- rucio/core/topology.py +66 -24
- rucio/core/trace.py +42 -28
- rucio/core/transfer.py +543 -259
- rucio/core/vo.py +36 -18
- rucio/core/volatile_replica.py +59 -32
- rucio/daemons/__init__.py +0 -1
- rucio/daemons/abacus/__init__.py +0 -1
- rucio/daemons/abacus/account.py +29 -19
- rucio/daemons/abacus/collection_replica.py +21 -10
- rucio/daemons/abacus/rse.py +22 -12
- rucio/daemons/atropos/__init__.py +0 -1
- rucio/daemons/atropos/atropos.py +1 -2
- rucio/daemons/auditor/__init__.py +56 -28
- rucio/daemons/auditor/hdfs.py +17 -6
- rucio/daemons/auditor/srmdumps.py +116 -45
- rucio/daemons/automatix/__init__.py +0 -1
- rucio/daemons/automatix/automatix.py +30 -18
- rucio/daemons/badreplicas/__init__.py +0 -1
- rucio/daemons/badreplicas/minos.py +29 -18
- rucio/daemons/badreplicas/minos_temporary_expiration.py +5 -7
- rucio/daemons/badreplicas/necromancer.py +9 -13
- rucio/daemons/bb8/__init__.py +0 -1
- rucio/daemons/bb8/bb8.py +10 -13
- rucio/daemons/bb8/common.py +151 -154
- rucio/daemons/bb8/nuclei_background_rebalance.py +15 -9
- rucio/daemons/bb8/t2_background_rebalance.py +15 -8
- rucio/daemons/c3po/__init__.py +0 -1
- rucio/daemons/c3po/algorithms/__init__.py +0 -1
- rucio/daemons/c3po/algorithms/simple.py +8 -5
- rucio/daemons/c3po/algorithms/t2_free_space.py +10 -7
- rucio/daemons/c3po/algorithms/t2_free_space_only_pop.py +10 -7
- rucio/daemons/c3po/algorithms/t2_free_space_only_pop_with_network.py +30 -15
- rucio/daemons/c3po/c3po.py +81 -52
- rucio/daemons/c3po/collectors/__init__.py +0 -1
- rucio/daemons/c3po/collectors/agis.py +17 -17
- rucio/daemons/c3po/collectors/free_space.py +32 -13
- rucio/daemons/c3po/collectors/jedi_did.py +14 -5
- rucio/daemons/c3po/collectors/mock_did.py +11 -6
- rucio/daemons/c3po/collectors/network_metrics.py +12 -4
- rucio/daemons/c3po/collectors/workload.py +21 -19
- rucio/daemons/c3po/utils/__init__.py +0 -1
- rucio/daemons/c3po/utils/dataset_cache.py +15 -5
- rucio/daemons/c3po/utils/expiring_dataset_cache.py +16 -5
- rucio/daemons/c3po/utils/expiring_list.py +6 -7
- rucio/daemons/c3po/utils/popularity.py +5 -2
- rucio/daemons/c3po/utils/timeseries.py +25 -12
- rucio/daemons/cache/__init__.py +0 -1
- rucio/daemons/cache/consumer.py +21 -15
- rucio/daemons/common.py +42 -18
- rucio/daemons/conveyor/__init__.py +0 -1
- rucio/daemons/conveyor/common.py +69 -37
- rucio/daemons/conveyor/finisher.py +83 -46
- rucio/daemons/conveyor/poller.py +101 -69
- rucio/daemons/conveyor/preparer.py +35 -28
- rucio/daemons/conveyor/receiver.py +64 -21
- rucio/daemons/conveyor/stager.py +33 -28
- rucio/daemons/conveyor/submitter.py +71 -47
- rucio/daemons/conveyor/throttler.py +99 -35
- rucio/daemons/follower/__init__.py +0 -1
- rucio/daemons/follower/follower.py +12 -8
- rucio/daemons/hermes/__init__.py +0 -1
- rucio/daemons/hermes/hermes.py +57 -21
- rucio/daemons/judge/__init__.py +0 -1
- rucio/daemons/judge/cleaner.py +27 -17
- rucio/daemons/judge/evaluator.py +31 -18
- rucio/daemons/judge/injector.py +31 -23
- rucio/daemons/judge/repairer.py +28 -18
- rucio/daemons/oauthmanager/__init__.py +0 -1
- rucio/daemons/oauthmanager/oauthmanager.py +7 -8
- rucio/daemons/reaper/__init__.py +0 -1
- rucio/daemons/reaper/dark_reaper.py +15 -9
- rucio/daemons/reaper/reaper.py +109 -67
- rucio/daemons/replicarecoverer/__init__.py +0 -1
- rucio/daemons/replicarecoverer/suspicious_replica_recoverer.py +255 -116
- rucio/{api → daemons/rsedecommissioner}/__init__.py +0 -1
- rucio/daemons/rsedecommissioner/config.py +81 -0
- rucio/daemons/rsedecommissioner/profiles/__init__.py +24 -0
- rucio/daemons/rsedecommissioner/profiles/atlas.py +60 -0
- rucio/daemons/rsedecommissioner/profiles/generic.py +451 -0
- rucio/daemons/rsedecommissioner/profiles/types.py +92 -0
- rucio/daemons/rsedecommissioner/rse_decommissioner.py +280 -0
- rucio/daemons/storage/__init__.py +0 -1
- rucio/daemons/storage/consistency/__init__.py +0 -1
- rucio/daemons/storage/consistency/actions.py +152 -59
- rucio/daemons/tracer/__init__.py +0 -1
- rucio/daemons/tracer/kronos.py +47 -24
- rucio/daemons/transmogrifier/__init__.py +0 -1
- rucio/daemons/transmogrifier/transmogrifier.py +35 -26
- rucio/daemons/undertaker/__init__.py +0 -1
- rucio/daemons/undertaker/undertaker.py +10 -10
- rucio/db/__init__.py +0 -1
- rucio/db/sqla/__init__.py +16 -2
- rucio/db/sqla/constants.py +10 -1
- rucio/db/sqla/migrate_repo/__init__.py +0 -1
- rucio/db/sqla/migrate_repo/env.py +0 -1
- rucio/db/sqla/migrate_repo/versions/01eaf73ab656_add_new_rule_notification_state_progress.py +0 -1
- rucio/db/sqla/migrate_repo/versions/0437a40dbfd1_add_eol_at_in_rules.py +0 -3
- rucio/db/sqla/migrate_repo/versions/0f1adb7a599a_create_transfer_hops_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/102efcf145f4_added_stuck_at_column_to_rules.py +0 -3
- rucio/db/sqla/migrate_repo/versions/13d4f70c66a9_introduce_transfer_limits.py +1 -3
- rucio/db/sqla/migrate_repo/versions/140fef722e91_cleanup_distances_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/14ec5aeb64cf_add_request_external_host.py +0 -3
- rucio/db/sqla/migrate_repo/versions/156fb5b5a14_add_request_type_to_requests_idx.py +1 -4
- rucio/db/sqla/migrate_repo/versions/1677d4d803c8_split_rse_availability_into_multiple.py +0 -1
- rucio/db/sqla/migrate_repo/versions/16a0aca82e12_create_index_on_table_replicas_path.py +0 -2
- rucio/db/sqla/migrate_repo/versions/1803333ac20f_adding_provenance_and_phys_group.py +0 -1
- rucio/db/sqla/migrate_repo/versions/1a29d6a9504c_add_didtype_chck_to_requests.py +0 -1
- rucio/db/sqla/migrate_repo/versions/1a80adff031a_create_index_on_rules_hist_recent.py +0 -2
- rucio/db/sqla/migrate_repo/versions/1c45d9730ca6_increase_identity_length.py +0 -1
- rucio/db/sqla/migrate_repo/versions/1d1215494e95_add_quarantined_replicas_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/1d96f484df21_asynchronous_rules_and_rule_approval.py +0 -1
- rucio/db/sqla/migrate_repo/versions/1f46c5f240ac_add_bytes_column_to_bad_replicas.py +0 -3
- rucio/db/sqla/migrate_repo/versions/1fc15ab60d43_add_message_history_table.py +0 -1
- rucio/db/sqla/migrate_repo/versions/2190e703eb6e_move_rse_settings_to_rse_attributes.py +1 -2
- rucio/db/sqla/migrate_repo/versions/21d6b9dc9961_add_mismatch_scheme_state_to_requests.py +0 -1
- rucio/db/sqla/migrate_repo/versions/22cf51430c78_add_availability_column_to_table_rses.py +0 -3
- rucio/db/sqla/migrate_repo/versions/22d887e4ec0a_create_sources_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/25821a8a45a3_remove_unique_constraint_on_requests.py +1 -4
- rucio/db/sqla/migrate_repo/versions/25fc855625cf_added_unique_constraint_to_rules.py +0 -2
- rucio/db/sqla/migrate_repo/versions/269fee20dee9_add_repair_cnt_to_locks.py +0 -3
- rucio/db/sqla/migrate_repo/versions/271a46ea6244_add_ignore_availability_column_to_rules.py +0 -3
- rucio/db/sqla/migrate_repo/versions/277b5fbb41d3_switch_heartbeats_executable.py +1 -2
- rucio/db/sqla/migrate_repo/versions/27e3a68927fb_remove_replicas_tombstone_and_replicas_.py +0 -1
- rucio/db/sqla/migrate_repo/versions/2854cd9e168_added_rule_id_column.py +0 -1
- rucio/db/sqla/migrate_repo/versions/295289b5a800_processed_by_and__at_in_requests.py +0 -2
- rucio/db/sqla/migrate_repo/versions/2962ece31cf4_add_nbaccesses_column_in_the_did_table.py +0 -3
- rucio/db/sqla/migrate_repo/versions/2af3291ec4c_added_replicas_history_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/2b69addda658_add_columns_for_third_party_copy_read_.py +0 -2
- rucio/db/sqla/migrate_repo/versions/2b8e7bcb4783_add_config_table.py +1 -4
- rucio/db/sqla/migrate_repo/versions/2ba5229cb54c_add_submitted_at_to_requests_table.py +0 -3
- rucio/db/sqla/migrate_repo/versions/2cbee484dcf9_added_column_volume_to_rse_transfer_.py +1 -4
- rucio/db/sqla/migrate_repo/versions/2edee4a83846_add_source_to_requests_and_requests_.py +0 -1
- rucio/db/sqla/migrate_repo/versions/2eef46be23d4_change_tokens_pk.py +1 -3
- rucio/db/sqla/migrate_repo/versions/2f648fc909f3_index_in_rule_history_on_scope_name.py +0 -2
- rucio/db/sqla/migrate_repo/versions/3082b8cef557_add_naming_convention_table_and_closed_.py +1 -3
- rucio/db/sqla/migrate_repo/versions/30fa38b6434e_add_index_on_service_column_in_the_message_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/3152492b110b_added_staging_area_column.py +1 -2
- rucio/db/sqla/migrate_repo/versions/32c7d2783f7e_create_bad_replicas_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/3345511706b8_replicas_table_pk_definition_is_in_.py +1 -3
- rucio/db/sqla/migrate_repo/versions/35ef10d1e11b_change_index_on_table_requests.py +0 -2
- rucio/db/sqla/migrate_repo/versions/379a19b5332d_create_rse_limits_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/384b96aa0f60_created_rule_history_tables.py +2 -3
- rucio/db/sqla/migrate_repo/versions/3ac1660a1a72_extend_distance_table.py +0 -3
- rucio/db/sqla/migrate_repo/versions/3ad36e2268b0_create_collection_replicas_updates_table.py +1 -4
- rucio/db/sqla/migrate_repo/versions/3c9df354071b_extend_waiting_request_state.py +0 -1
- rucio/db/sqla/migrate_repo/versions/3d9813fab443_add_a_new_state_lost_in_badfilesstatus.py +0 -1
- rucio/db/sqla/migrate_repo/versions/40ad39ce3160_add_transferred_at_to_requests_table.py +0 -3
- rucio/db/sqla/migrate_repo/versions/4207be2fd914_add_notification_column_to_rules.py +0 -1
- rucio/db/sqla/migrate_repo/versions/42db2617c364_create_index_on_requests_external_id.py +0 -2
- rucio/db/sqla/migrate_repo/versions/436827b13f82_added_column_activity_to_table_requests.py +0 -3
- rucio/db/sqla/migrate_repo/versions/44278720f774_update_requests_typ_sta_upd_idx_index.py +0 -2
- rucio/db/sqla/migrate_repo/versions/45378a1e76a8_create_collection_replica_table.py +2 -4
- rucio/db/sqla/migrate_repo/versions/469d262be19_removing_created_at_index.py +0 -2
- rucio/db/sqla/migrate_repo/versions/4783c1f49cb4_create_distance_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/49a21b4d4357_create_index_on_table_tokens.py +1 -4
- rucio/db/sqla/migrate_repo/versions/4a2cbedda8b9_add_source_replica_expression_column_to_.py +0 -3
- rucio/db/sqla/migrate_repo/versions/4a7182d9578b_added_bytes_length_accessed_at_columns.py +0 -3
- rucio/db/sqla/migrate_repo/versions/4bab9edd01fc_create_index_on_requests_rule_id.py +0 -2
- rucio/db/sqla/migrate_repo/versions/4c3a4acfe006_new_attr_account_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/4cf0a2e127d4_adding_transient_metadata.py +0 -3
- rucio/db/sqla/migrate_repo/versions/4df2c5ddabc0_remove_temporary_dids.py +55 -0
- rucio/db/sqla/migrate_repo/versions/50280c53117c_add_qos_class_to_rse.py +0 -2
- rucio/db/sqla/migrate_repo/versions/52153819589c_add_rse_id_to_replicas_table.py +0 -2
- rucio/db/sqla/migrate_repo/versions/52fd9f4916fa_added_activity_to_rules.py +0 -3
- rucio/db/sqla/migrate_repo/versions/53b479c3cb0f_fix_did_meta_table_missing_updated_at_.py +0 -3
- rucio/db/sqla/migrate_repo/versions/5673b4b6e843_add_wfms_metadata_to_rule_tables.py +0 -3
- rucio/db/sqla/migrate_repo/versions/575767d9f89_added_source_history_table.py +1 -2
- rucio/db/sqla/migrate_repo/versions/58bff7008037_add_started_at_to_requests.py +0 -3
- rucio/db/sqla/migrate_repo/versions/58c8b78301ab_rename_callback_to_message.py +1 -3
- rucio/db/sqla/migrate_repo/versions/5f139f77382a_added_child_rule_id_column.py +1 -3
- rucio/db/sqla/migrate_repo/versions/688ef1840840_adding_did_meta_table.py +1 -2
- rucio/db/sqla/migrate_repo/versions/6e572a9bfbf3_add_new_split_container_column_to_rules.py +0 -3
- rucio/db/sqla/migrate_repo/versions/70587619328_add_comment_column_for_subscriptions.py +0 -3
- rucio/db/sqla/migrate_repo/versions/739064d31565_remove_history_table_pks.py +1 -2
- rucio/db/sqla/migrate_repo/versions/7541902bf173_add_didsfollowed_and_followevents_table.py +2 -4
- rucio/db/sqla/migrate_repo/versions/7ec22226cdbf_new_replica_state_for_temporary_.py +0 -1
- rucio/db/sqla/migrate_repo/versions/810a41685bc1_added_columns_rse_transfer_limits.py +1 -4
- rucio/db/sqla/migrate_repo/versions/83f991c63a93_correct_rse_expression_length.py +0 -2
- rucio/db/sqla/migrate_repo/versions/8523998e2e76_increase_size_of_extended_attributes_.py +0 -3
- rucio/db/sqla/migrate_repo/versions/8ea9122275b1_adding_missing_function_based_indices.py +1 -2
- rucio/db/sqla/migrate_repo/versions/90f47792bb76_add_clob_payload_to_messages.py +0 -3
- rucio/db/sqla/migrate_repo/versions/914b8f02df38_new_table_for_lifetime_model_exceptions.py +1 -3
- rucio/db/sqla/migrate_repo/versions/94a5961ddbf2_add_estimator_columns.py +0 -3
- rucio/db/sqla/migrate_repo/versions/9a1b149a2044_add_saml_identity_type.py +0 -1
- rucio/db/sqla/migrate_repo/versions/9a45bc4ea66d_add_vp_table.py +1 -2
- rucio/db/sqla/migrate_repo/versions/9eb936a81eb1_true_is_true.py +0 -2
- rucio/db/sqla/migrate_repo/versions/a08fa8de1545_transfer_stats_table.py +55 -0
- rucio/db/sqla/migrate_repo/versions/a118956323f8_added_vo_table_and_vo_col_to_rse.py +1 -3
- rucio/db/sqla/migrate_repo/versions/a193a275255c_add_status_column_in_messages.py +0 -2
- rucio/db/sqla/migrate_repo/versions/a5f6f6e928a7_1_7_0.py +1 -4
- rucio/db/sqla/migrate_repo/versions/a616581ee47_added_columns_to_table_requests.py +0 -1
- rucio/db/sqla/migrate_repo/versions/a6eb23955c28_state_idx_non_functional.py +0 -1
- rucio/db/sqla/migrate_repo/versions/a74275a1ad30_added_global_quota_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/a93e4e47bda_heartbeats.py +1 -4
- rucio/db/sqla/migrate_repo/versions/ae2a56fcc89_added_comment_column_to_rules.py +0 -1
- rucio/db/sqla/migrate_repo/versions/b0070f3695c8_add_deletedidmeta_table.py +57 -0
- rucio/db/sqla/migrate_repo/versions/b4293a99f344_added_column_identity_to_table_tokens.py +0 -3
- rucio/db/sqla/migrate_repo/versions/b5493606bbf5_fix_primary_key_for_subscription_history.py +41 -0
- rucio/db/sqla/migrate_repo/versions/b7d287de34fd_removal_of_replicastate_source.py +1 -2
- rucio/db/sqla/migrate_repo/versions/b818052fa670_add_index_to_quarantined_replicas.py +1 -3
- rucio/db/sqla/migrate_repo/versions/b8caac94d7f0_add_comments_column_for_subscriptions_.py +0 -3
- rucio/db/sqla/migrate_repo/versions/b96a1c7e1cc4_new_bad_pfns_table_and_bad_replicas_.py +1 -5
- rucio/db/sqla/migrate_repo/versions/bb695f45c04_extend_request_state.py +1 -3
- rucio/db/sqla/migrate_repo/versions/bc68e9946deb_add_staging_timestamps_to_request.py +0 -3
- rucio/db/sqla/migrate_repo/versions/bf3baa1c1474_correct_pk_and_idx_for_history_tables.py +1 -3
- rucio/db/sqla/migrate_repo/versions/c0937668555f_add_qos_policy_map_table.py +1 -2
- rucio/db/sqla/migrate_repo/versions/c129ccdb2d5_add_lumiblocknr_to_dids.py +0 -3
- rucio/db/sqla/migrate_repo/versions/ccdbcd48206e_add_did_type_column_index_on_did_meta_.py +1 -4
- rucio/db/sqla/migrate_repo/versions/cebad904c4dd_new_payload_column_for_heartbeats.py +1 -2
- rucio/db/sqla/migrate_repo/versions/d1189a09c6e0_oauth2_0_and_jwt_feature_support_adding_.py +1 -4
- rucio/db/sqla/migrate_repo/versions/d23453595260_extend_request_state_for_preparer.py +1 -3
- rucio/db/sqla/migrate_repo/versions/d6dceb1de2d_added_purge_column_to_rules.py +1 -4
- rucio/db/sqla/migrate_repo/versions/d6e2c3b2cf26_remove_third_party_copy_column_from_rse.py +0 -2
- rucio/db/sqla/migrate_repo/versions/d91002c5841_new_account_limits_table.py +1 -3
- rucio/db/sqla/migrate_repo/versions/e138c364ebd0_extending_columns_for_filter_and_.py +0 -3
- rucio/db/sqla/migrate_repo/versions/e59300c8b179_support_for_archive.py +1 -3
- rucio/db/sqla/migrate_repo/versions/f1b14a8c2ac1_postgres_use_check_constraints.py +0 -1
- rucio/db/sqla/migrate_repo/versions/f41ffe206f37_oracle_global_temporary_tables.py +1 -2
- rucio/db/sqla/migrate_repo/versions/f85a2962b021_adding_transfertool_column_to_requests_.py +1 -3
- rucio/db/sqla/migrate_repo/versions/fa7a7d78b602_increase_refresh_token_size.py +0 -2
- rucio/db/sqla/migrate_repo/versions/fb28a95fe288_add_replicas_rse_id_tombstone_idx.py +0 -1
- rucio/db/sqla/migrate_repo/versions/fe1a65b176c9_set_third_party_copy_read_and_write_.py +1 -2
- rucio/db/sqla/migrate_repo/versions/fe8ea2fa9788_added_third_party_copy_column_to_rse_.py +0 -3
- rucio/db/sqla/models.py +122 -216
- rucio/db/sqla/sautils.py +12 -5
- rucio/db/sqla/session.py +71 -43
- rucio/db/sqla/types.py +3 -4
- rucio/db/sqla/util.py +91 -69
- rucio/gateway/__init__.py +13 -0
- rucio/{api → gateway}/account.py +119 -46
- rucio/{api → gateway}/account_limit.py +12 -13
- rucio/{api → gateway}/authentication.py +106 -33
- rucio/{api → gateway}/config.py +12 -13
- rucio/{api → gateway}/credential.py +15 -4
- rucio/{api → gateway}/did.py +384 -140
- rucio/{api → gateway}/dirac.py +16 -6
- rucio/{api → gateway}/exporter.py +3 -4
- rucio/{api → gateway}/heartbeat.py +17 -5
- rucio/{api → gateway}/identity.py +63 -19
- rucio/{api → gateway}/importer.py +3 -4
- rucio/{api → gateway}/lifetime_exception.py +35 -10
- rucio/{api → gateway}/lock.py +34 -12
- rucio/{api/meta.py → gateway/meta_conventions.py} +18 -16
- rucio/{api → gateway}/permission.py +4 -5
- rucio/{api → gateway}/quarantined_replica.py +13 -4
- rucio/{api → gateway}/replica.py +12 -11
- rucio/{api → gateway}/request.py +129 -28
- rucio/{api → gateway}/rse.py +11 -12
- rucio/{api → gateway}/rule.py +117 -35
- rucio/{api → gateway}/scope.py +24 -14
- rucio/{api → gateway}/subscription.py +65 -43
- rucio/{api → gateway}/vo.py +17 -7
- rucio/rse/__init__.py +3 -4
- rucio/rse/protocols/__init__.py +0 -1
- rucio/rse/protocols/bittorrent.py +184 -0
- rucio/rse/protocols/cache.py +1 -2
- rucio/rse/protocols/dummy.py +1 -2
- rucio/rse/protocols/gfal.py +12 -10
- rucio/rse/protocols/globus.py +7 -7
- rucio/rse/protocols/gsiftp.py +2 -3
- rucio/rse/protocols/http_cache.py +1 -2
- rucio/rse/protocols/mock.py +1 -2
- rucio/rse/protocols/ngarc.py +1 -2
- rucio/rse/protocols/posix.py +12 -13
- rucio/rse/protocols/protocol.py +116 -52
- rucio/rse/protocols/rclone.py +6 -7
- rucio/rse/protocols/rfio.py +4 -5
- rucio/rse/protocols/srm.py +9 -10
- rucio/rse/protocols/ssh.py +8 -9
- rucio/rse/protocols/storm.py +2 -3
- rucio/rse/protocols/webdav.py +17 -14
- rucio/rse/protocols/xrootd.py +23 -17
- rucio/rse/rsemanager.py +19 -7
- rucio/tests/__init__.py +0 -1
- rucio/tests/common.py +43 -17
- rucio/tests/common_server.py +3 -3
- rucio/transfertool/__init__.py +0 -1
- rucio/transfertool/bittorrent.py +199 -0
- rucio/transfertool/bittorrent_driver.py +52 -0
- rucio/transfertool/bittorrent_driver_qbittorrent.py +133 -0
- rucio/transfertool/fts3.py +250 -138
- rucio/transfertool/fts3_plugins.py +152 -0
- rucio/transfertool/globus.py +9 -8
- rucio/transfertool/globus_library.py +1 -2
- rucio/transfertool/mock.py +21 -12
- rucio/transfertool/transfertool.py +33 -24
- rucio/vcsversion.py +4 -4
- rucio/version.py +5 -13
- rucio/web/__init__.py +0 -1
- rucio/web/rest/__init__.py +0 -1
- rucio/web/rest/flaskapi/__init__.py +0 -1
- rucio/web/rest/flaskapi/authenticated_bp.py +0 -1
- rucio/web/rest/flaskapi/v1/__init__.py +0 -1
- rucio/web/rest/flaskapi/v1/accountlimits.py +15 -13
- rucio/web/rest/flaskapi/v1/accounts.py +49 -48
- rucio/web/rest/flaskapi/v1/archives.py +12 -10
- rucio/web/rest/flaskapi/v1/auth.py +146 -144
- rucio/web/rest/flaskapi/v1/common.py +82 -41
- rucio/web/rest/flaskapi/v1/config.py +5 -6
- rucio/web/rest/flaskapi/v1/credentials.py +7 -8
- rucio/web/rest/flaskapi/v1/dids.py +158 -28
- rucio/web/rest/flaskapi/v1/dirac.py +8 -8
- rucio/web/rest/flaskapi/v1/export.py +3 -5
- rucio/web/rest/flaskapi/v1/heartbeats.py +3 -5
- rucio/web/rest/flaskapi/v1/identities.py +3 -5
- rucio/web/rest/flaskapi/v1/import.py +3 -4
- rucio/web/rest/flaskapi/v1/lifetime_exceptions.py +6 -9
- rucio/web/rest/flaskapi/v1/locks.py +2 -4
- rucio/web/rest/flaskapi/v1/main.py +10 -2
- rucio/web/rest/flaskapi/v1/{meta.py → meta_conventions.py} +26 -11
- rucio/web/rest/flaskapi/v1/metrics.py +1 -2
- rucio/web/rest/flaskapi/v1/nongrid_traces.py +4 -4
- rucio/web/rest/flaskapi/v1/ping.py +6 -7
- rucio/web/rest/flaskapi/v1/redirect.py +8 -9
- rucio/web/rest/flaskapi/v1/replicas.py +43 -19
- rucio/web/rest/flaskapi/v1/requests.py +178 -21
- rucio/web/rest/flaskapi/v1/rses.py +61 -26
- rucio/web/rest/flaskapi/v1/rules.py +48 -18
- rucio/web/rest/flaskapi/v1/scopes.py +3 -5
- rucio/web/rest/flaskapi/v1/subscriptions.py +22 -18
- rucio/web/rest/flaskapi/v1/traces.py +4 -4
- rucio/web/rest/flaskapi/v1/types.py +20 -0
- rucio/web/rest/flaskapi/v1/vos.py +3 -5
- rucio/web/rest/main.py +0 -1
- rucio/web/rest/metrics.py +0 -1
- rucio/web/rest/ping.py +27 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/ldap.cfg.template +1 -1
- rucio-35.8.0.data/data/rucio/requirements.server.txt +268 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/tools/bootstrap.py +3 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/tools/merge_rucio_configs.py +2 -5
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/tools/reset_database.py +3 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio +87 -85
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-abacus-account +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-abacus-collection-replica +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-abacus-rse +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-admin +45 -32
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-atropos +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-auditor +13 -7
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-automatix +1 -2
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-bb8 +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-c3po +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-cache-client +2 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-cache-consumer +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-finisher +1 -2
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-poller +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-preparer +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-receiver +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-stager +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-submitter +2 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-conveyor-throttler +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-dark-reaper +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-dumper +11 -10
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-follower +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-hermes +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-judge-cleaner +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-judge-evaluator +2 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-judge-injector +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-judge-repairer +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-kronos +1 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-minos +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-minos-temporary-expiration +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-necromancer +1 -2
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-oauth-manager +2 -3
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-reaper +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-replica-recoverer +6 -7
- rucio-35.8.0.data/scripts/rucio-rse-decommissioner +66 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-storage-consistency-actions +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-transmogrifier +0 -1
- {rucio-32.8.6.data → rucio-35.8.0.data}/scripts/rucio-undertaker +1 -2
- rucio-35.8.0.dist-info/METADATA +72 -0
- rucio-35.8.0.dist-info/RECORD +493 -0
- {rucio-32.8.6.dist-info → rucio-35.8.0.dist-info}/WHEEL +1 -1
- {rucio-32.8.6.dist-info → rucio-35.8.0.dist-info}/licenses/AUTHORS.rst +3 -0
- rucio/api/temporary_did.py +0 -49
- rucio/common/schema/cms.py +0 -478
- rucio/common/schema/lsst.py +0 -423
- rucio/core/permission/cms.py +0 -1166
- rucio/core/temporary_did.py +0 -188
- rucio/daemons/reaper/light_reaper.py +0 -255
- rucio/web/rest/flaskapi/v1/tmp_dids.py +0 -115
- rucio-32.8.6.data/data/rucio/requirements.txt +0 -55
- rucio-32.8.6.data/scripts/rucio-light-reaper +0 -53
- rucio-32.8.6.dist-info/METADATA +0 -83
- rucio-32.8.6.dist-info/RECORD +0 -481
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/alembic.ini.template +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/alembic_offline.ini.template +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/globus-config.yml.template +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/mail_templates/rule_approval_request.tmpl +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/mail_templates/rule_approved_admin.tmpl +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/mail_templates/rule_approved_user.tmpl +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/mail_templates/rule_denied_admin.tmpl +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/mail_templates/rule_denied_user.tmpl +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/mail_templates/rule_ok_notification.tmpl +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/rse-accounts.cfg.template +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/rucio.cfg.atlas.client.template +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/rucio.cfg.template +0 -0
- {rucio-32.8.6.data → rucio-35.8.0.data}/data/rucio/etc/rucio_multi_vo.cfg.template +0 -0
- {rucio-32.8.6.dist-info → rucio-35.8.0.dist-info}/licenses/LICENSE +0 -0
- {rucio-32.8.6.dist-info → rucio-35.8.0.dist-info}/top_level.txt +0 -0
rucio/core/did.py
CHANGED
|
@@ -1,4 +1,3 @@
|
|
|
1
|
-
# -*- coding: utf-8 -*-
|
|
2
1
|
# Copyright European Organization for Nuclear Research (CERN) since 2012
|
|
3
2
|
#
|
|
4
3
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
@@ -19,48 +18,48 @@ from datetime import datetime, timedelta
|
|
|
19
18
|
from enum import Enum
|
|
20
19
|
from hashlib import md5
|
|
21
20
|
from re import match
|
|
22
|
-
from typing import TYPE_CHECKING
|
|
21
|
+
from typing import TYPE_CHECKING, Any, Literal, Optional, Union
|
|
23
22
|
|
|
24
|
-
from sqlalchemy import and_,
|
|
25
|
-
from sqlalchemy.exc import DatabaseError, IntegrityError
|
|
26
|
-
from sqlalchemy.
|
|
27
|
-
from sqlalchemy.sql import
|
|
28
|
-
from sqlalchemy.sql.expression import bindparam, case, select, true, false, null
|
|
23
|
+
from sqlalchemy import and_, delete, exists, insert, or_, update
|
|
24
|
+
from sqlalchemy.exc import DatabaseError, IntegrityError, NoResultFound
|
|
25
|
+
from sqlalchemy.sql import func, not_
|
|
26
|
+
from sqlalchemy.sql.expression import bindparam, case, false, null, select, true
|
|
29
27
|
|
|
30
28
|
import rucio.core.replica # import add_replicas
|
|
31
29
|
import rucio.core.rule
|
|
32
30
|
from rucio.common import exception
|
|
33
31
|
from rucio.common.config import config_get_bool, config_get_int
|
|
34
|
-
from rucio.common.utils import
|
|
32
|
+
from rucio.common.utils import chunks, is_archive
|
|
35
33
|
from rucio.core import did_meta_plugins
|
|
36
34
|
from rucio.core.message import add_message
|
|
37
35
|
from rucio.core.monitor import MetricManager
|
|
38
36
|
from rucio.core.naming_convention import validate_name
|
|
39
|
-
from rucio.db.sqla import
|
|
40
|
-
from rucio.db.sqla.constants import
|
|
41
|
-
from rucio.db.sqla.session import read_session,
|
|
37
|
+
from rucio.db.sqla import filter_thread_work, models
|
|
38
|
+
from rucio.db.sqla.constants import BadFilesStatus, DIDAvailability, DIDReEvaluation, DIDType, RuleState
|
|
39
|
+
from rucio.db.sqla.session import read_session, stream_session, transactional_session
|
|
42
40
|
from rucio.db.sqla.util import temp_table_mngr
|
|
43
41
|
|
|
44
42
|
if TYPE_CHECKING:
|
|
45
|
-
from collections.abc import
|
|
46
|
-
|
|
43
|
+
from collections.abc import Iterable, Iterator, Mapping, Sequence
|
|
44
|
+
|
|
47
45
|
from sqlalchemy.orm import Session
|
|
48
|
-
from sqlalchemy.
|
|
49
|
-
from
|
|
46
|
+
from sqlalchemy.sql._typing import ColumnExpressionArgument
|
|
47
|
+
from sqlalchemy.sql.selectable import Select
|
|
48
|
+
|
|
49
|
+
from rucio.common.types import InternalAccount, InternalScope, LoggerFunction
|
|
50
50
|
|
|
51
|
-
LoggerFunction = Callable[..., Any]
|
|
52
51
|
|
|
53
52
|
METRICS = MetricManager(module=__name__)
|
|
54
53
|
|
|
55
54
|
|
|
56
55
|
@read_session
|
|
57
56
|
def list_expired_dids(
|
|
58
|
-
worker_number: int = None,
|
|
59
|
-
total_workers: int = None,
|
|
60
|
-
limit: int = None,
|
|
57
|
+
worker_number: Optional[int] = None,
|
|
58
|
+
total_workers: Optional[int] = None,
|
|
59
|
+
limit: Optional[int] = None,
|
|
61
60
|
*,
|
|
62
61
|
session: "Session"
|
|
63
|
-
):
|
|
62
|
+
) -> list[dict[str, Any]]:
|
|
64
63
|
"""
|
|
65
64
|
List expired data identifiers.
|
|
66
65
|
|
|
@@ -80,13 +79,15 @@ def list_expired_dids(
|
|
|
80
79
|
models.DataIdentifier.did_type,
|
|
81
80
|
models.DataIdentifier.created_at,
|
|
82
81
|
models.DataIdentifier.purge_replicas
|
|
82
|
+
).with_hint(
|
|
83
|
+
models.DataIdentifier,
|
|
84
|
+
'INDEX(DIDS DIDS_EXPIRED_AT_IDX)',
|
|
85
|
+
'oracle'
|
|
83
86
|
).where(
|
|
84
87
|
models.DataIdentifier.expired_at < datetime.utcnow(),
|
|
85
88
|
not_(sub_query),
|
|
86
89
|
).order_by(
|
|
87
90
|
models.DataIdentifier.expired_at
|
|
88
|
-
).with_hint(
|
|
89
|
-
models.DataIdentifier, "index(DIDS DIDS_EXPIRED_AT_IDX)", 'oracle'
|
|
90
91
|
)
|
|
91
92
|
|
|
92
93
|
if session.bind.dialect.name in ['oracle', 'mysql', 'postgresql']:
|
|
@@ -120,17 +121,17 @@ def list_expired_dids(
|
|
|
120
121
|
def add_did(
|
|
121
122
|
scope: "InternalScope",
|
|
122
123
|
name: str,
|
|
123
|
-
did_type:
|
|
124
|
+
did_type: Union[str, DIDType],
|
|
124
125
|
account: "InternalAccount",
|
|
125
|
-
statuses:
|
|
126
|
-
meta:
|
|
127
|
-
rules:
|
|
128
|
-
lifetime:
|
|
129
|
-
dids:
|
|
130
|
-
rse_id:
|
|
126
|
+
statuses: Optional["Mapping[str, Any]"] = None,
|
|
127
|
+
meta: Optional["Mapping[str, Any]"] = None,
|
|
128
|
+
rules: Optional["Sequence[str]"] = None,
|
|
129
|
+
lifetime: Optional[int] = None,
|
|
130
|
+
dids: Optional["Sequence[Mapping[str, Any]]"] = None,
|
|
131
|
+
rse_id: Optional[str] = None,
|
|
131
132
|
*,
|
|
132
133
|
session: "Session",
|
|
133
|
-
):
|
|
134
|
+
) -> None:
|
|
134
135
|
"""
|
|
135
136
|
Add data identifier.
|
|
136
137
|
|
|
@@ -159,7 +160,7 @@ def add_dids(
|
|
|
159
160
|
account: "InternalAccount",
|
|
160
161
|
*,
|
|
161
162
|
session: "Session",
|
|
162
|
-
):
|
|
163
|
+
) -> None:
|
|
163
164
|
"""
|
|
164
165
|
Bulk add data identifiers.
|
|
165
166
|
|
|
@@ -251,12 +252,12 @@ def add_dids(
|
|
|
251
252
|
def attach_dids(
|
|
252
253
|
scope: "InternalScope",
|
|
253
254
|
name: str,
|
|
254
|
-
dids: "Sequence[
|
|
255
|
+
dids: "Sequence[Mapping[str, Any]]",
|
|
255
256
|
account: "InternalAccount",
|
|
256
|
-
rse_id:
|
|
257
|
+
rse_id: Optional[str] = None,
|
|
257
258
|
*,
|
|
258
259
|
session: "Session",
|
|
259
|
-
):
|
|
260
|
+
) -> None:
|
|
260
261
|
"""
|
|
261
262
|
Append data identifier.
|
|
262
263
|
|
|
@@ -272,12 +273,12 @@ def attach_dids(
|
|
|
272
273
|
|
|
273
274
|
@transactional_session
|
|
274
275
|
def attach_dids_to_dids(
|
|
275
|
-
attachments: "
|
|
276
|
+
attachments: "Sequence[Mapping[str, Any]]",
|
|
276
277
|
account: "InternalAccount",
|
|
277
278
|
ignore_duplicate: bool = False,
|
|
278
279
|
*,
|
|
279
280
|
session: "Session",
|
|
280
|
-
):
|
|
281
|
+
) -> None:
|
|
281
282
|
children_temp_table = temp_table_mngr(session).create_scope_name_table()
|
|
282
283
|
parent_dids = list()
|
|
283
284
|
first_iteration = True
|
|
@@ -287,18 +288,27 @@ def attach_dids_to_dids(
|
|
|
287
288
|
cont = []
|
|
288
289
|
stmt = select(
|
|
289
290
|
models.DataIdentifier
|
|
291
|
+
).with_hint(
|
|
292
|
+
models.DataIdentifier,
|
|
293
|
+
'INDEX(DIDS DIDS_PK)',
|
|
294
|
+
'oracle'
|
|
290
295
|
).where(
|
|
291
296
|
models.DataIdentifier.scope == attachment['scope'],
|
|
292
297
|
models.DataIdentifier.name == attachment['name']
|
|
293
|
-
).with_hint(
|
|
294
|
-
models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle'
|
|
295
298
|
)
|
|
296
299
|
parent_did = session.execute(stmt).scalar_one()
|
|
297
300
|
update_parent = False
|
|
298
301
|
|
|
299
302
|
if not first_iteration:
|
|
300
|
-
|
|
301
|
-
|
|
303
|
+
stmt = delete(
|
|
304
|
+
children_temp_table
|
|
305
|
+
)
|
|
306
|
+
session.execute(stmt)
|
|
307
|
+
values = [{'scope': s, 'name': n} for s, n in children]
|
|
308
|
+
stmt = insert(
|
|
309
|
+
children_temp_table
|
|
310
|
+
)
|
|
311
|
+
session.execute(stmt, values)
|
|
302
312
|
|
|
303
313
|
if parent_did.did_type == DIDType.FILE:
|
|
304
314
|
# check if parent file has the archive extension
|
|
@@ -342,15 +352,26 @@ def attach_dids_to_dids(
|
|
|
342
352
|
raise exception.DataIdentifierNotFound("Data identifier '%s:%s' not found" % (attachment['scope'], attachment['name']))
|
|
343
353
|
first_iteration = False
|
|
344
354
|
|
|
345
|
-
# Remove all duplicated
|
|
355
|
+
# Remove all duplicated dictionaries from the list
|
|
346
356
|
# (convert the list of dictionaries into a list of tuple, then to a set of tuple
|
|
347
357
|
# to remove duplicates, then back to a list of unique dictionaries)
|
|
348
358
|
parent_dids = [dict(tup) for tup in set(tuple(dictionary.items()) for dictionary in parent_dids)]
|
|
349
359
|
if parent_dids:
|
|
350
|
-
|
|
360
|
+
stmt = insert(
|
|
361
|
+
models.UpdatedDID
|
|
362
|
+
)
|
|
363
|
+
session.execute(stmt, parent_dids)
|
|
351
364
|
|
|
352
365
|
|
|
353
|
-
def __add_files_to_archive(
|
|
366
|
+
def __add_files_to_archive(
|
|
367
|
+
parent_did: models.DataIdentifier,
|
|
368
|
+
files_temp_table: Any,
|
|
369
|
+
files: "Mapping[tuple[InternalScope, str], Mapping[str, Any]]",
|
|
370
|
+
account: "InternalAccount",
|
|
371
|
+
ignore_duplicate: bool = False,
|
|
372
|
+
*,
|
|
373
|
+
session: "Session"
|
|
374
|
+
) -> None:
|
|
354
375
|
"""
|
|
355
376
|
Add files to archive.
|
|
356
377
|
|
|
@@ -376,10 +397,8 @@ def __add_files_to_archive(parent_did, files_temp_table, files, account, ignore_
|
|
|
376
397
|
).outerjoin_from(
|
|
377
398
|
files_temp_table,
|
|
378
399
|
models.DataIdentifier,
|
|
379
|
-
and_(
|
|
380
|
-
|
|
381
|
-
models.DataIdentifier.name == files_temp_table.name,
|
|
382
|
-
),
|
|
400
|
+
and_(models.DataIdentifier.scope == files_temp_table.scope,
|
|
401
|
+
models.DataIdentifier.name == files_temp_table.name)
|
|
383
402
|
)
|
|
384
403
|
if ignore_duplicate:
|
|
385
404
|
stmt = stmt.add_columns(
|
|
@@ -387,12 +406,10 @@ def __add_files_to_archive(parent_did, files_temp_table, files, account, ignore_
|
|
|
387
406
|
).outerjoin_from(
|
|
388
407
|
files_temp_table,
|
|
389
408
|
models.ConstituentAssociation,
|
|
390
|
-
and_(
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
models.ConstituentAssociation.child_name == files_temp_table.name,
|
|
395
|
-
),
|
|
409
|
+
and_(models.ConstituentAssociation.scope == parent_did.scope,
|
|
410
|
+
models.ConstituentAssociation.name == parent_did.name,
|
|
411
|
+
models.ConstituentAssociation.child_scope == files_temp_table.scope,
|
|
412
|
+
models.ConstituentAssociation.child_name == files_temp_table.name)
|
|
396
413
|
)
|
|
397
414
|
|
|
398
415
|
dids_to_add = {}
|
|
@@ -452,8 +469,16 @@ def __add_files_to_archive(parent_did, files_temp_table, files, account, ignore_
|
|
|
452
469
|
|
|
453
470
|
# insert into archive_contents
|
|
454
471
|
try:
|
|
455
|
-
|
|
456
|
-
|
|
472
|
+
values = list(dids_to_add.values())
|
|
473
|
+
stmt = insert(
|
|
474
|
+
models.DataIdentifier
|
|
475
|
+
)
|
|
476
|
+
dids_to_add and session.execute(stmt, values)
|
|
477
|
+
values = list(archive_contents_to_add.values())
|
|
478
|
+
stmt = insert(
|
|
479
|
+
models.ConstituentAssociation
|
|
480
|
+
)
|
|
481
|
+
archive_contents_to_add and session.execute(stmt, values)
|
|
457
482
|
if must_set_constituent:
|
|
458
483
|
stmt = update(
|
|
459
484
|
models.DataIdentifier
|
|
@@ -461,16 +486,16 @@ def __add_files_to_archive(parent_did, files_temp_table, files, account, ignore_
|
|
|
461
486
|
exists(
|
|
462
487
|
select(1)
|
|
463
488
|
).where(
|
|
464
|
-
models.DataIdentifier.scope == files_temp_table.scope,
|
|
465
|
-
|
|
489
|
+
and_(models.DataIdentifier.scope == files_temp_table.scope,
|
|
490
|
+
models.DataIdentifier.name == files_temp_table.name)
|
|
466
491
|
)
|
|
467
492
|
).where(
|
|
468
493
|
or_(models.DataIdentifier.constituent.is_(None),
|
|
469
494
|
models.DataIdentifier.constituent == false())
|
|
470
|
-
).
|
|
495
|
+
).values({
|
|
496
|
+
models.DataIdentifier.constituent: True
|
|
497
|
+
}).execution_options(
|
|
471
498
|
synchronize_session=False
|
|
472
|
-
).values(
|
|
473
|
-
constituent=True
|
|
474
499
|
)
|
|
475
500
|
session.execute(stmt)
|
|
476
501
|
session.flush()
|
|
@@ -478,7 +503,7 @@ def __add_files_to_archive(parent_did, files_temp_table, files, account, ignore_
|
|
|
478
503
|
raise exception.RucioException(error.args)
|
|
479
504
|
|
|
480
505
|
if not parent_did.is_archive:
|
|
481
|
-
# mark
|
|
506
|
+
# mark the archive file as is_archive
|
|
482
507
|
parent_did.is_archive = True
|
|
483
508
|
|
|
484
509
|
# mark parent datasets as is_archive = True
|
|
@@ -488,24 +513,33 @@ def __add_files_to_archive(parent_did, files_temp_table, files, account, ignore_
|
|
|
488
513
|
exists(
|
|
489
514
|
select(1).prefix_with("/*+ INDEX(CONTENTS CONTENTS_CHILD_SCOPE_NAME_IDX) */", dialect="oracle")
|
|
490
515
|
).where(
|
|
491
|
-
models.DataIdentifierAssociation.child_scope == parent_did.scope,
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
516
|
+
and_(models.DataIdentifierAssociation.child_scope == parent_did.scope,
|
|
517
|
+
models.DataIdentifierAssociation.child_name == parent_did.name,
|
|
518
|
+
models.DataIdentifierAssociation.scope == models.DataIdentifier.scope,
|
|
519
|
+
models.DataIdentifierAssociation.name == models.DataIdentifier.name)
|
|
495
520
|
)
|
|
496
521
|
).where(
|
|
497
522
|
or_(models.DataIdentifier.is_archive.is_(None),
|
|
498
523
|
models.DataIdentifier.is_archive == false())
|
|
499
|
-
).
|
|
524
|
+
).values({
|
|
525
|
+
models.DataIdentifier.is_archive: True
|
|
526
|
+
}).execution_options(
|
|
500
527
|
synchronize_session=False
|
|
501
|
-
).values(
|
|
502
|
-
is_archive=True
|
|
503
528
|
)
|
|
504
529
|
session.execute(stmt)
|
|
505
530
|
|
|
506
531
|
|
|
507
532
|
@transactional_session
|
|
508
|
-
def __add_files_to_dataset(
|
|
533
|
+
def __add_files_to_dataset(
|
|
534
|
+
parent_did: models.DataIdentifier,
|
|
535
|
+
files_temp_table: Any,
|
|
536
|
+
files: "Mapping[tuple[InternalScope, str], Mapping[str, Any]]",
|
|
537
|
+
account: "InternalAccount",
|
|
538
|
+
rse_id: str,
|
|
539
|
+
ignore_duplicate: bool = False,
|
|
540
|
+
*,
|
|
541
|
+
session: "Session"
|
|
542
|
+
) -> dict[tuple["InternalScope", str], dict[str, Any]]:
|
|
509
543
|
"""
|
|
510
544
|
Add files to dataset.
|
|
511
545
|
|
|
@@ -544,10 +578,8 @@ def __add_files_to_dataset(parent_did, files_temp_table, files, account, rse_id,
|
|
|
544
578
|
).outerjoin_from(
|
|
545
579
|
files_temp_table,
|
|
546
580
|
models.DataIdentifier,
|
|
547
|
-
and_(
|
|
548
|
-
|
|
549
|
-
models.DataIdentifier.name == files_temp_table.name,
|
|
550
|
-
),
|
|
581
|
+
and_(models.DataIdentifier.scope == files_temp_table.scope,
|
|
582
|
+
models.DataIdentifier.name == files_temp_table.name),
|
|
551
583
|
)
|
|
552
584
|
if ignore_duplicate:
|
|
553
585
|
stmt = stmt.add_columns(
|
|
@@ -555,12 +587,10 @@ def __add_files_to_dataset(parent_did, files_temp_table, files, account, rse_id,
|
|
|
555
587
|
).outerjoin_from(
|
|
556
588
|
files_temp_table,
|
|
557
589
|
models.DataIdentifierAssociation,
|
|
558
|
-
and_(
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
models.DataIdentifierAssociation.child_name == files_temp_table.name,
|
|
563
|
-
),
|
|
590
|
+
and_(models.DataIdentifierAssociation.scope == parent_did.scope,
|
|
591
|
+
models.DataIdentifierAssociation.name == parent_did.name,
|
|
592
|
+
models.DataIdentifierAssociation.child_scope == files_temp_table.scope,
|
|
593
|
+
models.DataIdentifierAssociation.child_name == files_temp_table.name),
|
|
564
594
|
)
|
|
565
595
|
|
|
566
596
|
files_to_add = {}
|
|
@@ -608,7 +638,11 @@ def __add_files_to_dataset(parent_did, files_temp_table, files, account, rse_id,
|
|
|
608
638
|
}
|
|
609
639
|
|
|
610
640
|
try:
|
|
611
|
-
|
|
641
|
+
values = list(files_to_add.values())
|
|
642
|
+
stmt = insert(
|
|
643
|
+
models.DataIdentifierAssociation
|
|
644
|
+
)
|
|
645
|
+
files_to_add and session.execute(stmt, values)
|
|
612
646
|
session.flush()
|
|
613
647
|
return files_to_add
|
|
614
648
|
except IntegrityError as error:
|
|
@@ -630,7 +664,14 @@ def __add_files_to_dataset(parent_did, files_temp_table, files, account, rse_id,
|
|
|
630
664
|
|
|
631
665
|
|
|
632
666
|
@transactional_session
|
|
633
|
-
def __add_collections_to_container(
|
|
667
|
+
def __add_collections_to_container(
|
|
668
|
+
parent_did: models.DataIdentifier,
|
|
669
|
+
collections_temp_table: Any,
|
|
670
|
+
collections: "Mapping[tuple[InternalScope, str], Mapping[str, Any]]",
|
|
671
|
+
account: "InternalAccount",
|
|
672
|
+
*,
|
|
673
|
+
session: "Session"
|
|
674
|
+
) -> None:
|
|
634
675
|
"""
|
|
635
676
|
Add collections (datasets or containers) to container.
|
|
636
677
|
|
|
@@ -651,10 +692,8 @@ def __add_collections_to_container(parent_did, collections_temp_table, collectio
|
|
|
651
692
|
).outerjoin_from(
|
|
652
693
|
collections_temp_table,
|
|
653
694
|
models.DataIdentifier,
|
|
654
|
-
and_(
|
|
655
|
-
|
|
656
|
-
models.DataIdentifier.name == collections_temp_table.name,
|
|
657
|
-
),
|
|
695
|
+
and_(models.DataIdentifier.scope == collections_temp_table.scope,
|
|
696
|
+
models.DataIdentifier.name == collections_temp_table.name),
|
|
658
697
|
)
|
|
659
698
|
|
|
660
699
|
container_parents = None
|
|
@@ -730,379 +769,15 @@ def __add_collections_to_container(parent_did, collections_temp_table, collectio
|
|
|
730
769
|
raise exception.RucioException(error.args)
|
|
731
770
|
|
|
732
771
|
|
|
733
|
-
def __add_files_to_archive_without_temp_tables(scope, name, files, account, ignore_duplicate=False, *, session: "Session"):
|
|
734
|
-
"""
|
|
735
|
-
Add files to archive.
|
|
736
|
-
|
|
737
|
-
:param scope: The scope name.
|
|
738
|
-
:param name: The data identifier name.
|
|
739
|
-
:param files: archive content.
|
|
740
|
-
:param account: The account owner.
|
|
741
|
-
:param ignore_duplicate: If True, ignore duplicate entries.
|
|
742
|
-
:param session: The database session in use.
|
|
743
|
-
"""
|
|
744
|
-
# lookup for existing files
|
|
745
|
-
files_query = select(
|
|
746
|
-
models.DataIdentifier.scope,
|
|
747
|
-
models.DataIdentifier.name,
|
|
748
|
-
models.DataIdentifier.bytes,
|
|
749
|
-
models.DataIdentifier.guid,
|
|
750
|
-
models.DataIdentifier.events,
|
|
751
|
-
models.DataIdentifier.availability,
|
|
752
|
-
models.DataIdentifier.adler32,
|
|
753
|
-
models.DataIdentifier.md5,
|
|
754
|
-
).where(
|
|
755
|
-
models.DataIdentifier.did_type == DIDType.FILE
|
|
756
|
-
).with_hint(
|
|
757
|
-
models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle'
|
|
758
|
-
)
|
|
759
|
-
|
|
760
|
-
file_condition = []
|
|
761
|
-
for file in files:
|
|
762
|
-
file_condition.append(and_(models.DataIdentifier.scope == file['scope'],
|
|
763
|
-
models.DataIdentifier.name == file['name']))
|
|
764
|
-
|
|
765
|
-
existing_content, existing_files = [], {}
|
|
766
|
-
if ignore_duplicate:
|
|
767
|
-
# lookup for existing content
|
|
768
|
-
content_query = select(
|
|
769
|
-
models.ConstituentAssociation.scope,
|
|
770
|
-
models.ConstituentAssociation.name,
|
|
771
|
-
models.ConstituentAssociation.child_scope,
|
|
772
|
-
models.ConstituentAssociation.child_name
|
|
773
|
-
).with_hint(
|
|
774
|
-
models.ConstituentAssociation, "INDEX(ARCHIVE_CONTENTS ARCH_CONTENTS_PK)", 'oracle'
|
|
775
|
-
)
|
|
776
|
-
content_condition = []
|
|
777
|
-
for file in files:
|
|
778
|
-
content_condition.append(and_(models.ConstituentAssociation.scope == scope,
|
|
779
|
-
models.ConstituentAssociation.name == name,
|
|
780
|
-
models.ConstituentAssociation.child_scope == file['scope'],
|
|
781
|
-
models.ConstituentAssociation.child_name == file['name']))
|
|
782
|
-
for row in session.execute(content_query.where(or_(*content_condition))):
|
|
783
|
-
existing_content.append(row)
|
|
784
|
-
|
|
785
|
-
for row in session.execute(files_query.where(or_(*file_condition))):
|
|
786
|
-
existing_files['%s:%s' % (row.scope.internal, row.name)] = {'child_scope': row.scope,
|
|
787
|
-
'child_name': row.name,
|
|
788
|
-
'scope': scope,
|
|
789
|
-
'name': name,
|
|
790
|
-
'bytes': row.bytes,
|
|
791
|
-
'adler32': row.adler32,
|
|
792
|
-
'md5': row.md5,
|
|
793
|
-
'guid': row.guid,
|
|
794
|
-
'length': row.events}
|
|
795
|
-
|
|
796
|
-
contents = []
|
|
797
|
-
new_files, existing_files_condition = [], []
|
|
798
|
-
for file in files:
|
|
799
|
-
did_tag = '%s:%s' % (file['scope'].internal, file['name'])
|
|
800
|
-
if did_tag not in existing_files:
|
|
801
|
-
# For non existing files
|
|
802
|
-
# Add them to the content
|
|
803
|
-
contents.append({'child_scope': file['scope'],
|
|
804
|
-
'child_name': file['name'],
|
|
805
|
-
'scope': scope,
|
|
806
|
-
'name': name,
|
|
807
|
-
'bytes': file['bytes'],
|
|
808
|
-
'adler32': file.get('adler32'),
|
|
809
|
-
'md5': file.get('md5'),
|
|
810
|
-
'guid': file.get('guid'),
|
|
811
|
-
'length': file.get('events')})
|
|
812
|
-
|
|
813
|
-
file['constituent'] = True
|
|
814
|
-
file['did_type'] = DIDType.FILE
|
|
815
|
-
file['account'] = account
|
|
816
|
-
for key in file.get('meta', {}):
|
|
817
|
-
file[key] = file['meta'][key]
|
|
818
|
-
# Prepare new file registrations
|
|
819
|
-
new_files.append(file)
|
|
820
|
-
else:
|
|
821
|
-
# For existing files
|
|
822
|
-
# Prepare the dids updates
|
|
823
|
-
existing_files_condition.append(and_(models.DataIdentifier.scope == file['scope'],
|
|
824
|
-
models.DataIdentifier.name == file['name']))
|
|
825
|
-
# Check if they are not already in the content
|
|
826
|
-
if not existing_content or (scope, name, file['scope'], file['name']) not in existing_content:
|
|
827
|
-
contents.append(existing_files[did_tag])
|
|
828
|
-
|
|
829
|
-
# insert into archive_contents
|
|
830
|
-
try:
|
|
831
|
-
new_files and session.execute(insert(models.DataIdentifier), new_files)
|
|
832
|
-
if existing_files_condition:
|
|
833
|
-
for chunk in chunks(existing_files_condition, 20):
|
|
834
|
-
stmt = update(
|
|
835
|
-
models.DataIdentifier
|
|
836
|
-
).prefix_with(
|
|
837
|
-
"/*+ INDEX(DIDS DIDS_PK) */", dialect='oracle'
|
|
838
|
-
).where(
|
|
839
|
-
models.DataIdentifier.did_type == DIDType.FILE
|
|
840
|
-
).where(
|
|
841
|
-
or_(models.DataIdentifier.constituent.is_(None),
|
|
842
|
-
models.DataIdentifier.constituent == false())
|
|
843
|
-
).where(
|
|
844
|
-
or_(*chunk)
|
|
845
|
-
).values(
|
|
846
|
-
constituent=True
|
|
847
|
-
)
|
|
848
|
-
session.execute(stmt)
|
|
849
|
-
contents and session.execute(insert(models.ConstituentAssociation), contents)
|
|
850
|
-
session.flush()
|
|
851
|
-
except IntegrityError as error:
|
|
852
|
-
raise exception.RucioException(error.args)
|
|
853
|
-
|
|
854
|
-
stmt = select(
|
|
855
|
-
models.DataIdentifier
|
|
856
|
-
).where(
|
|
857
|
-
models.DataIdentifier.did_type == DIDType.FILE,
|
|
858
|
-
models.DataIdentifier.scope == scope,
|
|
859
|
-
models.DataIdentifier.name == name,
|
|
860
|
-
)
|
|
861
|
-
archive_did = session.execute(stmt).scalar()
|
|
862
|
-
if not archive_did.is_archive:
|
|
863
|
-
# mark tha archive file as is_archive
|
|
864
|
-
archive_did.is_archive = True
|
|
865
|
-
|
|
866
|
-
# mark parent datasets as is_archive = True
|
|
867
|
-
stmt = update(
|
|
868
|
-
models.DataIdentifier
|
|
869
|
-
).where(
|
|
870
|
-
exists(
|
|
871
|
-
select(1).prefix_with("/*+ INDEX(CONTENTS CONTENTS_CHILD_SCOPE_NAME_IDX) */", dialect="oracle")
|
|
872
|
-
).where(
|
|
873
|
-
models.DataIdentifierAssociation.child_scope == scope,
|
|
874
|
-
models.DataIdentifierAssociation.child_name == name,
|
|
875
|
-
models.DataIdentifierAssociation.scope == models.DataIdentifier.scope,
|
|
876
|
-
models.DataIdentifierAssociation.name == models.DataIdentifier.name
|
|
877
|
-
)
|
|
878
|
-
).where(
|
|
879
|
-
or_(models.DataIdentifier.is_archive.is_(None),
|
|
880
|
-
models.DataIdentifier.is_archive == false())
|
|
881
|
-
).execution_options(
|
|
882
|
-
synchronize_session=False
|
|
883
|
-
).values(
|
|
884
|
-
is_archive=True
|
|
885
|
-
)
|
|
886
|
-
session.execute(stmt)
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
@transactional_session
|
|
890
|
-
def __add_files_to_dataset_without_temp_tables(scope, name, files, account, rse_id, ignore_duplicate=False, *, session: "Session"):
|
|
891
|
-
"""
|
|
892
|
-
Add files to dataset.
|
|
893
|
-
|
|
894
|
-
:param scope: The scope name.
|
|
895
|
-
:param name: The data identifier name.
|
|
896
|
-
:param files: The list of files.
|
|
897
|
-
:param account: The account owner.
|
|
898
|
-
:param rse_id: The RSE id for the replicas.
|
|
899
|
-
:param ignore_duplicate: If True, ignore duplicate entries.
|
|
900
|
-
:param session: The database session in use.
|
|
901
|
-
:returns: List of files attached (excluding the ones that were already attached to the dataset).
|
|
902
|
-
"""
|
|
903
|
-
# Get metadata from dataset
|
|
904
|
-
try:
|
|
905
|
-
dataset_meta = validate_name(scope=scope, name=name, did_type='D')
|
|
906
|
-
except Exception:
|
|
907
|
-
dataset_meta = None
|
|
908
|
-
|
|
909
|
-
if rse_id:
|
|
910
|
-
rucio.core.replica.add_replicas(rse_id=rse_id, files=files, dataset_meta=dataset_meta,
|
|
911
|
-
account=account, session=session)
|
|
912
|
-
|
|
913
|
-
files = get_files(files=files, session=session)
|
|
914
|
-
|
|
915
|
-
existing_content = []
|
|
916
|
-
if ignore_duplicate:
|
|
917
|
-
content_query = select(
|
|
918
|
-
models.DataIdentifierAssociation.scope,
|
|
919
|
-
models.DataIdentifierAssociation.name,
|
|
920
|
-
models.DataIdentifierAssociation.child_scope,
|
|
921
|
-
models.DataIdentifierAssociation.child_name
|
|
922
|
-
).with_hint(
|
|
923
|
-
models.DataIdentifierAssociation, "INDEX(CONTENTS CONTENTS_PK)", 'oracle'
|
|
924
|
-
)
|
|
925
|
-
content_condition = []
|
|
926
|
-
for file in files:
|
|
927
|
-
content_condition.append(and_(models.DataIdentifierAssociation.scope == scope,
|
|
928
|
-
models.DataIdentifierAssociation.name == name,
|
|
929
|
-
models.DataIdentifierAssociation.child_scope == file['scope'],
|
|
930
|
-
models.DataIdentifierAssociation.child_name == file['name']))
|
|
931
|
-
for row in session.execute(content_query.where(or_(*content_condition))):
|
|
932
|
-
existing_content.append(row)
|
|
933
|
-
|
|
934
|
-
contents = []
|
|
935
|
-
added_archives_condition = []
|
|
936
|
-
for file in files:
|
|
937
|
-
if not existing_content or (scope, name, file['scope'], file['name']) not in existing_content:
|
|
938
|
-
contents.append({'scope': scope, 'name': name, 'child_scope': file['scope'],
|
|
939
|
-
'child_name': file['name'], 'bytes': file['bytes'],
|
|
940
|
-
'adler32': file.get('adler32'),
|
|
941
|
-
'guid': file['guid'], 'events': file['events'],
|
|
942
|
-
'md5': file.get('md5'), 'did_type': DIDType.DATASET,
|
|
943
|
-
'child_type': DIDType.FILE, 'rule_evaluation': True})
|
|
944
|
-
added_archives_condition.append(
|
|
945
|
-
and_(models.DataIdentifier.scope == file['scope'],
|
|
946
|
-
models.DataIdentifier.name == file['name'],
|
|
947
|
-
models.DataIdentifier.is_archive == true()))
|
|
948
|
-
|
|
949
|
-
# if any of the attached files is an archive, set is_archive = True on the dataset
|
|
950
|
-
stmt = select(
|
|
951
|
-
models.DataIdentifier
|
|
952
|
-
).with_hint(
|
|
953
|
-
models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle'
|
|
954
|
-
).where(
|
|
955
|
-
or_(*added_archives_condition)
|
|
956
|
-
).limit(
|
|
957
|
-
1
|
|
958
|
-
)
|
|
959
|
-
if session.execute(stmt).scalar() is not None:
|
|
960
|
-
stmt = update(
|
|
961
|
-
models.DataIdentifier
|
|
962
|
-
).where(
|
|
963
|
-
models.DataIdentifier.scope == scope,
|
|
964
|
-
models.DataIdentifier.name == name,
|
|
965
|
-
).where(
|
|
966
|
-
or_(models.DataIdentifier.is_archive.is_(None),
|
|
967
|
-
models.DataIdentifier.is_archive == false())
|
|
968
|
-
).values(
|
|
969
|
-
is_archive=True
|
|
970
|
-
)
|
|
971
|
-
session.execute(stmt)
|
|
972
|
-
|
|
973
|
-
try:
|
|
974
|
-
contents and session.execute(insert(models.DataIdentifierAssociation), contents)
|
|
975
|
-
session.flush()
|
|
976
|
-
return contents
|
|
977
|
-
except IntegrityError as error:
|
|
978
|
-
if match('.*IntegrityError.*ORA-02291: integrity constraint .*CONTENTS_CHILD_ID_FK.*violated - parent key not found.*', error.args[0]) \
|
|
979
|
-
or match('.*IntegrityError.*1452.*Cannot add or update a child row: a foreign key constraint fails.*', error.args[0]) \
|
|
980
|
-
or match('.*IntegrityError.*foreign key constraints? failed.*', error.args[0]) \
|
|
981
|
-
or match('.*IntegrityError.*insert or update on table.*violates foreign key constraint.*', error.args[0]):
|
|
982
|
-
raise exception.DataIdentifierNotFound("Data identifier not found")
|
|
983
|
-
elif match('.*IntegrityError.*ORA-00001: unique constraint .*CONTENTS_PK.*violated.*', error.args[0]) \
|
|
984
|
-
or match('.*IntegrityError.*UNIQUE constraint failed: contents.scope, contents.name, contents.child_scope, contents.child_name.*', error.args[0])\
|
|
985
|
-
or match('.*IntegrityError.*duplicate key value violates unique constraint.*', error.args[0]) \
|
|
986
|
-
or match('.*UniqueViolation.*duplicate key value violates unique constraint.*', error.args[0]) \
|
|
987
|
-
or match('.*IntegrityError.*1062.*Duplicate entry .*for key.*PRIMARY.*', error.args[0]) \
|
|
988
|
-
or match('.*duplicate entry.*key.*PRIMARY.*', error.args[0]) \
|
|
989
|
-
or match('.*IntegrityError.*columns? .*not unique.*', error.args[0]):
|
|
990
|
-
raise exception.FileAlreadyExists(error.args)
|
|
991
|
-
else:
|
|
992
|
-
raise exception.RucioException(error.args)
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
@transactional_session
|
|
996
|
-
def __add_collections_to_container_without_temp_tables(scope, name, collections, account, *, session: "Session"):
|
|
997
|
-
"""
|
|
998
|
-
Add collections (datasets or containers) to container.
|
|
999
|
-
|
|
1000
|
-
:param scope: The scope name.
|
|
1001
|
-
:param name: The container name.
|
|
1002
|
-
:param collections: .
|
|
1003
|
-
:param account: The account owner.
|
|
1004
|
-
:param session: The database session in use.
|
|
1005
|
-
"""
|
|
1006
|
-
container_parents = None
|
|
1007
|
-
condition = []
|
|
1008
|
-
for cond in collections:
|
|
1009
|
-
|
|
1010
|
-
if (scope == cond['scope']) and (name == cond['name']):
|
|
1011
|
-
raise exception.UnsupportedOperation('Self-append is not valid!')
|
|
1012
|
-
|
|
1013
|
-
condition.append(and_(models.DataIdentifier.scope == cond['scope'],
|
|
1014
|
-
models.DataIdentifier.name == cond['name']))
|
|
1015
|
-
|
|
1016
|
-
available_dids = {}
|
|
1017
|
-
child_type = None
|
|
1018
|
-
stmt = select(
|
|
1019
|
-
models.DataIdentifier.scope,
|
|
1020
|
-
models.DataIdentifier.name,
|
|
1021
|
-
models.DataIdentifier.did_type
|
|
1022
|
-
).with_hint(
|
|
1023
|
-
models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle'
|
|
1024
|
-
).where(
|
|
1025
|
-
or_(*condition)
|
|
1026
|
-
)
|
|
1027
|
-
for row in session.execute(stmt):
|
|
1028
|
-
|
|
1029
|
-
if row.did_type == DIDType.FILE:
|
|
1030
|
-
raise exception.UnsupportedOperation("Adding a file (%s:%s) to a container (%s:%s) is forbidden" % (row.scope, row.name, scope, name))
|
|
1031
|
-
|
|
1032
|
-
if not child_type:
|
|
1033
|
-
child_type = row.did_type
|
|
1034
|
-
|
|
1035
|
-
available_dids['%s:%s' % (row.scope.internal, row.name)] = row.did_type
|
|
1036
|
-
|
|
1037
|
-
if child_type != row.did_type:
|
|
1038
|
-
raise exception.UnsupportedOperation("Mixed collection is not allowed: '%s:%s' is a %s(expected type: %s)" % (row.scope, row.name, row.did_type, child_type))
|
|
1039
|
-
|
|
1040
|
-
if child_type == DIDType.CONTAINER:
|
|
1041
|
-
if container_parents is None:
|
|
1042
|
-
container_parents = {(parent['scope'], parent['name']) for parent in list_all_parent_dids(scope=scope, name=name, session=session)}
|
|
1043
|
-
|
|
1044
|
-
if (row.scope, row.name) in container_parents:
|
|
1045
|
-
raise exception.UnsupportedOperation('Circular attachment detected. %s:%s is already a parent of %s:%s', row.scope, row.name, scope, name)
|
|
1046
|
-
|
|
1047
|
-
messages = []
|
|
1048
|
-
for c in collections:
|
|
1049
|
-
did_asso = models.DataIdentifierAssociation(
|
|
1050
|
-
scope=scope,
|
|
1051
|
-
name=name,
|
|
1052
|
-
child_scope=c['scope'],
|
|
1053
|
-
child_name=c['name'],
|
|
1054
|
-
did_type=DIDType.CONTAINER,
|
|
1055
|
-
child_type=available_dids.get('%s:%s' % (c['scope'].internal, c['name'])),
|
|
1056
|
-
rule_evaluation=True
|
|
1057
|
-
)
|
|
1058
|
-
did_asso.save(session=session, flush=False)
|
|
1059
|
-
# Send AMI messages
|
|
1060
|
-
if child_type == DIDType.CONTAINER:
|
|
1061
|
-
chld_type = 'CONTAINER'
|
|
1062
|
-
elif child_type == DIDType.DATASET:
|
|
1063
|
-
chld_type = 'DATASET'
|
|
1064
|
-
else:
|
|
1065
|
-
chld_type = 'UNKNOWN'
|
|
1066
|
-
|
|
1067
|
-
message = {'account': account.external,
|
|
1068
|
-
'scope': scope.external,
|
|
1069
|
-
'name': name,
|
|
1070
|
-
'childscope': c['scope'].external,
|
|
1071
|
-
'childname': c['name'],
|
|
1072
|
-
'childtype': chld_type}
|
|
1073
|
-
if account.vo != 'def':
|
|
1074
|
-
message['vo'] = account.vo
|
|
1075
|
-
messages.append(message)
|
|
1076
|
-
|
|
1077
|
-
try:
|
|
1078
|
-
for message in messages:
|
|
1079
|
-
add_message('REGISTER_CNT', message, session=session)
|
|
1080
|
-
session.flush()
|
|
1081
|
-
except IntegrityError as error:
|
|
1082
|
-
if match('.*IntegrityError.*ORA-02291: integrity constraint .*CONTENTS_CHILD_ID_FK.*violated - parent key not found.*', error.args[0]) \
|
|
1083
|
-
or match('.*IntegrityError.*1452.*Cannot add or update a child row: a foreign key constraint fails.*', error.args[0]) \
|
|
1084
|
-
or match('.*IntegrityError.*foreign key constraints? failed.*', error.args[0]) \
|
|
1085
|
-
or match('.*IntegrityError.*insert or update on table.*violates foreign key constraint.*', error.args[0]):
|
|
1086
|
-
raise exception.DataIdentifierNotFound("Data identifier not found")
|
|
1087
|
-
elif match('.*IntegrityError.*ORA-00001: unique constraint .*CONTENTS_PK.*violated.*', error.args[0]) \
|
|
1088
|
-
or match('.*IntegrityError.*1062.*Duplicate entry .*for key.*PRIMARY.*', error.args[0]) \
|
|
1089
|
-
or match('.*IntegrityError.*columns? scope.*name.*child_scope.*child_name.*not unique.*', error.args[0]) \
|
|
1090
|
-
or match('.*IntegrityError.*duplicate key value violates unique constraint.*', error.args[0]) \
|
|
1091
|
-
or match('.*UniqueViolation.*duplicate key value violates unique constraint.*', error.args[0]) \
|
|
1092
|
-
or match('.*IntegrityError.* UNIQUE constraint failed: contents.scope, contents.name, contents.child_scope, contents.child_name.*', error.args[0]):
|
|
1093
|
-
raise exception.DuplicateContent(error.args)
|
|
1094
|
-
raise exception.RucioException(error.args)
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
772
|
@transactional_session
|
|
1098
773
|
def delete_dids(
|
|
1099
|
-
dids: "Sequence[
|
|
774
|
+
dids: "Sequence[Mapping[str, Any]]",
|
|
1100
775
|
account: "InternalAccount",
|
|
1101
776
|
expire_rules: bool = False,
|
|
1102
777
|
*,
|
|
1103
778
|
session: "Session",
|
|
1104
779
|
logger: "LoggerFunction" = logging.log,
|
|
1105
|
-
):
|
|
780
|
+
) -> None:
|
|
1106
781
|
"""
|
|
1107
782
|
Delete data identifiers
|
|
1108
783
|
|
|
@@ -1170,7 +845,11 @@ def delete_dids(
|
|
|
1170
845
|
return
|
|
1171
846
|
|
|
1172
847
|
temp_table = temp_table_mngr(session).create_scope_name_table()
|
|
1173
|
-
|
|
848
|
+
values = list(data_in_temp_table.values())
|
|
849
|
+
stmt = insert(
|
|
850
|
+
temp_table
|
|
851
|
+
)
|
|
852
|
+
session.execute(stmt, values)
|
|
1174
853
|
|
|
1175
854
|
# Delete rules on did
|
|
1176
855
|
skip_deletion = False # Skip deletion in case of expiration of a rule
|
|
@@ -1237,8 +916,8 @@ def delete_dids(
|
|
|
1237
916
|
exists(
|
|
1238
917
|
select(1)
|
|
1239
918
|
).where(
|
|
1240
|
-
models.DidMeta.scope == temp_table.scope,
|
|
1241
|
-
|
|
919
|
+
and_(models.DidMeta.scope == temp_table.scope,
|
|
920
|
+
models.DidMeta.name == temp_table.name)
|
|
1242
921
|
)
|
|
1243
922
|
).execution_options(
|
|
1244
923
|
synchronize_session=False
|
|
@@ -1248,20 +927,28 @@ def delete_dids(
|
|
|
1248
927
|
|
|
1249
928
|
# Prepare the common part of the query for updating bad replicas if they exist
|
|
1250
929
|
bad_replica_stmt = update(
|
|
1251
|
-
models.
|
|
930
|
+
models.BadReplica
|
|
1252
931
|
).where(
|
|
1253
|
-
models.
|
|
1254
|
-
).values(
|
|
1255
|
-
state
|
|
1256
|
-
updated_at
|
|
1257
|
-
).execution_options(
|
|
932
|
+
models.BadReplica.state == BadFilesStatus.BAD
|
|
933
|
+
).values({
|
|
934
|
+
models.BadReplica.state: BadFilesStatus.DELETED,
|
|
935
|
+
models.BadReplica.updated_at: datetime.utcnow(),
|
|
936
|
+
}).execution_options(
|
|
1258
937
|
synchronize_session=False
|
|
1259
938
|
)
|
|
1260
939
|
|
|
1261
940
|
if file_dids:
|
|
1262
941
|
if data_in_temp_table is not file_dids:
|
|
1263
|
-
|
|
1264
|
-
|
|
942
|
+
stmt = delete(
|
|
943
|
+
temp_table
|
|
944
|
+
)
|
|
945
|
+
session.execute(stmt)
|
|
946
|
+
|
|
947
|
+
values = list(file_dids.values())
|
|
948
|
+
stmt = insert(
|
|
949
|
+
temp_table
|
|
950
|
+
)
|
|
951
|
+
session.execute(stmt, values)
|
|
1265
952
|
data_in_temp_table = file_dids
|
|
1266
953
|
|
|
1267
954
|
# update bad files passed directly as input
|
|
@@ -1269,16 +956,24 @@ def delete_dids(
|
|
|
1269
956
|
exists(
|
|
1270
957
|
select(1)
|
|
1271
958
|
).where(
|
|
1272
|
-
models.
|
|
1273
|
-
|
|
959
|
+
and_(models.BadReplica.scope == temp_table.scope,
|
|
960
|
+
models.BadReplica.name == temp_table.name)
|
|
1274
961
|
)
|
|
1275
962
|
)
|
|
1276
963
|
session.execute(stmt)
|
|
1277
964
|
|
|
1278
965
|
if collection_dids:
|
|
1279
966
|
if data_in_temp_table is not collection_dids:
|
|
1280
|
-
|
|
1281
|
-
|
|
967
|
+
stmt = delete(
|
|
968
|
+
temp_table
|
|
969
|
+
)
|
|
970
|
+
session.execute(stmt)
|
|
971
|
+
|
|
972
|
+
values = list(collection_dids.values())
|
|
973
|
+
stmt = insert(
|
|
974
|
+
temp_table
|
|
975
|
+
)
|
|
976
|
+
session.execute(stmt, values)
|
|
1282
977
|
data_in_temp_table = collection_dids
|
|
1283
978
|
|
|
1284
979
|
# Find files of datasets passed as input and put them in a separate temp table
|
|
@@ -1290,6 +985,7 @@ def delete_dids(
|
|
|
1290
985
|
select(
|
|
1291
986
|
models.DataIdentifierAssociation.child_scope,
|
|
1292
987
|
models.DataIdentifierAssociation.child_name,
|
|
988
|
+
).distinct(
|
|
1293
989
|
).join_from(
|
|
1294
990
|
temp_table,
|
|
1295
991
|
models.DataIdentifierAssociation,
|
|
@@ -1297,7 +993,6 @@ def delete_dids(
|
|
|
1297
993
|
models.DataIdentifierAssociation.name == temp_table.name)
|
|
1298
994
|
).where(
|
|
1299
995
|
models.DataIdentifierAssociation.child_type == DIDType.FILE
|
|
1300
|
-
).distinct(
|
|
1301
996
|
)
|
|
1302
997
|
)
|
|
1303
998
|
session.execute(stmt)
|
|
@@ -1307,8 +1002,8 @@ def delete_dids(
|
|
|
1307
1002
|
exists(
|
|
1308
1003
|
select(1)
|
|
1309
1004
|
).where(
|
|
1310
|
-
models.
|
|
1311
|
-
|
|
1005
|
+
and_(models.BadReplica.scope == resolved_files_temp_table.scope,
|
|
1006
|
+
models.BadReplica.name == resolved_files_temp_table.name)
|
|
1312
1007
|
)
|
|
1313
1008
|
)
|
|
1314
1009
|
session.execute(stmt)
|
|
@@ -1322,16 +1017,16 @@ def delete_dids(
|
|
|
1322
1017
|
exists(
|
|
1323
1018
|
select(1)
|
|
1324
1019
|
).where(
|
|
1325
|
-
models.RSEFileAssociation.scope == resolved_files_temp_table.scope,
|
|
1326
|
-
|
|
1020
|
+
and_(models.RSEFileAssociation.scope == resolved_files_temp_table.scope,
|
|
1021
|
+
models.RSEFileAssociation.name == resolved_files_temp_table.name)
|
|
1327
1022
|
)
|
|
1328
1023
|
).where(
|
|
1329
|
-
models.RSEFileAssociation.lock_cnt == 0,
|
|
1330
|
-
|
|
1331
|
-
).
|
|
1024
|
+
and_(models.RSEFileAssociation.lock_cnt == 0,
|
|
1025
|
+
models.RSEFileAssociation.tombstone != null())
|
|
1026
|
+
).values({
|
|
1027
|
+
models.RSEFileAssociation.tombstone: datetime(1970, 1, 1)
|
|
1028
|
+
}).execution_options(
|
|
1332
1029
|
synchronize_session=False
|
|
1333
|
-
).values(
|
|
1334
|
-
tombstone=datetime(1970, 1, 1)
|
|
1335
1030
|
)
|
|
1336
1031
|
session.execute(stmt)
|
|
1337
1032
|
|
|
@@ -1343,8 +1038,8 @@ def delete_dids(
|
|
|
1343
1038
|
exists(
|
|
1344
1039
|
select(1)
|
|
1345
1040
|
).where(
|
|
1346
|
-
models.DataIdentifierAssociation.scope == temp_table.scope,
|
|
1347
|
-
|
|
1041
|
+
and_(models.DataIdentifierAssociation.scope == temp_table.scope,
|
|
1042
|
+
models.DataIdentifierAssociation.name == temp_table.name)
|
|
1348
1043
|
)
|
|
1349
1044
|
).execution_options(
|
|
1350
1045
|
synchronize_session=False
|
|
@@ -1360,8 +1055,8 @@ def delete_dids(
|
|
|
1360
1055
|
exists(
|
|
1361
1056
|
select(1)
|
|
1362
1057
|
).where(
|
|
1363
|
-
models.CollectionReplica.scope == temp_table.scope,
|
|
1364
|
-
|
|
1058
|
+
and_(models.CollectionReplica.scope == temp_table.scope,
|
|
1059
|
+
models.CollectionReplica.name == temp_table.name)
|
|
1365
1060
|
)
|
|
1366
1061
|
).execution_options(
|
|
1367
1062
|
synchronize_session=False
|
|
@@ -1376,19 +1071,27 @@ def delete_dids(
|
|
|
1376
1071
|
|
|
1377
1072
|
if collection_dids:
|
|
1378
1073
|
if data_in_temp_table is not collection_dids:
|
|
1379
|
-
|
|
1380
|
-
|
|
1074
|
+
stmt = delete(
|
|
1075
|
+
temp_table
|
|
1076
|
+
)
|
|
1077
|
+
session.execute(stmt)
|
|
1078
|
+
|
|
1079
|
+
values = list(collection_dids.values())
|
|
1080
|
+
stmt = insert(
|
|
1081
|
+
temp_table
|
|
1082
|
+
)
|
|
1083
|
+
session.execute(stmt, values)
|
|
1381
1084
|
data_in_temp_table = collection_dids
|
|
1382
1085
|
|
|
1383
1086
|
with METRICS.timer('delete_dids.dids_followed'):
|
|
1384
1087
|
stmt = delete(
|
|
1385
|
-
models.
|
|
1088
|
+
models.DidFollowed
|
|
1386
1089
|
).where(
|
|
1387
1090
|
exists(
|
|
1388
1091
|
select(1)
|
|
1389
1092
|
).where(
|
|
1390
|
-
models.
|
|
1391
|
-
|
|
1093
|
+
and_(models.DidFollowed.scope == temp_table.scope,
|
|
1094
|
+
models.DidFollowed.name == temp_table.name)
|
|
1392
1095
|
)
|
|
1393
1096
|
).execution_options(
|
|
1394
1097
|
synchronize_session=False
|
|
@@ -1399,9 +1102,9 @@ def delete_dids(
|
|
|
1399
1102
|
dids_to_delete_filter = exists(
|
|
1400
1103
|
select(1)
|
|
1401
1104
|
).where(
|
|
1402
|
-
models.DataIdentifier.scope == temp_table.scope,
|
|
1403
|
-
|
|
1404
|
-
|
|
1105
|
+
and_(models.DataIdentifier.scope == temp_table.scope,
|
|
1106
|
+
models.DataIdentifier.name == temp_table.name,
|
|
1107
|
+
models.DataIdentifier.did_type.in_([DIDType.CONTAINER, DIDType.DATASET]))
|
|
1405
1108
|
)
|
|
1406
1109
|
|
|
1407
1110
|
if archive_dids:
|
|
@@ -1418,8 +1121,16 @@ def delete_dids(
|
|
|
1418
1121
|
|
|
1419
1122
|
if file_dids:
|
|
1420
1123
|
if data_in_temp_table is not file_dids:
|
|
1421
|
-
|
|
1422
|
-
|
|
1124
|
+
stmt = delete(
|
|
1125
|
+
temp_table
|
|
1126
|
+
)
|
|
1127
|
+
session.execute(stmt)
|
|
1128
|
+
|
|
1129
|
+
values = list(file_dids.values())
|
|
1130
|
+
stmt = insert(
|
|
1131
|
+
temp_table
|
|
1132
|
+
)
|
|
1133
|
+
session.execute(stmt, values)
|
|
1423
1134
|
data_in_temp_table = file_dids
|
|
1424
1135
|
stmt = update(
|
|
1425
1136
|
models.DataIdentifier
|
|
@@ -1427,21 +1138,27 @@ def delete_dids(
|
|
|
1427
1138
|
exists(
|
|
1428
1139
|
select(1)
|
|
1429
1140
|
).where(
|
|
1430
|
-
models.DataIdentifier.scope == temp_table.scope,
|
|
1431
|
-
|
|
1141
|
+
and_(models.DataIdentifier.scope == temp_table.scope,
|
|
1142
|
+
models.DataIdentifier.name == temp_table.name)
|
|
1432
1143
|
)
|
|
1433
1144
|
).where(
|
|
1434
1145
|
models.DataIdentifier.did_type == DIDType.FILE
|
|
1435
|
-
).
|
|
1146
|
+
).values({
|
|
1147
|
+
models.DataIdentifier.expired_at: None
|
|
1148
|
+
}).execution_options(
|
|
1436
1149
|
synchronize_session=False
|
|
1437
|
-
).values(
|
|
1438
|
-
expired_at=None
|
|
1439
1150
|
)
|
|
1440
1151
|
session.execute(stmt)
|
|
1441
1152
|
|
|
1442
1153
|
|
|
1443
1154
|
@transactional_session
|
|
1444
|
-
def detach_dids(
|
|
1155
|
+
def detach_dids(
|
|
1156
|
+
scope: "InternalScope",
|
|
1157
|
+
name: str,
|
|
1158
|
+
dids: "Sequence[Mapping[str, Any]]",
|
|
1159
|
+
*,
|
|
1160
|
+
session: "Session"
|
|
1161
|
+
) -> None:
|
|
1445
1162
|
"""
|
|
1446
1163
|
Detach data identifier
|
|
1447
1164
|
|
|
@@ -1454,11 +1171,10 @@ def detach_dids(scope, name, dids, *, session: "Session"):
|
|
|
1454
1171
|
stmt = select(
|
|
1455
1172
|
models.DataIdentifier
|
|
1456
1173
|
).where(
|
|
1457
|
-
models.DataIdentifier.scope == scope,
|
|
1458
|
-
|
|
1459
|
-
|
|
1460
|
-
|
|
1461
|
-
models.DataIdentifier.did_type == DIDType.DATASET)
|
|
1174
|
+
and_(models.DataIdentifier.scope == scope,
|
|
1175
|
+
models.DataIdentifier.name == name,
|
|
1176
|
+
or_(models.DataIdentifier.did_type == DIDType.CONTAINER,
|
|
1177
|
+
models.DataIdentifier.did_type == DIDType.DATASET))
|
|
1462
1178
|
)
|
|
1463
1179
|
try:
|
|
1464
1180
|
did = session.execute(stmt).scalar_one()
|
|
@@ -1474,9 +1190,9 @@ def detach_dids(scope, name, dids, *, session: "Session"):
|
|
|
1474
1190
|
# TODO: should judge target did's status: open, monotonic, close.
|
|
1475
1191
|
stmt = select(
|
|
1476
1192
|
models.DataIdentifierAssociation
|
|
1477
|
-
).
|
|
1478
|
-
scope
|
|
1479
|
-
|
|
1193
|
+
).where(
|
|
1194
|
+
and_(models.DataIdentifierAssociation.scope == scope,
|
|
1195
|
+
models.DataIdentifierAssociation.name == name)
|
|
1480
1196
|
).limit(
|
|
1481
1197
|
1
|
|
1482
1198
|
)
|
|
@@ -1487,14 +1203,13 @@ def detach_dids(scope, name, dids, *, session: "Session"):
|
|
|
1487
1203
|
raise exception.UnsupportedOperation('Self-detach is not valid.')
|
|
1488
1204
|
child_scope = source['scope']
|
|
1489
1205
|
child_name = source['name']
|
|
1490
|
-
|
|
1491
|
-
|
|
1492
|
-
|
|
1493
|
-
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
).scalar()
|
|
1206
|
+
curr_stmt = stmt.where(
|
|
1207
|
+
and_(models.DataIdentifierAssociation.child_scope == child_scope,
|
|
1208
|
+
models.DataIdentifierAssociation.child_name == child_name)
|
|
1209
|
+
).limit(
|
|
1210
|
+
1
|
|
1211
|
+
)
|
|
1212
|
+
associ_did = session.execute(curr_stmt).scalar()
|
|
1498
1213
|
if associ_did is None:
|
|
1499
1214
|
raise exception.DataIdentifierNotFound(f"Data identifier '{child_scope}:{child_name}' not found under '{scope}:{name}'")
|
|
1500
1215
|
|
|
@@ -1561,7 +1276,14 @@ def detach_dids(scope, name, dids, *, session: "Session"):
|
|
|
1561
1276
|
|
|
1562
1277
|
|
|
1563
1278
|
@stream_session
|
|
1564
|
-
def list_new_dids(
|
|
1279
|
+
def list_new_dids(
|
|
1280
|
+
did_type: Union[str, "DIDType"],
|
|
1281
|
+
thread: Optional[int] = None,
|
|
1282
|
+
total_threads: Optional[int] = None,
|
|
1283
|
+
chunk_size: int = 1000,
|
|
1284
|
+
*,
|
|
1285
|
+
session: "Session",
|
|
1286
|
+
) -> "Iterator[dict[str, Any]]":
|
|
1565
1287
|
"""
|
|
1566
1288
|
List recent identifiers.
|
|
1567
1289
|
|
|
@@ -1577,26 +1299,31 @@ def list_new_dids(did_type, thread=None, total_threads=None, chunk_size=1000, *,
|
|
|
1577
1299
|
).prefix_with(
|
|
1578
1300
|
"/*+ INDEX(RULES RULES_SCOPE_NAME_IDX) */", dialect='oracle'
|
|
1579
1301
|
).where(
|
|
1580
|
-
models.DataIdentifier.scope == models.ReplicationRule.scope,
|
|
1581
|
-
|
|
1582
|
-
|
|
1302
|
+
and_(models.DataIdentifier.scope == models.ReplicationRule.scope,
|
|
1303
|
+
models.DataIdentifier.name == models.ReplicationRule.name,
|
|
1304
|
+
models.ReplicationRule.state == RuleState.INJECT)
|
|
1583
1305
|
)
|
|
1584
1306
|
|
|
1585
1307
|
select_stmt = select(
|
|
1586
1308
|
models.DataIdentifier
|
|
1587
1309
|
).with_hint(
|
|
1588
|
-
models.DataIdentifier,
|
|
1589
|
-
|
|
1590
|
-
|
|
1310
|
+
models.DataIdentifier,
|
|
1311
|
+
'INDEX(dids DIDS_IS_NEW_IDX)',
|
|
1312
|
+
'oracle'
|
|
1591
1313
|
).where(
|
|
1592
|
-
|
|
1314
|
+
and_(models.DataIdentifier.is_new == true(),
|
|
1315
|
+
~exists(sub_query))
|
|
1593
1316
|
)
|
|
1594
1317
|
|
|
1595
1318
|
if did_type:
|
|
1596
1319
|
if isinstance(did_type, str):
|
|
1597
|
-
select_stmt = select_stmt.
|
|
1320
|
+
select_stmt = select_stmt.where(
|
|
1321
|
+
models.DataIdentifier.did_type == DIDType[did_type]
|
|
1322
|
+
)
|
|
1598
1323
|
elif isinstance(did_type, Enum):
|
|
1599
|
-
select_stmt = select_stmt.
|
|
1324
|
+
select_stmt = select_stmt.where(
|
|
1325
|
+
models.DataIdentifier.did_type == did_type
|
|
1326
|
+
)
|
|
1600
1327
|
|
|
1601
1328
|
select_stmt = filter_thread_work(session=session, query=select_stmt, total_threads=total_threads, thread_id=thread, hash_variable='name')
|
|
1602
1329
|
|
|
@@ -1610,7 +1337,12 @@ def list_new_dids(did_type, thread=None, total_threads=None, chunk_size=1000, *,
|
|
|
1610
1337
|
|
|
1611
1338
|
|
|
1612
1339
|
@transactional_session
|
|
1613
|
-
def set_new_dids(
|
|
1340
|
+
def set_new_dids(
|
|
1341
|
+
dids: "Sequence[Mapping[str, Any]]",
|
|
1342
|
+
new_flag: Optional[bool],
|
|
1343
|
+
*,
|
|
1344
|
+
session: "Session"
|
|
1345
|
+
) -> bool:
|
|
1614
1346
|
"""
|
|
1615
1347
|
Set/reset the flag new
|
|
1616
1348
|
|
|
@@ -1624,12 +1356,12 @@ def set_new_dids(dids, new_flag, *, session: "Session"):
|
|
|
1624
1356
|
try:
|
|
1625
1357
|
stmt = update(
|
|
1626
1358
|
models.DataIdentifier
|
|
1627
|
-
).
|
|
1628
|
-
scope
|
|
1629
|
-
|
|
1630
|
-
).values(
|
|
1631
|
-
is_new
|
|
1632
|
-
).execution_options(
|
|
1359
|
+
).where(
|
|
1360
|
+
and_(models.DataIdentifier.scope == did['scope'],
|
|
1361
|
+
models.DataIdentifier.name == did['name'])
|
|
1362
|
+
).values({
|
|
1363
|
+
models.DataIdentifier.is_new: new_flag
|
|
1364
|
+
}).execution_options(
|
|
1633
1365
|
synchronize_session=False
|
|
1634
1366
|
)
|
|
1635
1367
|
rowcount = session.execute(stmt).rowcount
|
|
@@ -1647,7 +1379,12 @@ def set_new_dids(dids, new_flag, *, session: "Session"):
|
|
|
1647
1379
|
|
|
1648
1380
|
|
|
1649
1381
|
@stream_session
|
|
1650
|
-
def list_content(
|
|
1382
|
+
def list_content(
|
|
1383
|
+
scope: "InternalScope",
|
|
1384
|
+
name: str,
|
|
1385
|
+
*,
|
|
1386
|
+
session: "Session"
|
|
1387
|
+
) -> "Iterator[dict[str, Any]]":
|
|
1651
1388
|
"""
|
|
1652
1389
|
List data identifier contents.
|
|
1653
1390
|
|
|
@@ -1658,10 +1395,12 @@ def list_content(scope, name, *, session: "Session"):
|
|
|
1658
1395
|
stmt = select(
|
|
1659
1396
|
models.DataIdentifierAssociation
|
|
1660
1397
|
).with_hint(
|
|
1661
|
-
models.DataIdentifierAssociation,
|
|
1662
|
-
|
|
1663
|
-
|
|
1664
|
-
|
|
1398
|
+
models.DataIdentifierAssociation,
|
|
1399
|
+
'INDEX(CONTENTS CONTENTS_PK)',
|
|
1400
|
+
'oracle'
|
|
1401
|
+
).where(
|
|
1402
|
+
and_(models.DataIdentifierAssociation.scope == scope,
|
|
1403
|
+
models.DataIdentifierAssociation.name == name)
|
|
1665
1404
|
)
|
|
1666
1405
|
children_found = False
|
|
1667
1406
|
for tmp_did in session.execute(stmt).yield_per(5).scalars():
|
|
@@ -1674,7 +1413,12 @@ def list_content(scope, name, *, session: "Session"):
|
|
|
1674
1413
|
|
|
1675
1414
|
|
|
1676
1415
|
@stream_session
|
|
1677
|
-
def list_content_history(
|
|
1416
|
+
def list_content_history(
|
|
1417
|
+
scope: "InternalScope",
|
|
1418
|
+
name: str,
|
|
1419
|
+
*,
|
|
1420
|
+
session: "Session"
|
|
1421
|
+
) -> "Iterator[dict[str, Any]]":
|
|
1678
1422
|
"""
|
|
1679
1423
|
List data identifier contents history.
|
|
1680
1424
|
|
|
@@ -1685,9 +1429,9 @@ def list_content_history(scope, name, *, session: "Session"):
|
|
|
1685
1429
|
try:
|
|
1686
1430
|
stmt = select(
|
|
1687
1431
|
models.DataIdentifierAssociationHistory
|
|
1688
|
-
).
|
|
1689
|
-
scope
|
|
1690
|
-
|
|
1432
|
+
).where(
|
|
1433
|
+
and_(models.DataIdentifierAssociationHistory.scope == scope,
|
|
1434
|
+
models.DataIdentifierAssociationHistory.name == name)
|
|
1691
1435
|
)
|
|
1692
1436
|
for tmp_did in session.execute(stmt).yield_per(5).scalars():
|
|
1693
1437
|
yield {'scope': tmp_did.child_scope, 'name': tmp_did.child_name,
|
|
@@ -1700,31 +1444,52 @@ def list_content_history(scope, name, *, session: "Session"):
|
|
|
1700
1444
|
|
|
1701
1445
|
|
|
1702
1446
|
@stream_session
|
|
1703
|
-
def list_parent_dids(
|
|
1447
|
+
def list_parent_dids(
|
|
1448
|
+
scope: "InternalScope",
|
|
1449
|
+
name: str,
|
|
1450
|
+
order_by: Optional[list[str]] = None,
|
|
1451
|
+
*,
|
|
1452
|
+
session: "Session"
|
|
1453
|
+
) -> "Iterator[dict[str, Any]]":
|
|
1704
1454
|
"""
|
|
1705
1455
|
List parent datasets and containers of a did.
|
|
1706
1456
|
|
|
1707
1457
|
:param scope: The scope.
|
|
1708
1458
|
:param name: The name.
|
|
1459
|
+
:param order_by: List of parameters to order the query by. Possible values: ['scope', 'name', 'did_type', 'created_at'].
|
|
1709
1460
|
:param session: The database session.
|
|
1710
1461
|
:returns: List of dids.
|
|
1711
1462
|
:rtype: Generator.
|
|
1712
1463
|
"""
|
|
1713
1464
|
|
|
1465
|
+
if order_by is None:
|
|
1466
|
+
order_by = []
|
|
1467
|
+
|
|
1714
1468
|
stmt = select(
|
|
1715
1469
|
models.DataIdentifierAssociation.scope,
|
|
1716
1470
|
models.DataIdentifierAssociation.name,
|
|
1717
|
-
models.DataIdentifierAssociation.did_type
|
|
1718
|
-
|
|
1719
|
-
|
|
1720
|
-
|
|
1471
|
+
models.DataIdentifierAssociation.did_type,
|
|
1472
|
+
models.DataIdentifier.created_at
|
|
1473
|
+
).where(
|
|
1474
|
+
and_(models.DataIdentifierAssociation.child_scope == scope,
|
|
1475
|
+
models.DataIdentifierAssociation.child_name == name,
|
|
1476
|
+
models.DataIdentifier.scope == models.DataIdentifierAssociation.scope,
|
|
1477
|
+
models.DataIdentifier.name == models.DataIdentifierAssociation.name)
|
|
1478
|
+
).order_by(
|
|
1479
|
+
*order_by
|
|
1721
1480
|
)
|
|
1481
|
+
|
|
1722
1482
|
for did in session.execute(stmt).yield_per(5):
|
|
1723
1483
|
yield {'scope': did.scope, 'name': did.name, 'type': did.did_type}
|
|
1724
1484
|
|
|
1725
1485
|
|
|
1726
1486
|
@stream_session
|
|
1727
|
-
def list_all_parent_dids(
|
|
1487
|
+
def list_all_parent_dids(
|
|
1488
|
+
scope: "InternalScope",
|
|
1489
|
+
name: str,
|
|
1490
|
+
*,
|
|
1491
|
+
session: "Session"
|
|
1492
|
+
) -> "Iterator[dict[str, Any]]":
|
|
1728
1493
|
"""
|
|
1729
1494
|
List all parent datasets and containers of a did, no matter on what level.
|
|
1730
1495
|
|
|
@@ -1739,9 +1504,9 @@ def list_all_parent_dids(scope, name, *, session: "Session"):
|
|
|
1739
1504
|
models.DataIdentifierAssociation.scope,
|
|
1740
1505
|
models.DataIdentifierAssociation.name,
|
|
1741
1506
|
models.DataIdentifierAssociation.did_type
|
|
1742
|
-
).
|
|
1743
|
-
child_scope
|
|
1744
|
-
|
|
1507
|
+
).where(
|
|
1508
|
+
and_(models.DataIdentifierAssociation.child_scope == scope,
|
|
1509
|
+
models.DataIdentifierAssociation.child_name == name)
|
|
1745
1510
|
)
|
|
1746
1511
|
for did in session.execute(stmt).yield_per(5):
|
|
1747
1512
|
yield {'scope': did.scope, 'name': did.name, 'type': did.did_type}
|
|
@@ -1751,9 +1516,9 @@ def list_all_parent_dids(scope, name, *, session: "Session"):
|
|
|
1751
1516
|
|
|
1752
1517
|
|
|
1753
1518
|
def list_child_dids_stmt(
|
|
1754
|
-
input_dids_table:
|
|
1519
|
+
input_dids_table: Any,
|
|
1755
1520
|
did_type: DIDType,
|
|
1756
|
-
):
|
|
1521
|
+
) -> "Select[tuple[InternalScope, str]]":
|
|
1757
1522
|
"""
|
|
1758
1523
|
Build and returns a query which recursively lists children dids of type `did_type`
|
|
1759
1524
|
for the dids given as input in a scope/name (temporary) table.
|
|
@@ -1775,11 +1540,9 @@ def list_child_dids_stmt(
|
|
|
1775
1540
|
).join_from(
|
|
1776
1541
|
input_dids_table,
|
|
1777
1542
|
models.DataIdentifierAssociation,
|
|
1778
|
-
and_(
|
|
1779
|
-
|
|
1780
|
-
|
|
1781
|
-
models.DataIdentifierAssociation.did_type.in_(dids_to_resolve),
|
|
1782
|
-
),
|
|
1543
|
+
and_(models.DataIdentifierAssociation.scope == input_dids_table.scope,
|
|
1544
|
+
models.DataIdentifierAssociation.name == input_dids_table.name,
|
|
1545
|
+
models.DataIdentifierAssociation.did_type.in_(dids_to_resolve)),
|
|
1783
1546
|
).cte(
|
|
1784
1547
|
recursive=True,
|
|
1785
1548
|
)
|
|
@@ -1792,9 +1555,9 @@ def list_child_dids_stmt(
|
|
|
1792
1555
|
models.DataIdentifierAssociation.child_name,
|
|
1793
1556
|
models.DataIdentifierAssociation.child_type,
|
|
1794
1557
|
).where(
|
|
1795
|
-
models.DataIdentifierAssociation.scope == initial_set.c.child_scope,
|
|
1796
|
-
|
|
1797
|
-
|
|
1558
|
+
and_(models.DataIdentifierAssociation.scope == initial_set.c.child_scope,
|
|
1559
|
+
models.DataIdentifierAssociation.name == initial_set.c.child_name,
|
|
1560
|
+
models.DataIdentifierAssociation.did_type.in_(dids_to_resolve))
|
|
1798
1561
|
)
|
|
1799
1562
|
)
|
|
1800
1563
|
|
|
@@ -1812,7 +1575,7 @@ def list_one_did_childs_stmt(
|
|
|
1812
1575
|
scope: "InternalScope",
|
|
1813
1576
|
name: str,
|
|
1814
1577
|
did_type: DIDType,
|
|
1815
|
-
):
|
|
1578
|
+
) -> "Select[tuple[InternalScope, str]]":
|
|
1816
1579
|
"""
|
|
1817
1580
|
Returns the sqlalchemy query for recursively fetching the child dids of type
|
|
1818
1581
|
'did_type' for the input did.
|
|
@@ -1832,9 +1595,9 @@ def list_one_did_childs_stmt(
|
|
|
1832
1595
|
models.DataIdentifierAssociation.child_name,
|
|
1833
1596
|
models.DataIdentifierAssociation.child_type,
|
|
1834
1597
|
).where(
|
|
1835
|
-
models.DataIdentifierAssociation.scope == scope,
|
|
1836
|
-
|
|
1837
|
-
|
|
1598
|
+
and_(models.DataIdentifierAssociation.scope == scope,
|
|
1599
|
+
models.DataIdentifierAssociation.name == name,
|
|
1600
|
+
models.DataIdentifierAssociation.did_type.in_(dids_to_resolve))
|
|
1838
1601
|
).cte(
|
|
1839
1602
|
recursive=True,
|
|
1840
1603
|
)
|
|
@@ -1847,9 +1610,9 @@ def list_one_did_childs_stmt(
|
|
|
1847
1610
|
models.DataIdentifierAssociation.child_name,
|
|
1848
1611
|
models.DataIdentifierAssociation.child_type,
|
|
1849
1612
|
).where(
|
|
1850
|
-
models.DataIdentifierAssociation.scope == initial_set.c.child_scope,
|
|
1851
|
-
|
|
1852
|
-
|
|
1613
|
+
and_(models.DataIdentifierAssociation.scope == initial_set.c.child_scope,
|
|
1614
|
+
models.DataIdentifierAssociation.name == initial_set.c.child_name,
|
|
1615
|
+
models.DataIdentifierAssociation.did_type.in_(dids_to_resolve))
|
|
1853
1616
|
)
|
|
1854
1617
|
)
|
|
1855
1618
|
|
|
@@ -1869,7 +1632,7 @@ def list_child_datasets(
|
|
|
1869
1632
|
name: str,
|
|
1870
1633
|
*,
|
|
1871
1634
|
session: "Session"
|
|
1872
|
-
):
|
|
1635
|
+
) -> list[dict[str, Union["InternalScope", str]]]:
|
|
1873
1636
|
"""
|
|
1874
1637
|
List all child datasets of a container.
|
|
1875
1638
|
|
|
@@ -1888,7 +1651,31 @@ def list_child_datasets(
|
|
|
1888
1651
|
|
|
1889
1652
|
|
|
1890
1653
|
@stream_session
|
|
1891
|
-
def
|
|
1654
|
+
def bulk_list_files(
|
|
1655
|
+
dids: "Iterable[Mapping[str, Any]]",
|
|
1656
|
+
long: bool = False,
|
|
1657
|
+
*,
|
|
1658
|
+
session: "Session"
|
|
1659
|
+
) -> "Optional[Iterator[dict[str, Any]]]":
|
|
1660
|
+
"""
|
|
1661
|
+
List file contents of a list of data identifier.
|
|
1662
|
+
|
|
1663
|
+
:param dids: A list of DIDs.
|
|
1664
|
+
:param long: A boolean to choose if more metadata are returned or not.
|
|
1665
|
+
:param session: The database session in use.
|
|
1666
|
+
"""
|
|
1667
|
+
for did in dids:
|
|
1668
|
+
try:
|
|
1669
|
+
for file_dict in list_files(scope=did['scope'], name=did['name'], long=long, session=session):
|
|
1670
|
+
file_dict['parent_scope'] = did['scope']
|
|
1671
|
+
file_dict['parent_name'] = did['name']
|
|
1672
|
+
yield file_dict
|
|
1673
|
+
except exception.DataIdentifierNotFound:
|
|
1674
|
+
pass
|
|
1675
|
+
|
|
1676
|
+
|
|
1677
|
+
@stream_session
|
|
1678
|
+
def list_files(scope: "InternalScope", name: str, long: bool = False, *, session: "Session") -> "Iterator[dict[str, Any]]":
|
|
1892
1679
|
"""
|
|
1893
1680
|
List data identifier file contents.
|
|
1894
1681
|
|
|
@@ -1907,11 +1694,13 @@ def list_files(scope, name, long=False, *, session: "Session"):
|
|
|
1907
1694
|
models.DataIdentifier.events,
|
|
1908
1695
|
models.DataIdentifier.lumiblocknr,
|
|
1909
1696
|
models.DataIdentifier.did_type
|
|
1910
|
-
).filter_by(
|
|
1911
|
-
scope=scope,
|
|
1912
|
-
name=name
|
|
1913
1697
|
).with_hint(
|
|
1914
|
-
models.DataIdentifier,
|
|
1698
|
+
models.DataIdentifier,
|
|
1699
|
+
'INDEX(DIDS DIDS_PK)',
|
|
1700
|
+
'oracle'
|
|
1701
|
+
).where(
|
|
1702
|
+
and_(models.DataIdentifier.scope == scope,
|
|
1703
|
+
models.DataIdentifier.name == name)
|
|
1915
1704
|
)
|
|
1916
1705
|
did = session.execute(stmt).one()
|
|
1917
1706
|
|
|
@@ -1930,7 +1719,9 @@ def list_files(scope, name, long=False, *, session: "Session"):
|
|
|
1930
1719
|
models.DataIdentifierAssociation.child_name,
|
|
1931
1720
|
models.DataIdentifierAssociation.child_type
|
|
1932
1721
|
).with_hint(
|
|
1933
|
-
models.DataIdentifierAssociation,
|
|
1722
|
+
models.DataIdentifierAssociation,
|
|
1723
|
+
'INDEX(CONTENTS CONTENTS_PK)',
|
|
1724
|
+
'oracle'
|
|
1934
1725
|
)
|
|
1935
1726
|
|
|
1936
1727
|
if long:
|
|
@@ -1944,10 +1735,12 @@ def list_files(scope, name, long=False, *, session: "Session"):
|
|
|
1944
1735
|
models.DataIdentifierAssociation.events,
|
|
1945
1736
|
models.DataIdentifier.lumiblocknr
|
|
1946
1737
|
).with_hint(
|
|
1947
|
-
models.DataIdentifierAssociation,
|
|
1738
|
+
models.DataIdentifierAssociation,
|
|
1739
|
+
'INDEX_RS_ASC(DIDS DIDS_PK) INDEX_RS_ASC(CONTENTS CONTENTS_PK) NO_INDEX_FFS(CONTENTS CONTENTS_PK)',
|
|
1740
|
+
'oracle'
|
|
1948
1741
|
).where(
|
|
1949
|
-
models.DataIdentifier.scope == models.DataIdentifierAssociation.child_scope,
|
|
1950
|
-
|
|
1742
|
+
and_(models.DataIdentifier.scope == models.DataIdentifierAssociation.child_scope,
|
|
1743
|
+
models.DataIdentifier.name == models.DataIdentifierAssociation.child_name)
|
|
1951
1744
|
)
|
|
1952
1745
|
else:
|
|
1953
1746
|
dst_cnt_query = select(
|
|
@@ -1960,7 +1753,9 @@ def list_files(scope, name, long=False, *, session: "Session"):
|
|
|
1960
1753
|
models.DataIdentifierAssociation.events,
|
|
1961
1754
|
bindparam("lumiblocknr", None)
|
|
1962
1755
|
).with_hint(
|
|
1963
|
-
models.DataIdentifierAssociation,
|
|
1756
|
+
models.DataIdentifierAssociation,
|
|
1757
|
+
'INDEX(CONTENTS CONTENTS_PK)',
|
|
1758
|
+
'oracle'
|
|
1964
1759
|
)
|
|
1965
1760
|
|
|
1966
1761
|
dids = [(scope, name, did[7]), ]
|
|
@@ -1985,7 +1780,10 @@ def list_files(scope, name, long=False, *, session: "Session"):
|
|
|
1985
1780
|
'guid': guid and guid.upper(),
|
|
1986
1781
|
'events': events}
|
|
1987
1782
|
else:
|
|
1988
|
-
stmt = cnt_query.
|
|
1783
|
+
stmt = cnt_query.where(
|
|
1784
|
+
and_(models.DataIdentifierAssociation.scope == s,
|
|
1785
|
+
models.DataIdentifierAssociation.name == n)
|
|
1786
|
+
)
|
|
1989
1787
|
for child_scope, child_name, child_type in session.execute(stmt).yield_per(500):
|
|
1990
1788
|
dids.append((child_scope, child_name, child_type))
|
|
1991
1789
|
|
|
@@ -1994,7 +1792,13 @@ def list_files(scope, name, long=False, *, session: "Session"):
|
|
|
1994
1792
|
|
|
1995
1793
|
|
|
1996
1794
|
@stream_session
|
|
1997
|
-
def scope_list(
|
|
1795
|
+
def scope_list(
|
|
1796
|
+
scope: "InternalScope",
|
|
1797
|
+
name: Optional[str] = None,
|
|
1798
|
+
recursive: bool = False,
|
|
1799
|
+
*,
|
|
1800
|
+
session: "Session"
|
|
1801
|
+
) -> "Iterator[dict[str, Any]]":
|
|
1998
1802
|
"""
|
|
1999
1803
|
List data identifiers in a scope.
|
|
2000
1804
|
|
|
@@ -2011,18 +1815,17 @@ def scope_list(scope, name=None, recursive=False, *, session: "Session"):
|
|
|
2011
1815
|
def __topdids(scope):
|
|
2012
1816
|
sub_stmt = select(
|
|
2013
1817
|
models.DataIdentifierAssociation.child_name
|
|
2014
|
-
).
|
|
2015
|
-
scope
|
|
2016
|
-
|
|
1818
|
+
).where(
|
|
1819
|
+
and_(models.DataIdentifierAssociation.scope == scope,
|
|
1820
|
+
models.DataIdentifierAssociation.child_scope == scope)
|
|
2017
1821
|
)
|
|
2018
1822
|
stmt = select(
|
|
2019
1823
|
models.DataIdentifier.name,
|
|
2020
1824
|
models.DataIdentifier.did_type,
|
|
2021
1825
|
models.DataIdentifier.bytes
|
|
2022
|
-
).filter_by(
|
|
2023
|
-
scope=scope
|
|
2024
1826
|
).where(
|
|
2025
|
-
|
|
1827
|
+
and_(models.DataIdentifier.scope == scope,
|
|
1828
|
+
not_(models.DataIdentifier.name.in_(sub_stmt)))
|
|
2026
1829
|
).order_by(
|
|
2027
1830
|
models.DataIdentifier.name
|
|
2028
1831
|
)
|
|
@@ -2032,12 +1835,12 @@ def scope_list(scope, name=None, recursive=False, *, session: "Session"):
|
|
|
2032
1835
|
else:
|
|
2033
1836
|
yield {'scope': scope, 'name': row.name, 'type': row.did_type, 'parent': None, 'level': 0, 'bytes': None}
|
|
2034
1837
|
|
|
2035
|
-
def __diddriller(pdid):
|
|
1838
|
+
def __diddriller(pdid: "Mapping[str, Any]") -> "Iterator[dict[str, Any]]":
|
|
2036
1839
|
stmt = select(
|
|
2037
1840
|
models.DataIdentifierAssociation
|
|
2038
|
-
).
|
|
2039
|
-
scope
|
|
2040
|
-
|
|
1841
|
+
).where(
|
|
1842
|
+
and_(models.DataIdentifierAssociation.scope == pdid['scope'],
|
|
1843
|
+
models.DataIdentifierAssociation.name == pdid['name'])
|
|
2041
1844
|
).order_by(
|
|
2042
1845
|
models.DataIdentifierAssociation.child_name
|
|
2043
1846
|
)
|
|
@@ -2054,9 +1857,9 @@ def scope_list(scope, name=None, recursive=False, *, session: "Session"):
|
|
|
2054
1857
|
else:
|
|
2055
1858
|
stmt = select(
|
|
2056
1859
|
models.DataIdentifier
|
|
2057
|
-
).
|
|
2058
|
-
scope
|
|
2059
|
-
|
|
1860
|
+
).where(
|
|
1861
|
+
and_(models.DataIdentifier.scope == scope,
|
|
1862
|
+
models.DataIdentifier.name == name)
|
|
2060
1863
|
).limit(
|
|
2061
1864
|
1
|
|
2062
1865
|
)
|
|
@@ -2078,15 +1881,22 @@ def scope_list(scope, name=None, recursive=False, *, session: "Session"):
|
|
|
2078
1881
|
|
|
2079
1882
|
|
|
2080
1883
|
@read_session
|
|
2081
|
-
def __get_did(
|
|
1884
|
+
def __get_did(
|
|
1885
|
+
scope: "InternalScope",
|
|
1886
|
+
name: str,
|
|
1887
|
+
*,
|
|
1888
|
+
session: "Session"
|
|
1889
|
+
) -> "models.DataIdentifier":
|
|
2082
1890
|
try:
|
|
2083
1891
|
stmt = select(
|
|
2084
1892
|
models.DataIdentifier
|
|
2085
1893
|
).with_hint(
|
|
2086
|
-
models.DataIdentifier,
|
|
1894
|
+
models.DataIdentifier,
|
|
1895
|
+
'INDEX(DIDS DIDS_PK)',
|
|
1896
|
+
'oracle'
|
|
2087
1897
|
).where(
|
|
2088
|
-
models.DataIdentifier.scope == scope,
|
|
2089
|
-
|
|
1898
|
+
and_(models.DataIdentifier.scope == scope,
|
|
1899
|
+
models.DataIdentifier.name == name)
|
|
2090
1900
|
)
|
|
2091
1901
|
return session.execute(stmt).scalar_one()
|
|
2092
1902
|
except NoResultFound:
|
|
@@ -2122,64 +1932,18 @@ def get_did(scope: "InternalScope", name: str, dynamic_depth: "Optional[DIDType]
|
|
|
2122
1932
|
'length': length, 'bytes': bytes_}
|
|
2123
1933
|
|
|
2124
1934
|
|
|
2125
|
-
@read_session
|
|
2126
|
-
def get_files(files, *, session: "Session"):
|
|
2127
|
-
"""
|
|
2128
|
-
Retrieve a list of files.
|
|
2129
|
-
|
|
2130
|
-
:param files: A list of files (dictionaries).
|
|
2131
|
-
:param session: The database session in use.
|
|
2132
|
-
"""
|
|
2133
|
-
file_condition = []
|
|
2134
|
-
for file in files:
|
|
2135
|
-
file_condition.append(and_(models.DataIdentifier.scope == file['scope'], models.DataIdentifier.name == file['name']))
|
|
2136
|
-
|
|
2137
|
-
stmt = select(
|
|
2138
|
-
models.DataIdentifier.scope,
|
|
2139
|
-
models.DataIdentifier.name,
|
|
2140
|
-
models.DataIdentifier.bytes,
|
|
2141
|
-
models.DataIdentifier.guid,
|
|
2142
|
-
models.DataIdentifier.events,
|
|
2143
|
-
models.DataIdentifier.availability,
|
|
2144
|
-
models.DataIdentifier.adler32,
|
|
2145
|
-
models.DataIdentifier.md5
|
|
2146
|
-
).where(
|
|
2147
|
-
models.DataIdentifier.did_type == DIDType.FILE
|
|
2148
|
-
).with_hint(
|
|
2149
|
-
models.DataIdentifier, "INDEX(DIDS DIDS_PK)", 'oracle'
|
|
2150
|
-
).where(
|
|
2151
|
-
or_(*file_condition)
|
|
2152
|
-
)
|
|
2153
|
-
|
|
2154
|
-
rows = []
|
|
2155
|
-
for row in session.execute(stmt):
|
|
2156
|
-
file = row._asdict()
|
|
2157
|
-
rows.append(file)
|
|
2158
|
-
if file['availability'] == DIDAvailability.LOST:
|
|
2159
|
-
raise exception.UnsupportedOperation('File %s:%s is LOST and cannot be attached' % (file['scope'], file['name']))
|
|
2160
|
-
# Check meta-data, if provided
|
|
2161
|
-
for f in files:
|
|
2162
|
-
if f['name'] == file['name'] and f['scope'] == file['scope']:
|
|
2163
|
-
for key in ['bytes', 'adler32', 'md5']:
|
|
2164
|
-
if key in f and str(f.get(key)) != str(file[key]):
|
|
2165
|
-
raise exception.FileConsistencyMismatch(key + " mismatch for '%(scope)s:%(name)s': " % file + str(f.get(key)) + '!=' + str(file[key]))
|
|
2166
|
-
break
|
|
2167
|
-
|
|
2168
|
-
if len(rows) != len(files):
|
|
2169
|
-
for file in files:
|
|
2170
|
-
found = False
|
|
2171
|
-
for row in rows:
|
|
2172
|
-
if row['scope'] == file['scope'] and row['name'] == file['name']:
|
|
2173
|
-
found = True
|
|
2174
|
-
break
|
|
2175
|
-
if not found:
|
|
2176
|
-
raise exception.DataIdentifierNotFound("Data identifier '%(scope)s:%(name)s' not found" % file)
|
|
2177
|
-
return rows
|
|
2178
|
-
|
|
2179
|
-
|
|
2180
1935
|
@transactional_session
|
|
2181
|
-
def set_metadata(
|
|
2182
|
-
|
|
1936
|
+
def set_metadata(
|
|
1937
|
+
scope: "InternalScope",
|
|
1938
|
+
name: str,
|
|
1939
|
+
key: str,
|
|
1940
|
+
value: Any,
|
|
1941
|
+
did_type: Optional[DIDType] = None,
|
|
1942
|
+
did: Optional["Mapping[str, Any]"] = None,
|
|
1943
|
+
recursive: bool = False,
|
|
1944
|
+
*,
|
|
1945
|
+
session: "Session"
|
|
1946
|
+
) -> None:
|
|
2183
1947
|
"""
|
|
2184
1948
|
Add single metadata to a data identifier.
|
|
2185
1949
|
|
|
@@ -2195,7 +1959,14 @@ def set_metadata(scope, name, key, value, did_type=None, did=None,
|
|
|
2195
1959
|
|
|
2196
1960
|
|
|
2197
1961
|
@transactional_session
|
|
2198
|
-
def set_metadata_bulk(
|
|
1962
|
+
def set_metadata_bulk(
|
|
1963
|
+
scope: "InternalScope",
|
|
1964
|
+
name: str,
|
|
1965
|
+
meta: "Mapping[str, Any]",
|
|
1966
|
+
recursive: bool = False,
|
|
1967
|
+
*,
|
|
1968
|
+
session: "Session"
|
|
1969
|
+
) -> None:
|
|
2199
1970
|
"""
|
|
2200
1971
|
Add metadata to a data identifier.
|
|
2201
1972
|
|
|
@@ -2209,7 +1980,12 @@ def set_metadata_bulk(scope, name, meta, recursive=False, *, session: "Session")
|
|
|
2209
1980
|
|
|
2210
1981
|
|
|
2211
1982
|
@transactional_session
|
|
2212
|
-
def set_dids_metadata_bulk(
|
|
1983
|
+
def set_dids_metadata_bulk(
|
|
1984
|
+
dids: "Iterable[Mapping[str, Any]]",
|
|
1985
|
+
recursive: bool = False,
|
|
1986
|
+
*,
|
|
1987
|
+
session: "Session"
|
|
1988
|
+
) -> None:
|
|
2213
1989
|
"""
|
|
2214
1990
|
Add metadata to a list of data identifiers.
|
|
2215
1991
|
|
|
@@ -2223,7 +1999,13 @@ def set_dids_metadata_bulk(dids, recursive=False, *, session: "Session"):
|
|
|
2223
1999
|
|
|
2224
2000
|
|
|
2225
2001
|
@read_session
|
|
2226
|
-
def get_metadata(
|
|
2002
|
+
def get_metadata(
|
|
2003
|
+
scope: "InternalScope",
|
|
2004
|
+
name: str,
|
|
2005
|
+
plugin: str = 'DID_COLUMN',
|
|
2006
|
+
*,
|
|
2007
|
+
session: "Session"
|
|
2008
|
+
) -> dict[str, Any]:
|
|
2227
2009
|
"""
|
|
2228
2010
|
Get data identifier metadata
|
|
2229
2011
|
|
|
@@ -2237,7 +2019,11 @@ def get_metadata(scope, name, plugin='DID_COLUMN', *, session: "Session"):
|
|
|
2237
2019
|
|
|
2238
2020
|
|
|
2239
2021
|
@stream_session
|
|
2240
|
-
def list_parent_dids_bulk(
|
|
2022
|
+
def list_parent_dids_bulk(
|
|
2023
|
+
dids: "Iterable[Mapping[str, Any]]",
|
|
2024
|
+
*,
|
|
2025
|
+
session: "Session"
|
|
2026
|
+
) -> "Iterator[dict[str, Any]]":
|
|
2241
2027
|
"""
|
|
2242
2028
|
List parent datasets and containers of a did.
|
|
2243
2029
|
|
|
@@ -2269,7 +2055,12 @@ def list_parent_dids_bulk(dids, *, session: "Session"):
|
|
|
2269
2055
|
|
|
2270
2056
|
|
|
2271
2057
|
@stream_session
|
|
2272
|
-
def get_metadata_bulk(
|
|
2058
|
+
def get_metadata_bulk(
|
|
2059
|
+
dids: list["Mapping[Any, Any]"],
|
|
2060
|
+
inherit: bool = False,
|
|
2061
|
+
*,
|
|
2062
|
+
session: "Session"
|
|
2063
|
+
) -> "Iterator[dict[str, Any]]":
|
|
2273
2064
|
"""
|
|
2274
2065
|
Get metadata for a list of dids
|
|
2275
2066
|
:param dids: A list of dids.
|
|
@@ -2326,7 +2117,9 @@ def get_metadata_bulk(dids, inherit=False, *, session: "Session"):
|
|
|
2326
2117
|
stmt = select(
|
|
2327
2118
|
models.DataIdentifier
|
|
2328
2119
|
).with_hint(
|
|
2329
|
-
models.DataIdentifier,
|
|
2120
|
+
models.DataIdentifier,
|
|
2121
|
+
'INDEX(DIDS DIDS_PK)',
|
|
2122
|
+
'oracle'
|
|
2330
2123
|
).where(
|
|
2331
2124
|
or_(*chunk)
|
|
2332
2125
|
)
|
|
@@ -2337,7 +2130,13 @@ def get_metadata_bulk(dids, inherit=False, *, session: "Session"):
|
|
|
2337
2130
|
|
|
2338
2131
|
|
|
2339
2132
|
@transactional_session
|
|
2340
|
-
def delete_metadata(
|
|
2133
|
+
def delete_metadata(
|
|
2134
|
+
scope: "InternalScope",
|
|
2135
|
+
name: str,
|
|
2136
|
+
key: str,
|
|
2137
|
+
*,
|
|
2138
|
+
session: "Session"
|
|
2139
|
+
) -> None:
|
|
2341
2140
|
"""
|
|
2342
2141
|
Delete a key from the metadata column
|
|
2343
2142
|
|
|
@@ -2349,7 +2148,13 @@ def delete_metadata(scope, name, key, *, session: "Session"):
|
|
|
2349
2148
|
|
|
2350
2149
|
|
|
2351
2150
|
@transactional_session
|
|
2352
|
-
def set_status(
|
|
2151
|
+
def set_status(
|
|
2152
|
+
scope: "InternalScope",
|
|
2153
|
+
name: str,
|
|
2154
|
+
*,
|
|
2155
|
+
session: "Session",
|
|
2156
|
+
**kwargs
|
|
2157
|
+
) -> None:
|
|
2353
2158
|
"""
|
|
2354
2159
|
Set data identifier status
|
|
2355
2160
|
|
|
@@ -2363,14 +2168,13 @@ def set_status(scope, name, *, session: "Session", **kwargs):
|
|
|
2363
2168
|
|
|
2364
2169
|
update_stmt = update(
|
|
2365
2170
|
models.DataIdentifier
|
|
2366
|
-
).
|
|
2367
|
-
scope
|
|
2368
|
-
|
|
2171
|
+
).where(
|
|
2172
|
+
and_(models.DataIdentifier.scope == scope,
|
|
2173
|
+
models.DataIdentifier.name == name,
|
|
2174
|
+
or_(models.DataIdentifier.did_type == DIDType.CONTAINER,
|
|
2175
|
+
models.DataIdentifier.did_type == DIDType.DATASET))
|
|
2369
2176
|
).prefix_with(
|
|
2370
2177
|
"/*+ INDEX(DIDS DIDS_PK) */", dialect='oracle'
|
|
2371
|
-
).where(
|
|
2372
|
-
or_(models.DataIdentifier.did_type == DIDType.CONTAINER,
|
|
2373
|
-
models.DataIdentifier.did_type == DIDType.DATASET)
|
|
2374
2178
|
).execution_options(
|
|
2375
2179
|
synchronize_session=False
|
|
2376
2180
|
)
|
|
@@ -2380,10 +2184,9 @@ def set_status(scope, name, *, session: "Session", **kwargs):
|
|
|
2380
2184
|
raise exception.UnsupportedStatus(f'The status {k} is not a valid data identifier status.')
|
|
2381
2185
|
if k == 'open':
|
|
2382
2186
|
if not kwargs[k]:
|
|
2383
|
-
update_stmt = update_stmt.
|
|
2384
|
-
is_open
|
|
2385
|
-
|
|
2386
|
-
models.DataIdentifier.did_type != DIDType.FILE
|
|
2187
|
+
update_stmt = update_stmt.where(
|
|
2188
|
+
and_(models.DataIdentifier.is_open == true(),
|
|
2189
|
+
models.DataIdentifier.did_type != DIDType.FILE)
|
|
2387
2190
|
)
|
|
2388
2191
|
values['is_open'], values['closed_at'] = False, datetime.utcnow()
|
|
2389
2192
|
values['bytes'], values['length'], values['events'] = __resolve_bytes_length_events_did(did=__get_did(scope=scope, name=name, session=session),
|
|
@@ -2391,13 +2194,13 @@ def set_status(scope, name, *, session: "Session", **kwargs):
|
|
|
2391
2194
|
# Update datasetlocks as well
|
|
2392
2195
|
stmt = update(
|
|
2393
2196
|
models.DatasetLock
|
|
2394
|
-
).
|
|
2395
|
-
scope
|
|
2396
|
-
|
|
2397
|
-
).values(
|
|
2398
|
-
length
|
|
2399
|
-
bytes
|
|
2400
|
-
)
|
|
2197
|
+
).where(
|
|
2198
|
+
and_(models.DatasetLock.scope == scope,
|
|
2199
|
+
models.DatasetLock.name == name)
|
|
2200
|
+
).values({
|
|
2201
|
+
models.DatasetLock.length: values['length'],
|
|
2202
|
+
models.DatasetLock.bytes: values['bytes']
|
|
2203
|
+
})
|
|
2401
2204
|
session.execute(stmt)
|
|
2402
2205
|
|
|
2403
2206
|
# Generate a message
|
|
@@ -2417,10 +2220,9 @@ def set_status(scope, name, *, session: "Session", **kwargs):
|
|
|
2417
2220
|
|
|
2418
2221
|
else:
|
|
2419
2222
|
# Set status to open only for privileged accounts
|
|
2420
|
-
update_stmt = update_stmt.
|
|
2421
|
-
is_open
|
|
2422
|
-
|
|
2423
|
-
models.DataIdentifier.did_type != DIDType.FILE
|
|
2223
|
+
update_stmt = update_stmt.where(
|
|
2224
|
+
and_(models.DataIdentifier.is_open == false(),
|
|
2225
|
+
models.DataIdentifier.did_type != DIDType.FILE)
|
|
2424
2226
|
)
|
|
2425
2227
|
values['is_open'] = True
|
|
2426
2228
|
|
|
@@ -2429,15 +2231,17 @@ def set_status(scope, name, *, session: "Session", **kwargs):
|
|
|
2429
2231
|
message['vo'] = scope.vo
|
|
2430
2232
|
add_message('OPEN', message, session=session)
|
|
2431
2233
|
|
|
2432
|
-
update_stmt = update_stmt.values(
|
|
2234
|
+
update_stmt = update_stmt.values(
|
|
2235
|
+
values
|
|
2236
|
+
)
|
|
2433
2237
|
rowcount = session.execute(update_stmt).rowcount
|
|
2434
2238
|
|
|
2435
2239
|
if not rowcount:
|
|
2436
2240
|
stmt = select(
|
|
2437
2241
|
models.DataIdentifier
|
|
2438
|
-
).
|
|
2439
|
-
scope
|
|
2440
|
-
|
|
2242
|
+
).where(
|
|
2243
|
+
and_(models.DataIdentifier.scope == scope,
|
|
2244
|
+
models.DataIdentifier.name == name)
|
|
2441
2245
|
)
|
|
2442
2246
|
try:
|
|
2443
2247
|
session.execute(stmt).scalar_one()
|
|
@@ -2449,17 +2253,28 @@ def set_status(scope, name, *, session: "Session", **kwargs):
|
|
|
2449
2253
|
if not values['is_open']:
|
|
2450
2254
|
stmt = select(
|
|
2451
2255
|
models.ReplicationRule
|
|
2452
|
-
).
|
|
2453
|
-
scope
|
|
2454
|
-
|
|
2256
|
+
).where(
|
|
2257
|
+
and_(models.ReplicationRule.scope == scope,
|
|
2258
|
+
models.ReplicationRule.name == name)
|
|
2455
2259
|
)
|
|
2456
2260
|
for rule in session.execute(stmt).scalars():
|
|
2457
2261
|
rucio.core.rule.generate_rule_notifications(rule=rule, session=session)
|
|
2458
2262
|
|
|
2459
2263
|
|
|
2460
2264
|
@read_session
|
|
2461
|
-
def list_dids(
|
|
2462
|
-
|
|
2265
|
+
def list_dids(
|
|
2266
|
+
scope: "InternalScope",
|
|
2267
|
+
filters: "Mapping[Any, Any]",
|
|
2268
|
+
did_type: Literal['all', 'collection', 'dataset', 'container', 'file'] = 'collection',
|
|
2269
|
+
ignore_case: bool = False,
|
|
2270
|
+
limit: Optional[int] = None,
|
|
2271
|
+
offset: Optional[int] = None,
|
|
2272
|
+
long: bool = False,
|
|
2273
|
+
recursive: bool = False,
|
|
2274
|
+
ignore_dids: Optional["Sequence[str]"] = None,
|
|
2275
|
+
*,
|
|
2276
|
+
session: "Session"
|
|
2277
|
+
) -> "Iterator[dict[str, Any]]":
|
|
2463
2278
|
"""
|
|
2464
2279
|
Search data identifiers.
|
|
2465
2280
|
|
|
@@ -2478,7 +2293,12 @@ def list_dids(scope, filters, did_type='collection', ignore_case=False, limit=No
|
|
|
2478
2293
|
|
|
2479
2294
|
|
|
2480
2295
|
@read_session
|
|
2481
|
-
def get_did_atime(
|
|
2296
|
+
def get_did_atime(
|
|
2297
|
+
scope: "InternalScope",
|
|
2298
|
+
name: str,
|
|
2299
|
+
*,
|
|
2300
|
+
session: "Session"
|
|
2301
|
+
) -> datetime:
|
|
2482
2302
|
"""
|
|
2483
2303
|
Get the accessed_at timestamp for a did. Just for testing.
|
|
2484
2304
|
:param scope: the scope name.
|
|
@@ -2489,15 +2309,20 @@ def get_did_atime(scope, name, *, session: "Session"):
|
|
|
2489
2309
|
"""
|
|
2490
2310
|
stmt = select(
|
|
2491
2311
|
models.DataIdentifier.accessed_at
|
|
2492
|
-
).
|
|
2493
|
-
scope
|
|
2494
|
-
|
|
2312
|
+
).where(
|
|
2313
|
+
and_(models.DataIdentifier.scope == scope,
|
|
2314
|
+
models.DataIdentifier.name == name)
|
|
2495
2315
|
)
|
|
2496
2316
|
return session.execute(stmt).one()[0]
|
|
2497
2317
|
|
|
2498
2318
|
|
|
2499
2319
|
@read_session
|
|
2500
|
-
def get_did_access_cnt(
|
|
2320
|
+
def get_did_access_cnt(
|
|
2321
|
+
scope: "InternalScope",
|
|
2322
|
+
name: str,
|
|
2323
|
+
*,
|
|
2324
|
+
session: "Session"
|
|
2325
|
+
) -> int:
|
|
2501
2326
|
"""
|
|
2502
2327
|
Get the access_cnt for a did. Just for testing.
|
|
2503
2328
|
:param scope: the scope name.
|
|
@@ -2508,15 +2333,19 @@ def get_did_access_cnt(scope, name, *, session: "Session"):
|
|
|
2508
2333
|
"""
|
|
2509
2334
|
stmt = select(
|
|
2510
2335
|
models.DataIdentifier.access_cnt
|
|
2511
|
-
).
|
|
2512
|
-
scope
|
|
2513
|
-
|
|
2336
|
+
).where(
|
|
2337
|
+
and_(models.DataIdentifier.scope == scope,
|
|
2338
|
+
models.DataIdentifier.name == name)
|
|
2514
2339
|
)
|
|
2515
2340
|
return session.execute(stmt).one()[0]
|
|
2516
2341
|
|
|
2517
2342
|
|
|
2518
2343
|
@stream_session
|
|
2519
|
-
def get_dataset_by_guid(
|
|
2344
|
+
def get_dataset_by_guid(
|
|
2345
|
+
guid: str,
|
|
2346
|
+
*,
|
|
2347
|
+
session: "Session"
|
|
2348
|
+
) -> "Iterator[dict[str, Union[InternalScope, str]]]":
|
|
2520
2349
|
"""
|
|
2521
2350
|
Get the parent datasets for a given GUID.
|
|
2522
2351
|
:param guid: The GUID.
|
|
@@ -2526,22 +2355,26 @@ def get_dataset_by_guid(guid, *, session: "Session"):
|
|
|
2526
2355
|
"""
|
|
2527
2356
|
stmt = select(
|
|
2528
2357
|
models.DataIdentifier
|
|
2529
|
-
).filter_by(
|
|
2530
|
-
guid=guid,
|
|
2531
|
-
did_type=DIDType.FILE
|
|
2532
2358
|
).with_hint(
|
|
2533
|
-
models.ReplicaLock,
|
|
2359
|
+
models.ReplicaLock,
|
|
2360
|
+
'INDEX(DIDS_GUIDS_IDX)',
|
|
2361
|
+
'oracle'
|
|
2362
|
+
).where(
|
|
2363
|
+
and_(models.DataIdentifier.guid == guid,
|
|
2364
|
+
models.DataIdentifier.did_type == DIDType.FILE)
|
|
2534
2365
|
)
|
|
2535
2366
|
try:
|
|
2536
2367
|
r = session.execute(stmt).scalar_one()
|
|
2537
2368
|
datasets_stmt = select(
|
|
2538
2369
|
models.DataIdentifierAssociation.scope,
|
|
2539
2370
|
models.DataIdentifierAssociation.name
|
|
2540
|
-
).filter_by(
|
|
2541
|
-
child_scope=r.scope,
|
|
2542
|
-
child_name=r.name
|
|
2543
2371
|
).with_hint(
|
|
2544
|
-
models.DataIdentifierAssociation,
|
|
2372
|
+
models.DataIdentifierAssociation,
|
|
2373
|
+
'INDEX(CONTENTS CONTENTS_CHILD_SCOPE_NAME_IDX)',
|
|
2374
|
+
'oracle'
|
|
2375
|
+
).where(
|
|
2376
|
+
and_(models.DataIdentifierAssociation.child_scope == r.scope,
|
|
2377
|
+
models.DataIdentifierAssociation.child_name == r.name)
|
|
2545
2378
|
)
|
|
2546
2379
|
|
|
2547
2380
|
except NoResultFound:
|
|
@@ -2551,7 +2384,11 @@ def get_dataset_by_guid(guid, *, session: "Session"):
|
|
|
2551
2384
|
|
|
2552
2385
|
|
|
2553
2386
|
@transactional_session
|
|
2554
|
-
def touch_dids(
|
|
2387
|
+
def touch_dids(
|
|
2388
|
+
dids: "Iterable[Mapping[str, Any]]",
|
|
2389
|
+
*,
|
|
2390
|
+
session: "Session"
|
|
2391
|
+
) -> bool:
|
|
2555
2392
|
"""
|
|
2556
2393
|
Update the accessed_at timestamp and the access_cnt of the given dids.
|
|
2557
2394
|
|
|
@@ -2567,15 +2404,15 @@ def touch_dids(dids, *, session: "Session"):
|
|
|
2567
2404
|
for did in dids:
|
|
2568
2405
|
stmt = update(
|
|
2569
2406
|
models.DataIdentifier
|
|
2570
|
-
).
|
|
2571
|
-
scope
|
|
2572
|
-
|
|
2573
|
-
|
|
2574
|
-
).values(
|
|
2575
|
-
accessed_at
|
|
2576
|
-
access_cnt
|
|
2577
|
-
|
|
2578
|
-
).execution_options(
|
|
2407
|
+
).where(
|
|
2408
|
+
and_(models.DataIdentifier.scope == did['scope'],
|
|
2409
|
+
models.DataIdentifier.name == did['name'],
|
|
2410
|
+
models.DataIdentifier.did_type == did['type'])
|
|
2411
|
+
).values({
|
|
2412
|
+
models.DataIdentifier.accessed_at: did.get('accessed_at') or now,
|
|
2413
|
+
models.DataIdentifier.access_cnt: case((models.DataIdentifier.access_cnt == none_value, 1),
|
|
2414
|
+
else_=(models.DataIdentifier.access_cnt + 1)) # type: ignore
|
|
2415
|
+
}).execution_options(
|
|
2579
2416
|
synchronize_session=False
|
|
2580
2417
|
)
|
|
2581
2418
|
session.execute(stmt)
|
|
@@ -2586,7 +2423,16 @@ def touch_dids(dids, *, session: "Session"):
|
|
|
2586
2423
|
|
|
2587
2424
|
|
|
2588
2425
|
@transactional_session
|
|
2589
|
-
def create_did_sample(
|
|
2426
|
+
def create_did_sample(
|
|
2427
|
+
input_scope: "InternalScope",
|
|
2428
|
+
input_name: str,
|
|
2429
|
+
output_scope: "InternalScope",
|
|
2430
|
+
output_name: str,
|
|
2431
|
+
account: "InternalAccount",
|
|
2432
|
+
nbfiles: str,
|
|
2433
|
+
*,
|
|
2434
|
+
session: "Session"
|
|
2435
|
+
) -> None:
|
|
2590
2436
|
"""
|
|
2591
2437
|
Create a sample from an input collection.
|
|
2592
2438
|
|
|
@@ -2629,8 +2475,8 @@ def __resolve_bytes_length_events_did(
|
|
|
2629
2475
|
func.sum(models.DataIdentifierAssociation.bytes),
|
|
2630
2476
|
func.sum(models.DataIdentifierAssociation.events),
|
|
2631
2477
|
).where(
|
|
2632
|
-
models.DataIdentifierAssociation.scope == did.scope,
|
|
2633
|
-
|
|
2478
|
+
and_(models.DataIdentifierAssociation.scope == did.scope,
|
|
2479
|
+
models.DataIdentifierAssociation.name == did.name)
|
|
2634
2480
|
)
|
|
2635
2481
|
elif did.did_type == DIDType.CONTAINER and dynamic_depth == DIDType.DATASET:
|
|
2636
2482
|
child_did_stmt = list_one_did_childs_stmt(did.scope, did.name, did_type=DIDType.DATASET).subquery()
|
|
@@ -2672,7 +2518,7 @@ def __resolve_bytes_length_events_did(
|
|
|
2672
2518
|
|
|
2673
2519
|
|
|
2674
2520
|
@transactional_session
|
|
2675
|
-
def resurrect(dids, *, session: "Session"):
|
|
2521
|
+
def resurrect(dids: "Iterable[Mapping[str, Any]]", *, session: "Session") -> None:
|
|
2676
2522
|
"""
|
|
2677
2523
|
Resurrect data identifiers.
|
|
2678
2524
|
|
|
@@ -2684,10 +2530,12 @@ def resurrect(dids, *, session: "Session"):
|
|
|
2684
2530
|
stmt = select(
|
|
2685
2531
|
models.DeletedDataIdentifier
|
|
2686
2532
|
).with_hint(
|
|
2687
|
-
models.DeletedDataIdentifier,
|
|
2688
|
-
|
|
2689
|
-
|
|
2690
|
-
|
|
2533
|
+
models.DeletedDataIdentifier,
|
|
2534
|
+
'INDEX(DELETED_DIDS DELETED_DIDS_PK)',
|
|
2535
|
+
'oracle'
|
|
2536
|
+
).where(
|
|
2537
|
+
and_(models.DeletedDataIdentifier.scope == did['scope'],
|
|
2538
|
+
models.DeletedDataIdentifier.name == did['name'])
|
|
2691
2539
|
)
|
|
2692
2540
|
del_did = session.execute(stmt).scalar_one()
|
|
2693
2541
|
except NoResultFound:
|
|
@@ -2695,13 +2543,13 @@ def resurrect(dids, *, session: "Session"):
|
|
|
2695
2543
|
stmt = update(
|
|
2696
2544
|
models.DataIdentifier
|
|
2697
2545
|
).where(
|
|
2698
|
-
models.DataIdentifier.scope == did['scope'],
|
|
2699
|
-
|
|
2700
|
-
|
|
2701
|
-
).
|
|
2546
|
+
and_(models.DataIdentifier.scope == did['scope'],
|
|
2547
|
+
models.DataIdentifier.name == did['name'],
|
|
2548
|
+
models.DataIdentifier.expired_at < datetime.utcnow())
|
|
2549
|
+
).values({
|
|
2550
|
+
models.DataIdentifier.expired_at: None
|
|
2551
|
+
}).execution_options(
|
|
2702
2552
|
synchronize_session=False
|
|
2703
|
-
).values(
|
|
2704
|
-
expired_at=None
|
|
2705
2553
|
)
|
|
2706
2554
|
rowcount = session.execute(stmt).rowcount
|
|
2707
2555
|
if rowcount:
|
|
@@ -2720,18 +2568,22 @@ def resurrect(dids, *, session: "Session"):
|
|
|
2720
2568
|
models.DeletedDataIdentifier
|
|
2721
2569
|
).prefix_with(
|
|
2722
2570
|
"/*+ INDEX(DELETED_DIDS DELETED_DIDS_PK) */", dialect='oracle'
|
|
2723
|
-
).
|
|
2724
|
-
scope
|
|
2725
|
-
|
|
2571
|
+
).where(
|
|
2572
|
+
and_(models.DeletedDataIdentifier.scope == did['scope'],
|
|
2573
|
+
models.DeletedDataIdentifier.name == did['name'])
|
|
2726
2574
|
)
|
|
2727
2575
|
session.execute(stmt)
|
|
2728
2576
|
|
|
2729
|
-
models.DataIdentifier(**kargs)
|
|
2730
|
-
save(session=session, flush=False)
|
|
2577
|
+
models.DataIdentifier(**kargs).save(session=session, flush=False)
|
|
2731
2578
|
|
|
2732
2579
|
|
|
2733
2580
|
@stream_session
|
|
2734
|
-
def list_archive_content(
|
|
2581
|
+
def list_archive_content(
|
|
2582
|
+
scope: "InternalScope",
|
|
2583
|
+
name,
|
|
2584
|
+
*,
|
|
2585
|
+
session: "Session"
|
|
2586
|
+
) -> "Iterator[dict[str, Any]]":
|
|
2735
2587
|
"""
|
|
2736
2588
|
List archive contents.
|
|
2737
2589
|
|
|
@@ -2743,10 +2595,12 @@ def list_archive_content(scope, name, *, session: "Session"):
|
|
|
2743
2595
|
stmt = select(
|
|
2744
2596
|
models.ConstituentAssociation
|
|
2745
2597
|
).with_hint(
|
|
2746
|
-
models.ConstituentAssociation,
|
|
2747
|
-
|
|
2748
|
-
|
|
2749
|
-
|
|
2598
|
+
models.ConstituentAssociation,
|
|
2599
|
+
'INDEX(ARCHIVE_CONTENTS ARCH_CONTENTS_PK)',
|
|
2600
|
+
'oracle'
|
|
2601
|
+
).where(
|
|
2602
|
+
and_(models.ConstituentAssociation.scope == scope,
|
|
2603
|
+
models.ConstituentAssociation.name == name)
|
|
2750
2604
|
)
|
|
2751
2605
|
|
|
2752
2606
|
for tmp_did in session.execute(stmt).yield_per(5).scalars():
|
|
@@ -2757,7 +2611,13 @@ def list_archive_content(scope, name, *, session: "Session"):
|
|
|
2757
2611
|
|
|
2758
2612
|
|
|
2759
2613
|
@transactional_session
|
|
2760
|
-
def add_did_to_followed(
|
|
2614
|
+
def add_did_to_followed(
|
|
2615
|
+
scope: "InternalScope",
|
|
2616
|
+
name: str,
|
|
2617
|
+
account: "InternalAccount",
|
|
2618
|
+
*,
|
|
2619
|
+
session: "Session"
|
|
2620
|
+
) -> None:
|
|
2761
2621
|
"""
|
|
2762
2622
|
Mark a did as followed by the given account
|
|
2763
2623
|
|
|
@@ -2771,7 +2631,12 @@ def add_did_to_followed(scope, name, account, *, session: "Session"):
|
|
|
2771
2631
|
|
|
2772
2632
|
|
|
2773
2633
|
@transactional_session
|
|
2774
|
-
def add_dids_to_followed(
|
|
2634
|
+
def add_dids_to_followed(
|
|
2635
|
+
dids: "Iterable[Mapping[str, Any]]",
|
|
2636
|
+
account: "InternalAccount",
|
|
2637
|
+
*,
|
|
2638
|
+
session: "Session"
|
|
2639
|
+
) -> None:
|
|
2775
2640
|
"""
|
|
2776
2641
|
Bulk mark datasets as followed
|
|
2777
2642
|
|
|
@@ -2784,14 +2649,14 @@ def add_dids_to_followed(dids, account, *, session: "Session"):
|
|
|
2784
2649
|
# Get the did details corresponding to the scope and name passed.
|
|
2785
2650
|
stmt = select(
|
|
2786
2651
|
models.DataIdentifier
|
|
2787
|
-
).
|
|
2788
|
-
scope
|
|
2789
|
-
|
|
2652
|
+
).where(
|
|
2653
|
+
and_(models.DataIdentifier.scope == did['scope'],
|
|
2654
|
+
models.DataIdentifier.name == did['name'])
|
|
2790
2655
|
)
|
|
2791
2656
|
did = session.execute(stmt).scalar_one()
|
|
2792
2657
|
# Add the queried to the followed table.
|
|
2793
|
-
new_did_followed = models.
|
|
2794
|
-
|
|
2658
|
+
new_did_followed = models.DidFollowed(scope=did.scope, name=did.name, account=account,
|
|
2659
|
+
did_type=did.did_type)
|
|
2795
2660
|
|
|
2796
2661
|
new_did_followed.save(session=session, flush=False)
|
|
2797
2662
|
|
|
@@ -2801,7 +2666,12 @@ def add_dids_to_followed(dids, account, *, session: "Session"):
|
|
|
2801
2666
|
|
|
2802
2667
|
|
|
2803
2668
|
@stream_session
|
|
2804
|
-
def get_users_following_did(
|
|
2669
|
+
def get_users_following_did(
|
|
2670
|
+
scope: "InternalScope",
|
|
2671
|
+
name: str,
|
|
2672
|
+
*,
|
|
2673
|
+
session: "Session"
|
|
2674
|
+
) -> "Iterator[dict[str, InternalAccount]]":
|
|
2805
2675
|
"""
|
|
2806
2676
|
Return list of users following a did
|
|
2807
2677
|
|
|
@@ -2811,10 +2681,10 @@ def get_users_following_did(scope, name, *, session: "Session"):
|
|
|
2811
2681
|
"""
|
|
2812
2682
|
try:
|
|
2813
2683
|
stmt = select(
|
|
2814
|
-
models.
|
|
2815
|
-
).
|
|
2816
|
-
scope
|
|
2817
|
-
|
|
2684
|
+
models.DidFollowed
|
|
2685
|
+
).where(
|
|
2686
|
+
and_(models.DidFollowed.scope == scope,
|
|
2687
|
+
models.DidFollowed.name == name)
|
|
2818
2688
|
)
|
|
2819
2689
|
for user in session.execute(stmt).scalars().all():
|
|
2820
2690
|
# Return a dictionary of users to be rendered as json.
|
|
@@ -2825,7 +2695,13 @@ def get_users_following_did(scope, name, *, session: "Session"):
|
|
|
2825
2695
|
|
|
2826
2696
|
|
|
2827
2697
|
@transactional_session
|
|
2828
|
-
def remove_did_from_followed(
|
|
2698
|
+
def remove_did_from_followed(
|
|
2699
|
+
scope: "InternalScope",
|
|
2700
|
+
name: str,
|
|
2701
|
+
account: "InternalAccount",
|
|
2702
|
+
*,
|
|
2703
|
+
session: "Session"
|
|
2704
|
+
) -> None:
|
|
2829
2705
|
"""
|
|
2830
2706
|
Mark a did as not followed
|
|
2831
2707
|
|
|
@@ -2839,7 +2715,12 @@ def remove_did_from_followed(scope, name, account, *, session: "Session"):
|
|
|
2839
2715
|
|
|
2840
2716
|
|
|
2841
2717
|
@transactional_session
|
|
2842
|
-
def remove_dids_from_followed(
|
|
2718
|
+
def remove_dids_from_followed(
|
|
2719
|
+
dids: "Iterable[Mapping[str, Any]]",
|
|
2720
|
+
account: "InternalAccount",
|
|
2721
|
+
*,
|
|
2722
|
+
session: "Session"
|
|
2723
|
+
) -> None:
|
|
2843
2724
|
"""
|
|
2844
2725
|
Bulk mark datasets as not followed
|
|
2845
2726
|
|
|
@@ -2850,11 +2731,11 @@ def remove_dids_from_followed(dids, account, *, session: "Session"):
|
|
|
2850
2731
|
try:
|
|
2851
2732
|
for did in dids:
|
|
2852
2733
|
stmt = delete(
|
|
2853
|
-
models.
|
|
2854
|
-
).
|
|
2855
|
-
scope
|
|
2856
|
-
|
|
2857
|
-
|
|
2734
|
+
models.DidFollowed
|
|
2735
|
+
).where(
|
|
2736
|
+
and_(models.DidFollowed.scope == did['scope'],
|
|
2737
|
+
models.DidFollowed.name == did['name'],
|
|
2738
|
+
models.DidFollowed.account == account)
|
|
2858
2739
|
).execution_options(
|
|
2859
2740
|
synchronize_session=False
|
|
2860
2741
|
)
|
|
@@ -2864,9 +2745,16 @@ def remove_dids_from_followed(dids, account, *, session: "Session"):
|
|
|
2864
2745
|
|
|
2865
2746
|
|
|
2866
2747
|
@transactional_session
|
|
2867
|
-
def trigger_event(
|
|
2748
|
+
def trigger_event(
|
|
2749
|
+
scope: "InternalScope",
|
|
2750
|
+
name: str,
|
|
2751
|
+
event_type: str,
|
|
2752
|
+
payload: str,
|
|
2753
|
+
*,
|
|
2754
|
+
session: "Session"
|
|
2755
|
+
) -> None:
|
|
2868
2756
|
"""
|
|
2869
|
-
Records changes
|
|
2757
|
+
Records changes occurring in the did to the FollowEvent table
|
|
2870
2758
|
|
|
2871
2759
|
:param scope: The scope name.
|
|
2872
2760
|
:param name: The data identifier name.
|
|
@@ -2876,15 +2764,15 @@ def trigger_event(scope, name, event_type, payload, *, session: "Session"):
|
|
|
2876
2764
|
"""
|
|
2877
2765
|
try:
|
|
2878
2766
|
stmt = select(
|
|
2879
|
-
models.
|
|
2880
|
-
).
|
|
2881
|
-
scope
|
|
2882
|
-
|
|
2767
|
+
models.DidFollowed
|
|
2768
|
+
).where(
|
|
2769
|
+
and_(models.DidFollowed.scope == scope,
|
|
2770
|
+
models.DidFollowed.name == name)
|
|
2883
2771
|
)
|
|
2884
2772
|
for did in session.execute(stmt).scalars().all():
|
|
2885
|
-
# Create a new event using
|
|
2886
|
-
new_event = models.
|
|
2887
|
-
|
|
2773
|
+
# Create a new event using the specified parameters.
|
|
2774
|
+
new_event = models.FollowEvent(scope=scope, name=name, account=did.account,
|
|
2775
|
+
did_type=did.did_type, event_type=event_type, payload=payload)
|
|
2888
2776
|
new_event.save(session=session, flush=False)
|
|
2889
2777
|
|
|
2890
2778
|
session.flush()
|
|
@@ -2893,20 +2781,25 @@ def trigger_event(scope, name, event_type, payload, *, session: "Session"):
|
|
|
2893
2781
|
|
|
2894
2782
|
|
|
2895
2783
|
@read_session
|
|
2896
|
-
def create_reports(
|
|
2784
|
+
def create_reports(
|
|
2785
|
+
total_workers: int,
|
|
2786
|
+
worker_number: int,
|
|
2787
|
+
*,
|
|
2788
|
+
session: "Session"
|
|
2789
|
+
) -> None:
|
|
2897
2790
|
"""
|
|
2898
2791
|
Create a summary report of the events affecting a dataset, for its followers.
|
|
2899
2792
|
|
|
2900
2793
|
:param session: The database session in use.
|
|
2901
2794
|
"""
|
|
2902
|
-
# Query the
|
|
2795
|
+
# Query the FollowEvent table
|
|
2903
2796
|
stmt = select(
|
|
2904
|
-
models.
|
|
2797
|
+
models.FollowEvent
|
|
2905
2798
|
).order_by(
|
|
2906
|
-
models.
|
|
2799
|
+
models.FollowEvent.created_at
|
|
2907
2800
|
)
|
|
2908
2801
|
|
|
2909
|
-
# Use
|
|
2802
|
+
# Use heartbeat mechanism to select a chunk of events based on the hashed account
|
|
2910
2803
|
stmt = filter_thread_work(session=session, query=stmt, total_threads=total_workers, thread_id=worker_number, hash_variable='account')
|
|
2911
2804
|
|
|
2912
2805
|
try:
|
|
@@ -2928,11 +2821,11 @@ def create_reports(total_workers, worker_number, *, session: "Session"):
|
|
|
2928
2821
|
account = event.account
|
|
2929
2822
|
# Clean up the event after creating the report
|
|
2930
2823
|
stmt = delete(
|
|
2931
|
-
models.
|
|
2932
|
-
).
|
|
2933
|
-
scope
|
|
2934
|
-
|
|
2935
|
-
|
|
2824
|
+
models.FollowEvent
|
|
2825
|
+
).where(
|
|
2826
|
+
and_(models.FollowEvent.scope == event.scope,
|
|
2827
|
+
models.FollowEvent.name == event.name,
|
|
2828
|
+
models.FollowEvent.account == event.account)
|
|
2936
2829
|
).execution_options(
|
|
2937
2830
|
synchronize_session=False
|
|
2938
2831
|
)
|
|
@@ -2942,8 +2835,8 @@ def create_reports(total_workers, worker_number, *, session: "Session"):
|
|
|
2942
2835
|
# Get the email associated with the account.
|
|
2943
2836
|
stmt = select(
|
|
2944
2837
|
models.Account.email
|
|
2945
|
-
).
|
|
2946
|
-
account
|
|
2838
|
+
).where(
|
|
2839
|
+
models.Account.account == account
|
|
2947
2840
|
)
|
|
2948
2841
|
email = session.execute(stmt).scalar()
|
|
2949
2842
|
add_message('email', {'to': email,
|
|
@@ -2955,7 +2848,12 @@ def create_reports(total_workers, worker_number, *, session: "Session"):
|
|
|
2955
2848
|
|
|
2956
2849
|
|
|
2957
2850
|
@transactional_session
|
|
2958
|
-
def insert_content_history(
|
|
2851
|
+
def insert_content_history(
|
|
2852
|
+
filter_: "ColumnExpressionArgument[bool]",
|
|
2853
|
+
did_created_at: datetime,
|
|
2854
|
+
*,
|
|
2855
|
+
session: "Session"
|
|
2856
|
+
) -> None:
|
|
2959
2857
|
"""
|
|
2960
2858
|
Insert into content history a list of did
|
|
2961
2859
|
|
|
@@ -3006,7 +2904,7 @@ def insert_content_history(filter_, did_created_at, *, session: "Session"):
|
|
|
3006
2904
|
|
|
3007
2905
|
|
|
3008
2906
|
@transactional_session
|
|
3009
|
-
def insert_deleted_dids(filter_, *, session: "Session"):
|
|
2907
|
+
def insert_deleted_dids(filter_: "ColumnExpressionArgument[bool]", *, session: "Session") -> None:
|
|
3010
2908
|
"""
|
|
3011
2909
|
Insert into deleted_dids a list of did
|
|
3012
2910
|
|