rucio 35.7.0__py3-none-any.whl → 37.0.0rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rucio might be problematic. Click here for more details.
- rucio/alembicrevision.py +1 -1
- rucio/{daemons/c3po/collectors → cli}/__init__.py +1 -0
- rucio/cli/account.py +216 -0
- rucio-35.7.0.data/scripts/rucio → rucio/cli/bin_legacy/rucio.py +769 -486
- rucio-35.7.0.data/scripts/rucio-admin → rucio/cli/bin_legacy/rucio_admin.py +476 -423
- rucio/cli/command.py +272 -0
- rucio/cli/config.py +72 -0
- rucio/cli/did.py +191 -0
- rucio/cli/download.py +128 -0
- rucio/cli/lifetime_exception.py +33 -0
- rucio/cli/replica.py +162 -0
- rucio/cli/rse.py +293 -0
- rucio/cli/rule.py +158 -0
- rucio/cli/scope.py +40 -0
- rucio/cli/subscription.py +73 -0
- rucio/cli/upload.py +60 -0
- rucio/cli/utils.py +226 -0
- rucio/client/accountclient.py +0 -1
- rucio/client/baseclient.py +33 -24
- rucio/client/client.py +45 -1
- rucio/client/didclient.py +5 -3
- rucio/client/downloadclient.py +6 -8
- rucio/client/replicaclient.py +0 -2
- rucio/client/richclient.py +317 -0
- rucio/client/rseclient.py +4 -4
- rucio/client/uploadclient.py +26 -12
- rucio/common/bittorrent.py +234 -0
- rucio/common/cache.py +66 -29
- rucio/common/checksum.py +168 -0
- rucio/common/client.py +122 -0
- rucio/common/config.py +22 -35
- rucio/common/constants.py +61 -3
- rucio/common/didtype.py +72 -24
- rucio/common/dumper/__init__.py +45 -38
- rucio/common/dumper/consistency.py +75 -30
- rucio/common/dumper/data_models.py +63 -19
- rucio/common/dumper/path_parsing.py +19 -8
- rucio/common/exception.py +65 -8
- rucio/common/extra.py +5 -10
- rucio/common/logging.py +13 -13
- rucio/common/pcache.py +8 -7
- rucio/common/plugins.py +59 -27
- rucio/common/policy.py +12 -3
- rucio/common/schema/__init__.py +84 -34
- rucio/common/schema/generic.py +0 -17
- rucio/common/schema/generic_multi_vo.py +0 -17
- rucio/common/stomp_utils.py +383 -119
- rucio/common/test_rucio_server.py +12 -6
- rucio/common/types.py +132 -52
- rucio/common/utils.py +93 -643
- rucio/core/account_limit.py +14 -12
- rucio/core/authentication.py +2 -2
- rucio/core/config.py +23 -42
- rucio/core/credential.py +14 -15
- rucio/core/did.py +5 -1
- rucio/core/did_meta_plugins/elasticsearch_meta.py +407 -0
- rucio/core/did_meta_plugins/filter_engine.py +62 -3
- rucio/core/did_meta_plugins/json_meta.py +2 -2
- rucio/core/did_meta_plugins/mongo_meta.py +43 -30
- rucio/core/did_meta_plugins/postgres_meta.py +75 -39
- rucio/core/identity.py +6 -5
- rucio/core/importer.py +4 -3
- rucio/core/lifetime_exception.py +2 -2
- rucio/core/lock.py +8 -7
- rucio/core/message.py +6 -0
- rucio/core/monitor.py +30 -29
- rucio/core/naming_convention.py +2 -2
- rucio/core/nongrid_trace.py +2 -2
- rucio/core/oidc.py +11 -9
- rucio/core/permission/__init__.py +79 -37
- rucio/core/permission/generic.py +1 -7
- rucio/core/permission/generic_multi_vo.py +1 -7
- rucio/core/quarantined_replica.py +4 -3
- rucio/core/replica.py +464 -139
- rucio/core/replica_sorter.py +55 -59
- rucio/core/request.py +34 -32
- rucio/core/rse.py +301 -97
- rucio/core/rse_counter.py +1 -2
- rucio/core/rse_expression_parser.py +7 -7
- rucio/core/rse_selector.py +9 -7
- rucio/core/rule.py +41 -40
- rucio/core/rule_grouping.py +42 -40
- rucio/core/scope.py +5 -4
- rucio/core/subscription.py +26 -28
- rucio/core/topology.py +11 -11
- rucio/core/trace.py +2 -2
- rucio/core/transfer.py +29 -15
- rucio/core/volatile_replica.py +4 -3
- rucio/daemons/atropos/atropos.py +1 -1
- rucio/daemons/auditor/__init__.py +2 -2
- rucio/daemons/auditor/srmdumps.py +6 -6
- rucio/daemons/automatix/automatix.py +32 -21
- rucio/daemons/badreplicas/necromancer.py +2 -2
- rucio/daemons/bb8/nuclei_background_rebalance.py +1 -1
- rucio/daemons/bb8/t2_background_rebalance.py +1 -1
- rucio/daemons/cache/consumer.py +26 -90
- rucio/daemons/common.py +15 -25
- rucio/daemons/conveyor/finisher.py +2 -2
- rucio/daemons/conveyor/poller.py +18 -28
- rucio/daemons/conveyor/receiver.py +53 -123
- rucio/daemons/conveyor/stager.py +1 -0
- rucio/daemons/conveyor/submitter.py +3 -3
- rucio/daemons/hermes/hermes.py +129 -369
- rucio/daemons/judge/evaluator.py +2 -2
- rucio/daemons/oauthmanager/oauthmanager.py +3 -3
- rucio/daemons/reaper/dark_reaper.py +7 -3
- rucio/daemons/reaper/reaper.py +12 -16
- rucio/daemons/rsedecommissioner/config.py +1 -1
- rucio/daemons/rsedecommissioner/profiles/generic.py +5 -4
- rucio/daemons/rsedecommissioner/profiles/types.py +7 -6
- rucio/daemons/rsedecommissioner/rse_decommissioner.py +1 -1
- rucio/daemons/storage/consistency/actions.py +8 -6
- rucio/daemons/tracer/kronos.py +117 -142
- rucio/db/sqla/constants.py +5 -0
- rucio/db/sqla/migrate_repo/versions/1677d4d803c8_split_rse_availability_into_multiple.py +4 -4
- rucio/db/sqla/migrate_repo/versions/30d5206e9cad_increase_oauthrequest_redirect_msg_.py +37 -0
- rucio/db/sqla/models.py +157 -154
- rucio/db/sqla/session.py +58 -27
- rucio/db/sqla/types.py +2 -2
- rucio/db/sqla/util.py +2 -2
- rucio/gateway/account.py +18 -12
- rucio/gateway/account_limit.py +137 -60
- rucio/gateway/authentication.py +18 -12
- rucio/gateway/config.py +30 -20
- rucio/gateway/credential.py +9 -10
- rucio/gateway/did.py +70 -53
- rucio/gateway/dirac.py +6 -4
- rucio/gateway/exporter.py +3 -2
- rucio/gateway/heartbeat.py +6 -4
- rucio/gateway/identity.py +36 -51
- rucio/gateway/importer.py +3 -2
- rucio/gateway/lifetime_exception.py +3 -2
- rucio/gateway/meta_conventions.py +17 -6
- rucio/gateway/permission.py +4 -1
- rucio/gateway/quarantined_replica.py +3 -2
- rucio/gateway/replica.py +31 -22
- rucio/gateway/request.py +27 -18
- rucio/gateway/rse.py +69 -37
- rucio/gateway/rule.py +46 -26
- rucio/gateway/scope.py +3 -2
- rucio/gateway/subscription.py +14 -11
- rucio/gateway/vo.py +12 -8
- rucio/rse/__init__.py +3 -3
- rucio/rse/protocols/bittorrent.py +11 -1
- rucio/rse/protocols/cache.py +0 -11
- rucio/rse/protocols/dummy.py +0 -11
- rucio/rse/protocols/gfal.py +14 -9
- rucio/rse/protocols/globus.py +1 -1
- rucio/rse/protocols/http_cache.py +1 -1
- rucio/rse/protocols/posix.py +2 -2
- rucio/rse/protocols/protocol.py +84 -317
- rucio/rse/protocols/rclone.py +2 -1
- rucio/rse/protocols/rfio.py +10 -1
- rucio/rse/protocols/ssh.py +2 -1
- rucio/rse/protocols/storm.py +2 -13
- rucio/rse/protocols/webdav.py +74 -30
- rucio/rse/protocols/xrootd.py +2 -1
- rucio/rse/rsemanager.py +170 -53
- rucio/rse/translation.py +260 -0
- rucio/tests/common.py +23 -13
- rucio/tests/common_server.py +26 -9
- rucio/transfertool/bittorrent.py +15 -14
- rucio/transfertool/bittorrent_driver.py +5 -7
- rucio/transfertool/bittorrent_driver_qbittorrent.py +9 -8
- rucio/transfertool/fts3.py +20 -16
- rucio/transfertool/mock.py +2 -3
- rucio/vcsversion.py +4 -4
- rucio/version.py +7 -0
- rucio/web/rest/flaskapi/v1/accounts.py +17 -3
- rucio/web/rest/flaskapi/v1/auth.py +5 -5
- rucio/web/rest/flaskapi/v1/credentials.py +3 -2
- rucio/web/rest/flaskapi/v1/dids.py +21 -15
- rucio/web/rest/flaskapi/v1/identities.py +33 -9
- rucio/web/rest/flaskapi/v1/redirect.py +5 -4
- rucio/web/rest/flaskapi/v1/replicas.py +12 -8
- rucio/web/rest/flaskapi/v1/rses.py +15 -4
- rucio/web/rest/flaskapi/v1/traces.py +56 -19
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/alembic.ini.template +1 -1
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/alembic_offline.ini.template +1 -1
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/rucio.cfg.atlas.client.template +3 -2
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/rucio.cfg.template +3 -19
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/rucio_multi_vo.cfg.template +1 -18
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/requirements.server.txt +97 -68
- rucio-37.0.0rc2.data/scripts/rucio +133 -0
- rucio-37.0.0rc2.data/scripts/rucio-admin +97 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-atropos +2 -2
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-auditor +2 -1
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-automatix +2 -2
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-cache-client +17 -10
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-receiver +1 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-kronos +1 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-minos +2 -2
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-minos-temporary-expiration +2 -2
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-necromancer +2 -2
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-reaper +6 -6
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-transmogrifier +2 -2
- rucio-37.0.0rc2.dist-info/METADATA +92 -0
- {rucio-35.7.0.dist-info → rucio-37.0.0rc2.dist-info}/RECORD +239 -245
- {rucio-35.7.0.dist-info → rucio-37.0.0rc2.dist-info}/licenses/AUTHORS.rst +3 -0
- rucio/common/schema/atlas.py +0 -413
- rucio/common/schema/belleii.py +0 -408
- rucio/common/schema/domatpc.py +0 -401
- rucio/common/schema/escape.py +0 -426
- rucio/common/schema/icecube.py +0 -406
- rucio/core/permission/atlas.py +0 -1348
- rucio/core/permission/belleii.py +0 -1077
- rucio/core/permission/escape.py +0 -1078
- rucio/daemons/c3po/algorithms/__init__.py +0 -13
- rucio/daemons/c3po/algorithms/simple.py +0 -134
- rucio/daemons/c3po/algorithms/t2_free_space.py +0 -128
- rucio/daemons/c3po/algorithms/t2_free_space_only_pop.py +0 -130
- rucio/daemons/c3po/algorithms/t2_free_space_only_pop_with_network.py +0 -294
- rucio/daemons/c3po/c3po.py +0 -371
- rucio/daemons/c3po/collectors/agis.py +0 -108
- rucio/daemons/c3po/collectors/free_space.py +0 -81
- rucio/daemons/c3po/collectors/jedi_did.py +0 -57
- rucio/daemons/c3po/collectors/mock_did.py +0 -51
- rucio/daemons/c3po/collectors/network_metrics.py +0 -71
- rucio/daemons/c3po/collectors/workload.py +0 -112
- rucio/daemons/c3po/utils/__init__.py +0 -13
- rucio/daemons/c3po/utils/dataset_cache.py +0 -50
- rucio/daemons/c3po/utils/expiring_dataset_cache.py +0 -56
- rucio/daemons/c3po/utils/expiring_list.py +0 -62
- rucio/daemons/c3po/utils/popularity.py +0 -85
- rucio/daemons/c3po/utils/timeseries.py +0 -89
- rucio/rse/protocols/gsiftp.py +0 -92
- rucio-35.7.0.data/scripts/rucio-c3po +0 -85
- rucio-35.7.0.dist-info/METADATA +0 -72
- /rucio/{daemons/c3po → cli/bin_legacy}/__init__.py +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/globus-config.yml.template +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/ldap.cfg.template +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/mail_templates/rule_approval_request.tmpl +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/mail_templates/rule_approved_admin.tmpl +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/mail_templates/rule_approved_user.tmpl +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/mail_templates/rule_denied_admin.tmpl +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/mail_templates/rule_denied_user.tmpl +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/mail_templates/rule_ok_notification.tmpl +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/etc/rse-accounts.cfg.template +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/tools/bootstrap.py +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/tools/merge_rucio_configs.py +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/data/rucio/tools/reset_database.py +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-abacus-account +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-abacus-collection-replica +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-abacus-rse +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-bb8 +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-cache-consumer +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-finisher +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-poller +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-preparer +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-stager +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-submitter +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-conveyor-throttler +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-dark-reaper +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-dumper +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-follower +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-hermes +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-judge-cleaner +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-judge-evaluator +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-judge-injector +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-judge-repairer +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-oauth-manager +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-replica-recoverer +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-rse-decommissioner +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-storage-consistency-actions +0 -0
- {rucio-35.7.0.data → rucio-37.0.0rc2.data}/scripts/rucio-undertaker +0 -0
- {rucio-35.7.0.dist-info → rucio-37.0.0rc2.dist-info}/WHEEL +0 -0
- {rucio-35.7.0.dist-info → rucio-37.0.0rc2.dist-info}/licenses/LICENSE +0 -0
- {rucio-35.7.0.dist-info → rucio-37.0.0rc2.dist-info}/top_level.txt +0 -0
rucio/daemons/hermes/hermes.py
CHANGED
|
@@ -21,10 +21,8 @@ import datetime
|
|
|
21
21
|
import functools
|
|
22
22
|
import json
|
|
23
23
|
import logging
|
|
24
|
-
import random
|
|
25
24
|
import re
|
|
26
25
|
import smtplib
|
|
27
|
-
import socket
|
|
28
26
|
import sys
|
|
29
27
|
import threading
|
|
30
28
|
import time
|
|
@@ -33,28 +31,21 @@ from email.mime.text import MIMEText
|
|
|
33
31
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
|
34
32
|
|
|
35
33
|
import requests
|
|
36
|
-
import stomp
|
|
37
34
|
from requests.auth import HTTPBasicAuth
|
|
38
35
|
|
|
39
36
|
import rucio.db.sqla.util
|
|
40
|
-
from rucio.common.config import
|
|
41
|
-
config_get,
|
|
42
|
-
config_get_bool,
|
|
43
|
-
config_get_int,
|
|
44
|
-
config_get_list,
|
|
45
|
-
)
|
|
37
|
+
from rucio.common.config import config_get, config_get_bool, config_get_list
|
|
46
38
|
from rucio.common.exception import DatabaseException
|
|
47
|
-
from rucio.common.logging import setup_logging
|
|
39
|
+
from rucio.common.logging import formatted_logger, setup_logging
|
|
40
|
+
from rucio.common.stomp_utils import ListenerBase, StompConnectionManager
|
|
48
41
|
from rucio.core.message import delete_messages, retrieve_messages
|
|
49
42
|
from rucio.core.monitor import MetricManager
|
|
50
43
|
from rucio.daemons.common import run_daemon
|
|
51
44
|
|
|
52
45
|
if TYPE_CHECKING:
|
|
53
|
-
from collections.abc import Iterable
|
|
46
|
+
from collections.abc import Iterable
|
|
54
47
|
from types import FrameType
|
|
55
48
|
|
|
56
|
-
from stomp.utils import Frame
|
|
57
|
-
|
|
58
49
|
from rucio.common.types import LoggerFunction
|
|
59
50
|
from rucio.daemons.common import HeartbeatHandler
|
|
60
51
|
|
|
@@ -76,267 +67,13 @@ def default(datetype: Union[datetime.date, datetime.datetime]) -> str:
|
|
|
76
67
|
return datetype.isoformat()
|
|
77
68
|
|
|
78
69
|
|
|
79
|
-
class HermesListener(
|
|
70
|
+
class HermesListener(ListenerBase):
|
|
80
71
|
"""
|
|
81
72
|
Hermes Listener
|
|
82
73
|
"""
|
|
83
74
|
|
|
84
|
-
def __init__(self, broker: str):
|
|
85
|
-
"""
|
|
86
|
-
__init__
|
|
87
|
-
"""
|
|
88
|
-
self.__broker = broker
|
|
89
|
-
|
|
90
|
-
def on_error(self, frame: "Frame") -> None:
|
|
91
|
-
"""
|
|
92
|
-
Error handler
|
|
93
|
-
"""
|
|
94
|
-
logging.error("[broker] [%s]: %s", self.__broker, frame.body)
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
def setup_activemq(
|
|
98
|
-
logger: "LoggerFunction"
|
|
99
|
-
) -> tuple[
|
|
100
|
-
Optional[list[stomp.Connection12]],
|
|
101
|
-
Optional[str],
|
|
102
|
-
Optional[str],
|
|
103
|
-
Optional[str],
|
|
104
|
-
Optional[bool]
|
|
105
|
-
]:
|
|
106
|
-
"""
|
|
107
|
-
Deliver messages to ActiveMQ
|
|
108
75
|
|
|
109
|
-
|
|
110
|
-
"""
|
|
111
|
-
|
|
112
|
-
logger(logging.INFO, "[broker] Resolving brokers")
|
|
113
|
-
|
|
114
|
-
brokers_alias = []
|
|
115
|
-
brokers_resolved = []
|
|
116
|
-
try:
|
|
117
|
-
brokers_alias = [
|
|
118
|
-
broker.strip()
|
|
119
|
-
for broker in config_get("messaging-hermes", "brokers").split(",")
|
|
120
|
-
]
|
|
121
|
-
except:
|
|
122
|
-
raise Exception("Could not load brokers from configuration")
|
|
123
|
-
|
|
124
|
-
logger(logging.INFO, "[broker] Resolving broker dns alias: %s", brokers_alias)
|
|
125
|
-
brokers_resolved = []
|
|
126
|
-
for broker in brokers_alias:
|
|
127
|
-
try:
|
|
128
|
-
addrinfos = socket.getaddrinfo(
|
|
129
|
-
broker, 0, socket.AF_INET, 0, socket.IPPROTO_TCP
|
|
130
|
-
)
|
|
131
|
-
brokers_resolved.extend(ai[4][0] for ai in addrinfos)
|
|
132
|
-
except socket.gaierror as ex:
|
|
133
|
-
logger(
|
|
134
|
-
logging.ERROR,
|
|
135
|
-
"[broker] Cannot resolve domain name %s (%s)",
|
|
136
|
-
broker,
|
|
137
|
-
str(ex),
|
|
138
|
-
)
|
|
139
|
-
|
|
140
|
-
logger(logging.DEBUG, "[broker] Brokers resolved to %s", brokers_resolved)
|
|
141
|
-
|
|
142
|
-
if not brokers_resolved:
|
|
143
|
-
logger(logging.FATAL, "[broker] No brokers resolved.")
|
|
144
|
-
return None, None, None, None, None
|
|
145
|
-
|
|
146
|
-
broker_timeout = 3
|
|
147
|
-
if not broker_timeout: # Allow zero in config
|
|
148
|
-
broker_timeout = None
|
|
149
|
-
|
|
150
|
-
logger(logging.INFO, "[broker] Checking authentication method")
|
|
151
|
-
use_ssl = True
|
|
152
|
-
try:
|
|
153
|
-
use_ssl = config_get_bool("messaging-hermes", "use_ssl")
|
|
154
|
-
except:
|
|
155
|
-
logger(
|
|
156
|
-
logging.INFO,
|
|
157
|
-
"[broker] Could not find use_ssl in configuration -- please update your rucio.cfg",
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
port = config_get_int("messaging-hermes", "port")
|
|
161
|
-
vhost = config_get("messaging-hermes", "broker_virtual_host", raise_exception=False)
|
|
162
|
-
username = None
|
|
163
|
-
password = None
|
|
164
|
-
if not use_ssl:
|
|
165
|
-
username = config_get("messaging-hermes", "username")
|
|
166
|
-
password = config_get("messaging-hermes", "password")
|
|
167
|
-
port = config_get_int("messaging-hermes", "nonssl_port")
|
|
168
|
-
|
|
169
|
-
conns = []
|
|
170
|
-
for broker in brokers_resolved:
|
|
171
|
-
if not use_ssl:
|
|
172
|
-
logger(
|
|
173
|
-
logging.INFO,
|
|
174
|
-
"[broker] setting up username/password authentication: %s",
|
|
175
|
-
broker,
|
|
176
|
-
)
|
|
177
|
-
else:
|
|
178
|
-
logger(
|
|
179
|
-
logging.INFO,
|
|
180
|
-
"[broker] setting up ssl cert/key authentication: %s",
|
|
181
|
-
broker,
|
|
182
|
-
)
|
|
183
|
-
|
|
184
|
-
con = stomp.Connection12(
|
|
185
|
-
host_and_ports=[(broker, port)],
|
|
186
|
-
vhost=vhost,
|
|
187
|
-
keepalive=True,
|
|
188
|
-
timeout=broker_timeout,
|
|
189
|
-
)
|
|
190
|
-
if use_ssl:
|
|
191
|
-
con.set_ssl(
|
|
192
|
-
key_file=config_get("messaging-hermes", "ssl_key_file"),
|
|
193
|
-
cert_file=config_get("messaging-hermes", "ssl_cert_file"),
|
|
194
|
-
)
|
|
195
|
-
|
|
196
|
-
con.set_listener(
|
|
197
|
-
"rucio-hermes", HermesListener(con.transport._Transport__host_and_ports[0])
|
|
198
|
-
)
|
|
199
|
-
|
|
200
|
-
conns.append(con)
|
|
201
|
-
destination = config_get("messaging-hermes", "destination")
|
|
202
|
-
return conns, destination, username, password, use_ssl
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
def deliver_to_activemq(
|
|
206
|
-
messages: "Iterable[dict[str, Any]]",
|
|
207
|
-
conns: "Sequence[stomp.Connection12]",
|
|
208
|
-
destination: str,
|
|
209
|
-
username: str,
|
|
210
|
-
password: str,
|
|
211
|
-
use_ssl: bool,
|
|
212
|
-
logger: "LoggerFunction"
|
|
213
|
-
) -> list[str]:
|
|
214
|
-
"""
|
|
215
|
-
Deliver messages to ActiveMQ
|
|
216
|
-
|
|
217
|
-
:param messages: The list of messages.
|
|
218
|
-
:param conns: A list of connections.
|
|
219
|
-
:param destination: The destination topic or queue.
|
|
220
|
-
:param username: The username if no SSL connection.
|
|
221
|
-
:param password: The username if no SSL connection.
|
|
222
|
-
:param use_ssl: Boolean to choose if SSL connection is used.
|
|
223
|
-
:param logger: The logger object.
|
|
224
|
-
|
|
225
|
-
:returns: List of message_id to delete
|
|
226
|
-
"""
|
|
227
|
-
to_delete = []
|
|
228
|
-
for message in messages:
|
|
229
|
-
try:
|
|
230
|
-
conn = random.sample(conns, 1)[0]
|
|
231
|
-
if not conn.is_connected():
|
|
232
|
-
host_and_ports = conn.transport._Transport__host_and_ports[0][0]
|
|
233
|
-
RECONNECT_COUNTER.labels(host=host_and_ports.split(".")[0]).inc()
|
|
234
|
-
if not use_ssl:
|
|
235
|
-
logger(
|
|
236
|
-
logging.INFO,
|
|
237
|
-
"[broker] - connecting with USERPASS to %s",
|
|
238
|
-
host_and_ports,
|
|
239
|
-
)
|
|
240
|
-
conn.connect(username, password, wait=True)
|
|
241
|
-
else:
|
|
242
|
-
logger(
|
|
243
|
-
logging.INFO,
|
|
244
|
-
"[broker] - connecting with SSL to %s",
|
|
245
|
-
host_and_ports,
|
|
246
|
-
)
|
|
247
|
-
conn.connect(wait=True)
|
|
248
|
-
|
|
249
|
-
conn.send(
|
|
250
|
-
body=json.dumps(
|
|
251
|
-
{
|
|
252
|
-
"event_type": str(message["event_type"]).lower(),
|
|
253
|
-
"payload": message["payload"],
|
|
254
|
-
"created_at": str(message["created_at"]),
|
|
255
|
-
}
|
|
256
|
-
),
|
|
257
|
-
destination=destination,
|
|
258
|
-
headers={
|
|
259
|
-
"persistent": "true",
|
|
260
|
-
"event_type": str(message["event_type"]).lower(),
|
|
261
|
-
},
|
|
262
|
-
)
|
|
263
|
-
|
|
264
|
-
to_delete.append(message["id"])
|
|
265
|
-
except ValueError:
|
|
266
|
-
logger(
|
|
267
|
-
logging.ERROR,
|
|
268
|
-
"[broker] Cannot serialize payload to JSON: %s",
|
|
269
|
-
str(message["payload"]),
|
|
270
|
-
)
|
|
271
|
-
to_delete.append(message["id"])
|
|
272
|
-
continue
|
|
273
|
-
except stomp.exception.NotConnectedException as error:
|
|
274
|
-
logger(
|
|
275
|
-
logging.WARNING,
|
|
276
|
-
"[broker] Could not deliver message due to NotConnectedException: %s",
|
|
277
|
-
str(error),
|
|
278
|
-
)
|
|
279
|
-
continue
|
|
280
|
-
except stomp.exception.ConnectFailedException as error:
|
|
281
|
-
logger(
|
|
282
|
-
logging.WARNING,
|
|
283
|
-
"[broker] Could not deliver message due to ConnectFailedException: %s",
|
|
284
|
-
str(error),
|
|
285
|
-
)
|
|
286
|
-
continue
|
|
287
|
-
except Exception as error:
|
|
288
|
-
logger(logging.ERROR, "[broker] Could not deliver message: %s", str(error))
|
|
289
|
-
continue
|
|
290
|
-
|
|
291
|
-
if str(message["event_type"]).lower().startswith("transfer") or str(
|
|
292
|
-
message["event_type"]
|
|
293
|
-
).lower().startswith("stagein"):
|
|
294
|
-
logger(
|
|
295
|
-
logging.DEBUG,
|
|
296
|
-
"[broker] - event_type: %s, scope: %s, name: %s, rse: %s, request-id: %s, transfer-id: %s, created_at: %s",
|
|
297
|
-
str(message["event_type"]).lower(),
|
|
298
|
-
message["payload"].get("scope", None),
|
|
299
|
-
message["payload"].get("name", None),
|
|
300
|
-
message["payload"].get("dst-rse", None),
|
|
301
|
-
message["payload"].get("request-id", None),
|
|
302
|
-
message["payload"].get("transfer-id", None),
|
|
303
|
-
str(message["created_at"]),
|
|
304
|
-
)
|
|
305
|
-
|
|
306
|
-
elif str(message["event_type"]).lower().startswith("dataset"):
|
|
307
|
-
logger(
|
|
308
|
-
logging.DEBUG,
|
|
309
|
-
"[broker] - event_type: %s, scope: %s, name: %s, rse: %s, rule-id: %s, created_at: %s)",
|
|
310
|
-
str(message["event_type"]).lower(),
|
|
311
|
-
message["payload"].get("scope", None),
|
|
312
|
-
message["payload"].get("name", None),
|
|
313
|
-
message["payload"].get("rse", None),
|
|
314
|
-
message["payload"].get("rule_id", None),
|
|
315
|
-
str(message["created_at"]),
|
|
316
|
-
)
|
|
317
|
-
|
|
318
|
-
elif str(message["event_type"]).lower().startswith("deletion"):
|
|
319
|
-
if "url" not in message["payload"]:
|
|
320
|
-
message["payload"]["url"] = "unknown"
|
|
321
|
-
logger(
|
|
322
|
-
logging.DEBUG,
|
|
323
|
-
"[broker] - event_type: %s, scope: %s, name: %s, rse: %s, url: %s, created_at: %s)",
|
|
324
|
-
str(message["event_type"]).lower(),
|
|
325
|
-
message["payload"].get("scope", None),
|
|
326
|
-
message["payload"].get("name", None),
|
|
327
|
-
message["payload"].get("rse", None),
|
|
328
|
-
message["payload"].get("url", None),
|
|
329
|
-
str(message["created_at"]),
|
|
330
|
-
)
|
|
331
|
-
else:
|
|
332
|
-
logger(logging.DEBUG, "[broker] Other message: %s", message)
|
|
333
|
-
return to_delete
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
def deliver_emails(
|
|
337
|
-
messages: "Iterable[dict[str, Any]]",
|
|
338
|
-
logger: "LoggerFunction"
|
|
339
|
-
) -> list[str]:
|
|
76
|
+
def deliver_emails(messages: "Iterable[dict[str, Any]]", logger: "LoggerFunction") -> list[int]:
|
|
340
77
|
"""
|
|
341
78
|
Sends emails
|
|
342
79
|
|
|
@@ -400,15 +137,16 @@ def submit_to_elastic(
|
|
|
400
137
|
|
|
401
138
|
for message in messages:
|
|
402
139
|
text += '{ "index":{ } }\n%s\n' % json.dumps(message, default=default)
|
|
403
|
-
res = requests.post(
|
|
404
|
-
|
|
405
|
-
|
|
140
|
+
res = requests.post(endpoint,
|
|
141
|
+
data=text,
|
|
142
|
+
headers={"Content-Type": "application/json"},
|
|
143
|
+
auth=auth)
|
|
406
144
|
return res.status_code
|
|
407
145
|
|
|
408
146
|
|
|
409
147
|
def aggregate_to_influx(
|
|
410
148
|
messages: "Iterable[dict[str, Any]]",
|
|
411
|
-
bin_size:
|
|
149
|
+
bin_size: str,
|
|
412
150
|
endpoint: str,
|
|
413
151
|
logger: "LoggerFunction"
|
|
414
152
|
) -> int:
|
|
@@ -432,11 +170,9 @@ def aggregate_to_influx(
|
|
|
432
170
|
payload = message["payload"]
|
|
433
171
|
if event_type in ["transfer-failed", "transfer-done"]:
|
|
434
172
|
if not payload["transferred_at"]:
|
|
435
|
-
logger(
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
payload["reason"],
|
|
439
|
-
)
|
|
173
|
+
logger(logging.WARNING,
|
|
174
|
+
"No transferred_at for message. Reason : %s",
|
|
175
|
+
payload["reason"])
|
|
440
176
|
continue
|
|
441
177
|
transferred_at = time.strptime(
|
|
442
178
|
payload["transferred_at"], "%Y-%m-%d %H:%M:%S"
|
|
@@ -452,11 +188,7 @@ def aggregate_to_influx(
|
|
|
452
188
|
payload["activity"],
|
|
453
189
|
)
|
|
454
190
|
activity = re.sub(" ", r"\ ", activity)
|
|
455
|
-
key = "transfer,activity
|
|
456
|
-
activity,
|
|
457
|
-
src_rse,
|
|
458
|
-
dest_rse,
|
|
459
|
-
)
|
|
191
|
+
key = f"transfer,activity={activity!s},src_rse={src_rse!s},dst_rse={dest_rse!s}"
|
|
460
192
|
if key not in bins[transferred_at]:
|
|
461
193
|
bins[transferred_at][key] = [0, 0, 0, 0]
|
|
462
194
|
if event_type == "transfer-done":
|
|
@@ -476,7 +208,7 @@ def aggregate_to_influx(
|
|
|
476
208
|
if created_at not in bins:
|
|
477
209
|
bins[created_at] = {}
|
|
478
210
|
rse = payload["rse"]
|
|
479
|
-
key = "deletion,rse
|
|
211
|
+
key = f"deletion,rse={rse!s}"
|
|
480
212
|
if key not in bins[created_at]:
|
|
481
213
|
bins[created_at][key] = [0, 0, 0, 0]
|
|
482
214
|
if event_type == "deletion-done":
|
|
@@ -485,31 +217,22 @@ def aggregate_to_influx(
|
|
|
485
217
|
if event_type == "deletion-failed":
|
|
486
218
|
bins[created_at][key][2] += 1
|
|
487
219
|
bins[created_at][key][3] += payload["bytes"]
|
|
220
|
+
|
|
488
221
|
points = ""
|
|
489
|
-
for timestamp in bins:
|
|
490
|
-
for
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
|
|
496
|
-
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
event_type,
|
|
500
|
-
metrics[1],
|
|
501
|
-
event_type,
|
|
502
|
-
metrics[2],
|
|
503
|
-
event_type,
|
|
504
|
-
metrics[3],
|
|
505
|
-
timestamp,
|
|
506
|
-
)
|
|
507
|
-
)
|
|
508
|
-
points += point
|
|
509
|
-
points += "\n"
|
|
222
|
+
for timestamp, entries in bins.items():
|
|
223
|
+
for key, metrics in entries.items():
|
|
224
|
+
event_type = key.split(",")[0]
|
|
225
|
+
points += (f"{key!s} "
|
|
226
|
+
f"nb_{event_type!s}_done={metrics[0]!s},"
|
|
227
|
+
f"bytes_{event_type!s}_done={metrics[1]!s},"
|
|
228
|
+
f"nb_{event_type!s}_failed={metrics[2]!s},"
|
|
229
|
+
f"bytes_{event_type!s}_failed={metrics[3]!s} "
|
|
230
|
+
rf"{timestamp!s}\n")
|
|
231
|
+
|
|
510
232
|
influx_token = config_get("hermes", "influxdb_token", False, None)
|
|
233
|
+
headers = {}
|
|
511
234
|
if influx_token:
|
|
512
|
-
headers
|
|
235
|
+
headers["Authorization"] = f"Token {influx_token!s}"
|
|
513
236
|
if points:
|
|
514
237
|
res = requests.post(endpoint, headers=headers, data=points)
|
|
515
238
|
logger(logging.DEBUG, "%s", str(res.text))
|
|
@@ -517,6 +240,69 @@ def aggregate_to_influx(
|
|
|
517
240
|
return 204
|
|
518
241
|
|
|
519
242
|
|
|
243
|
+
def build_message_dict(
|
|
244
|
+
bulk: int,
|
|
245
|
+
thread: int,
|
|
246
|
+
total_threads: int,
|
|
247
|
+
message_dict: dict[str, list[dict[str, Any]]],
|
|
248
|
+
logger: "LoggerFunction",
|
|
249
|
+
service: Optional[str] = None,
|
|
250
|
+
) -> None:
|
|
251
|
+
"""
|
|
252
|
+
Retrieves messages from the database and builds a dictionary with the keys being the services, and the values a list of the messages (built up of dictionary / json information)
|
|
253
|
+
|
|
254
|
+
:param bulk: Integer for number of messages to retrieve.
|
|
255
|
+
:param thread: Passed to thread in retrieve_messages for Identifier of the caller thread as an integer.
|
|
256
|
+
:param total_threads: Passed to total_threads for Maximum number of threads as an integer.
|
|
257
|
+
:param message_dict: Either empty dictionary to be built, or build upon when using query_by_service.
|
|
258
|
+
:param logger: The logger object.
|
|
259
|
+
:param service: When passed, only returns messages table for this specific service.
|
|
260
|
+
|
|
261
|
+
:returns: None, but builds on the dictionary message_dict passed to this fuction (for when querying multiple services).
|
|
262
|
+
"""
|
|
263
|
+
start_time = time.time()
|
|
264
|
+
messages = retrieve_messages(
|
|
265
|
+
bulk=bulk,
|
|
266
|
+
old_mode=False,
|
|
267
|
+
thread=thread,
|
|
268
|
+
total_threads=total_threads,
|
|
269
|
+
service_filter=service,
|
|
270
|
+
)
|
|
271
|
+
|
|
272
|
+
if messages:
|
|
273
|
+
if service is not None:
|
|
274
|
+
# query_by_service dictionary build behaviour
|
|
275
|
+
message_dict[service] = messages.copy()
|
|
276
|
+
logger(
|
|
277
|
+
logging.DEBUG,
|
|
278
|
+
"Retrieved %i messages retrieved in %s seconds for %s service.",
|
|
279
|
+
len(messages),
|
|
280
|
+
time.time() - start_time,
|
|
281
|
+
service,
|
|
282
|
+
)
|
|
283
|
+
|
|
284
|
+
else:
|
|
285
|
+
# default dictionary build behaviour
|
|
286
|
+
for message in messages:
|
|
287
|
+
service = message["services"]
|
|
288
|
+
if service is not None:
|
|
289
|
+
if service not in message_dict:
|
|
290
|
+
message_dict[service] = []
|
|
291
|
+
message_dict[service].append(message)
|
|
292
|
+
logger(
|
|
293
|
+
logging.DEBUG,
|
|
294
|
+
"Retrieved %i messages retrieved in %s seconds",
|
|
295
|
+
len(messages),
|
|
296
|
+
time.time() - start_time,
|
|
297
|
+
)
|
|
298
|
+
else:
|
|
299
|
+
logger(
|
|
300
|
+
logging.INFO,
|
|
301
|
+
"No messages retrieved in %s seconds",
|
|
302
|
+
time.time() - start_time,
|
|
303
|
+
)
|
|
304
|
+
|
|
305
|
+
|
|
520
306
|
def hermes(once: bool = False, bulk: int = 1000, sleep_time: int = 10) -> None:
|
|
521
307
|
"""
|
|
522
308
|
Creates a Hermes Worker that can submit messages to different services (InfluXDB, ElasticSearch, ActiveMQ)
|
|
@@ -573,42 +359,36 @@ def run_once(heartbeat_handler: "HeartbeatHandler", bulk: int, **_kwargs) -> boo
|
|
|
573
359
|
logger(logging.ERROR, str(err))
|
|
574
360
|
conns = None
|
|
575
361
|
if "activemq" in services_list:
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
if not conns:
|
|
579
|
-
logger(
|
|
580
|
-
logging.ERROR,
|
|
581
|
-
"ActiveMQ defined in the services list, cannot be setup",
|
|
582
|
-
)
|
|
583
|
-
except Exception as err:
|
|
584
|
-
logger(logging.ERROR, str(err))
|
|
362
|
+
conn_mgr = StompConnectionManager(config_section='messaging-hermes', logger=logger)
|
|
363
|
+
conn_mgr.set_listener_factory("rucio-hermes", HermesListener, heartbeats=conn_mgr.config.heartbeats)
|
|
585
364
|
|
|
586
365
|
worker_number, total_workers, logger = heartbeat_handler.live()
|
|
587
366
|
message_dict = {}
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
messages
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
len(messages),
|
|
609
|
-
time.time() - start_time,
|
|
367
|
+
query_by_service = config_get_bool("hermes", "query_by_service", default=False)
|
|
368
|
+
|
|
369
|
+
# query_by_service is a toggleable behaviour switch between collecting bulk number of messages across all services when false, to collecting bulk messages from each service when true.
|
|
370
|
+
if query_by_service:
|
|
371
|
+
for service in services_list:
|
|
372
|
+
build_message_dict(
|
|
373
|
+
bulk=bulk,
|
|
374
|
+
thread=worker_number,
|
|
375
|
+
total_threads=total_workers,
|
|
376
|
+
message_dict=message_dict,
|
|
377
|
+
logger=logger,
|
|
378
|
+
service=service,
|
|
379
|
+
)
|
|
380
|
+
else:
|
|
381
|
+
build_message_dict(
|
|
382
|
+
bulk=bulk,
|
|
383
|
+
thread=worker_number,
|
|
384
|
+
total_threads=total_workers,
|
|
385
|
+
message_dict=message_dict,
|
|
386
|
+
logger=logger
|
|
610
387
|
)
|
|
611
388
|
|
|
389
|
+
if message_dict:
|
|
390
|
+
to_delete = []
|
|
391
|
+
|
|
612
392
|
if "influx" in message_dict and influx_endpoint:
|
|
613
393
|
# For influxDB, bulk submission, either everything succeeds or fails
|
|
614
394
|
t_time = time.time()
|
|
@@ -688,15 +468,7 @@ def run_once(heartbeat_handler: "HeartbeatHandler", bulk: int, **_kwargs) -> boo
|
|
|
688
468
|
if "activemq" in message_dict and conns:
|
|
689
469
|
t_time = time.time()
|
|
690
470
|
try:
|
|
691
|
-
messages_sent =
|
|
692
|
-
messages=message_dict["activemq"],
|
|
693
|
-
conns=conns,
|
|
694
|
-
destination=destination, # type: ignore (argument could be None)
|
|
695
|
-
username=username, # type: ignore (argument could be None)
|
|
696
|
-
password=password, # type: ignore (argument could be None)
|
|
697
|
-
use_ssl=use_ssl, # type: ignore (argument could be None)
|
|
698
|
-
logger=logger,
|
|
699
|
-
)
|
|
471
|
+
messages_sent = conn_mgr.deliver_messages(messages=message_dict["activemq"])
|
|
700
472
|
logger(
|
|
701
473
|
logging.INFO,
|
|
702
474
|
"%s messages successfully submitted to ActiveMQ in %s seconds",
|
|
@@ -717,6 +489,7 @@ def run_once(heartbeat_handler: "HeartbeatHandler", bulk: int, **_kwargs) -> boo
|
|
|
717
489
|
"updated_at": message["created_at"],
|
|
718
490
|
"payload": str(message["payload"]),
|
|
719
491
|
"event_type": message["event_type"],
|
|
492
|
+
"services": message["services"]
|
|
720
493
|
}
|
|
721
494
|
for message in to_delete
|
|
722
495
|
]
|
|
@@ -744,31 +517,18 @@ def run(
|
|
|
744
517
|
Starts up the hermes threads.
|
|
745
518
|
"""
|
|
746
519
|
setup_logging(process_name=DAEMON_NAME)
|
|
520
|
+
logger = formatted_logger(logging.log, DAEMON_NAME + ' %s')
|
|
747
521
|
|
|
748
522
|
if rucio.db.sqla.util.is_old_db():
|
|
749
523
|
raise DatabaseException("Database was not updated, daemon won't start")
|
|
750
524
|
|
|
751
|
-
logging.
|
|
752
|
-
thread_list = [
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
"bulk": bulk,
|
|
758
|
-
"sleep_time": sleep_time,
|
|
759
|
-
},
|
|
760
|
-
)
|
|
761
|
-
for _ in range(0, threads)
|
|
762
|
-
]
|
|
525
|
+
logger(logging.INFO, "starting hermes threads")
|
|
526
|
+
thread_list = []
|
|
527
|
+
for _ in range(threads):
|
|
528
|
+
her_thread = threading.Thread(target=hermes, kwargs={"once": once, "bulk": bulk, "sleep_time": sleep_time})
|
|
529
|
+
her_thread.start()
|
|
530
|
+
thread_list.append(her_thread)
|
|
763
531
|
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
logging.debug(thread_list)
|
|
768
|
-
# Interruptible joins require a timeout.
|
|
769
|
-
while thread_list:
|
|
770
|
-
thread_list = [
|
|
771
|
-
thread.join(timeout=3.14)
|
|
772
|
-
for thread in thread_list
|
|
773
|
-
if thread and thread.is_alive()
|
|
774
|
-
]
|
|
532
|
+
logger(logging.DEBUG, thread_list)
|
|
533
|
+
while [thread.join(timeout=3.14) for thread in thread_list if thread.is_alive()]:
|
|
534
|
+
pass
|
rucio/daemons/judge/evaluator.py
CHANGED
|
@@ -91,9 +91,9 @@ def run_once(
|
|
|
91
91
|
dids = get_updated_dids(total_workers=total_workers,
|
|
92
92
|
worker_number=worker_number,
|
|
93
93
|
limit=did_limit,
|
|
94
|
-
blocked_dids=[(InternalScope(key[0],
|
|
94
|
+
blocked_dids=[(InternalScope(key[0], from_external=False), key[1]) for key in paused_dids])
|
|
95
95
|
logger(logging.DEBUG, 'index query time %f fetch size is %d (%d blocked)', time.time() - start, len(dids),
|
|
96
|
-
len([(InternalScope(key[0],
|
|
96
|
+
len([(InternalScope(key[0], from_external=False), key[1]) for key in paused_dids]))
|
|
97
97
|
|
|
98
98
|
# If the list is empty, sent the worker to sleep
|
|
99
99
|
if not dids:
|
|
@@ -53,7 +53,7 @@ graceful_stop = threading.Event()
|
|
|
53
53
|
DAEMON_NAME = 'oauth-manager'
|
|
54
54
|
|
|
55
55
|
|
|
56
|
-
def
|
|
56
|
+
def oauth_manager(once: bool = False, max_rows: int = 100, sleep_time: int = 300) -> None:
|
|
57
57
|
"""
|
|
58
58
|
Main loop to delete all expired tokens, refresh tokens eligible
|
|
59
59
|
for refresh and delete all expired OAuth session parameters.
|
|
@@ -178,10 +178,10 @@ def run(once: bool = False, threads: int = 1, max_rows: int = 100, sleep_time: i
|
|
|
178
178
|
raise DatabaseException('Database was not updated, daemon won\'t start')
|
|
179
179
|
|
|
180
180
|
if once:
|
|
181
|
-
|
|
181
|
+
oauth_manager(once, max_rows, sleep_time)
|
|
182
182
|
else:
|
|
183
183
|
logging.info('OAuth Manager starting %s threads', str(threads))
|
|
184
|
-
threads = [threading.Thread(target=
|
|
184
|
+
threads = [threading.Thread(target=oauth_manager,
|
|
185
185
|
kwargs={'once': once,
|
|
186
186
|
'max_rows': max_rows,
|
|
187
187
|
'sleep_time': sleep_time}) for i in range(0, threads)]
|
|
@@ -44,6 +44,7 @@ if TYPE_CHECKING:
|
|
|
44
44
|
from types import FrameType
|
|
45
45
|
from typing import Optional
|
|
46
46
|
|
|
47
|
+
from rucio.common.types import LFNDict
|
|
47
48
|
from rucio.daemons.common import HeartbeatHandler
|
|
48
49
|
|
|
49
50
|
logging.getLogger("requests").setLevel(logging.CRITICAL)
|
|
@@ -120,10 +121,13 @@ def run_once(
|
|
|
120
121
|
if replica['scope']:
|
|
121
122
|
scope = replica['scope'].external
|
|
122
123
|
try:
|
|
124
|
+
lfn: "LFNDict" = {
|
|
125
|
+
'scope': scope,
|
|
126
|
+
'name': replica['name'],
|
|
127
|
+
'path': replica['path']
|
|
128
|
+
}
|
|
123
129
|
pfn = str(list(rsemgr.lfns2pfns(rse_settings=rse_info,
|
|
124
|
-
lfns=[
|
|
125
|
-
'name': replica['name'],
|
|
126
|
-
'path': replica['path']}],
|
|
130
|
+
lfns=[lfn],
|
|
127
131
|
operation='delete',
|
|
128
132
|
scheme=scheme).values())[0])
|
|
129
133
|
logger(logging.INFO, 'Deletion ATTEMPT of %s:%s as %s on %s', scope, replica['name'], pfn, rse)
|