rucio-clients 35.7.0__py3-none-any.whl → 37.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of rucio-clients might be problematic. Click here for more details.
- rucio/alembicrevision.py +1 -1
- rucio/cli/__init__.py +14 -0
- rucio/cli/account.py +216 -0
- rucio/cli/bin_legacy/__init__.py +13 -0
- rucio_clients-35.7.0.data/scripts/rucio → rucio/cli/bin_legacy/rucio.py +769 -486
- rucio_clients-35.7.0.data/scripts/rucio-admin → rucio/cli/bin_legacy/rucio_admin.py +476 -423
- rucio/cli/command.py +272 -0
- rucio/cli/config.py +72 -0
- rucio/cli/did.py +191 -0
- rucio/cli/download.py +128 -0
- rucio/cli/lifetime_exception.py +33 -0
- rucio/cli/replica.py +162 -0
- rucio/cli/rse.py +293 -0
- rucio/cli/rule.py +158 -0
- rucio/cli/scope.py +40 -0
- rucio/cli/subscription.py +73 -0
- rucio/cli/upload.py +60 -0
- rucio/cli/utils.py +226 -0
- rucio/client/accountclient.py +0 -1
- rucio/client/baseclient.py +33 -24
- rucio/client/client.py +45 -1
- rucio/client/didclient.py +5 -3
- rucio/client/downloadclient.py +6 -8
- rucio/client/replicaclient.py +0 -2
- rucio/client/richclient.py +317 -0
- rucio/client/rseclient.py +4 -4
- rucio/client/uploadclient.py +26 -12
- rucio/common/bittorrent.py +234 -0
- rucio/common/cache.py +66 -29
- rucio/common/checksum.py +168 -0
- rucio/common/client.py +122 -0
- rucio/common/config.py +22 -35
- rucio/common/constants.py +61 -3
- rucio/common/didtype.py +72 -24
- rucio/common/exception.py +65 -8
- rucio/common/extra.py +5 -10
- rucio/common/logging.py +13 -13
- rucio/common/pcache.py +8 -7
- rucio/common/plugins.py +59 -27
- rucio/common/policy.py +12 -3
- rucio/common/schema/__init__.py +84 -34
- rucio/common/schema/generic.py +0 -17
- rucio/common/schema/generic_multi_vo.py +0 -17
- rucio/common/test_rucio_server.py +12 -6
- rucio/common/types.py +132 -52
- rucio/common/utils.py +93 -643
- rucio/rse/__init__.py +3 -3
- rucio/rse/protocols/bittorrent.py +11 -1
- rucio/rse/protocols/cache.py +0 -11
- rucio/rse/protocols/dummy.py +0 -11
- rucio/rse/protocols/gfal.py +14 -9
- rucio/rse/protocols/globus.py +1 -1
- rucio/rse/protocols/http_cache.py +1 -1
- rucio/rse/protocols/posix.py +2 -2
- rucio/rse/protocols/protocol.py +84 -317
- rucio/rse/protocols/rclone.py +2 -1
- rucio/rse/protocols/rfio.py +10 -1
- rucio/rse/protocols/ssh.py +2 -1
- rucio/rse/protocols/storm.py +2 -13
- rucio/rse/protocols/webdav.py +74 -30
- rucio/rse/protocols/xrootd.py +2 -1
- rucio/rse/rsemanager.py +170 -53
- rucio/rse/translation.py +260 -0
- rucio/vcsversion.py +4 -4
- rucio/version.py +7 -0
- {rucio_clients-35.7.0.data → rucio_clients-37.0.0.data}/data/etc/rucio.cfg.atlas.client.template +3 -2
- {rucio_clients-35.7.0.data → rucio_clients-37.0.0.data}/data/etc/rucio.cfg.template +3 -19
- {rucio_clients-35.7.0.data → rucio_clients-37.0.0.data}/data/requirements.client.txt +11 -7
- rucio_clients-37.0.0.data/scripts/rucio +133 -0
- rucio_clients-37.0.0.data/scripts/rucio-admin +97 -0
- {rucio_clients-35.7.0.dist-info → rucio_clients-37.0.0.dist-info}/METADATA +18 -14
- rucio_clients-37.0.0.dist-info/RECORD +104 -0
- {rucio_clients-35.7.0.dist-info → rucio_clients-37.0.0.dist-info}/licenses/AUTHORS.rst +3 -0
- rucio/common/schema/atlas.py +0 -413
- rucio/common/schema/belleii.py +0 -408
- rucio/common/schema/domatpc.py +0 -401
- rucio/common/schema/escape.py +0 -426
- rucio/common/schema/icecube.py +0 -406
- rucio/rse/protocols/gsiftp.py +0 -92
- rucio_clients-35.7.0.dist-info/RECORD +0 -88
- {rucio_clients-35.7.0.data → rucio_clients-37.0.0.data}/data/etc/rse-accounts.cfg.template +0 -0
- {rucio_clients-35.7.0.data → rucio_clients-37.0.0.data}/data/rucio_client/merge_rucio_configs.py +0 -0
- {rucio_clients-35.7.0.dist-info → rucio_clients-37.0.0.dist-info}/WHEEL +0 -0
- {rucio_clients-35.7.0.dist-info → rucio_clients-37.0.0.dist-info}/licenses/LICENSE +0 -0
- {rucio_clients-35.7.0.dist-info → rucio_clients-37.0.0.dist-info}/top_level.txt +0 -0
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
|
|
1
|
+
#!/usr/bin/env python
|
|
2
2
|
# Copyright European Organization for Nuclear Research (CERN) since 2012
|
|
3
3
|
#
|
|
4
4
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
@@ -14,53 +14,49 @@
|
|
|
14
14
|
# limitations under the License.
|
|
15
15
|
|
|
16
16
|
import argparse
|
|
17
|
-
import errno
|
|
18
17
|
import itertools
|
|
19
|
-
import logging
|
|
20
18
|
import math
|
|
21
19
|
import os
|
|
22
20
|
import signal
|
|
23
|
-
import subprocess
|
|
24
21
|
import sys
|
|
25
22
|
import time
|
|
26
23
|
import traceback
|
|
27
24
|
import unittest
|
|
28
25
|
import uuid
|
|
29
|
-
from configparser import NoOptionError, NoSectionError
|
|
30
26
|
from copy import deepcopy
|
|
31
27
|
from datetime import datetime
|
|
32
|
-
from
|
|
28
|
+
from logging import DEBUG
|
|
33
29
|
from typing import Optional
|
|
34
30
|
|
|
31
|
+
from rich.console import Console
|
|
32
|
+
from rich.padding import Padding
|
|
33
|
+
from rich.status import Status
|
|
34
|
+
from rich.text import Text
|
|
35
|
+
from rich.theme import Theme
|
|
36
|
+
from rich.traceback import install
|
|
37
|
+
from rich.tree import Tree
|
|
35
38
|
from tabulate import tabulate
|
|
36
39
|
|
|
37
40
|
# rucio module has the same name as this executable module, so this rule fails. pylint: disable=no-name-in-module
|
|
38
41
|
from rucio import version
|
|
39
|
-
from rucio.
|
|
42
|
+
from rucio.cli.utils import exception_handler, get_client, setup_gfal2_logger, signal_handler
|
|
43
|
+
from rucio.client.richclient import MAX_TRACEBACK_WIDTH, MIN_CONSOLE_WIDTH, CLITheme, generate_table, get_cli_config, get_pager, print_output, setup_rich_logger
|
|
44
|
+
from rucio.common.client import detect_client_location
|
|
40
45
|
from rucio.common.config import config_get, config_get_float
|
|
41
46
|
from rucio.common.constants import ReplicaState
|
|
42
47
|
from rucio.common.exception import (
|
|
43
|
-
AccessDenied,
|
|
44
|
-
CannotAuthenticate,
|
|
45
|
-
DataIdentifierAlreadyExists,
|
|
46
|
-
DataIdentifierNotFound,
|
|
47
48
|
DIDFilterSyntaxError,
|
|
48
|
-
DuplicateContent,
|
|
49
49
|
DuplicateCriteriaInDIDFilter,
|
|
50
50
|
DuplicateRule,
|
|
51
51
|
InputValidationError,
|
|
52
52
|
InvalidObject,
|
|
53
|
-
InvalidRSEExpression,
|
|
54
53
|
InvalidType,
|
|
55
|
-
MissingDependency,
|
|
56
|
-
RSENotFound,
|
|
57
54
|
RucioException,
|
|
58
|
-
RuleNotFound,
|
|
59
55
|
UnsupportedOperation,
|
|
60
56
|
)
|
|
61
57
|
from rucio.common.extra import import_extras
|
|
62
58
|
from rucio.common.test_rucio_server import TestRucioServer
|
|
63
|
-
from rucio.common.utils import Color, StoreAndDeprecateWarningAction, chunks,
|
|
59
|
+
from rucio.common.utils import Color, StoreAndDeprecateWarningAction, chunks, extract_scope, parse_did_filter_from_string, parse_did_filter_from_string_fe, setup_logger, sizefmt
|
|
64
60
|
|
|
65
61
|
EXTRA_MODULES = import_extras(['argcomplete'])
|
|
66
62
|
|
|
@@ -73,32 +69,8 @@ FAILURE = 1
|
|
|
73
69
|
DEFAULT_SECURE_PORT = 443
|
|
74
70
|
DEFAULT_PORT = 80
|
|
75
71
|
|
|
76
|
-
logger = logging.log
|
|
77
|
-
gfal2_logger = logging.getLogger("gfal2")
|
|
78
72
|
tablefmt = 'psql'
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
def setup_gfal2_logger(logger):
|
|
82
|
-
logger.setLevel(logging.CRITICAL)
|
|
83
|
-
logger.addHandler(logging.StreamHandler())
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
setup_gfal2_logger(gfal2_logger)
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
def signal_handler(sig, frame):
|
|
90
|
-
logger.warning('You pressed Ctrl+C! Exiting gracefully')
|
|
91
|
-
child_processes = subprocess.Popen('ps -o pid --ppid %s --noheaders' % os.getpid(), shell=True, stdout=subprocess.PIPE)
|
|
92
|
-
child_processes = child_processes.stdout.read()
|
|
93
|
-
for pid in child_processes.split("\n")[:-1]:
|
|
94
|
-
try:
|
|
95
|
-
os.kill(int(pid), signal.SIGTERM)
|
|
96
|
-
except Exception:
|
|
97
|
-
print('Cannot kill child process')
|
|
98
|
-
sys.exit(1)
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
signal.signal(signal.SIGINT, signal_handler)
|
|
73
|
+
cli_config = get_cli_config()
|
|
102
74
|
|
|
103
75
|
|
|
104
76
|
def get_scope(did, client):
|
|
@@ -112,151 +84,6 @@ def get_scope(did, client):
|
|
|
112
84
|
return None, did
|
|
113
85
|
|
|
114
86
|
|
|
115
|
-
def exception_handler(function):
|
|
116
|
-
@wraps(function)
|
|
117
|
-
def new_funct(*args, **kwargs):
|
|
118
|
-
try:
|
|
119
|
-
return function(*args, **kwargs)
|
|
120
|
-
except InvalidObject as error:
|
|
121
|
-
logger.error(error)
|
|
122
|
-
return error.error_code
|
|
123
|
-
except DataIdentifierNotFound as error:
|
|
124
|
-
logger.error(error)
|
|
125
|
-
logger.debug('This means that the Data IDentifier you provided is not known by Rucio.')
|
|
126
|
-
return error.error_code
|
|
127
|
-
except AccessDenied as error:
|
|
128
|
-
logger.error(error)
|
|
129
|
-
logger.debug('This error is a permission issue. You cannot run this command with your account.')
|
|
130
|
-
return error.error_code
|
|
131
|
-
except DataIdentifierAlreadyExists as error:
|
|
132
|
-
logger.error(error)
|
|
133
|
-
logger.debug('This means that the Data IDentifier you try to add is already registered in Rucio.')
|
|
134
|
-
return error.error_code
|
|
135
|
-
except RSENotFound as error:
|
|
136
|
-
logger.error(error)
|
|
137
|
-
logger.debug('This means that the Rucio Storage Element you provided is not known by Rucio.')
|
|
138
|
-
return error.error_code
|
|
139
|
-
except InvalidRSEExpression as error:
|
|
140
|
-
logger.error(error)
|
|
141
|
-
logger.debug('This means the RSE expression you provided is not syntactically correct.')
|
|
142
|
-
return error.error_code
|
|
143
|
-
except DuplicateContent as error:
|
|
144
|
-
logger.error(error)
|
|
145
|
-
logger.debug('This means that the DID you want to attach is already in the target DID.')
|
|
146
|
-
return error.error_code
|
|
147
|
-
except TypeError as error:
|
|
148
|
-
logger.error(error)
|
|
149
|
-
logger.debug('This means the parameter you passed has a wrong type.')
|
|
150
|
-
return FAILURE
|
|
151
|
-
except RuleNotFound as error:
|
|
152
|
-
logger.error(error)
|
|
153
|
-
logger.debug('This means the rule you specified does not exist.')
|
|
154
|
-
return error.error_code
|
|
155
|
-
except UnsupportedOperation as error:
|
|
156
|
-
logger.error(error)
|
|
157
|
-
logger.debug('This means you cannot change the status of the DID.')
|
|
158
|
-
return error.error_code
|
|
159
|
-
except MissingDependency as error:
|
|
160
|
-
logger.error(error)
|
|
161
|
-
logger.debug('This means one dependency is missing.')
|
|
162
|
-
return error.error_code
|
|
163
|
-
except KeyError as error:
|
|
164
|
-
if 'x-rucio-auth-token' in str(error):
|
|
165
|
-
used_account = None
|
|
166
|
-
try: # get the configured account from the configuration file
|
|
167
|
-
used_account = '%s (from rucio.cfg)' % config_get('client', 'account')
|
|
168
|
-
except:
|
|
169
|
-
pass
|
|
170
|
-
try: # are we overridden by the environment?
|
|
171
|
-
used_account = '%s (from RUCIO_ACCOUNT)' % os.environ['RUCIO_ACCOUNT']
|
|
172
|
-
except:
|
|
173
|
-
pass
|
|
174
|
-
logger.error('Specified account %s does not have an associated identity.' % used_account)
|
|
175
|
-
else:
|
|
176
|
-
logger.debug(traceback.format_exc())
|
|
177
|
-
contact = config_get('policy', 'support', raise_exception=False)
|
|
178
|
-
support = ('Please follow up with all relevant information at: ' + contact) if contact else ''
|
|
179
|
-
logger.error('\nThe object is missing this property: %s\n'
|
|
180
|
-
'This should never happen. Please rerun the last command with the "-v" option to gather more information.\n'
|
|
181
|
-
'%s' % (str(error), support))
|
|
182
|
-
return FAILURE
|
|
183
|
-
except RucioException as error:
|
|
184
|
-
logger.error(error)
|
|
185
|
-
return error.error_code
|
|
186
|
-
except Exception as error:
|
|
187
|
-
if isinstance(error, IOError) and getattr(error, 'errno', None) == errno.EPIPE:
|
|
188
|
-
# Ignore Broken Pipe
|
|
189
|
-
# While in python3 we can directly catch 'BrokenPipeError', in python2 it doesn't exist.
|
|
190
|
-
|
|
191
|
-
# Python flushes standard streams on exit; redirect remaining output
|
|
192
|
-
# to devnull to avoid another BrokenPipeError at shutdown
|
|
193
|
-
devnull = os.open(os.devnull, os.O_WRONLY)
|
|
194
|
-
os.dup2(devnull, sys.stdout.fileno())
|
|
195
|
-
return SUCCESS
|
|
196
|
-
logger.debug(traceback.format_exc())
|
|
197
|
-
logger.error(error)
|
|
198
|
-
contact = config_get('policy', 'support', raise_exception=False)
|
|
199
|
-
support = ("If it's a problem concerning your experiment or if you're unsure what to do, please follow up at: %s\n" % contact) if contact else ''
|
|
200
|
-
contact = config_get('policy', 'support_rucio', default='https://github.com/rucio/rucio/issues')
|
|
201
|
-
support += "If you're sure there is a problem with Rucio itself, please follow up at: " + contact
|
|
202
|
-
logger.error('\nRucio exited with an unexpected/unknown error.\n'
|
|
203
|
-
'Please rerun the last command with the "-v" option to gather more information.\n'
|
|
204
|
-
'%s' % support)
|
|
205
|
-
return FAILURE
|
|
206
|
-
return new_funct
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
def get_client(args):
|
|
210
|
-
"""
|
|
211
|
-
Returns a new client object.
|
|
212
|
-
"""
|
|
213
|
-
if not args.auth_strategy:
|
|
214
|
-
if 'RUCIO_AUTH_TYPE' in os.environ:
|
|
215
|
-
auth_type = os.environ['RUCIO_AUTH_TYPE'].lower()
|
|
216
|
-
else:
|
|
217
|
-
try:
|
|
218
|
-
auth_type = config_get('client', 'auth_type').lower()
|
|
219
|
-
except (NoOptionError, NoSectionError):
|
|
220
|
-
logger.error('Cannot get AUTH_TYPE')
|
|
221
|
-
sys.exit(FAILURE)
|
|
222
|
-
else:
|
|
223
|
-
auth_type = args.auth_strategy.lower()
|
|
224
|
-
|
|
225
|
-
if auth_type in ['userpass', 'saml'] and args.username is not None and args.password is not None:
|
|
226
|
-
creds = {'username': args.username, 'password': args.password}
|
|
227
|
-
elif auth_type == 'oidc':
|
|
228
|
-
if args.oidc_issuer:
|
|
229
|
-
args.oidc_issuer = args.oidc_issuer.lower()
|
|
230
|
-
creds = {'oidc_auto': args.oidc_auto,
|
|
231
|
-
'oidc_scope': args.oidc_scope,
|
|
232
|
-
'oidc_audience': args.oidc_audience,
|
|
233
|
-
'oidc_polling': args.oidc_polling,
|
|
234
|
-
'oidc_refresh_lifetime': args.oidc_refresh_lifetime,
|
|
235
|
-
'oidc_issuer': args.oidc_issuer,
|
|
236
|
-
'oidc_username': args.oidc_username,
|
|
237
|
-
'oidc_password': args.oidc_password}
|
|
238
|
-
elif auth_type == "x509":
|
|
239
|
-
creds = {'client_cert': args.certificate, "client_key": args.client_key}
|
|
240
|
-
else:
|
|
241
|
-
creds = None
|
|
242
|
-
|
|
243
|
-
try:
|
|
244
|
-
client = Client(rucio_host=args.host, auth_host=args.auth_host,
|
|
245
|
-
account=args.account,
|
|
246
|
-
auth_type=auth_type, creds=creds,
|
|
247
|
-
ca_cert=args.ca_certificate, timeout=args.timeout,
|
|
248
|
-
user_agent=args.user_agent, vo=args.vo,
|
|
249
|
-
logger=logger)
|
|
250
|
-
except CannotAuthenticate as error:
|
|
251
|
-
logger.error(error)
|
|
252
|
-
if 'alert certificate expired' in str(error):
|
|
253
|
-
logger.error('The server certificate expired.')
|
|
254
|
-
elif auth_type.lower() == 'x509_proxy':
|
|
255
|
-
logger.error('Please verify that your proxy is still valid and renew it if needed.')
|
|
256
|
-
sys.exit(FAILURE)
|
|
257
|
-
return client
|
|
258
|
-
|
|
259
|
-
|
|
260
87
|
def __resolve_containers_to_datasets(scope, name, client):
|
|
261
88
|
"""
|
|
262
89
|
Helper function to resolve a container into its dataset content.
|
|
@@ -271,11 +98,10 @@ def __resolve_containers_to_datasets(scope, name, client):
|
|
|
271
98
|
|
|
272
99
|
|
|
273
100
|
@exception_handler
|
|
274
|
-
def ping(args):
|
|
101
|
+
def ping(args, client, logger, console, spinner):
|
|
275
102
|
"""
|
|
276
103
|
Pings a Rucio server.
|
|
277
104
|
"""
|
|
278
|
-
client = get_client(args)
|
|
279
105
|
server_info = client.ping()
|
|
280
106
|
if server_info:
|
|
281
107
|
print(server_info['version'])
|
|
@@ -285,27 +111,32 @@ def ping(args):
|
|
|
285
111
|
|
|
286
112
|
|
|
287
113
|
@exception_handler
|
|
288
|
-
def whoami_account(args):
|
|
114
|
+
def whoami_account(args, client, logger, console, spinner):
|
|
289
115
|
"""
|
|
290
116
|
%(prog)s show [options] <field1=value1 field2=value2 ...>
|
|
291
117
|
|
|
292
118
|
Show extended information of a given account
|
|
293
119
|
"""
|
|
294
|
-
client = get_client(args)
|
|
295
120
|
info = client.whoami()
|
|
296
|
-
|
|
297
|
-
|
|
121
|
+
if cli_config == 'rich':
|
|
122
|
+
keyword_styles = {**CLITheme.ACCOUNT_STATUS, **CLITheme.ACCOUNT_TYPE}
|
|
123
|
+
table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for (k, v) in sorted(info.items())]
|
|
124
|
+
table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
|
|
125
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
126
|
+
else:
|
|
127
|
+
for k in info:
|
|
128
|
+
print(k.ljust(10) + ' : ' + str(info[k]))
|
|
298
129
|
return SUCCESS
|
|
299
130
|
|
|
300
131
|
|
|
301
132
|
@exception_handler
|
|
302
|
-
def list_dataset_replicas(args):
|
|
133
|
+
def list_dataset_replicas(args, client, logger, console, spinner):
|
|
303
134
|
"""
|
|
304
135
|
%(prog)s list [options] <field1=value1 field2=value2 ...>
|
|
305
136
|
|
|
306
137
|
List dataset replicas
|
|
307
138
|
"""
|
|
308
|
-
|
|
139
|
+
|
|
309
140
|
result = {}
|
|
310
141
|
datasets = []
|
|
311
142
|
|
|
@@ -329,6 +160,10 @@ def list_dataset_replicas(args):
|
|
|
329
160
|
result[dsn] = {}
|
|
330
161
|
result[dsn][replica['rse']] = [replica['rse'], replica['available_length'], replica['length']]
|
|
331
162
|
|
|
163
|
+
if cli_config == 'rich':
|
|
164
|
+
spinner.update(status='Fetching dataset replicas')
|
|
165
|
+
spinner.start()
|
|
166
|
+
|
|
332
167
|
if len(args.dids) == 1:
|
|
333
168
|
scope, name = get_scope(args.dids[0], client)
|
|
334
169
|
dmeta = client.get_metadata(scope, name)
|
|
@@ -341,27 +176,44 @@ def list_dataset_replicas(args):
|
|
|
341
176
|
|
|
342
177
|
if args.deep or len(datasets) < 2:
|
|
343
178
|
for did in datasets:
|
|
344
|
-
dsn = "
|
|
179
|
+
dsn = f"{did['scope']}:{did['name']}"
|
|
345
180
|
for rep in client.list_dataset_replicas(scope=did['scope'], name=did['name'], deep=args.deep):
|
|
346
181
|
_append_result(dsn=dsn, replica=rep)
|
|
347
182
|
else:
|
|
348
183
|
for rep in client.list_dataset_replicas_bulk(dids=datasets):
|
|
349
|
-
dsn = "
|
|
184
|
+
dsn = f"{rep['scope']}:{rep['name']}"
|
|
350
185
|
_append_result(dsn=dsn, replica=rep)
|
|
351
186
|
|
|
352
187
|
if args.csv:
|
|
353
188
|
for dsn in result:
|
|
354
189
|
for rse in list(result[dsn].values()):
|
|
355
190
|
print(rse[0], rse[1], rse[2], sep=',')
|
|
191
|
+
|
|
192
|
+
if cli_config == 'rich':
|
|
193
|
+
spinner.stop()
|
|
356
194
|
else:
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
195
|
+
output = []
|
|
196
|
+
for i, dsn in enumerate(result):
|
|
197
|
+
if cli_config == 'rich':
|
|
198
|
+
if i > 0:
|
|
199
|
+
output.append(Text(f'\nDATASET: {dsn}', style=CLITheme.TEXT_HIGHLIGHT))
|
|
200
|
+
elif len(result) > 1:
|
|
201
|
+
output.append(Text(f'DATASET: {dsn}', style=CLITheme.TEXT_HIGHLIGHT))
|
|
202
|
+
|
|
203
|
+
table = generate_table(list(result[dsn].values()), headers=['RSE', 'FOUND', 'TOTAL'], col_alignments=['left', 'right', 'right'])
|
|
204
|
+
output.append(table)
|
|
205
|
+
else:
|
|
206
|
+
print(f'\nDATASET: {dsn}')
|
|
207
|
+
print(tabulate(list(result[dsn].values()), tablefmt=tablefmt, headers=['RSE', 'FOUND', 'TOTAL']))
|
|
208
|
+
|
|
209
|
+
if cli_config == 'rich':
|
|
210
|
+
spinner.stop()
|
|
211
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
360
212
|
return SUCCESS
|
|
361
213
|
|
|
362
214
|
|
|
363
215
|
@exception_handler
|
|
364
|
-
def list_file_replicas(args):
|
|
216
|
+
def list_file_replicas(args, client, logger, console, spinner):
|
|
365
217
|
"""
|
|
366
218
|
%(prog)s list [options] <field1=value1 field2=value2 ...>
|
|
367
219
|
|
|
@@ -369,13 +221,12 @@ def list_file_replicas(args):
|
|
|
369
221
|
"""
|
|
370
222
|
if args.missing:
|
|
371
223
|
args.all_states = True
|
|
372
|
-
client = get_client(args)
|
|
373
224
|
|
|
374
225
|
protocols = None
|
|
375
226
|
if args.protocols:
|
|
376
227
|
protocols = args.protocols.split(',')
|
|
377
228
|
|
|
378
|
-
|
|
229
|
+
table_data = []
|
|
379
230
|
dids = []
|
|
380
231
|
if args.missing and not args.rses:
|
|
381
232
|
print('Cannot use --missing without specifying a RSE')
|
|
@@ -384,9 +235,13 @@ def list_file_replicas(args):
|
|
|
384
235
|
print('The substitution parameter must equal --link="/pfn/dir:/dst/dir"')
|
|
385
236
|
return FAILURE
|
|
386
237
|
|
|
238
|
+
if cli_config == 'rich':
|
|
239
|
+
spinner.update(status='Fetching file replicas')
|
|
240
|
+
spinner.start()
|
|
241
|
+
|
|
387
242
|
for did in args.dids:
|
|
388
243
|
scope, name = get_scope(did, client)
|
|
389
|
-
client.get_metadata(scope=scope, name=name) #
|
|
244
|
+
client.get_metadata(scope=scope, name=name) # Break with Exception before streaming replicas if DID does not exist.
|
|
390
245
|
dids.append({'scope': scope, 'name': name})
|
|
391
246
|
|
|
392
247
|
replicas = client.list_replicas(dids, schemes=protocols,
|
|
@@ -400,14 +255,24 @@ def list_file_replicas(args):
|
|
|
400
255
|
rses = [rse["rse"] for rse in client.list_rses(rse_expression=args.rses)]
|
|
401
256
|
|
|
402
257
|
if args.metalink:
|
|
403
|
-
print(replicas[:-1]) #
|
|
258
|
+
print(replicas[:-1]) # Last character is newline, no need to print that.
|
|
404
259
|
return SUCCESS
|
|
405
260
|
|
|
406
261
|
if args.missing:
|
|
407
262
|
for replica, rse in itertools.product(replicas, rses):
|
|
408
263
|
if 'states' in replica and rse in replica['states'] and replica['states'].get(rse) != 'AVAILABLE':
|
|
409
|
-
|
|
410
|
-
|
|
264
|
+
if cli_config == 'rich':
|
|
265
|
+
replica_state = f"[{CLITheme.REPLICA_STATE.get(ReplicaState[replica['states'].get(rse)].value, 'default')}]{ReplicaState[replica['states'].get(rse)].value}[/]"
|
|
266
|
+
table_data.append([replica['scope'], replica['name'], '({0}) {1}'.format(replica_state, rse)])
|
|
267
|
+
else:
|
|
268
|
+
table_data.append([replica['scope'], replica['name'], "({0}) {1}".format(ReplicaState[replica['states'].get(rse)].value, rse)])
|
|
269
|
+
if cli_config == 'rich':
|
|
270
|
+
table = generate_table(table_data, headers=['SCOPE', 'NAME', '(STATE) RSE'], col_alignments=['left', 'left', 'left'])
|
|
271
|
+
spinner.stop()
|
|
272
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
273
|
+
else:
|
|
274
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE', 'NAME', '(STATE) RSE']))
|
|
275
|
+
|
|
411
276
|
elif args.link:
|
|
412
277
|
pfn_dir, dst_dir = args.link.split(':')
|
|
413
278
|
if args.rses:
|
|
@@ -427,13 +292,23 @@ def list_file_replicas(args):
|
|
|
427
292
|
for pfn in replica['pfns']:
|
|
428
293
|
rse = replica['pfns'][pfn]['rse']
|
|
429
294
|
if replica['rses'].get(rse):
|
|
430
|
-
|
|
295
|
+
if cli_config == 'rich':
|
|
296
|
+
table_data.append([pfn])
|
|
297
|
+
else:
|
|
298
|
+
print(pfn)
|
|
431
299
|
else:
|
|
432
300
|
for replica in replicas:
|
|
433
301
|
for pfn in replica['pfns']:
|
|
434
302
|
rse = replica['pfns'][pfn]['rse']
|
|
435
303
|
if replica['rses'][rse]:
|
|
436
|
-
|
|
304
|
+
if cli_config == 'rich':
|
|
305
|
+
table_data.append([pfn])
|
|
306
|
+
else:
|
|
307
|
+
print(pfn)
|
|
308
|
+
if cli_config == 'rich':
|
|
309
|
+
table = generate_table(table_data, headers=['PFN'], col_alignments=['left'])
|
|
310
|
+
spinner.stop()
|
|
311
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
437
312
|
else:
|
|
438
313
|
if args.all_states:
|
|
439
314
|
header = ['SCOPE', 'NAME', 'FILESIZE', 'ADLER32', '(STATE) RSE: REPLICA']
|
|
@@ -444,27 +319,48 @@ def list_file_replicas(args):
|
|
|
444
319
|
for pfn in replica['pfns']:
|
|
445
320
|
rse = replica['pfns'][pfn]['rse']
|
|
446
321
|
if args.all_states:
|
|
447
|
-
|
|
322
|
+
if cli_config == 'rich':
|
|
323
|
+
replica_state = f"[{CLITheme.REPLICA_STATE.get(ReplicaState[replica['states'][rse]].value, 'default')}]{ReplicaState[replica['states'][rse]].value}[/]"
|
|
324
|
+
# Less does not display hyperlinks well if the table is very wide.
|
|
325
|
+
if args.no_pager:
|
|
326
|
+
rse_string = f'({replica_state}) {rse}: [u bright_blue link={pfn}]{pfn}[/]'
|
|
327
|
+
else:
|
|
328
|
+
rse_string = f'({replica_state}) {rse}: [u bright_blue]{pfn}[/]'
|
|
329
|
+
else:
|
|
330
|
+
rse_string = '({2}) {0}: {1}'.format(rse, pfn, ReplicaState[replica['states'][rse]].value)
|
|
448
331
|
else:
|
|
449
|
-
|
|
332
|
+
if cli_config == 'rich':
|
|
333
|
+
# Less does not display hyperlinks well if the table is very wide.
|
|
334
|
+
if args.no_pager:
|
|
335
|
+
rse_string = f'{rse}: [u bright_blue link={pfn}]{pfn}[/]'
|
|
336
|
+
else:
|
|
337
|
+
rse_string = f'{rse}: [u bright_blue]{pfn}[/]'
|
|
338
|
+
else:
|
|
339
|
+
rse_string = '{0}: {1}'.format(rse, pfn)
|
|
450
340
|
if args.rses:
|
|
451
341
|
for selected_rse in rses:
|
|
452
342
|
if rse == selected_rse:
|
|
453
|
-
|
|
343
|
+
table_data.append([replica['scope'], replica['name'], sizefmt(replica['bytes'], args.human), replica['adler32'], rse_string])
|
|
454
344
|
else:
|
|
455
|
-
|
|
456
|
-
|
|
345
|
+
table_data.append([replica['scope'], replica['name'], sizefmt(replica['bytes'], args.human), replica['adler32'], rse_string])
|
|
346
|
+
|
|
347
|
+
if cli_config == 'rich':
|
|
348
|
+
table = generate_table(table_data, headers=header, col_alignments=['left', 'left', 'right', 'left', 'left'])
|
|
349
|
+
spinner.stop()
|
|
350
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
351
|
+
else:
|
|
352
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=header, disable_numparse=True))
|
|
457
353
|
return SUCCESS
|
|
458
354
|
|
|
459
355
|
|
|
460
356
|
@exception_handler
|
|
461
|
-
def add_dataset(args):
|
|
357
|
+
def add_dataset(args, client, logger, console, spinner):
|
|
462
358
|
"""
|
|
463
359
|
%(prog)s add-dataset [options] <dsn>
|
|
464
360
|
|
|
465
361
|
Add a dataset identifier.
|
|
466
362
|
"""
|
|
467
|
-
|
|
363
|
+
|
|
468
364
|
scope, name = get_scope(args.did, client)
|
|
469
365
|
client.add_dataset(scope=scope, name=name, statuses={'monotonic': args.monotonic}, lifetime=args.lifetime)
|
|
470
366
|
print('Added %s:%s' % (scope, name))
|
|
@@ -472,13 +368,13 @@ def add_dataset(args):
|
|
|
472
368
|
|
|
473
369
|
|
|
474
370
|
@exception_handler
|
|
475
|
-
def add_container(args):
|
|
371
|
+
def add_container(args, client, logger, console, spinner):
|
|
476
372
|
"""
|
|
477
373
|
%(prog)s add-container [options] <dsn>
|
|
478
374
|
|
|
479
375
|
Add a container identifier.
|
|
480
376
|
"""
|
|
481
|
-
|
|
377
|
+
|
|
482
378
|
scope, name = get_scope(args.did, client)
|
|
483
379
|
client.add_container(scope=scope, name=name, statuses={'monotonic': args.monotonic}, lifetime=args.lifetime)
|
|
484
380
|
print('Added %s:%s' % (scope, name))
|
|
@@ -486,13 +382,13 @@ def add_container(args):
|
|
|
486
382
|
|
|
487
383
|
|
|
488
384
|
@exception_handler
|
|
489
|
-
def attach(args):
|
|
385
|
+
def attach(args, client, logger, console, spinner):
|
|
490
386
|
"""
|
|
491
387
|
%(prog)s attach [options] <field1=value1 field2=value2 ...>
|
|
492
388
|
|
|
493
389
|
Attach a data identifier.
|
|
494
390
|
"""
|
|
495
|
-
|
|
391
|
+
|
|
496
392
|
scope, name = get_scope(args.todid, client)
|
|
497
393
|
dids = args.dids
|
|
498
394
|
limit = 499
|
|
@@ -531,13 +427,13 @@ def attach(args):
|
|
|
531
427
|
|
|
532
428
|
|
|
533
429
|
@exception_handler
|
|
534
|
-
def detach(args):
|
|
430
|
+
def detach(args, client, logger, console, spinner):
|
|
535
431
|
"""
|
|
536
432
|
%(prog)s detach [options] <field1=value1 field2=value2 ...>
|
|
537
433
|
|
|
538
434
|
Detach data identifier.
|
|
539
435
|
"""
|
|
540
|
-
|
|
436
|
+
|
|
541
437
|
scope, name = get_scope(args.fromdid, client)
|
|
542
438
|
dids = []
|
|
543
439
|
for did in args.dids:
|
|
@@ -549,16 +445,16 @@ def detach(args):
|
|
|
549
445
|
|
|
550
446
|
|
|
551
447
|
@exception_handler
|
|
552
|
-
def list_dids(args):
|
|
448
|
+
def list_dids(args, client, logger, console, spinner):
|
|
553
449
|
"""
|
|
554
450
|
%(prog)s list-dids scope[:*|:name] [--filter 'value' | --recursive]
|
|
555
451
|
|
|
556
452
|
List the data identifiers for a given scope.
|
|
557
453
|
"""
|
|
558
|
-
|
|
454
|
+
|
|
559
455
|
filters = {}
|
|
560
456
|
type_ = 'collection'
|
|
561
|
-
|
|
457
|
+
table_data = []
|
|
562
458
|
|
|
563
459
|
try:
|
|
564
460
|
scope, name = get_scope(args.did[0], client)
|
|
@@ -569,16 +465,16 @@ def list_dids(args):
|
|
|
569
465
|
name = '*'
|
|
570
466
|
|
|
571
467
|
if scope not in client.list_scopes():
|
|
572
|
-
logger.error('Scope not found')
|
|
468
|
+
logger.error('Scope not found.')
|
|
573
469
|
return FAILURE
|
|
574
470
|
|
|
575
471
|
if args.recursive and '*' in name:
|
|
576
|
-
logger.error('Option recursive cannot be used with wildcards')
|
|
472
|
+
logger.error('Option recursive cannot be used with wildcards.')
|
|
577
473
|
return FAILURE
|
|
578
474
|
else:
|
|
579
475
|
if filters:
|
|
580
476
|
if ('name' in filters) and (name != '*'):
|
|
581
|
-
logger.error('Must have a wildcard in did name if filtering by name')
|
|
477
|
+
logger.error('Must have a wildcard in did name if filtering by name.')
|
|
582
478
|
return FAILURE
|
|
583
479
|
|
|
584
480
|
try:
|
|
@@ -599,20 +495,34 @@ def list_dids(args):
|
|
|
599
495
|
logger.error(e)
|
|
600
496
|
return FAILURE
|
|
601
497
|
|
|
498
|
+
if cli_config == 'rich':
|
|
499
|
+
spinner.update(status='Fetching DIDs')
|
|
500
|
+
spinner.start()
|
|
501
|
+
|
|
602
502
|
for did in client.list_dids(scope, filters=filters, did_type=type_, long=True, recursive=args.recursive):
|
|
603
|
-
|
|
503
|
+
if cli_config == 'rich':
|
|
504
|
+
table_data.append([f"{did['scope']}:{did['name']}", Text(did['did_type'], style=CLITheme.DID_TYPE.get(did['did_type'], 'default'))])
|
|
505
|
+
else:
|
|
506
|
+
table_data.append([f"{did['scope']}:{did['name']}", did['did_type']])
|
|
604
507
|
|
|
605
|
-
if
|
|
606
|
-
|
|
607
|
-
|
|
508
|
+
if cli_config == 'rich':
|
|
509
|
+
if args.short:
|
|
510
|
+
table = generate_table([[did] for did, _ in table_data], headers=['SCOPE:NAME'], col_alignments=['left'])
|
|
511
|
+
else:
|
|
512
|
+
table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
|
|
513
|
+
spinner.stop()
|
|
514
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
608
515
|
else:
|
|
609
|
-
|
|
610
|
-
|
|
516
|
+
if args.short:
|
|
517
|
+
for did, _ in table_data:
|
|
518
|
+
print(did)
|
|
519
|
+
else:
|
|
520
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
|
|
611
521
|
return SUCCESS
|
|
612
522
|
|
|
613
523
|
|
|
614
524
|
@exception_handler
|
|
615
|
-
def list_dids_extended(args):
|
|
525
|
+
def list_dids_extended(args, client, logger, console, spinner):
|
|
616
526
|
"""
|
|
617
527
|
%(prog)s list-dids-extended scope[:*|:name] [--filter 'key=value' | --recursive]
|
|
618
528
|
|
|
@@ -623,42 +533,63 @@ def list_dids_extended(args):
|
|
|
623
533
|
|
|
624
534
|
|
|
625
535
|
@exception_handler
|
|
626
|
-
def list_scopes(args):
|
|
536
|
+
def list_scopes(args, client, logger, console, spinner):
|
|
627
537
|
"""
|
|
628
538
|
%(prog)s list-scopes <scope>
|
|
629
539
|
|
|
630
540
|
List scopes.
|
|
631
541
|
"""
|
|
632
542
|
# For the moment..
|
|
633
|
-
|
|
543
|
+
|
|
544
|
+
if cli_config == 'rich':
|
|
545
|
+
spinner.update(status='Fetching scopes')
|
|
546
|
+
spinner.start()
|
|
547
|
+
|
|
634
548
|
scopes = client.list_scopes()
|
|
635
|
-
|
|
636
|
-
|
|
549
|
+
if cli_config == 'rich':
|
|
550
|
+
table = generate_table([[scope] for scope in sorted(scopes)], headers=['SCOPE'], col_alignments=['left'])
|
|
551
|
+
spinner.stop()
|
|
552
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
553
|
+
else:
|
|
554
|
+
for scope in scopes:
|
|
555
|
+
print(scope)
|
|
637
556
|
return SUCCESS
|
|
638
557
|
|
|
639
558
|
|
|
640
559
|
@exception_handler
|
|
641
|
-
def list_files(args):
|
|
560
|
+
def list_files(args, client, logger, console, spinner):
|
|
642
561
|
"""
|
|
643
562
|
%(prog)s list-files [options] <field1=value1 field2=value2 ...>
|
|
644
563
|
|
|
645
564
|
List data identifier contents.
|
|
646
565
|
"""
|
|
647
|
-
|
|
566
|
+
|
|
567
|
+
if cli_config == 'rich':
|
|
568
|
+
spinner.update(status='Fetching files')
|
|
569
|
+
spinner.start()
|
|
570
|
+
|
|
648
571
|
if args.csv:
|
|
649
572
|
for did in args.dids:
|
|
650
573
|
scope, name = get_scope(did, client)
|
|
651
574
|
for f in client.list_files(scope=scope, name=name):
|
|
652
575
|
guid = f['guid']
|
|
653
576
|
if guid:
|
|
654
|
-
guid = '
|
|
577
|
+
guid = f'{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:32]}'
|
|
655
578
|
else:
|
|
656
579
|
guid = '(None)'
|
|
657
580
|
print('{}:{}'.format(f['scope'], f['name']), guid, f['adler32'], sizefmt(f['bytes'], args.human), f['events'], sep=',')
|
|
581
|
+
if cli_config == 'rich':
|
|
582
|
+
spinner.stop()
|
|
658
583
|
return SUCCESS
|
|
659
584
|
elif args.LOCALPATH:
|
|
660
|
-
|
|
661
|
-
|
|
585
|
+
full_str = ''
|
|
586
|
+
if cli_config == 'rich':
|
|
587
|
+
header = '''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
|
|
588
|
+
<!DOCTYPE POOLFILECATALOG SYSTEM "InMemory">
|
|
589
|
+
<POOLFILECATALOG>'''
|
|
590
|
+
full_str = header
|
|
591
|
+
else:
|
|
592
|
+
print('''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
|
|
662
593
|
<!DOCTYPE POOLFILECATALOG SYSTEM "InMemory">
|
|
663
594
|
<POOLFILECATALOG>''')
|
|
664
595
|
|
|
@@ -676,15 +607,23 @@ def list_files(args):
|
|
|
676
607
|
for f in client.list_files(scope=scope, name=name):
|
|
677
608
|
guid = f['guid']
|
|
678
609
|
if guid:
|
|
679
|
-
guid = '
|
|
610
|
+
guid = f'{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:32]}'
|
|
680
611
|
else:
|
|
681
612
|
guid = '(None)'
|
|
682
|
-
print(file_str % (guid, args.LOCALPATH, f['name'], f['name']))
|
|
683
613
|
|
|
684
|
-
|
|
614
|
+
if cli_config == 'rich':
|
|
615
|
+
full_str += '\n' + file_str % (guid, args.LOCALPATH, f['name'], f['name'])
|
|
616
|
+
else:
|
|
617
|
+
print(file_str % (guid, args.LOCALPATH, f['name'], f['name']))
|
|
618
|
+
|
|
619
|
+
if cli_config == 'rich':
|
|
620
|
+
spinner.stop()
|
|
621
|
+
print_output(full_str + '\n</POOLFILECATALOG>', console=console, no_pager=True)
|
|
622
|
+
else:
|
|
623
|
+
print('</POOLFILECATALOG>')
|
|
685
624
|
return SUCCESS
|
|
686
625
|
else:
|
|
687
|
-
|
|
626
|
+
table_data = []
|
|
688
627
|
for did in args.dids:
|
|
689
628
|
totfiles = 0
|
|
690
629
|
totsize = 0
|
|
@@ -697,82 +636,140 @@ def list_files(args):
|
|
|
697
636
|
totevents += int(file.get('events', 0))
|
|
698
637
|
guid = file['guid']
|
|
699
638
|
if guid:
|
|
700
|
-
guid = '
|
|
639
|
+
guid = f'{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:32]}'
|
|
701
640
|
else:
|
|
702
641
|
guid = '(None)'
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
708
|
-
|
|
642
|
+
table_data.append([f"{file['scope']}:{file['name']}", guid, f"ad:{file['adler32']}", sizefmt(file['bytes'], args.human), file['events']])
|
|
643
|
+
|
|
644
|
+
if cli_config == 'rich':
|
|
645
|
+
table = generate_table(table_data, headers=['SCOPE:NAME', 'GUID', 'ADLER32', 'FILESIZE', 'EVENTS'], col_alignments=['left', 'left', 'left', 'right', 'right'])
|
|
646
|
+
summary_data = [['Total files', str(totfiles)], ['Total size', sizefmt(totsize, args.human)]]
|
|
647
|
+
if totevents:
|
|
648
|
+
summary_data.append(['Total events', str(totevents)])
|
|
649
|
+
summary_table = generate_table(summary_data, col_alignments=['left', 'left'], row_styles=['none'])
|
|
650
|
+
spinner.stop()
|
|
651
|
+
print_output(table, summary_table, console=console, no_pager=args.no_pager)
|
|
652
|
+
else:
|
|
653
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', 'GUID', 'ADLER32', 'FILESIZE', 'EVENTS'], disable_numparse=True))
|
|
654
|
+
print('Total files : %s' % totfiles)
|
|
655
|
+
print('Total size : %s' % sizefmt(totsize, args.human))
|
|
656
|
+
if totevents:
|
|
657
|
+
print('Total events : %s' % totevents)
|
|
709
658
|
return SUCCESS
|
|
710
659
|
|
|
711
660
|
|
|
712
661
|
@exception_handler
|
|
713
|
-
def list_content(args):
|
|
662
|
+
def list_content(args, client, logger, console, spinner):
|
|
714
663
|
"""
|
|
715
664
|
%(prog)s list-content [options] <field1=value1 field2=value2 ...>
|
|
716
665
|
|
|
717
666
|
List data identifier contents.
|
|
718
667
|
"""
|
|
719
|
-
|
|
720
|
-
|
|
668
|
+
|
|
669
|
+
table_data = []
|
|
670
|
+
if cli_config == 'rich':
|
|
671
|
+
spinner.update(status='Fetching dataset contents')
|
|
672
|
+
spinner.start()
|
|
673
|
+
|
|
721
674
|
for did in args.dids:
|
|
722
675
|
scope, name = get_scope(did, client)
|
|
723
676
|
for content in client.list_content(scope=scope, name=name):
|
|
724
|
-
|
|
725
|
-
|
|
726
|
-
|
|
727
|
-
|
|
677
|
+
if cli_config == 'rich':
|
|
678
|
+
table_data.append([f"{content['scope']}:{content['name']}", Text(content['type'].upper(), style=CLITheme.DID_TYPE.get(content['type'].upper(), 'default'))])
|
|
679
|
+
else:
|
|
680
|
+
table_data.append([f"{content['scope']}:{content['name']}", content['type'].upper()])
|
|
681
|
+
|
|
682
|
+
if cli_config == 'rich':
|
|
683
|
+
if args.short:
|
|
684
|
+
table = generate_table([[did] for did, _ in table_data], headers=['SCOPE:NAME'], col_alignments=['left'])
|
|
685
|
+
else:
|
|
686
|
+
table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
|
|
687
|
+
spinner.stop()
|
|
688
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
728
689
|
else:
|
|
729
|
-
|
|
690
|
+
if args.short:
|
|
691
|
+
for did, dummy in table_data:
|
|
692
|
+
print(did)
|
|
693
|
+
else:
|
|
694
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
|
|
730
695
|
return SUCCESS
|
|
731
696
|
|
|
732
697
|
|
|
733
698
|
@exception_handler
|
|
734
|
-
def list_content_history(args):
|
|
699
|
+
def list_content_history(args, client, logger, console, spinner):
|
|
735
700
|
"""
|
|
736
701
|
%(prog)s list-content-history [options] <field1=value1 field2=value2 ...>
|
|
737
702
|
|
|
738
703
|
List data identifier contents.
|
|
739
704
|
"""
|
|
740
|
-
|
|
741
|
-
|
|
705
|
+
|
|
706
|
+
table_data = []
|
|
707
|
+
if cli_config == 'rich':
|
|
708
|
+
spinner.update(status='Fetching content history')
|
|
709
|
+
spinner.start()
|
|
710
|
+
|
|
742
711
|
for did in args.dids:
|
|
743
712
|
scope, name = get_scope(did, client)
|
|
744
713
|
for content in client.list_content_history(scope=scope, name=name):
|
|
745
|
-
|
|
746
|
-
|
|
714
|
+
if cli_config == 'rich':
|
|
715
|
+
table_data.append([f"{content['scope']}:{content['name']}", Text(content['type'].upper(), style=CLITheme.DID_TYPE.get(content['type'].upper(), 'default'))])
|
|
716
|
+
else:
|
|
717
|
+
table_data.append([f"{content['scope']}:{content['name']}", content['type'].upper()])
|
|
718
|
+
|
|
719
|
+
if cli_config == 'rich':
|
|
720
|
+
table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
|
|
721
|
+
spinner.stop()
|
|
722
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
723
|
+
else:
|
|
724
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
|
|
747
725
|
return SUCCESS
|
|
748
726
|
|
|
749
727
|
|
|
750
728
|
@exception_handler
|
|
751
|
-
def list_parent_dids(args):
|
|
729
|
+
def list_parent_dids(args, client, logger, console, spinner):
|
|
752
730
|
"""
|
|
753
731
|
%(prog)s list-parent-dids
|
|
754
732
|
|
|
755
733
|
List parent data identifier.
|
|
756
734
|
"""
|
|
757
|
-
|
|
735
|
+
|
|
736
|
+
if cli_config == 'rich':
|
|
737
|
+
spinner.update(status='Fetching parent DIDs')
|
|
738
|
+
spinner.start()
|
|
739
|
+
|
|
758
740
|
if args.pfns:
|
|
759
741
|
dict_datasets = {}
|
|
742
|
+
output = []
|
|
760
743
|
for res in client.get_did_from_pfns(args.pfns):
|
|
761
744
|
for key in res:
|
|
762
745
|
if key not in dict_datasets:
|
|
763
746
|
dict_datasets[key] = []
|
|
764
747
|
for rule in client.list_associated_rules_for_file(res[key]['scope'], res[key]['name']):
|
|
765
|
-
if
|
|
766
|
-
dict_datasets[key].append(
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
748
|
+
if f"{rule['scope']}:{rule['name']}" not in dict_datasets[key]:
|
|
749
|
+
dict_datasets[key].append(f"{rule['scope']}:{rule['name']}")
|
|
750
|
+
|
|
751
|
+
for i, pfn in enumerate(dict_datasets):
|
|
752
|
+
if cli_config == 'rich':
|
|
753
|
+
parent_tree = Tree('')
|
|
754
|
+
for parent in dict_datasets[pfn]:
|
|
755
|
+
parent_tree.add(parent)
|
|
756
|
+
table = generate_table([['PFN', pfn], ['Parents', parent_tree]], col_alignments=['left', 'left'], row_styles=['none'])
|
|
757
|
+
output.append(table)
|
|
758
|
+
else:
|
|
759
|
+
print('PFN: ', pfn)
|
|
760
|
+
print('Parents: ', ','.join(dict_datasets[pfn]))
|
|
761
|
+
|
|
762
|
+
if cli_config == 'rich':
|
|
763
|
+
spinner.stop()
|
|
764
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
770
765
|
elif args.guids:
|
|
766
|
+
output = []
|
|
771
767
|
guids = []
|
|
772
768
|
for input_ in args.guids:
|
|
773
769
|
try:
|
|
774
770
|
uuid.UUID(input_)
|
|
775
771
|
except ValueError:
|
|
772
|
+
print(f'Ignoring invalid GUID: {input_}')
|
|
776
773
|
continue
|
|
777
774
|
dict_datasets = {}
|
|
778
775
|
for guid in guids:
|
|
@@ -780,17 +777,38 @@ def list_parent_dids(args):
|
|
|
780
777
|
if guid not in dict_datasets:
|
|
781
778
|
dict_datasets[guid] = []
|
|
782
779
|
for rule in client.list_associated_rules_for_file(did['scope'], did['name']):
|
|
783
|
-
if
|
|
784
|
-
dict_datasets[guid].append(
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
780
|
+
if f"{rule['scope']}:{rule['name']}" not in dict_datasets[guid]:
|
|
781
|
+
dict_datasets[guid].append(f"{rule['scope']}:{rule['name']}")
|
|
782
|
+
|
|
783
|
+
for i, guid in enumerate(dict_datasets):
|
|
784
|
+
if cli_config == 'rich':
|
|
785
|
+
parent_tree = Tree('')
|
|
786
|
+
for parent in dict_datasets[guid]:
|
|
787
|
+
parent_tree.add(parent)
|
|
788
|
+
table = generate_table([['GUID', guid], ['Parents', parent_tree]], col_alignments=['left', 'left'], row_styles=['none'])
|
|
789
|
+
output.append(table)
|
|
790
|
+
else:
|
|
791
|
+
print('GUID: ', guid)
|
|
792
|
+
print('Parents : ', ','.join(dict_datasets[guid]))
|
|
793
|
+
|
|
794
|
+
if cli_config == 'rich':
|
|
795
|
+
spinner.stop()
|
|
796
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
788
797
|
elif args.did:
|
|
789
|
-
|
|
798
|
+
table_data = []
|
|
790
799
|
scope, name = get_scope(args.did, client)
|
|
791
800
|
for dataset in client.list_parent_dids(scope=scope, name=name):
|
|
792
|
-
|
|
793
|
-
|
|
801
|
+
if cli_config == 'rich':
|
|
802
|
+
table_data.append([f"{dataset['scope']}:{dataset['name']}", Text(dataset['type'], style=CLITheme.DID_TYPE.get(dataset['type'], 'default'))])
|
|
803
|
+
else:
|
|
804
|
+
table_data.append([f"{dataset['scope']}:{dataset['name']}", dataset['type']])
|
|
805
|
+
|
|
806
|
+
if cli_config == 'rich':
|
|
807
|
+
table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
|
|
808
|
+
spinner.stop()
|
|
809
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
810
|
+
else:
|
|
811
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
|
|
794
812
|
else:
|
|
795
813
|
print('At least one option has to be given. Use -h to list the options.')
|
|
796
814
|
return FAILURE
|
|
@@ -798,13 +816,13 @@ def list_parent_dids(args):
|
|
|
798
816
|
|
|
799
817
|
|
|
800
818
|
@exception_handler
|
|
801
|
-
def close(args):
|
|
819
|
+
def close(args, client, logger, console, spinner):
|
|
802
820
|
"""
|
|
803
821
|
%(prog)s close [options] <field1=value1 field2=value2 ...>
|
|
804
822
|
|
|
805
823
|
Close a dataset or container.
|
|
806
824
|
"""
|
|
807
|
-
|
|
825
|
+
|
|
808
826
|
for did in args.dids:
|
|
809
827
|
scope, name = get_scope(did, client)
|
|
810
828
|
client.set_status(scope=scope, name=name, open=False)
|
|
@@ -813,13 +831,13 @@ def close(args):
|
|
|
813
831
|
|
|
814
832
|
|
|
815
833
|
@exception_handler
|
|
816
|
-
def reopen(args):
|
|
834
|
+
def reopen(args, client, logger, console, spinner):
|
|
817
835
|
"""
|
|
818
836
|
%(prog)s reopen [options] <field1=value1 field2=value2 ...>
|
|
819
837
|
|
|
820
838
|
Reopen a dataset or container (only for privileged users).
|
|
821
839
|
"""
|
|
822
|
-
|
|
840
|
+
|
|
823
841
|
for did in args.dids:
|
|
824
842
|
scope, name = get_scope(did, client)
|
|
825
843
|
client.set_status(scope=scope, name=name, open=True)
|
|
@@ -828,30 +846,49 @@ def reopen(args):
|
|
|
828
846
|
|
|
829
847
|
|
|
830
848
|
@exception_handler
|
|
831
|
-
def stat(args):
|
|
849
|
+
def stat(args, client, logger, console, spinner):
|
|
832
850
|
"""
|
|
833
851
|
%(prog)s stat [options] <field1=value1 field2=value2 ...>
|
|
834
852
|
|
|
835
853
|
List attributes and statuses about data identifiers..
|
|
836
854
|
"""
|
|
837
|
-
|
|
855
|
+
|
|
856
|
+
if cli_config == 'rich':
|
|
857
|
+
spinner.update(status='Fetching DID stats')
|
|
858
|
+
spinner.start()
|
|
859
|
+
keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE}
|
|
860
|
+
|
|
861
|
+
output = []
|
|
838
862
|
for i, did in enumerate(args.dids):
|
|
839
|
-
if i > 0:
|
|
840
|
-
print('------')
|
|
841
863
|
scope, name = get_scope(did, client)
|
|
842
864
|
info = client.get_did(scope=scope, name=name, dynamic_depth='DATASET')
|
|
843
|
-
|
|
844
|
-
|
|
865
|
+
if cli_config == 'rich':
|
|
866
|
+
if i > 0:
|
|
867
|
+
output.append(Text(f'\nDID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
|
|
868
|
+
elif len(args.dids) > 1:
|
|
869
|
+
output.append(Text(f'DID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
|
|
870
|
+
table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for (k, v) in sorted(info.items())]
|
|
871
|
+
table = generate_table(table_data, row_styles=['none'], col_alignments=['left', 'left'])
|
|
872
|
+
output.append(table)
|
|
873
|
+
else:
|
|
874
|
+
if i > 0:
|
|
875
|
+
print('------')
|
|
876
|
+
table = [(k + ':', str(v)) for (k, v) in sorted(info.items())]
|
|
877
|
+
print(tabulate(table, tablefmt='plain', disable_numparse=True))
|
|
878
|
+
|
|
879
|
+
if cli_config == 'rich':
|
|
880
|
+
spinner.stop()
|
|
881
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
845
882
|
return SUCCESS
|
|
846
883
|
|
|
847
884
|
|
|
848
|
-
def erase(args):
|
|
885
|
+
def erase(args, client, logger, console, spinner):
|
|
849
886
|
"""
|
|
850
887
|
%(prog)s erase [options] <field1=value1 field2=value2 ...>
|
|
851
888
|
|
|
852
889
|
Delete data identifier.
|
|
853
890
|
"""
|
|
854
|
-
|
|
891
|
+
|
|
855
892
|
for did in args.dids:
|
|
856
893
|
if '*' in did:
|
|
857
894
|
logger.warning("This command doesn't support wildcards! Skipping DID: %s" % did)
|
|
@@ -883,7 +920,7 @@ def erase(args):
|
|
|
883
920
|
|
|
884
921
|
|
|
885
922
|
@exception_handler
|
|
886
|
-
def upload(args):
|
|
923
|
+
def upload(args, client, logger, console, spinner):
|
|
887
924
|
"""
|
|
888
925
|
rucio upload [scope:datasetname] [folder/] [files1 file2 file3]
|
|
889
926
|
%(prog)s upload [options] <field1=value1 field2=value2 ...>
|
|
@@ -951,7 +988,6 @@ def upload(args):
|
|
|
951
988
|
logger.error("If PFN is specified, you cannot use --recursive")
|
|
952
989
|
raise InputValidationError('Invalid input argument composition')
|
|
953
990
|
|
|
954
|
-
client = get_client(args)
|
|
955
991
|
from rucio.client.uploadclient import UploadClient
|
|
956
992
|
upload_client = UploadClient(client, logger=logger)
|
|
957
993
|
summary_file_path = 'rucio_upload.json' if args.summary else None
|
|
@@ -960,7 +996,7 @@ def upload(args):
|
|
|
960
996
|
|
|
961
997
|
|
|
962
998
|
@exception_handler
|
|
963
|
-
def download(args):
|
|
999
|
+
def download(args, client, logger, console, spinner):
|
|
964
1000
|
"""
|
|
965
1001
|
%(prog)s download [options] <field1=value1 field2=value2 ...>
|
|
966
1002
|
|
|
@@ -1004,7 +1040,6 @@ def download(args):
|
|
|
1004
1040
|
trace_pattern['usrdn'] = args.trace_usrdn
|
|
1005
1041
|
deactivate_file_download_exceptions = args.deactivate_file_download_exceptions if args.deactivate_file_download_exceptions is not None else False
|
|
1006
1042
|
|
|
1007
|
-
client = get_client(args)
|
|
1008
1043
|
from rucio.client.downloadclient import DownloadClient
|
|
1009
1044
|
download_client = DownloadClient(client=client, logger=logger, check_admin=args.allow_tape)
|
|
1010
1045
|
|
|
@@ -1083,7 +1118,7 @@ def download(args):
|
|
|
1083
1118
|
item_defaults['did'] = did_str
|
|
1084
1119
|
if args.rses is None:
|
|
1085
1120
|
logger.warning("No RSE was given, selecting one.")
|
|
1086
|
-
|
|
1121
|
+
|
|
1087
1122
|
replicas = client.list_replicas(
|
|
1088
1123
|
[{"scope": did_str.split(':')[0], "name": did_str.split(':')[-1]}],
|
|
1089
1124
|
schemes=args.protocol,
|
|
@@ -1156,36 +1191,55 @@ def _get_rse_for_pfn(replicas, pfn) -> Optional[str]:
|
|
|
1156
1191
|
|
|
1157
1192
|
|
|
1158
1193
|
@exception_handler
|
|
1159
|
-
def get_metadata(args):
|
|
1194
|
+
def get_metadata(args, client, logger, console, spinner):
|
|
1160
1195
|
"""
|
|
1161
1196
|
%(prog)s get_metadata [options] <field1=value1 field2=value2 ...>
|
|
1162
1197
|
|
|
1163
1198
|
Get data identifier metadata
|
|
1164
1199
|
"""
|
|
1165
|
-
|
|
1200
|
+
|
|
1166
1201
|
if args.plugin:
|
|
1167
1202
|
plugin = args.plugin
|
|
1168
1203
|
else:
|
|
1169
1204
|
plugin = config_get('client', 'metadata_default_plugin', default='DID_COLUMN')
|
|
1170
1205
|
|
|
1206
|
+
if cli_config == 'rich':
|
|
1207
|
+
spinner.update(status='Fetching metadata')
|
|
1208
|
+
spinner.start()
|
|
1209
|
+
keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE, **CLITheme.AVAILABILITY}
|
|
1210
|
+
|
|
1211
|
+
output = []
|
|
1171
1212
|
for i, did in enumerate(args.dids):
|
|
1172
|
-
if i > 0:
|
|
1173
|
-
print('------')
|
|
1174
1213
|
scope, name = get_scope(did, client)
|
|
1175
1214
|
meta = client.get_metadata(scope=scope, name=name, plugin=plugin)
|
|
1176
|
-
|
|
1177
|
-
|
|
1215
|
+
if cli_config == 'rich':
|
|
1216
|
+
if i > 0:
|
|
1217
|
+
output.append(Text(f'\nDID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
|
|
1218
|
+
elif len(args.dids) > 1:
|
|
1219
|
+
output.append(Text(f'DID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
|
|
1220
|
+
table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for (k, v) in sorted(meta.items())]
|
|
1221
|
+
table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
|
|
1222
|
+
output.append(table)
|
|
1223
|
+
else:
|
|
1224
|
+
if i > 0:
|
|
1225
|
+
print('------')
|
|
1226
|
+
table = [(k + ':', str(v)) for (k, v) in sorted(meta.items())]
|
|
1227
|
+
print(tabulate(table, tablefmt='plain', disable_numparse=True))
|
|
1228
|
+
|
|
1229
|
+
if cli_config == 'rich':
|
|
1230
|
+
spinner.stop()
|
|
1231
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
1178
1232
|
return SUCCESS
|
|
1179
1233
|
|
|
1180
1234
|
|
|
1181
1235
|
@exception_handler
|
|
1182
|
-
def set_metadata(args):
|
|
1236
|
+
def set_metadata(args, client, logger, console, spinner):
|
|
1183
1237
|
"""
|
|
1184
1238
|
%(prog)s set_metadata [options] <field1=value1 field2=value2 ...>
|
|
1185
1239
|
|
|
1186
1240
|
Set data identifier metadata
|
|
1187
1241
|
"""
|
|
1188
|
-
|
|
1242
|
+
|
|
1189
1243
|
value = args.value
|
|
1190
1244
|
if args.key == 'lifetime':
|
|
1191
1245
|
value = None if args.value.lower() == 'none' else float(args.value)
|
|
@@ -1195,26 +1249,26 @@ def set_metadata(args):
|
|
|
1195
1249
|
|
|
1196
1250
|
|
|
1197
1251
|
@exception_handler
|
|
1198
|
-
def delete_metadata(args):
|
|
1252
|
+
def delete_metadata(args, client, logger, console, spinner):
|
|
1199
1253
|
"""
|
|
1200
1254
|
%(prog)s set_metadata [options] <field1=value1 field2=value2 ...>
|
|
1201
1255
|
|
|
1202
1256
|
Delete data identifier metadata
|
|
1203
1257
|
"""
|
|
1204
|
-
|
|
1258
|
+
|
|
1205
1259
|
scope, name = get_scope(args.did, client)
|
|
1206
1260
|
client.delete_metadata(scope=scope, name=name, key=args.key)
|
|
1207
1261
|
return SUCCESS
|
|
1208
1262
|
|
|
1209
1263
|
|
|
1210
1264
|
@exception_handler
|
|
1211
|
-
def add_rule(args):
|
|
1265
|
+
def add_rule(args, client, logger, console, spinner):
|
|
1212
1266
|
"""
|
|
1213
1267
|
%(prog)s add-rule <did> <copies> <rse-expression> [options]
|
|
1214
1268
|
|
|
1215
1269
|
Add a rule to a did.
|
|
1216
1270
|
"""
|
|
1217
|
-
|
|
1271
|
+
|
|
1218
1272
|
dids = []
|
|
1219
1273
|
rule_ids = []
|
|
1220
1274
|
for did in args.dids:
|
|
@@ -1267,13 +1321,12 @@ def add_rule(args):
|
|
|
1267
1321
|
|
|
1268
1322
|
|
|
1269
1323
|
@exception_handler
|
|
1270
|
-
def delete_rule(args):
|
|
1324
|
+
def delete_rule(args, client, logger, console, spinner):
|
|
1271
1325
|
"""
|
|
1272
1326
|
%(prog)s delete-rule [options] <ruleid>
|
|
1273
1327
|
|
|
1274
1328
|
Delete a rule.
|
|
1275
1329
|
"""
|
|
1276
|
-
client = get_client(args)
|
|
1277
1330
|
|
|
1278
1331
|
try:
|
|
1279
1332
|
# Test if the rule_id is a real rule_id
|
|
@@ -1306,13 +1359,13 @@ def delete_rule(args):
|
|
|
1306
1359
|
|
|
1307
1360
|
|
|
1308
1361
|
@exception_handler
|
|
1309
|
-
def update_rule(args):
|
|
1362
|
+
def update_rule(args, client, logger, console, spinner):
|
|
1310
1363
|
"""
|
|
1311
1364
|
%(prog)s update-rule [options] <ruleid>
|
|
1312
1365
|
|
|
1313
1366
|
Update a rule.
|
|
1314
1367
|
"""
|
|
1315
|
-
|
|
1368
|
+
|
|
1316
1369
|
options = {}
|
|
1317
1370
|
if args.lifetime:
|
|
1318
1371
|
options['lifetime'] = None if args.lifetime.lower() == "none" else int(args.lifetime)
|
|
@@ -1356,13 +1409,12 @@ def update_rule(args):
|
|
|
1356
1409
|
|
|
1357
1410
|
|
|
1358
1411
|
@exception_handler
|
|
1359
|
-
def move_rule(args):
|
|
1412
|
+
def move_rule(args, client, logger, console, spinner):
|
|
1360
1413
|
"""
|
|
1361
1414
|
%(prog)s move-rule [options] <ruleid> <rse_expression>
|
|
1362
1415
|
|
|
1363
1416
|
Update a rule.
|
|
1364
1417
|
"""
|
|
1365
|
-
client = get_client(args)
|
|
1366
1418
|
|
|
1367
1419
|
override = {}
|
|
1368
1420
|
if args.activity:
|
|
@@ -1377,65 +1429,102 @@ def move_rule(args):
|
|
|
1377
1429
|
|
|
1378
1430
|
|
|
1379
1431
|
@exception_handler
|
|
1380
|
-
def info_rule(args):
|
|
1432
|
+
def info_rule(args, client, logger, console, spinner):
|
|
1381
1433
|
"""
|
|
1382
1434
|
%(prog)s rule-info [options] <ruleid>
|
|
1383
1435
|
|
|
1384
1436
|
Retrieve information about a rule.
|
|
1385
1437
|
"""
|
|
1386
|
-
|
|
1438
|
+
|
|
1439
|
+
if cli_config == 'rich':
|
|
1440
|
+
spinner.update(status='Fetching rule info')
|
|
1441
|
+
spinner.start()
|
|
1442
|
+
|
|
1387
1443
|
if args.examine:
|
|
1444
|
+
output = []
|
|
1388
1445
|
analysis = client.examine_replication_rule(rule_id=args.rule_id)
|
|
1389
|
-
|
|
1390
|
-
|
|
1391
|
-
|
|
1392
|
-
|
|
1393
|
-
|
|
1394
|
-
|
|
1395
|
-
|
|
1396
|
-
|
|
1397
|
-
|
|
1398
|
-
|
|
1399
|
-
|
|
1400
|
-
|
|
1446
|
+
if cli_config == 'rich':
|
|
1447
|
+
keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE, **CLITheme.RULE_STATE}
|
|
1448
|
+
rule_status = " ".join([f'[{keyword_styles.get(word, "default")}]{word}[/]' for word in analysis['rule_error'].split()])
|
|
1449
|
+
output.append(f'Status of the replication rule: {rule_status}')
|
|
1450
|
+
if analysis['transfers']:
|
|
1451
|
+
output.append('[b]STUCK Requests:[/]')
|
|
1452
|
+
for transfer in analysis['transfers']:
|
|
1453
|
+
output.append(Padding.indent(Text(f"{transfer['scope']}:{transfer['name']}", style=CLITheme.SUBHEADER_HIGHLIGHT), 2))
|
|
1454
|
+
table_data = [['RSE:', str(transfer['rse'])],
|
|
1455
|
+
['Attempts:', str(transfer['attempts'])],
|
|
1456
|
+
['Last retry:', str(transfer['last_time'])],
|
|
1457
|
+
['Last error:', str(transfer['last_source'])],
|
|
1458
|
+
['Available sources:', ', '.join([source[0] for source in transfer['sources'] if source[1]])],
|
|
1459
|
+
['Blocklisted sources:', ', '.join([source[0] for source in transfer['sources'] if not source[1]])]]
|
|
1460
|
+
table = generate_table(table_data, row_styles=['none'], col_alignments=['left', 'left'])
|
|
1461
|
+
output.append(Padding.indent(table, 2))
|
|
1462
|
+
|
|
1463
|
+
spinner.stop()
|
|
1464
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
1465
|
+
else:
|
|
1466
|
+
analysis = client.examine_replication_rule(rule_id=args.rule_id)
|
|
1467
|
+
print('Status of the replication rule: %s' % analysis['rule_error'])
|
|
1468
|
+
if analysis['transfers']:
|
|
1469
|
+
print('STUCK Requests:')
|
|
1470
|
+
for transfer in analysis['transfers']:
|
|
1471
|
+
print(' %s:%s' % (transfer['scope'], transfer['name']))
|
|
1472
|
+
print(' RSE: %s' % str(transfer['rse']))
|
|
1473
|
+
print(' Attempts: %s' % str(transfer['attempts']))
|
|
1474
|
+
print(' Last Retry: %s' % str(transfer['last_time']))
|
|
1475
|
+
print(' Last error: %s' % str(transfer['last_error']))
|
|
1476
|
+
print(' Last source: %s' % str(transfer['last_source']))
|
|
1477
|
+
print(' Available sources: %s' % ', '.join([source[0] for source in transfer['sources'] if source[1]]))
|
|
1478
|
+
print(' Blocklisted sources: %s' % ', '.join([source[0] for source in transfer['sources'] if not source[1]]))
|
|
1401
1479
|
else:
|
|
1402
1480
|
rule = client.get_replication_rule(rule_id=args.rule_id)
|
|
1403
|
-
|
|
1404
|
-
|
|
1405
|
-
|
|
1406
|
-
|
|
1407
|
-
|
|
1408
|
-
|
|
1409
|
-
|
|
1410
|
-
|
|
1411
|
-
|
|
1412
|
-
|
|
1413
|
-
|
|
1414
|
-
|
|
1415
|
-
|
|
1416
|
-
|
|
1417
|
-
|
|
1418
|
-
|
|
1419
|
-
|
|
1420
|
-
|
|
1421
|
-
|
|
1422
|
-
|
|
1423
|
-
|
|
1424
|
-
|
|
1425
|
-
|
|
1426
|
-
|
|
1427
|
-
|
|
1481
|
+
if cli_config == 'rich':
|
|
1482
|
+
keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE, **CLITheme.RULE_STATE}
|
|
1483
|
+
table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for k, v in sorted(rule.items())]
|
|
1484
|
+
table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
|
|
1485
|
+
spinner.stop()
|
|
1486
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1487
|
+
else:
|
|
1488
|
+
print("Id: %s" % rule['id'])
|
|
1489
|
+
print("Account: %s" % rule['account'])
|
|
1490
|
+
print("Scope: %s" % rule['scope'])
|
|
1491
|
+
print("Name: %s" % rule['name'])
|
|
1492
|
+
print("RSE Expression: %s" % rule['rse_expression'])
|
|
1493
|
+
print("Copies: %s" % rule['copies'])
|
|
1494
|
+
print("State: %s" % rule['state'])
|
|
1495
|
+
print("Locks OK/REPLICATING/STUCK: %s/%s/%s" % (rule['locks_ok_cnt'], rule['locks_replicating_cnt'], rule['locks_stuck_cnt']))
|
|
1496
|
+
print("Grouping: %s" % rule['grouping'])
|
|
1497
|
+
print("Expires at: %s" % rule['expires_at'])
|
|
1498
|
+
print("Locked: %s" % rule['locked'])
|
|
1499
|
+
print("Weight: %s" % rule['weight'])
|
|
1500
|
+
print("Created at: %s" % rule['created_at'])
|
|
1501
|
+
print("Updated at: %s" % rule['updated_at'])
|
|
1502
|
+
print("Error: %s" % rule['error'])
|
|
1503
|
+
print("Subscription Id: %s" % rule['subscription_id'])
|
|
1504
|
+
print("Source replica expression: %s" % rule['source_replica_expression'])
|
|
1505
|
+
print("Activity: %s" % rule['activity'])
|
|
1506
|
+
print("Comment: %s" % rule['comments'])
|
|
1507
|
+
print("Ignore Quota: %s" % rule['ignore_account_limit'])
|
|
1508
|
+
print("Ignore Availability: %s" % rule['ignore_availability'])
|
|
1509
|
+
print("Purge replicas: %s" % rule['purge_replicas'])
|
|
1510
|
+
print("Notification: %s" % rule['notification'])
|
|
1511
|
+
print("End of life: %s" % rule['eol_at'])
|
|
1512
|
+
print("Child Rule Id: %s" % rule['child_rule_id'])
|
|
1428
1513
|
return SUCCESS
|
|
1429
1514
|
|
|
1430
1515
|
|
|
1431
1516
|
@exception_handler
|
|
1432
|
-
def list_rules(args):
|
|
1517
|
+
def list_rules(args, client, logger, console, spinner):
|
|
1433
1518
|
"""
|
|
1434
1519
|
%(prog)s list-rules ...
|
|
1435
1520
|
|
|
1436
1521
|
List rules.
|
|
1437
1522
|
"""
|
|
1438
|
-
|
|
1523
|
+
|
|
1524
|
+
if cli_config == 'rich':
|
|
1525
|
+
spinner.update(status='Fetching rules')
|
|
1526
|
+
spinner.start()
|
|
1527
|
+
|
|
1439
1528
|
if args.rule_id:
|
|
1440
1529
|
rules = [client.get_replication_rule(args.rule_id)]
|
|
1441
1530
|
elif args.file:
|
|
@@ -1480,83 +1569,129 @@ def list_rules(args):
|
|
|
1480
1569
|
for rule in rules:
|
|
1481
1570
|
print(rule['id'],
|
|
1482
1571
|
rule['account'],
|
|
1483
|
-
|
|
1484
|
-
|
|
1572
|
+
f"{rule['scope']}:{rule['name']}",
|
|
1573
|
+
f"{rule['state']}[{rule['locks_ok_cnt']}/{rule['locks_replicating_cnt']}/{rule['locks_stuck_cnt']}]",
|
|
1485
1574
|
rule['rse_expression'],
|
|
1486
1575
|
rule['copies'],
|
|
1487
1576
|
sizefmt(rule['bytes'], args.human) if rule['bytes'] is not None else 'N/A',
|
|
1488
1577
|
rule['expires_at'],
|
|
1489
1578
|
rule['created_at'],
|
|
1490
1579
|
sep=',')
|
|
1580
|
+
|
|
1581
|
+
if cli_config == 'rich':
|
|
1582
|
+
spinner.stop()
|
|
1491
1583
|
else:
|
|
1492
|
-
|
|
1584
|
+
table_data = []
|
|
1493
1585
|
for rule in rules:
|
|
1494
|
-
|
|
1495
|
-
|
|
1496
|
-
|
|
1497
|
-
|
|
1498
|
-
|
|
1499
|
-
|
|
1500
|
-
|
|
1501
|
-
|
|
1502
|
-
|
|
1503
|
-
|
|
1586
|
+
if cli_config == 'rich':
|
|
1587
|
+
table_data.append([rule['id'],
|
|
1588
|
+
rule['account'],
|
|
1589
|
+
f"{rule['scope']}:{rule['name']}",
|
|
1590
|
+
f"[{CLITheme.RULE_STATE.get(rule['state'], 'default')}]{rule['state']}[/][{rule['locks_ok_cnt']}/{rule['locks_replicating_cnt']}/{rule['locks_stuck_cnt']}]",
|
|
1591
|
+
rule['rse_expression'],
|
|
1592
|
+
rule['copies'],
|
|
1593
|
+
sizefmt(rule['bytes'], args.human) if rule['bytes'] is not None else 'N/A',
|
|
1594
|
+
rule['expires_at'],
|
|
1595
|
+
rule['created_at']])
|
|
1596
|
+
else:
|
|
1597
|
+
table_data.append([rule['id'],
|
|
1598
|
+
rule['account'],
|
|
1599
|
+
f"{rule['scope']}:{rule['name']}",
|
|
1600
|
+
f"{rule['state']}[{rule['locks_ok_cnt']}/{rule['locks_replicating_cnt']}/{rule['locks_stuck_cnt']}]",
|
|
1601
|
+
rule['rse_expression'],
|
|
1602
|
+
rule['copies'],
|
|
1603
|
+
sizefmt(rule['bytes'], args.human) if rule['bytes'] is not None else 'N/A',
|
|
1604
|
+
rule['expires_at'],
|
|
1605
|
+
rule['created_at']])
|
|
1606
|
+
|
|
1607
|
+
if cli_config == 'rich':
|
|
1608
|
+
table = generate_table(table_data, headers=['ID', 'ACCOUNT', 'SCOPE:NAME', 'STATE[OK/REPL/STUCK]', 'RSE EXPRESSION', 'COPIES', 'SIZE', 'EXPIRES (UTC)', 'CREATED (UTC)'],
|
|
1609
|
+
col_alignments=['left', 'left', 'left', 'right', 'left', 'right', 'right', 'left', 'left'])
|
|
1610
|
+
spinner.stop()
|
|
1611
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1612
|
+
else:
|
|
1613
|
+
print(tabulate(table_data, tablefmt='simple', headers=['ID', 'ACCOUNT', 'SCOPE:NAME', 'STATE[OK/REPL/STUCK]', 'RSE_EXPRESSION', 'COPIES', 'SIZE', 'EXPIRES (UTC)', 'CREATED (UTC)'], disable_numparse=True))
|
|
1504
1614
|
return SUCCESS
|
|
1505
1615
|
|
|
1506
1616
|
|
|
1507
1617
|
@exception_handler
|
|
1508
|
-
def list_rules_history(args):
|
|
1618
|
+
def list_rules_history(args, client, logger, console, spinner):
|
|
1509
1619
|
"""
|
|
1510
1620
|
%(prog)s list-rules_history ...
|
|
1511
1621
|
|
|
1512
1622
|
List replication rules history for a DID.
|
|
1513
1623
|
"""
|
|
1514
1624
|
rule_dict = []
|
|
1515
|
-
|
|
1625
|
+
if cli_config == 'rich':
|
|
1626
|
+
spinner.update(status='Fetching rules history')
|
|
1627
|
+
spinner.start()
|
|
1628
|
+
|
|
1516
1629
|
scope, name = get_scope(args.did, client)
|
|
1630
|
+
table_data = []
|
|
1517
1631
|
for rule in client.list_replication_rule_full_history(scope, name):
|
|
1518
1632
|
if rule['rule_id'] not in rule_dict:
|
|
1519
1633
|
rule_dict.append(rule['rule_id'])
|
|
1520
|
-
|
|
1521
|
-
|
|
1522
|
-
|
|
1523
|
-
|
|
1524
|
-
|
|
1634
|
+
if cli_config == 'rich':
|
|
1635
|
+
table_data.append(['Insertion', rule['account'], rule['rse_expression'], rule['created_at']])
|
|
1636
|
+
else:
|
|
1637
|
+
print('-' * 40)
|
|
1638
|
+
print('Rule insertion')
|
|
1639
|
+
print('Account : %s' % rule['account'])
|
|
1640
|
+
print('RSE expression : %s' % (rule['rse_expression']))
|
|
1641
|
+
print('Time : %s' % (rule['created_at']))
|
|
1525
1642
|
else:
|
|
1526
1643
|
rule_dict.remove(rule['rule_id'])
|
|
1527
|
-
|
|
1528
|
-
|
|
1529
|
-
|
|
1530
|
-
|
|
1531
|
-
|
|
1644
|
+
if cli_config == 'rich':
|
|
1645
|
+
table_data.append(['Deletion', rule['account'], rule['rse_expression'], rule['updated_at']])
|
|
1646
|
+
else:
|
|
1647
|
+
print('-' * 40)
|
|
1648
|
+
print('Rule deletion')
|
|
1649
|
+
print('Account : %s' % rule['account'])
|
|
1650
|
+
print('RSE expression : %s' % (rule['rse_expression']))
|
|
1651
|
+
print('Time : %s' % (rule['updated_at']))
|
|
1652
|
+
|
|
1653
|
+
if cli_config == 'rich':
|
|
1654
|
+
table_data = sorted(table_data, key=lambda entry: entry[-1], reverse=True)
|
|
1655
|
+
table = generate_table(table_data, headers=['ACTION', 'ACCOUNT', 'RSE EXPRESSION', 'TIME'])
|
|
1656
|
+
spinner.stop()
|
|
1657
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1532
1658
|
return SUCCESS
|
|
1533
1659
|
|
|
1534
1660
|
|
|
1535
1661
|
@exception_handler
|
|
1536
|
-
def list_rses(args):
|
|
1662
|
+
def list_rses(args, client, logger, console, spinner):
|
|
1537
1663
|
"""
|
|
1538
1664
|
%(prog)s list-rses [options] <field1=value1 field2=value2 ...>
|
|
1539
1665
|
|
|
1540
1666
|
List rses.
|
|
1541
1667
|
|
|
1542
1668
|
"""
|
|
1543
|
-
|
|
1669
|
+
if cli_config == 'rich':
|
|
1670
|
+
spinner.update(status='Fetching RSEs')
|
|
1671
|
+
spinner.start()
|
|
1544
1672
|
|
|
1545
1673
|
rses = client.list_rses(args.rses)
|
|
1546
|
-
|
|
1547
|
-
print(
|
|
1674
|
+
if args.csv:
|
|
1675
|
+
print(*(rse['rse'] for rse in rses), sep='\n')
|
|
1676
|
+
elif cli_config == 'rich':
|
|
1677
|
+
table = generate_table([[rse['rse']] for rse in sorted(rses, key=lambda elem: elem['rse'])], headers=['RSE'], col_alignments=['left'])
|
|
1678
|
+
spinner.stop()
|
|
1679
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1680
|
+
else:
|
|
1681
|
+
for rse in rses:
|
|
1682
|
+
print('%(rse)s' % rse)
|
|
1548
1683
|
return SUCCESS
|
|
1549
1684
|
|
|
1550
1685
|
|
|
1551
1686
|
@exception_handler
|
|
1552
|
-
def list_suspicious_replicas(args):
|
|
1687
|
+
def list_suspicious_replicas(args, client, logger, console, spinner):
|
|
1553
1688
|
"""
|
|
1554
1689
|
%(prog)s list-suspicious-replicas [options] <field1=value1 field2=value2 ...>
|
|
1555
1690
|
|
|
1556
1691
|
List replicas marked as suspicious.
|
|
1557
1692
|
|
|
1558
1693
|
"""
|
|
1559
|
-
|
|
1694
|
+
|
|
1560
1695
|
rse_expression = None
|
|
1561
1696
|
younger_than = None
|
|
1562
1697
|
nattempts = None
|
|
@@ -1566,168 +1701,275 @@ def list_suspicious_replicas(args):
|
|
|
1566
1701
|
younger_than = args.younger_than
|
|
1567
1702
|
if args.nattempts:
|
|
1568
1703
|
nattempts = args.nattempts
|
|
1704
|
+
|
|
1705
|
+
if cli_config == 'rich':
|
|
1706
|
+
spinner.update(status='Fetching suspicious replicas')
|
|
1707
|
+
spinner.start()
|
|
1708
|
+
|
|
1569
1709
|
# Generator is a list with one entry, which itself is a list of lists.
|
|
1570
1710
|
replicas_gen = client.list_suspicious_replicas(rse_expression, younger_than, nattempts)
|
|
1571
1711
|
for i in replicas_gen:
|
|
1572
1712
|
replicas = i
|
|
1573
1713
|
table = []
|
|
1714
|
+
table_data = []
|
|
1574
1715
|
for rep in replicas:
|
|
1575
|
-
|
|
1576
|
-
|
|
1716
|
+
table_data.append([rep['rse'], rep['scope'], rep['created_at'], rep['cnt'], rep['name']])
|
|
1717
|
+
|
|
1718
|
+
if cli_config == 'rich':
|
|
1719
|
+
table = generate_table(table_data, headers=['RSE EXPRESSION', 'SCOPE', 'CREATED AT', 'N-ATTEMPTS', 'FILE NAME'], col_alignments=['left', 'left', 'left', 'right', 'left'])
|
|
1720
|
+
spinner.stop()
|
|
1721
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1722
|
+
else:
|
|
1723
|
+
print(tabulate(table_data, headers=(['RSE Expression:', 'Scope:', 'Created at:', 'Nattempts:', 'File Name:'])))
|
|
1577
1724
|
return SUCCESS
|
|
1578
1725
|
|
|
1579
1726
|
|
|
1580
1727
|
@exception_handler
|
|
1581
|
-
def list_rse_attributes(args):
|
|
1728
|
+
def list_rse_attributes(args, client, logger, console, spinner):
|
|
1582
1729
|
"""
|
|
1583
1730
|
%(prog)s list-rse-attributes [options] <field1=value1 field2=value2 ...>
|
|
1584
1731
|
|
|
1585
1732
|
List rses.
|
|
1586
1733
|
|
|
1587
1734
|
"""
|
|
1588
|
-
|
|
1735
|
+
|
|
1589
1736
|
attributes = client.list_rse_attributes(rse=args.rse)
|
|
1590
|
-
|
|
1591
|
-
|
|
1737
|
+
if cli_config == 'rich':
|
|
1738
|
+
keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.RSE_TYPE}
|
|
1739
|
+
table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for k, v in sorted(attributes.items())] # columns have mixed datatypes
|
|
1740
|
+
table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
|
|
1741
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1742
|
+
else:
|
|
1743
|
+
table = [(k + ':', str(v)) for (k, v) in sorted(attributes.items())] # columns have mixed datatypes
|
|
1744
|
+
print(tabulate(table, tablefmt='plain', disable_numparse=True)) # disabling number parsing
|
|
1592
1745
|
return SUCCESS
|
|
1593
1746
|
|
|
1594
1747
|
|
|
1595
1748
|
@exception_handler
|
|
1596
|
-
def list_rse_usage(args):
|
|
1749
|
+
def list_rse_usage(args, client, logger, console, spinner):
|
|
1597
1750
|
"""
|
|
1598
1751
|
%(prog)s list-rse-usage [options] <rse>
|
|
1599
1752
|
|
|
1600
1753
|
Show the space usage of a given rse
|
|
1601
1754
|
|
|
1602
1755
|
"""
|
|
1603
|
-
|
|
1756
|
+
|
|
1757
|
+
if cli_config == 'rich':
|
|
1758
|
+
spinner.update(status='Fetching RSE usage')
|
|
1759
|
+
spinner.start()
|
|
1760
|
+
|
|
1604
1761
|
all_usages = client.get_rse_usage(rse=args.rse, filters={'per_account': args.show_accounts})
|
|
1605
1762
|
select_usages = [u for u in all_usages if u['source'] not in ('srm', 'gsiftp', 'webdav')]
|
|
1606
|
-
|
|
1763
|
+
|
|
1764
|
+
if cli_config == 'rich':
|
|
1765
|
+
output = []
|
|
1766
|
+
table_data = []
|
|
1767
|
+
header = ['SOURCE', 'USED', 'FILES', 'FREE', 'TOTAL', 'UPDATED AT']
|
|
1768
|
+
header_account_data = ['ACCOUNT', 'USED', 'PERCENTAGE %']
|
|
1769
|
+
key2id = {header[i].lower().replace(' ', '_'): i for i in range(len(header))}
|
|
1770
|
+
account_data = {}
|
|
1607
1771
|
for usage in select_usages:
|
|
1608
|
-
|
|
1772
|
+
if cli_config == 'rich':
|
|
1773
|
+
row = [''] * len(header)
|
|
1609
1774
|
for elem in usage:
|
|
1610
|
-
if
|
|
1775
|
+
if elem in ['free', 'total'] and usage['source'] != 'storage' or elem == 'files' and usage['source'] != 'rucio':
|
|
1611
1776
|
continue
|
|
1612
1777
|
elif elem in ['used', 'free', 'total']:
|
|
1613
|
-
|
|
1778
|
+
if cli_config == 'rich':
|
|
1779
|
+
row[key2id[elem]] = sizefmt(usage[elem], args.human)
|
|
1780
|
+
else:
|
|
1781
|
+
print(' {0}: {1}'.format(elem, sizefmt(usage[elem], args.human)))
|
|
1614
1782
|
elif elem == 'account_usages':
|
|
1615
|
-
|
|
1616
|
-
|
|
1617
|
-
|
|
1783
|
+
if cli_config == 'rich':
|
|
1784
|
+
if usage[elem]:
|
|
1785
|
+
for account in usage[elem]:
|
|
1786
|
+
if cli_config == 'rich':
|
|
1787
|
+
account_data[usage['source']].append([account['account'], sizefmt(account['used'], args.human), str(account['percentage'])])
|
|
1618
1788
|
else:
|
|
1619
|
-
|
|
1620
|
-
|
|
1621
|
-
|
|
1622
|
-
|
|
1623
|
-
|
|
1624
|
-
used_string = 'used: {0}'.format(sizefmt(account['used'], args.human))
|
|
1625
|
-
account_string = 'account: {0}'.format(account['account'])
|
|
1626
|
-
percentage_string = 'percentage: {0}'.format(account['percentage'])
|
|
1627
|
-
print(base_string + account_string.ljust(col_width) + used_string.ljust(col_width) + percentage_string.ljust(col_width))
|
|
1789
|
+
account_usages_title = ' per account:'
|
|
1790
|
+
if not usage[elem]:
|
|
1791
|
+
account_usages_title += ' no usage'
|
|
1792
|
+
else:
|
|
1793
|
+
print(account_usages_title)
|
|
1628
1794
|
print(' ------')
|
|
1795
|
+
col_width = max(len(str(entry[1])) for account in usage[elem] for entry in list(account.items())) + 16
|
|
1796
|
+
for account in usage[elem]:
|
|
1797
|
+
base_string = ' '
|
|
1798
|
+
used_string = 'used: {0}'.format(sizefmt(account['used'], args.human))
|
|
1799
|
+
account_string = 'account: {0}'.format(account['account'])
|
|
1800
|
+
percentage_string = 'percentage: {0}'.format(account['percentage'])
|
|
1801
|
+
print(base_string + account_string.ljust(col_width) + used_string.ljust(col_width) + percentage_string.ljust(col_width))
|
|
1802
|
+
print(' ------')
|
|
1629
1803
|
else:
|
|
1630
|
-
|
|
1631
|
-
|
|
1804
|
+
if cli_config == 'rich':
|
|
1805
|
+
if elem in key2id:
|
|
1806
|
+
row[key2id[elem]] = str(usage[elem])
|
|
1807
|
+
if elem == 'source':
|
|
1808
|
+
account_data[usage[elem]] = []
|
|
1809
|
+
else:
|
|
1810
|
+
print(' {0}: {1}'.format(elem, usage[elem]))
|
|
1811
|
+
|
|
1812
|
+
if cli_config == 'rich':
|
|
1813
|
+
table_data.append(row)
|
|
1814
|
+
|
|
1815
|
+
if cli_config == 'rich':
|
|
1816
|
+
table = generate_table(table_data, headers=header, col_alignments=['left', 'right', 'right', 'right', 'right', 'left'])
|
|
1817
|
+
output.append(table)
|
|
1818
|
+
|
|
1819
|
+
if args.show_accounts:
|
|
1820
|
+
output.append('\n[b]USAGE PER ACCOUNT:')
|
|
1821
|
+
for source in account_data:
|
|
1822
|
+
if len(account_data[source]) > 0:
|
|
1823
|
+
output.append(Padding.indent(Text(f'source: {source}', style=CLITheme.SUBHEADER_HIGHLIGHT), 2))
|
|
1824
|
+
account_table = generate_table(account_data[source], headers=header_account_data, col_alignments=['left', 'right', 'right'])
|
|
1825
|
+
output.append(Padding.indent(account_table, 2))
|
|
1826
|
+
|
|
1827
|
+
spinner.stop()
|
|
1828
|
+
print_output(*output, console=console, no_pager=args.no_pager)
|
|
1829
|
+
else:
|
|
1830
|
+
print('------')
|
|
1632
1831
|
return SUCCESS
|
|
1633
1832
|
|
|
1634
1833
|
|
|
1635
1834
|
@exception_handler
|
|
1636
|
-
def list_account_limits(args):
|
|
1835
|
+
def list_account_limits(args, client, logger, console, spinner):
|
|
1637
1836
|
"""
|
|
1638
1837
|
%(prog)s list [options] <field1=value1 field2=value2 ...>
|
|
1639
1838
|
|
|
1640
1839
|
List account limits.
|
|
1641
1840
|
|
|
1642
1841
|
"""
|
|
1643
|
-
|
|
1644
|
-
|
|
1842
|
+
if cli_config == 'rich':
|
|
1843
|
+
spinner.update(status='Fetching account limits')
|
|
1844
|
+
spinner.start()
|
|
1845
|
+
|
|
1645
1846
|
if args.rse:
|
|
1646
1847
|
limits = client.get_local_account_limit(account=args.limit_account, rse=args.rse)
|
|
1647
1848
|
else:
|
|
1648
1849
|
limits = client.get_local_account_limits(account=args.limit_account)
|
|
1850
|
+
|
|
1851
|
+
table_data = []
|
|
1649
1852
|
for limit in list(limits.items()):
|
|
1650
|
-
|
|
1651
|
-
|
|
1652
|
-
print(tabulate(table, tablefmt=tablefmt, headers=['RSE', 'LIMIT']))
|
|
1853
|
+
table_data.append([limit[0], sizefmt(limit[1], args.human)])
|
|
1854
|
+
table_data.sort()
|
|
1653
1855
|
|
|
1654
|
-
|
|
1856
|
+
if cli_config == 'rich':
|
|
1857
|
+
table1 = generate_table(table_data, headers=['RSE', 'LIMIT'], col_alignments=['left', 'right'])
|
|
1858
|
+
else:
|
|
1859
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE', 'LIMIT']))
|
|
1860
|
+
|
|
1861
|
+
table_data = []
|
|
1655
1862
|
limits = client.get_global_account_limits(account=args.limit_account)
|
|
1656
1863
|
for limit in list(limits.items()):
|
|
1657
1864
|
if (args.rse and args.rse in limit[1]['resolved_rses']) or not args.rse:
|
|
1658
|
-
|
|
1659
|
-
|
|
1660
|
-
print(tabulate(table, tablefmt=tablefmt, headers=['RSE EXPRESSION', 'LIMIT']))
|
|
1865
|
+
table_data.append([limit[0], sizefmt(limit[1]['limit'], args.human)])
|
|
1866
|
+
table_data.sort()
|
|
1661
1867
|
|
|
1868
|
+
if cli_config == 'rich':
|
|
1869
|
+
table2 = generate_table(table_data, headers=['RSE EXPRESSION', 'LIMIT'], col_alignments=['left', 'right'])
|
|
1870
|
+
else:
|
|
1871
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE EXPRESSION', 'LIMIT']))
|
|
1872
|
+
|
|
1873
|
+
if cli_config == 'rich':
|
|
1874
|
+
spinner.stop()
|
|
1875
|
+
print_output(table1, table2, console=console, no_pager=args.no_pager)
|
|
1662
1876
|
return SUCCESS
|
|
1663
1877
|
|
|
1664
1878
|
|
|
1665
1879
|
@exception_handler
|
|
1666
|
-
def list_account_usage(args):
|
|
1880
|
+
def list_account_usage(args, client, logger, console, spinner):
|
|
1667
1881
|
"""
|
|
1668
1882
|
%(prog)s list [options] <field1=value1 field2=value2 ...>
|
|
1669
1883
|
|
|
1670
1884
|
List account usage.
|
|
1671
1885
|
|
|
1672
1886
|
"""
|
|
1673
|
-
|
|
1674
|
-
|
|
1675
|
-
|
|
1676
|
-
|
|
1677
|
-
|
|
1678
|
-
|
|
1887
|
+
if cli_config == 'rich':
|
|
1888
|
+
spinner.update(status='Fetching account usage')
|
|
1889
|
+
spinner.start()
|
|
1890
|
+
|
|
1891
|
+
usage = client.get_local_account_usage(account=args.usage_account, rse=args.rse)
|
|
1892
|
+
table_data = []
|
|
1679
1893
|
for item in usage:
|
|
1680
1894
|
remaining = 0 if float(item['bytes_remaining']) < 0 else float(item['bytes_remaining'])
|
|
1681
|
-
|
|
1682
|
-
|
|
1683
|
-
print(tabulate(table, tablefmt=tablefmt, headers=['RSE', 'USAGE', 'LIMIT', 'QUOTA LEFT']))
|
|
1895
|
+
table_data.append([item['rse'], sizefmt(item['bytes'], args.human), sizefmt(item['bytes_limit'], args.human), sizefmt(remaining, args.human)])
|
|
1896
|
+
table_data.sort()
|
|
1684
1897
|
|
|
1685
|
-
|
|
1898
|
+
if cli_config == 'rich':
|
|
1899
|
+
table1 = generate_table(table_data, headers=['RSE', 'USAGE', 'LIMIT', 'QUOTA LEFT'], col_alignments=['left', 'right', 'right', 'right'])
|
|
1900
|
+
else:
|
|
1901
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE', 'USAGE', 'LIMIT', 'QUOTA LEFT']))
|
|
1902
|
+
|
|
1903
|
+
table_data = []
|
|
1686
1904
|
usage = client.get_global_account_usage(account=args.usage_account)
|
|
1687
1905
|
for item in usage:
|
|
1688
1906
|
if (args.rse and args.rse in item['rse_expression']) or not args.rse:
|
|
1689
1907
|
remaining = 0 if float(item['bytes_remaining']) < 0 else float(item['bytes_remaining'])
|
|
1690
|
-
|
|
1691
|
-
|
|
1692
|
-
print(tabulate(table, tablefmt=tablefmt, headers=['RSE EXPRESSION', 'USAGE', 'LIMIT', 'QUOTA LEFT']))
|
|
1908
|
+
table_data.append([item['rse_expression'], sizefmt(item['bytes'], args.human), sizefmt(item['bytes_limit'], args.human), sizefmt(remaining, args.human)])
|
|
1909
|
+
table_data.sort()
|
|
1693
1910
|
|
|
1911
|
+
if cli_config == 'rich':
|
|
1912
|
+
table2 = generate_table(table_data, headers=['RSE EXPRESSION', 'USAGE', 'LIMIT', 'QUOTA LEFT'], col_alignments=['left', 'right', 'right', 'right'])
|
|
1913
|
+
else:
|
|
1914
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE EXPRESSION', 'USAGE', 'LIMIT', 'QUOTA LEFT']))
|
|
1915
|
+
|
|
1916
|
+
if cli_config == 'rich':
|
|
1917
|
+
spinner.stop()
|
|
1918
|
+
print_output(table1, table2, console=console, no_pager=args.no_pager)
|
|
1694
1919
|
return SUCCESS
|
|
1695
1920
|
|
|
1696
1921
|
|
|
1697
1922
|
@exception_handler
|
|
1698
|
-
def list_datasets_rse(args):
|
|
1923
|
+
def list_datasets_rse(args, client, logger, console, spinner):
|
|
1699
1924
|
"""
|
|
1700
1925
|
%(prog)s list [options] <field1=value1 field2=value2 ...>
|
|
1701
1926
|
|
|
1702
1927
|
List the datasets in a site.
|
|
1703
1928
|
|
|
1704
1929
|
"""
|
|
1705
|
-
|
|
1930
|
+
|
|
1931
|
+
if cli_config == 'rich':
|
|
1932
|
+
spinner.update(status='Fetching datasets at RSE')
|
|
1933
|
+
spinner.start()
|
|
1934
|
+
|
|
1706
1935
|
if args.long:
|
|
1707
|
-
|
|
1936
|
+
table_data = []
|
|
1708
1937
|
for dsn in client.list_datasets_per_rse(args.rse):
|
|
1709
|
-
|
|
1710
|
-
|
|
1711
|
-
|
|
1938
|
+
table_data.append([f"{dsn['scope']}:{dsn['name']}"
|
|
1939
|
+
f"{str(dsn['available_length'])}/{str(dsn['length'])}",
|
|
1940
|
+
f"{str(dsn['available_bytes'])}/{str(dsn['bytes'])}"])
|
|
1941
|
+
|
|
1942
|
+
if cli_config == 'rich':
|
|
1943
|
+
table_data.sort()
|
|
1944
|
+
table = generate_table(table_data, headers=['SCOPE:NAME', 'LOCAL FILES/TOTAL FILES', 'LOCAL BYTES/TOTAL BYTES'], col_alignments=['left', 'right', 'right'])
|
|
1945
|
+
spinner.stop()
|
|
1946
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1947
|
+
else:
|
|
1948
|
+
print(tabulate(table_data, tablefmt=tablefmt, headers=['DID', 'LOCAL FILES/TOTAL FILES', 'LOCAL BYTES/TOTAL BYTES']))
|
|
1712
1949
|
else:
|
|
1713
|
-
dsns = list(set([
|
|
1950
|
+
dsns = list(set([f"{dsn['scope']}:{dsn['name']}" for dsn in client.list_datasets_per_rse(args.rse)]))
|
|
1714
1951
|
dsns.sort()
|
|
1715
|
-
|
|
1716
|
-
|
|
1717
|
-
|
|
1718
|
-
|
|
1952
|
+
if cli_config == 'rich':
|
|
1953
|
+
table = generate_table([[dsn] for dsn in dsns], headers=['SCOPE:NAME'])
|
|
1954
|
+
spinner.stop()
|
|
1955
|
+
print_output(table, console=console, no_pager=args.no_pager)
|
|
1956
|
+
else:
|
|
1957
|
+
print("SCOPE:NAME")
|
|
1958
|
+
print('----------')
|
|
1959
|
+
for dsn in dsns:
|
|
1960
|
+
print(dsn)
|
|
1719
1961
|
return SUCCESS
|
|
1720
1962
|
|
|
1721
1963
|
|
|
1722
1964
|
@exception_handler
|
|
1723
|
-
def add_lifetime_exception(args):
|
|
1965
|
+
def add_lifetime_exception(args, client, logger, console, spinner):
|
|
1724
1966
|
"""
|
|
1725
1967
|
%(prog)s add_lifetime_exception [options] <field1=value1 field2=value2 ...>
|
|
1726
1968
|
|
|
1727
1969
|
Declare a lifetime model exception.
|
|
1728
1970
|
|
|
1729
1971
|
"""
|
|
1730
|
-
|
|
1972
|
+
|
|
1731
1973
|
if not args.reason:
|
|
1732
1974
|
logger.error('reason for the extension is mandatory')
|
|
1733
1975
|
return FAILURE
|
|
@@ -1745,20 +1987,22 @@ def add_lifetime_exception(args):
|
|
|
1745
1987
|
logger.error('inputfile is mandatory')
|
|
1746
1988
|
return FAILURE
|
|
1747
1989
|
with open(args.inputfile) as infile:
|
|
1748
|
-
|
|
1990
|
+
# Deduplicate the content of the input file and ignore empty lines.
|
|
1991
|
+
dids = set(did for line in infile if (did := line.strip()))
|
|
1749
1992
|
|
|
1750
1993
|
dids_list = []
|
|
1751
1994
|
containers = []
|
|
1752
1995
|
datasets = []
|
|
1753
|
-
error_types = ['Total DIDs',
|
|
1754
|
-
'DID not submitted because it is a file',
|
|
1755
|
-
'DID that are containers and were resolved',
|
|
1756
|
-
'DID not submitted because it is not part of the lifetime campaign',
|
|
1757
|
-
'DID successfully submitted including the one from containers resolved']
|
|
1758
1996
|
for did in dids:
|
|
1759
1997
|
scope, name = get_scope(did, client)
|
|
1760
1998
|
dids_list.append({'scope': scope, 'name': name})
|
|
1761
|
-
|
|
1999
|
+
error_summary = {
|
|
2000
|
+
"total_dids": {"description": "Total DIDs", "count": len(dids_list)},
|
|
2001
|
+
"files_ignored": {"description": "DID not submitted because it is a file", "count": 0},
|
|
2002
|
+
"containers_resolved": {"description": "DID that are containers and were resolved", "count": 0},
|
|
2003
|
+
"not_in_lifetime_model": {"description": "DID not submitted because it is not part of the lifetime campaign", "count": 0},
|
|
2004
|
+
"successfully_submitted": {"description": "DID successfully submitted including the one from containers resolved", "count": 0},
|
|
2005
|
+
}
|
|
1762
2006
|
chunk_limit = 500 # Server should be able to accept 1000
|
|
1763
2007
|
dids_list_copy = deepcopy(dids_list)
|
|
1764
2008
|
for chunk in chunks(dids_list_copy, chunk_limit):
|
|
@@ -1767,18 +2011,18 @@ def add_lifetime_exception(args):
|
|
|
1767
2011
|
dids_list.remove({'scope': scope, 'name': name})
|
|
1768
2012
|
if meta['did_type'] == 'FILE':
|
|
1769
2013
|
logger.warning('%s:%s is a file. Will be ignored' % (scope, name))
|
|
1770
|
-
|
|
2014
|
+
error_summary["files_ignored"]["count"] += 1
|
|
1771
2015
|
elif meta['did_type'] == 'CONTAINER':
|
|
1772
2016
|
logger.warning('%s:%s is a container. It needs to be resolved' % (scope, name))
|
|
1773
2017
|
containers.append({'scope': scope, 'name': name})
|
|
1774
|
-
|
|
2018
|
+
error_summary["containers_resolved"]["count"] += 1
|
|
1775
2019
|
elif not meta['eol_at']:
|
|
1776
2020
|
logger.warning('%s:%s is not affected by the lifetime model' % (scope, name))
|
|
1777
|
-
|
|
2021
|
+
error_summary["not_in_lifetime_model"]["count"] += 1
|
|
1778
2022
|
else:
|
|
1779
2023
|
logger.info('%s:%s will be declared' % (scope, name))
|
|
1780
2024
|
datasets.append({'scope': scope, 'name': name})
|
|
1781
|
-
|
|
2025
|
+
error_summary["successfully_submitted"]["count"] += 1
|
|
1782
2026
|
|
|
1783
2027
|
for did in dids_list:
|
|
1784
2028
|
scope = did['scope']
|
|
@@ -1796,11 +2040,11 @@ def add_lifetime_exception(args):
|
|
|
1796
2040
|
logger.debug('%s:%s' % (scope, name))
|
|
1797
2041
|
if not meta['eol_at']:
|
|
1798
2042
|
logger.warning('%s:%s is not affected by the lifetime model' % (scope, name))
|
|
1799
|
-
|
|
2043
|
+
error_summary["not_in_lifetime_model"]["count"] += 1
|
|
1800
2044
|
else:
|
|
1801
2045
|
logger.info('%s:%s will be declared' % (scope, name))
|
|
1802
2046
|
datasets.append({'scope': scope, 'name': name})
|
|
1803
|
-
|
|
2047
|
+
error_summary["successfully_submitted"]["count"] += 1
|
|
1804
2048
|
if not datasets:
|
|
1805
2049
|
logger.error('Nothing to submit')
|
|
1806
2050
|
return SUCCESS
|
|
@@ -1810,19 +2054,26 @@ def add_lifetime_exception(args):
|
|
|
1810
2054
|
logger.error(err)
|
|
1811
2055
|
return FAILURE
|
|
1812
2056
|
except Exception:
|
|
1813
|
-
|
|
1814
|
-
|
|
2057
|
+
error_message = 'Failure to submit exception. Please retry.'
|
|
2058
|
+
if cli_config == 'rich':
|
|
2059
|
+
if logger.level == DEBUG:
|
|
2060
|
+
logger.exception(error_message)
|
|
2061
|
+
else:
|
|
2062
|
+
logger.error(error_message)
|
|
2063
|
+
else:
|
|
2064
|
+
logger.error(error_message)
|
|
2065
|
+
logger.debug(traceback.format_exc())
|
|
1815
2066
|
return FAILURE
|
|
1816
2067
|
|
|
1817
|
-
logger.info('Exception successfully submitted. Summary below')
|
|
1818
|
-
for
|
|
1819
|
-
print('{0:100} {1:6d}'.format(
|
|
2068
|
+
logger.info('Exception successfully submitted. Summary below:')
|
|
2069
|
+
for key, data in error_summary.items():
|
|
2070
|
+
print('{0:100} {1:6d}'.format(data["description"], data["count"]))
|
|
1820
2071
|
return SUCCESS
|
|
1821
2072
|
|
|
1822
2073
|
|
|
1823
|
-
def test_server(args):
|
|
2074
|
+
def test_server(args, client, logger, console, spinner):
|
|
1824
2075
|
""""
|
|
1825
|
-
%(prog)s test-server [options] <field1=value1 field2=value2 ...>
|
|
2076
|
+
%(prog)s test-rucio-server [options] <field1=value1 field2=value2 ...>
|
|
1826
2077
|
Test the client against a server.
|
|
1827
2078
|
"""
|
|
1828
2079
|
suite = unittest.TestLoader().loadTestsFromTestCase(TestRucioServer)
|
|
@@ -1830,13 +2081,11 @@ def test_server(args):
|
|
|
1830
2081
|
return SUCCESS
|
|
1831
2082
|
|
|
1832
2083
|
|
|
1833
|
-
def touch(args):
|
|
2084
|
+
def touch(args, client, logger, console, spinner):
|
|
1834
2085
|
"""
|
|
1835
2086
|
%(prog)s touch [options] <did1 did2 ...>
|
|
1836
2087
|
"""
|
|
1837
2088
|
|
|
1838
|
-
client = get_client(args)
|
|
1839
|
-
|
|
1840
2089
|
for did in args.dids:
|
|
1841
2090
|
scope, name = get_scope(did, client)
|
|
1842
2091
|
client.touch(scope, name, args.rse)
|
|
@@ -1846,7 +2095,7 @@ def rse_completer(prefix, parsed_args, **kwargs):
|
|
|
1846
2095
|
"""
|
|
1847
2096
|
Completes the argument with a list of RSEs
|
|
1848
2097
|
"""
|
|
1849
|
-
client = get_client(parsed_args)
|
|
2098
|
+
client = get_client(parsed_args, logger=None)
|
|
1850
2099
|
return ["%(rse)s" % rse for rse in client.list_rses()]
|
|
1851
2100
|
|
|
1852
2101
|
|
|
@@ -1854,7 +2103,7 @@ def get_parser():
|
|
|
1854
2103
|
"""
|
|
1855
2104
|
Returns the argparse parser.
|
|
1856
2105
|
"""
|
|
1857
|
-
oparser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]), add_help=True)
|
|
2106
|
+
oparser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]), add_help=True, exit_on_error=False)
|
|
1858
2107
|
subparsers = oparser.add_subparsers()
|
|
1859
2108
|
|
|
1860
2109
|
# Main arguments
|
|
@@ -1863,12 +2112,13 @@ def get_parser():
|
|
|
1863
2112
|
oparser.add_argument('--verbose', '-v', default=False, action='store_true', help="Print more verbose output.")
|
|
1864
2113
|
oparser.add_argument('-H', '--host', dest="host", metavar="ADDRESS", help="The Rucio API host.")
|
|
1865
2114
|
oparser.add_argument('--auth-host', dest="auth_host", metavar="ADDRESS", help="The Rucio Authentication host.")
|
|
1866
|
-
oparser.add_argument('-a', '--account', dest="
|
|
2115
|
+
oparser.add_argument('-a', '--account', dest="issuer", help="Rucio account to use.")
|
|
1867
2116
|
oparser.add_argument('-S', '--auth-strategy', dest="auth_strategy", default=None, help="Authentication strategy (userpass, x509...)")
|
|
1868
2117
|
oparser.add_argument('-T', '--timeout', dest="timeout", type=float, default=None, help="Set all timeout values to seconds.")
|
|
1869
2118
|
oparser.add_argument('--robot', '-R', dest="human", default=True, action='store_false', help="All output in bytes and without the units. This output format is preferred by parsers and scripts.")
|
|
1870
2119
|
oparser.add_argument('--user-agent', '-U', dest="user_agent", default='rucio-clients', action='store', help="Rucio User Agent")
|
|
1871
2120
|
oparser.add_argument('--vo', dest="vo", metavar="VO", default=None, help="VO to authenticate at. Only used in multi-VO mode.")
|
|
2121
|
+
oparser.add_argument("--no-pager", dest="no_pager", default=False, action='store_true', help=argparse.SUPPRESS)
|
|
1872
2122
|
|
|
1873
2123
|
# Options for the userpass or OIDC auth_strategy
|
|
1874
2124
|
oparser.add_argument('-u', '--user', dest='username', default=None, help='username')
|
|
@@ -1892,7 +2142,7 @@ def get_parser():
|
|
|
1892
2142
|
|
|
1893
2143
|
# Options for the x509 auth_strategy
|
|
1894
2144
|
oparser.add_argument('--certificate', dest='certificate', default=None, help='Client certificate file for x509 Authentication.')
|
|
1895
|
-
oparser.add_argument('--
|
|
2145
|
+
oparser.add_argument('--client-key', dest='client_key', default=None, help='Client key for x509 Authentication.')
|
|
1896
2146
|
oparser.add_argument('--ca-certificate', dest='ca_certificate', default=None, help='CA certificate to verify peer against (SSL).')
|
|
1897
2147
|
|
|
1898
2148
|
# Ping command
|
|
@@ -2467,6 +2717,7 @@ You can filter by account::
|
|
|
2467
2717
|
list_rses_parser.set_defaults(function=list_rses)
|
|
2468
2718
|
list_rses_parser.add_argument('--rses', dest='rses', action='store', help='The RSE filter expression. A comprehensive help about RSE expressions \
|
|
2469
2719
|
can be found in ' + Color.BOLD + 'https://rucio.cern.ch/documentation/started/concepts/rse_expressions' + Color.END)
|
|
2720
|
+
list_rses_parser.add_argument("--csv", action='store_true', help='Output a list of RSEs as a csv')
|
|
2470
2721
|
|
|
2471
2722
|
# The list-suspicious-replicas command
|
|
2472
2723
|
list_suspicious_replicas_parser = subparsers.add_parser('list-suspicious-replicas', help='Show the list of all replicas marked "suspicious".')
|
|
@@ -2489,7 +2740,7 @@ can be found in ' + Color.BOLD + 'https://rucio.cern.ch/documentation/started/co
|
|
|
2489
2740
|
list_datasets_rse_parser.add_argument('--long', dest='long', action='store_true', default=False, help='The long option')
|
|
2490
2741
|
|
|
2491
2742
|
# The test-server command
|
|
2492
|
-
test_server_parser = subparsers.add_parser('test-server', help='Test Server', description='Run a bunch of tests against the Rucio Servers.')
|
|
2743
|
+
test_server_parser = subparsers.add_parser('test-rucio-server', help='Test Server', description='Run a bunch of tests against the Rucio Servers.')
|
|
2493
2744
|
test_server_parser.set_defaults(function=test_server)
|
|
2494
2745
|
|
|
2495
2746
|
# The get-metadata subparser
|
|
@@ -2516,7 +2767,13 @@ can be found in ' + Color.BOLD + 'https://rucio.cern.ch/documentation/started/co
|
|
|
2516
2767
|
return oparser
|
|
2517
2768
|
|
|
2518
2769
|
|
|
2519
|
-
|
|
2770
|
+
def main():
|
|
2771
|
+
|
|
2772
|
+
pager = get_pager()
|
|
2773
|
+
console = Console(theme=Theme(CLITheme.LOG_THEMES), soft_wrap=True)
|
|
2774
|
+
console.width = max(MIN_CONSOLE_WIDTH, console.width)
|
|
2775
|
+
spinner = Status('Initializing spinner', spinner=CLITheme.SPINNER, spinner_style=CLITheme.SPINNER_STYLE, console=console)
|
|
2776
|
+
|
|
2520
2777
|
arguments = sys.argv[1:]
|
|
2521
2778
|
# set the configuration before anything else, if the config parameter is present
|
|
2522
2779
|
for argi in range(len(arguments)):
|
|
@@ -2533,10 +2790,36 @@ if __name__ == '__main__':
|
|
|
2533
2790
|
|
|
2534
2791
|
args = oparser.parse_args(arguments)
|
|
2535
2792
|
|
|
2536
|
-
|
|
2793
|
+
if cli_config == 'rich':
|
|
2794
|
+
install(console=console, word_wrap=True, width=min(console.width, MAX_TRACEBACK_WIDTH)) # Make rich exception tracebacks the default.
|
|
2795
|
+
logger = setup_rich_logger(module_name=__name__, logger_name='user', verbose=args.verbose, console=console)
|
|
2796
|
+
else:
|
|
2797
|
+
logger = setup_logger(module_name=__name__, logger_name='user', verbose=args.verbose)
|
|
2798
|
+
|
|
2799
|
+
setup_gfal2_logger()
|
|
2800
|
+
signal.signal(signal.SIGINT, lambda sig, frame: signal_handler(sig, frame, logger))
|
|
2801
|
+
|
|
2537
2802
|
start_time = time.time()
|
|
2538
|
-
|
|
2803
|
+
client = get_client(args, logger)
|
|
2804
|
+
result = args.function(args, client, logger, console, spinner)
|
|
2539
2805
|
end_time = time.time()
|
|
2540
|
-
if
|
|
2541
|
-
|
|
2806
|
+
if cli_config == 'rich':
|
|
2807
|
+
spinner.stop()
|
|
2808
|
+
if console.is_terminal and not args.no_pager:
|
|
2809
|
+
command_output = console.end_capture()
|
|
2810
|
+
if command_output == '' and args.verbose:
|
|
2811
|
+
print("Completed in %-0.4f sec." % (end_time - start_time))
|
|
2812
|
+
else:
|
|
2813
|
+
if args.verbose:
|
|
2814
|
+
command_output += "Completed in %-0.4f sec." % (end_time - start_time)
|
|
2815
|
+
# Ignore SIGINT during pager execution.
|
|
2816
|
+
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
|
2817
|
+
pager(command_output)
|
|
2818
|
+
else:
|
|
2819
|
+
if args.verbose:
|
|
2820
|
+
print("Completed in %-0.4f sec." % (end_time - start_time))
|
|
2542
2821
|
sys.exit(result)
|
|
2822
|
+
|
|
2823
|
+
|
|
2824
|
+
if __name__ == '__main__':
|
|
2825
|
+
main()
|