rucio-clients 37.0.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rucio-clients might be problematic. Click here for more details.

Files changed (104) hide show
  1. rucio/__init__.py +17 -0
  2. rucio/alembicrevision.py +15 -0
  3. rucio/cli/__init__.py +14 -0
  4. rucio/cli/account.py +216 -0
  5. rucio/cli/bin_legacy/__init__.py +13 -0
  6. rucio/cli/bin_legacy/rucio.py +2825 -0
  7. rucio/cli/bin_legacy/rucio_admin.py +2500 -0
  8. rucio/cli/command.py +272 -0
  9. rucio/cli/config.py +72 -0
  10. rucio/cli/did.py +191 -0
  11. rucio/cli/download.py +128 -0
  12. rucio/cli/lifetime_exception.py +33 -0
  13. rucio/cli/replica.py +162 -0
  14. rucio/cli/rse.py +293 -0
  15. rucio/cli/rule.py +158 -0
  16. rucio/cli/scope.py +40 -0
  17. rucio/cli/subscription.py +73 -0
  18. rucio/cli/upload.py +60 -0
  19. rucio/cli/utils.py +226 -0
  20. rucio/client/__init__.py +15 -0
  21. rucio/client/accountclient.py +432 -0
  22. rucio/client/accountlimitclient.py +183 -0
  23. rucio/client/baseclient.py +983 -0
  24. rucio/client/client.py +120 -0
  25. rucio/client/configclient.py +126 -0
  26. rucio/client/credentialclient.py +59 -0
  27. rucio/client/didclient.py +868 -0
  28. rucio/client/diracclient.py +56 -0
  29. rucio/client/downloadclient.py +1783 -0
  30. rucio/client/exportclient.py +44 -0
  31. rucio/client/fileclient.py +50 -0
  32. rucio/client/importclient.py +42 -0
  33. rucio/client/lifetimeclient.py +90 -0
  34. rucio/client/lockclient.py +109 -0
  35. rucio/client/metaconventionsclient.py +140 -0
  36. rucio/client/pingclient.py +44 -0
  37. rucio/client/replicaclient.py +452 -0
  38. rucio/client/requestclient.py +125 -0
  39. rucio/client/richclient.py +317 -0
  40. rucio/client/rseclient.py +746 -0
  41. rucio/client/ruleclient.py +294 -0
  42. rucio/client/scopeclient.py +90 -0
  43. rucio/client/subscriptionclient.py +173 -0
  44. rucio/client/touchclient.py +82 -0
  45. rucio/client/uploadclient.py +969 -0
  46. rucio/common/__init__.py +13 -0
  47. rucio/common/bittorrent.py +234 -0
  48. rucio/common/cache.py +111 -0
  49. rucio/common/checksum.py +168 -0
  50. rucio/common/client.py +122 -0
  51. rucio/common/config.py +788 -0
  52. rucio/common/constants.py +217 -0
  53. rucio/common/constraints.py +17 -0
  54. rucio/common/didtype.py +237 -0
  55. rucio/common/exception.py +1208 -0
  56. rucio/common/extra.py +31 -0
  57. rucio/common/logging.py +420 -0
  58. rucio/common/pcache.py +1409 -0
  59. rucio/common/plugins.py +185 -0
  60. rucio/common/policy.py +93 -0
  61. rucio/common/schema/__init__.py +200 -0
  62. rucio/common/schema/generic.py +416 -0
  63. rucio/common/schema/generic_multi_vo.py +395 -0
  64. rucio/common/stomp_utils.py +423 -0
  65. rucio/common/stopwatch.py +55 -0
  66. rucio/common/test_rucio_server.py +154 -0
  67. rucio/common/types.py +483 -0
  68. rucio/common/utils.py +1688 -0
  69. rucio/rse/__init__.py +96 -0
  70. rucio/rse/protocols/__init__.py +13 -0
  71. rucio/rse/protocols/bittorrent.py +194 -0
  72. rucio/rse/protocols/cache.py +111 -0
  73. rucio/rse/protocols/dummy.py +100 -0
  74. rucio/rse/protocols/gfal.py +708 -0
  75. rucio/rse/protocols/globus.py +243 -0
  76. rucio/rse/protocols/http_cache.py +82 -0
  77. rucio/rse/protocols/mock.py +123 -0
  78. rucio/rse/protocols/ngarc.py +209 -0
  79. rucio/rse/protocols/posix.py +250 -0
  80. rucio/rse/protocols/protocol.py +361 -0
  81. rucio/rse/protocols/rclone.py +365 -0
  82. rucio/rse/protocols/rfio.py +145 -0
  83. rucio/rse/protocols/srm.py +338 -0
  84. rucio/rse/protocols/ssh.py +414 -0
  85. rucio/rse/protocols/storm.py +195 -0
  86. rucio/rse/protocols/webdav.py +594 -0
  87. rucio/rse/protocols/xrootd.py +302 -0
  88. rucio/rse/rsemanager.py +881 -0
  89. rucio/rse/translation.py +260 -0
  90. rucio/vcsversion.py +11 -0
  91. rucio/version.py +45 -0
  92. rucio_clients-37.0.0rc1.data/data/etc/rse-accounts.cfg.template +25 -0
  93. rucio_clients-37.0.0rc1.data/data/etc/rucio.cfg.atlas.client.template +43 -0
  94. rucio_clients-37.0.0rc1.data/data/etc/rucio.cfg.template +241 -0
  95. rucio_clients-37.0.0rc1.data/data/requirements.client.txt +19 -0
  96. rucio_clients-37.0.0rc1.data/data/rucio_client/merge_rucio_configs.py +144 -0
  97. rucio_clients-37.0.0rc1.data/scripts/rucio +133 -0
  98. rucio_clients-37.0.0rc1.data/scripts/rucio-admin +97 -0
  99. rucio_clients-37.0.0rc1.dist-info/METADATA +54 -0
  100. rucio_clients-37.0.0rc1.dist-info/RECORD +104 -0
  101. rucio_clients-37.0.0rc1.dist-info/WHEEL +5 -0
  102. rucio_clients-37.0.0rc1.dist-info/licenses/AUTHORS.rst +100 -0
  103. rucio_clients-37.0.0rc1.dist-info/licenses/LICENSE +201 -0
  104. rucio_clients-37.0.0rc1.dist-info/top_level.txt +1 -0
@@ -0,0 +1,2825 @@
1
+ #!/usr/bin/env python
2
+ # Copyright European Organization for Nuclear Research (CERN) since 2012
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License at
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+
16
+ import argparse
17
+ import itertools
18
+ import math
19
+ import os
20
+ import signal
21
+ import sys
22
+ import time
23
+ import traceback
24
+ import unittest
25
+ import uuid
26
+ from copy import deepcopy
27
+ from datetime import datetime
28
+ from logging import DEBUG
29
+ from typing import Optional
30
+
31
+ from rich.console import Console
32
+ from rich.padding import Padding
33
+ from rich.status import Status
34
+ from rich.text import Text
35
+ from rich.theme import Theme
36
+ from rich.traceback import install
37
+ from rich.tree import Tree
38
+ from tabulate import tabulate
39
+
40
+ # rucio module has the same name as this executable module, so this rule fails. pylint: disable=no-name-in-module
41
+ from rucio import version
42
+ from rucio.cli.utils import exception_handler, get_client, setup_gfal2_logger, signal_handler
43
+ from rucio.client.richclient import MAX_TRACEBACK_WIDTH, MIN_CONSOLE_WIDTH, CLITheme, generate_table, get_cli_config, get_pager, print_output, setup_rich_logger
44
+ from rucio.common.client import detect_client_location
45
+ from rucio.common.config import config_get, config_get_float
46
+ from rucio.common.constants import ReplicaState
47
+ from rucio.common.exception import (
48
+ DIDFilterSyntaxError,
49
+ DuplicateCriteriaInDIDFilter,
50
+ DuplicateRule,
51
+ InputValidationError,
52
+ InvalidObject,
53
+ InvalidType,
54
+ RucioException,
55
+ UnsupportedOperation,
56
+ )
57
+ from rucio.common.extra import import_extras
58
+ from rucio.common.test_rucio_server import TestRucioServer
59
+ from rucio.common.utils import Color, StoreAndDeprecateWarningAction, chunks, extract_scope, parse_did_filter_from_string, parse_did_filter_from_string_fe, setup_logger, sizefmt
60
+
61
+ EXTRA_MODULES = import_extras(['argcomplete'])
62
+
63
+ if EXTRA_MODULES['argcomplete']:
64
+ import argcomplete # pylint: disable=E0401
65
+
66
+ SUCCESS = 0
67
+ FAILURE = 1
68
+
69
+ DEFAULT_SECURE_PORT = 443
70
+ DEFAULT_PORT = 80
71
+
72
+ tablefmt = 'psql'
73
+ cli_config = get_cli_config()
74
+
75
+
76
+ def get_scope(did, client):
77
+ try:
78
+ scope, name = extract_scope(did)
79
+ return scope, name
80
+ except TypeError:
81
+ scopes = client.list_scopes()
82
+ scope, name = extract_scope(did, scopes)
83
+ return scope, name
84
+ return None, did
85
+
86
+
87
+ def __resolve_containers_to_datasets(scope, name, client):
88
+ """
89
+ Helper function to resolve a container into its dataset content.
90
+ """
91
+ datasets = []
92
+ for did in client.list_content(scope, name):
93
+ if did['type'] == 'DATASET':
94
+ datasets.append({'scope': did['scope'], 'name': did['name']})
95
+ elif did['type'] == 'CONTAINER':
96
+ datasets.extend(__resolve_containers_to_datasets(did['scope'], did['name'], client))
97
+ return datasets
98
+
99
+
100
+ @exception_handler
101
+ def ping(args, client, logger, console, spinner):
102
+ """
103
+ Pings a Rucio server.
104
+ """
105
+ server_info = client.ping()
106
+ if server_info:
107
+ print(server_info['version'])
108
+ return SUCCESS
109
+ logger.error('Ping failed')
110
+ return FAILURE
111
+
112
+
113
+ @exception_handler
114
+ def whoami_account(args, client, logger, console, spinner):
115
+ """
116
+ %(prog)s show [options] <field1=value1 field2=value2 ...>
117
+
118
+ Show extended information of a given account
119
+ """
120
+ info = client.whoami()
121
+ if cli_config == 'rich':
122
+ keyword_styles = {**CLITheme.ACCOUNT_STATUS, **CLITheme.ACCOUNT_TYPE}
123
+ table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for (k, v) in sorted(info.items())]
124
+ table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
125
+ print_output(table, console=console, no_pager=args.no_pager)
126
+ else:
127
+ for k in info:
128
+ print(k.ljust(10) + ' : ' + str(info[k]))
129
+ return SUCCESS
130
+
131
+
132
+ @exception_handler
133
+ def list_dataset_replicas(args, client, logger, console, spinner):
134
+ """
135
+ %(prog)s list [options] <field1=value1 field2=value2 ...>
136
+
137
+ List dataset replicas
138
+ """
139
+
140
+ result = {}
141
+ datasets = []
142
+
143
+ def _append_to_datasets(scope, name):
144
+ filedid = {'scope': scope, 'name': name}
145
+ if filedid not in datasets:
146
+ datasets.append(filedid)
147
+
148
+ def _fetch_datasets_for_meta(meta):
149
+ """Internal function to fetch datasets and recurse into files."""
150
+ if meta['did_type'] != 'DATASET':
151
+ dids = client.scope_list(scope=meta['scope'], name=meta['name'], recursive=True)
152
+ for did in dids:
153
+ if did['type'] == 'FILE':
154
+ _append_to_datasets(did['parent']['scope'], did['parent']['name'])
155
+ else:
156
+ _append_to_datasets(meta['scope'], meta['name'])
157
+
158
+ def _append_result(dsn, replica):
159
+ if dsn not in result:
160
+ result[dsn] = {}
161
+ result[dsn][replica['rse']] = [replica['rse'], replica['available_length'], replica['length']]
162
+
163
+ if cli_config == 'rich':
164
+ spinner.update(status='Fetching dataset replicas')
165
+ spinner.start()
166
+
167
+ if len(args.dids) == 1:
168
+ scope, name = get_scope(args.dids[0], client)
169
+ dmeta = client.get_metadata(scope, name)
170
+ _fetch_datasets_for_meta(meta=dmeta)
171
+ else:
172
+ extractdids = (get_scope(did, client) for did in args.dids)
173
+ splitdids = [{'scope': scope, 'name': name} for scope, name in extractdids]
174
+ for dmeta in client.get_metadata_bulk(dids=splitdids):
175
+ _fetch_datasets_for_meta(meta=dmeta)
176
+
177
+ if args.deep or len(datasets) < 2:
178
+ for did in datasets:
179
+ dsn = f"{did['scope']}:{did['name']}"
180
+ for rep in client.list_dataset_replicas(scope=did['scope'], name=did['name'], deep=args.deep):
181
+ _append_result(dsn=dsn, replica=rep)
182
+ else:
183
+ for rep in client.list_dataset_replicas_bulk(dids=datasets):
184
+ dsn = f"{rep['scope']}:{rep['name']}"
185
+ _append_result(dsn=dsn, replica=rep)
186
+
187
+ if args.csv:
188
+ for dsn in result:
189
+ for rse in list(result[dsn].values()):
190
+ print(rse[0], rse[1], rse[2], sep=',')
191
+
192
+ if cli_config == 'rich':
193
+ spinner.stop()
194
+ else:
195
+ output = []
196
+ for i, dsn in enumerate(result):
197
+ if cli_config == 'rich':
198
+ if i > 0:
199
+ output.append(Text(f'\nDATASET: {dsn}', style=CLITheme.TEXT_HIGHLIGHT))
200
+ elif len(result) > 1:
201
+ output.append(Text(f'DATASET: {dsn}', style=CLITheme.TEXT_HIGHLIGHT))
202
+
203
+ table = generate_table(list(result[dsn].values()), headers=['RSE', 'FOUND', 'TOTAL'], col_alignments=['left', 'right', 'right'])
204
+ output.append(table)
205
+ else:
206
+ print(f'\nDATASET: {dsn}')
207
+ print(tabulate(list(result[dsn].values()), tablefmt=tablefmt, headers=['RSE', 'FOUND', 'TOTAL']))
208
+
209
+ if cli_config == 'rich':
210
+ spinner.stop()
211
+ print_output(*output, console=console, no_pager=args.no_pager)
212
+ return SUCCESS
213
+
214
+
215
+ @exception_handler
216
+ def list_file_replicas(args, client, logger, console, spinner):
217
+ """
218
+ %(prog)s list [options] <field1=value1 field2=value2 ...>
219
+
220
+ List file replicas
221
+ """
222
+ if args.missing:
223
+ args.all_states = True
224
+
225
+ protocols = None
226
+ if args.protocols:
227
+ protocols = args.protocols.split(',')
228
+
229
+ table_data = []
230
+ dids = []
231
+ if args.missing and not args.rses:
232
+ print('Cannot use --missing without specifying a RSE')
233
+ return FAILURE
234
+ if args.link and ':' not in args.link:
235
+ print('The substitution parameter must equal --link="/pfn/dir:/dst/dir"')
236
+ return FAILURE
237
+
238
+ if cli_config == 'rich':
239
+ spinner.update(status='Fetching file replicas')
240
+ spinner.start()
241
+
242
+ for did in args.dids:
243
+ scope, name = get_scope(did, client)
244
+ client.get_metadata(scope=scope, name=name) # Break with Exception before streaming replicas if DID does not exist.
245
+ dids.append({'scope': scope, 'name': name})
246
+
247
+ replicas = client.list_replicas(dids, schemes=protocols,
248
+ ignore_availability=True,
249
+ all_states=args.all_states,
250
+ rse_expression=args.rses,
251
+ metalink=args.metalink,
252
+ client_location=detect_client_location(),
253
+ sort=args.sort, domain=args.domain,
254
+ resolve_archives=not args.no_resolve_archives)
255
+ rses = [rse["rse"] for rse in client.list_rses(rse_expression=args.rses)]
256
+
257
+ if args.metalink:
258
+ print(replicas[:-1]) # Last character is newline, no need to print that.
259
+ return SUCCESS
260
+
261
+ if args.missing:
262
+ for replica, rse in itertools.product(replicas, rses):
263
+ if 'states' in replica and rse in replica['states'] and replica['states'].get(rse) != 'AVAILABLE':
264
+ if cli_config == 'rich':
265
+ replica_state = f"[{CLITheme.REPLICA_STATE.get(ReplicaState[replica['states'].get(rse)].value, 'default')}]{ReplicaState[replica['states'].get(rse)].value}[/]"
266
+ table_data.append([replica['scope'], replica['name'], '({0}) {1}'.format(replica_state, rse)])
267
+ else:
268
+ table_data.append([replica['scope'], replica['name'], "({0}) {1}".format(ReplicaState[replica['states'].get(rse)].value, rse)])
269
+ if cli_config == 'rich':
270
+ table = generate_table(table_data, headers=['SCOPE', 'NAME', '(STATE) RSE'], col_alignments=['left', 'left', 'left'])
271
+ spinner.stop()
272
+ print_output(table, console=console, no_pager=args.no_pager)
273
+ else:
274
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE', 'NAME', '(STATE) RSE']))
275
+
276
+ elif args.link:
277
+ pfn_dir, dst_dir = args.link.split(':')
278
+ if args.rses:
279
+ for replica, rse in itertools.product(replicas, rses):
280
+ if replica['rses'].get(rse):
281
+ for pfn in replica['rses'][rse]:
282
+ os.symlink(dst_dir + pfn.rsplit(pfn_dir)[-1], replica['name'])
283
+ else:
284
+ for replica in replicas:
285
+ for rse in replica['rses']:
286
+ if replica['rses'][rse]:
287
+ for pfn in replica['rses'][rse]:
288
+ os.symlink(dst_dir + pfn.rsplit(pfn_dir)[-1], replica['name'])
289
+ elif args.pfns:
290
+ if args.rses:
291
+ for replica in replicas:
292
+ for pfn in replica['pfns']:
293
+ rse = replica['pfns'][pfn]['rse']
294
+ if replica['rses'].get(rse):
295
+ if cli_config == 'rich':
296
+ table_data.append([pfn])
297
+ else:
298
+ print(pfn)
299
+ else:
300
+ for replica in replicas:
301
+ for pfn in replica['pfns']:
302
+ rse = replica['pfns'][pfn]['rse']
303
+ if replica['rses'][rse]:
304
+ if cli_config == 'rich':
305
+ table_data.append([pfn])
306
+ else:
307
+ print(pfn)
308
+ if cli_config == 'rich':
309
+ table = generate_table(table_data, headers=['PFN'], col_alignments=['left'])
310
+ spinner.stop()
311
+ print_output(table, console=console, no_pager=args.no_pager)
312
+ else:
313
+ if args.all_states:
314
+ header = ['SCOPE', 'NAME', 'FILESIZE', 'ADLER32', '(STATE) RSE: REPLICA']
315
+ else:
316
+ header = ['SCOPE', 'NAME', 'FILESIZE', 'ADLER32', 'RSE: REPLICA']
317
+ for replica in replicas:
318
+ if 'bytes' in replica:
319
+ for pfn in replica['pfns']:
320
+ rse = replica['pfns'][pfn]['rse']
321
+ if args.all_states:
322
+ if cli_config == 'rich':
323
+ replica_state = f"[{CLITheme.REPLICA_STATE.get(ReplicaState[replica['states'][rse]].value, 'default')}]{ReplicaState[replica['states'][rse]].value}[/]"
324
+ # Less does not display hyperlinks well if the table is very wide.
325
+ if args.no_pager:
326
+ rse_string = f'({replica_state}) {rse}: [u bright_blue link={pfn}]{pfn}[/]'
327
+ else:
328
+ rse_string = f'({replica_state}) {rse}: [u bright_blue]{pfn}[/]'
329
+ else:
330
+ rse_string = '({2}) {0}: {1}'.format(rse, pfn, ReplicaState[replica['states'][rse]].value)
331
+ else:
332
+ if cli_config == 'rich':
333
+ # Less does not display hyperlinks well if the table is very wide.
334
+ if args.no_pager:
335
+ rse_string = f'{rse}: [u bright_blue link={pfn}]{pfn}[/]'
336
+ else:
337
+ rse_string = f'{rse}: [u bright_blue]{pfn}[/]'
338
+ else:
339
+ rse_string = '{0}: {1}'.format(rse, pfn)
340
+ if args.rses:
341
+ for selected_rse in rses:
342
+ if rse == selected_rse:
343
+ table_data.append([replica['scope'], replica['name'], sizefmt(replica['bytes'], args.human), replica['adler32'], rse_string])
344
+ else:
345
+ table_data.append([replica['scope'], replica['name'], sizefmt(replica['bytes'], args.human), replica['adler32'], rse_string])
346
+
347
+ if cli_config == 'rich':
348
+ table = generate_table(table_data, headers=header, col_alignments=['left', 'left', 'right', 'left', 'left'])
349
+ spinner.stop()
350
+ print_output(table, console=console, no_pager=args.no_pager)
351
+ else:
352
+ print(tabulate(table_data, tablefmt=tablefmt, headers=header, disable_numparse=True))
353
+ return SUCCESS
354
+
355
+
356
+ @exception_handler
357
+ def add_dataset(args, client, logger, console, spinner):
358
+ """
359
+ %(prog)s add-dataset [options] <dsn>
360
+
361
+ Add a dataset identifier.
362
+ """
363
+
364
+ scope, name = get_scope(args.did, client)
365
+ client.add_dataset(scope=scope, name=name, statuses={'monotonic': args.monotonic}, lifetime=args.lifetime)
366
+ print('Added %s:%s' % (scope, name))
367
+ return SUCCESS
368
+
369
+
370
+ @exception_handler
371
+ def add_container(args, client, logger, console, spinner):
372
+ """
373
+ %(prog)s add-container [options] <dsn>
374
+
375
+ Add a container identifier.
376
+ """
377
+
378
+ scope, name = get_scope(args.did, client)
379
+ client.add_container(scope=scope, name=name, statuses={'monotonic': args.monotonic}, lifetime=args.lifetime)
380
+ print('Added %s:%s' % (scope, name))
381
+ return SUCCESS
382
+
383
+
384
+ @exception_handler
385
+ def attach(args, client, logger, console, spinner):
386
+ """
387
+ %(prog)s attach [options] <field1=value1 field2=value2 ...>
388
+
389
+ Attach a data identifier.
390
+ """
391
+
392
+ scope, name = get_scope(args.todid, client)
393
+ dids = args.dids
394
+ limit = 499
395
+
396
+ if args.fromfile:
397
+ if len(dids) > 1:
398
+ logger.error("If --fromfile option is active, only one file is supported. The file should contain a list of dids, one per line.")
399
+ return FAILURE
400
+ try:
401
+ f = open(dids[0], 'r')
402
+ dids = [did.rstrip() for did in f.readlines()]
403
+ except OSError:
404
+ logger.error("Can't open file '" + dids[0] + "'.")
405
+ return FAILURE
406
+
407
+ dids = [{'scope': get_scope(did, client)[0], 'name': get_scope(did, client)[1]} for did in dids]
408
+ if len(dids) <= limit:
409
+ client.attach_dids(scope=scope, name=name, dids=dids)
410
+ else:
411
+ logger.warning("You are trying to attach too much DIDs. Therefore they will be chunked and attached in multiple commands.")
412
+ missing_dids = []
413
+ for i, chunk in enumerate(chunks(dids, limit)):
414
+ logger.info("Try to attach chunk {0}/{1}".format(i, int(math.ceil(float(len(dids)) / float(limit)))))
415
+ try:
416
+ client.attach_dids(scope=scope, name=name, dids=chunk)
417
+ except Exception:
418
+ content = [{'scope': did['scope'], 'name': did['name']} for did in client.list_content(scope=scope, name=name)]
419
+ missing_dids += [did for did in chunk if did not in content]
420
+
421
+ if missing_dids:
422
+ for chunk in chunks(missing_dids, limit):
423
+ client.attach_dids(scope=scope, name=name, dids=chunk)
424
+
425
+ print('DIDs successfully attached to %s:%s' % (scope, name))
426
+ return SUCCESS
427
+
428
+
429
+ @exception_handler
430
+ def detach(args, client, logger, console, spinner):
431
+ """
432
+ %(prog)s detach [options] <field1=value1 field2=value2 ...>
433
+
434
+ Detach data identifier.
435
+ """
436
+
437
+ scope, name = get_scope(args.fromdid, client)
438
+ dids = []
439
+ for did in args.dids:
440
+ cscope, cname = get_scope(did, client)
441
+ dids.append({'scope': cscope, 'name': cname})
442
+ client.detach_dids(scope=scope, name=name, dids=dids)
443
+ print('DIDs successfully detached from %s:%s' % (scope, name))
444
+ return SUCCESS
445
+
446
+
447
+ @exception_handler
448
+ def list_dids(args, client, logger, console, spinner):
449
+ """
450
+ %(prog)s list-dids scope[:*|:name] [--filter 'value' | --recursive]
451
+
452
+ List the data identifiers for a given scope.
453
+ """
454
+
455
+ filters = {}
456
+ type_ = 'collection'
457
+ table_data = []
458
+
459
+ try:
460
+ scope, name = get_scope(args.did[0], client)
461
+ if name == '':
462
+ name = '*'
463
+ except InvalidObject:
464
+ scope = args.did[0]
465
+ name = '*'
466
+
467
+ if scope not in client.list_scopes():
468
+ logger.error('Scope not found.')
469
+ return FAILURE
470
+
471
+ if args.recursive and '*' in name:
472
+ logger.error('Option recursive cannot be used with wildcards.')
473
+ return FAILURE
474
+ else:
475
+ if filters:
476
+ if ('name' in filters) and (name != '*'):
477
+ logger.error('Must have a wildcard in did name if filtering by name.')
478
+ return FAILURE
479
+
480
+ try:
481
+ filters, type_ = parse_did_filter_from_string_fe(args.filter, name)
482
+ except InvalidType as error:
483
+ logger.error(error)
484
+ return FAILURE
485
+ except DuplicateCriteriaInDIDFilter as error:
486
+ logger.error(error)
487
+ return FAILURE
488
+ except DIDFilterSyntaxError as error:
489
+ logger.error(error)
490
+ return FAILURE
491
+ except ValueError as error:
492
+ logger.error(error)
493
+ return FAILURE
494
+ except Exception as e:
495
+ logger.error(e)
496
+ return FAILURE
497
+
498
+ if cli_config == 'rich':
499
+ spinner.update(status='Fetching DIDs')
500
+ spinner.start()
501
+
502
+ for did in client.list_dids(scope, filters=filters, did_type=type_, long=True, recursive=args.recursive):
503
+ if cli_config == 'rich':
504
+ table_data.append([f"{did['scope']}:{did['name']}", Text(did['did_type'], style=CLITheme.DID_TYPE.get(did['did_type'], 'default'))])
505
+ else:
506
+ table_data.append([f"{did['scope']}:{did['name']}", did['did_type']])
507
+
508
+ if cli_config == 'rich':
509
+ if args.short:
510
+ table = generate_table([[did] for did, _ in table_data], headers=['SCOPE:NAME'], col_alignments=['left'])
511
+ else:
512
+ table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
513
+ spinner.stop()
514
+ print_output(table, console=console, no_pager=args.no_pager)
515
+ else:
516
+ if args.short:
517
+ for did, _ in table_data:
518
+ print(did)
519
+ else:
520
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
521
+ return SUCCESS
522
+
523
+
524
+ @exception_handler
525
+ def list_dids_extended(args, client, logger, console, spinner):
526
+ """
527
+ %(prog)s list-dids-extended scope[:*|:name] [--filter 'key=value' | --recursive]
528
+
529
+ List the data identifiers for a given scope (DEPRECATED).
530
+ """
531
+ logger.error("This command has been deprecated. Please use list_dids instead.")
532
+ return FAILURE
533
+
534
+
535
+ @exception_handler
536
+ def list_scopes(args, client, logger, console, spinner):
537
+ """
538
+ %(prog)s list-scopes <scope>
539
+
540
+ List scopes.
541
+ """
542
+ # For the moment..
543
+
544
+ if cli_config == 'rich':
545
+ spinner.update(status='Fetching scopes')
546
+ spinner.start()
547
+
548
+ scopes = client.list_scopes()
549
+ if cli_config == 'rich':
550
+ table = generate_table([[scope] for scope in sorted(scopes)], headers=['SCOPE'], col_alignments=['left'])
551
+ spinner.stop()
552
+ print_output(table, console=console, no_pager=args.no_pager)
553
+ else:
554
+ for scope in scopes:
555
+ print(scope)
556
+ return SUCCESS
557
+
558
+
559
+ @exception_handler
560
+ def list_files(args, client, logger, console, spinner):
561
+ """
562
+ %(prog)s list-files [options] <field1=value1 field2=value2 ...>
563
+
564
+ List data identifier contents.
565
+ """
566
+
567
+ if cli_config == 'rich':
568
+ spinner.update(status='Fetching files')
569
+ spinner.start()
570
+
571
+ if args.csv:
572
+ for did in args.dids:
573
+ scope, name = get_scope(did, client)
574
+ for f in client.list_files(scope=scope, name=name):
575
+ guid = f['guid']
576
+ if guid:
577
+ guid = f'{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:32]}'
578
+ else:
579
+ guid = '(None)'
580
+ print('{}:{}'.format(f['scope'], f['name']), guid, f['adler32'], sizefmt(f['bytes'], args.human), f['events'], sep=',')
581
+ if cli_config == 'rich':
582
+ spinner.stop()
583
+ return SUCCESS
584
+ elif args.LOCALPATH:
585
+ full_str = ''
586
+ if cli_config == 'rich':
587
+ header = '''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
588
+ <!DOCTYPE POOLFILECATALOG SYSTEM "InMemory">
589
+ <POOLFILECATALOG>'''
590
+ full_str = header
591
+ else:
592
+ print('''<?xml version="1.0" encoding="UTF-8" standalone="no" ?>
593
+ <!DOCTYPE POOLFILECATALOG SYSTEM "InMemory">
594
+ <POOLFILECATALOG>''')
595
+
596
+ file_str = ''' <File ID="%s">
597
+ <physical>
598
+ <pfn filetype="ROOT_All" name="%s/%s"/>
599
+ </physical>
600
+ <logical>
601
+ <lfn name="%s"/>
602
+ </logical>
603
+ </File>'''
604
+
605
+ for did in args.dids:
606
+ scope, name = get_scope(did, client)
607
+ for f in client.list_files(scope=scope, name=name):
608
+ guid = f['guid']
609
+ if guid:
610
+ guid = f'{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:32]}'
611
+ else:
612
+ guid = '(None)'
613
+
614
+ if cli_config == 'rich':
615
+ full_str += '\n' + file_str % (guid, args.LOCALPATH, f['name'], f['name'])
616
+ else:
617
+ print(file_str % (guid, args.LOCALPATH, f['name'], f['name']))
618
+
619
+ if cli_config == 'rich':
620
+ spinner.stop()
621
+ print_output(full_str + '\n</POOLFILECATALOG>', console=console, no_pager=True)
622
+ else:
623
+ print('</POOLFILECATALOG>')
624
+ return SUCCESS
625
+ else:
626
+ table_data = []
627
+ for did in args.dids:
628
+ totfiles = 0
629
+ totsize = 0
630
+ totevents = 0
631
+ scope, name = get_scope(did, client)
632
+ for file in client.list_files(scope=scope, name=name):
633
+ totfiles += 1
634
+ totsize += int(file['bytes'])
635
+ if file['events']:
636
+ totevents += int(file.get('events', 0))
637
+ guid = file['guid']
638
+ if guid:
639
+ guid = f'{guid[0:8]}-{guid[8:12]}-{guid[12:16]}-{guid[16:20]}-{guid[20:32]}'
640
+ else:
641
+ guid = '(None)'
642
+ table_data.append([f"{file['scope']}:{file['name']}", guid, f"ad:{file['adler32']}", sizefmt(file['bytes'], args.human), file['events']])
643
+
644
+ if cli_config == 'rich':
645
+ table = generate_table(table_data, headers=['SCOPE:NAME', 'GUID', 'ADLER32', 'FILESIZE', 'EVENTS'], col_alignments=['left', 'left', 'left', 'right', 'right'])
646
+ summary_data = [['Total files', str(totfiles)], ['Total size', sizefmt(totsize, args.human)]]
647
+ if totevents:
648
+ summary_data.append(['Total events', str(totevents)])
649
+ summary_table = generate_table(summary_data, col_alignments=['left', 'left'], row_styles=['none'])
650
+ spinner.stop()
651
+ print_output(table, summary_table, console=console, no_pager=args.no_pager)
652
+ else:
653
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', 'GUID', 'ADLER32', 'FILESIZE', 'EVENTS'], disable_numparse=True))
654
+ print('Total files : %s' % totfiles)
655
+ print('Total size : %s' % sizefmt(totsize, args.human))
656
+ if totevents:
657
+ print('Total events : %s' % totevents)
658
+ return SUCCESS
659
+
660
+
661
+ @exception_handler
662
+ def list_content(args, client, logger, console, spinner):
663
+ """
664
+ %(prog)s list-content [options] <field1=value1 field2=value2 ...>
665
+
666
+ List data identifier contents.
667
+ """
668
+
669
+ table_data = []
670
+ if cli_config == 'rich':
671
+ spinner.update(status='Fetching dataset contents')
672
+ spinner.start()
673
+
674
+ for did in args.dids:
675
+ scope, name = get_scope(did, client)
676
+ for content in client.list_content(scope=scope, name=name):
677
+ if cli_config == 'rich':
678
+ table_data.append([f"{content['scope']}:{content['name']}", Text(content['type'].upper(), style=CLITheme.DID_TYPE.get(content['type'].upper(), 'default'))])
679
+ else:
680
+ table_data.append([f"{content['scope']}:{content['name']}", content['type'].upper()])
681
+
682
+ if cli_config == 'rich':
683
+ if args.short:
684
+ table = generate_table([[did] for did, _ in table_data], headers=['SCOPE:NAME'], col_alignments=['left'])
685
+ else:
686
+ table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
687
+ spinner.stop()
688
+ print_output(table, console=console, no_pager=args.no_pager)
689
+ else:
690
+ if args.short:
691
+ for did, dummy in table_data:
692
+ print(did)
693
+ else:
694
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
695
+ return SUCCESS
696
+
697
+
698
+ @exception_handler
699
+ def list_content_history(args, client, logger, console, spinner):
700
+ """
701
+ %(prog)s list-content-history [options] <field1=value1 field2=value2 ...>
702
+
703
+ List data identifier contents.
704
+ """
705
+
706
+ table_data = []
707
+ if cli_config == 'rich':
708
+ spinner.update(status='Fetching content history')
709
+ spinner.start()
710
+
711
+ for did in args.dids:
712
+ scope, name = get_scope(did, client)
713
+ for content in client.list_content_history(scope=scope, name=name):
714
+ if cli_config == 'rich':
715
+ table_data.append([f"{content['scope']}:{content['name']}", Text(content['type'].upper(), style=CLITheme.DID_TYPE.get(content['type'].upper(), 'default'))])
716
+ else:
717
+ table_data.append([f"{content['scope']}:{content['name']}", content['type'].upper()])
718
+
719
+ if cli_config == 'rich':
720
+ table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
721
+ spinner.stop()
722
+ print_output(table, console=console, no_pager=args.no_pager)
723
+ else:
724
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
725
+ return SUCCESS
726
+
727
+
728
+ @exception_handler
729
+ def list_parent_dids(args, client, logger, console, spinner):
730
+ """
731
+ %(prog)s list-parent-dids
732
+
733
+ List parent data identifier.
734
+ """
735
+
736
+ if cli_config == 'rich':
737
+ spinner.update(status='Fetching parent DIDs')
738
+ spinner.start()
739
+
740
+ if args.pfns:
741
+ dict_datasets = {}
742
+ output = []
743
+ for res in client.get_did_from_pfns(args.pfns):
744
+ for key in res:
745
+ if key not in dict_datasets:
746
+ dict_datasets[key] = []
747
+ for rule in client.list_associated_rules_for_file(res[key]['scope'], res[key]['name']):
748
+ if f"{rule['scope']}:{rule['name']}" not in dict_datasets[key]:
749
+ dict_datasets[key].append(f"{rule['scope']}:{rule['name']}")
750
+
751
+ for i, pfn in enumerate(dict_datasets):
752
+ if cli_config == 'rich':
753
+ parent_tree = Tree('')
754
+ for parent in dict_datasets[pfn]:
755
+ parent_tree.add(parent)
756
+ table = generate_table([['PFN', pfn], ['Parents', parent_tree]], col_alignments=['left', 'left'], row_styles=['none'])
757
+ output.append(table)
758
+ else:
759
+ print('PFN: ', pfn)
760
+ print('Parents: ', ','.join(dict_datasets[pfn]))
761
+
762
+ if cli_config == 'rich':
763
+ spinner.stop()
764
+ print_output(*output, console=console, no_pager=args.no_pager)
765
+ elif args.guids:
766
+ output = []
767
+ guids = []
768
+ for input_ in args.guids:
769
+ try:
770
+ uuid.UUID(input_)
771
+ except ValueError:
772
+ print(f'Ignoring invalid GUID: {input_}')
773
+ continue
774
+ dict_datasets = {}
775
+ for guid in guids:
776
+ for did in client.get_dataset_by_guid(guid):
777
+ if guid not in dict_datasets:
778
+ dict_datasets[guid] = []
779
+ for rule in client.list_associated_rules_for_file(did['scope'], did['name']):
780
+ if f"{rule['scope']}:{rule['name']}" not in dict_datasets[guid]:
781
+ dict_datasets[guid].append(f"{rule['scope']}:{rule['name']}")
782
+
783
+ for i, guid in enumerate(dict_datasets):
784
+ if cli_config == 'rich':
785
+ parent_tree = Tree('')
786
+ for parent in dict_datasets[guid]:
787
+ parent_tree.add(parent)
788
+ table = generate_table([['GUID', guid], ['Parents', parent_tree]], col_alignments=['left', 'left'], row_styles=['none'])
789
+ output.append(table)
790
+ else:
791
+ print('GUID: ', guid)
792
+ print('Parents : ', ','.join(dict_datasets[guid]))
793
+
794
+ if cli_config == 'rich':
795
+ spinner.stop()
796
+ print_output(*output, console=console, no_pager=args.no_pager)
797
+ elif args.did:
798
+ table_data = []
799
+ scope, name = get_scope(args.did, client)
800
+ for dataset in client.list_parent_dids(scope=scope, name=name):
801
+ if cli_config == 'rich':
802
+ table_data.append([f"{dataset['scope']}:{dataset['name']}", Text(dataset['type'], style=CLITheme.DID_TYPE.get(dataset['type'], 'default'))])
803
+ else:
804
+ table_data.append([f"{dataset['scope']}:{dataset['name']}", dataset['type']])
805
+
806
+ if cli_config == 'rich':
807
+ table = generate_table(table_data, headers=['SCOPE:NAME', '[DID TYPE]'], col_alignments=['left', 'left'])
808
+ spinner.stop()
809
+ print_output(table, console=console, no_pager=args.no_pager)
810
+ else:
811
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['SCOPE:NAME', '[DID TYPE]']))
812
+ else:
813
+ print('At least one option has to be given. Use -h to list the options.')
814
+ return FAILURE
815
+ return SUCCESS
816
+
817
+
818
+ @exception_handler
819
+ def close(args, client, logger, console, spinner):
820
+ """
821
+ %(prog)s close [options] <field1=value1 field2=value2 ...>
822
+
823
+ Close a dataset or container.
824
+ """
825
+
826
+ for did in args.dids:
827
+ scope, name = get_scope(did, client)
828
+ client.set_status(scope=scope, name=name, open=False)
829
+ print(f'{scope}:{name} has been closed.')
830
+ return SUCCESS
831
+
832
+
833
+ @exception_handler
834
+ def reopen(args, client, logger, console, spinner):
835
+ """
836
+ %(prog)s reopen [options] <field1=value1 field2=value2 ...>
837
+
838
+ Reopen a dataset or container (only for privileged users).
839
+ """
840
+
841
+ for did in args.dids:
842
+ scope, name = get_scope(did, client)
843
+ client.set_status(scope=scope, name=name, open=True)
844
+ print(f'{scope}:{name} has been reopened.')
845
+ return SUCCESS
846
+
847
+
848
+ @exception_handler
849
+ def stat(args, client, logger, console, spinner):
850
+ """
851
+ %(prog)s stat [options] <field1=value1 field2=value2 ...>
852
+
853
+ List attributes and statuses about data identifiers..
854
+ """
855
+
856
+ if cli_config == 'rich':
857
+ spinner.update(status='Fetching DID stats')
858
+ spinner.start()
859
+ keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE}
860
+
861
+ output = []
862
+ for i, did in enumerate(args.dids):
863
+ scope, name = get_scope(did, client)
864
+ info = client.get_did(scope=scope, name=name, dynamic_depth='DATASET')
865
+ if cli_config == 'rich':
866
+ if i > 0:
867
+ output.append(Text(f'\nDID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
868
+ elif len(args.dids) > 1:
869
+ output.append(Text(f'DID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
870
+ table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for (k, v) in sorted(info.items())]
871
+ table = generate_table(table_data, row_styles=['none'], col_alignments=['left', 'left'])
872
+ output.append(table)
873
+ else:
874
+ if i > 0:
875
+ print('------')
876
+ table = [(k + ':', str(v)) for (k, v) in sorted(info.items())]
877
+ print(tabulate(table, tablefmt='plain', disable_numparse=True))
878
+
879
+ if cli_config == 'rich':
880
+ spinner.stop()
881
+ print_output(*output, console=console, no_pager=args.no_pager)
882
+ return SUCCESS
883
+
884
+
885
+ def erase(args, client, logger, console, spinner):
886
+ """
887
+ %(prog)s erase [options] <field1=value1 field2=value2 ...>
888
+
889
+ Delete data identifier.
890
+ """
891
+
892
+ for did in args.dids:
893
+ if '*' in did:
894
+ logger.warning("This command doesn't support wildcards! Skipping DID: %s" % did)
895
+ continue
896
+ try:
897
+ scope, name = get_scope(did, client)
898
+ except RucioException as error:
899
+ logger.warning('DID is in wrong format: %s' % did)
900
+ logger.debug('Error: %s' % error)
901
+ continue
902
+
903
+ if args.undo:
904
+ try:
905
+ client.set_metadata(scope=scope, name=name, key='lifetime', value=None)
906
+ logger.info('Erase undo for DID: {0}:{1}'.format(scope, name))
907
+ except Exception:
908
+ logger.warning('Cannot undo erase operation on DID. DID not existent or grace period of 24 hours already expired.')
909
+ logger.warning(' DID: {0}:{1}'.format(scope, name))
910
+ else:
911
+ try:
912
+ # set lifetime to expire in 24 hours (value is in seconds).
913
+ client.set_metadata(scope=scope, name=name, key='lifetime', value=86400)
914
+ logger.info('CAUTION! erase operation is irreversible after 24 hours. To cancel this operation you can run the following command:')
915
+ print("rucio erase --undo {0}:{1}".format(scope, name))
916
+ except RucioException as error:
917
+ logger.warning('Failed to erase DID: %s' % did)
918
+ logger.debug('Error: %s' % error)
919
+ return SUCCESS
920
+
921
+
922
+ @exception_handler
923
+ def upload(args, client, logger, console, spinner):
924
+ """
925
+ rucio upload [scope:datasetname] [folder/] [files1 file2 file3]
926
+ %(prog)s upload [options] <field1=value1 field2=value2 ...>
927
+
928
+ Upload files into Rucio
929
+ """
930
+ if args.lifetime and args.expiration_date:
931
+ logger.error("--lifetime and --expiration-date cannot be specified at the same time.")
932
+ return FAILURE
933
+ elif args.expiration_date:
934
+ expiration_date = datetime.strptime(args.expiration_date, "%Y-%m-%d-%H:%M:%S")
935
+ if expiration_date < datetime.utcnow():
936
+ logger.error("The specified expiration date should be in the future!")
937
+ return FAILURE
938
+ args.lifetime = (expiration_date - datetime.utcnow()).total_seconds()
939
+
940
+ dsscope = None
941
+ dsname = None
942
+ for arg in args.args:
943
+ did = arg.split(':')
944
+ if not dsscope and len(did) == 2:
945
+ dsscope = did[0]
946
+ dsname = did[1]
947
+ elif len(did) == 2:
948
+ logger.warning('Ignoring input {} because dataset DID is already set {}:{}'.format(arg, dsscope, dsname))
949
+
950
+ items = []
951
+ for arg in args.args:
952
+ if arg.count(':') > 0:
953
+ continue
954
+ if args.pfn:
955
+ if args.impl:
956
+ logger.warning('Ignoring --impl option because --pfn option given')
957
+ args.impl = None
958
+ items.append({'path': arg,
959
+ 'rse': args.rse,
960
+ 'did_scope': args.scope,
961
+ 'did_name': args.name,
962
+ 'impl': args.impl,
963
+ 'dataset_scope': dsscope,
964
+ 'dataset_name': dsname,
965
+ 'force_scheme': args.protocol,
966
+ 'pfn': args.pfn,
967
+ 'no_register': args.no_register,
968
+ 'lifetime': args.lifetime,
969
+ 'register_after_upload': args.register_after_upload,
970
+ 'transfer_timeout': args.transfer_timeout,
971
+ 'guid': args.guid,
972
+ 'recursive': args.recursive})
973
+
974
+ if len(items) < 1:
975
+ raise InputValidationError('No files could be extracted from the given arguments')
976
+
977
+ if len(items) > 1 and args.guid:
978
+ logger.error("A single GUID was specified on the command line, but there are multiple files to upload.")
979
+ logger.error("If GUID auto-detection is not used, only one file may be uploaded at a time")
980
+ raise InputValidationError('Invalid input argument composition')
981
+ if len(items) > 1 and args.name:
982
+ logger.error("A single LFN was specified on the command line, but there are multiple files to upload.")
983
+ logger.error("If LFN auto-detection is not used, only one file may be uploaded at a time")
984
+ raise InputValidationError('Invalid input argument composition')
985
+
986
+ if args.recursive and args.pfn:
987
+ logger.error("It is not possible to create the folder structure into collections with a non-deterministic way.")
988
+ logger.error("If PFN is specified, you cannot use --recursive")
989
+ raise InputValidationError('Invalid input argument composition')
990
+
991
+ from rucio.client.uploadclient import UploadClient
992
+ upload_client = UploadClient(client, logger=logger)
993
+ summary_file_path = 'rucio_upload.json' if args.summary else None
994
+ upload_client.upload(items, summary_file_path)
995
+ return SUCCESS
996
+
997
+
998
+ @exception_handler
999
+ def download(args, client, logger, console, spinner):
1000
+ """
1001
+ %(prog)s download [options] <field1=value1 field2=value2 ...>
1002
+
1003
+ Download files from Rucio using new threaded model and RSE expression support
1004
+ """
1005
+ # Input validation
1006
+ if not args.dids and not args.filter and not args.metalink_file:
1007
+ logger.error('At least one did is mandatory')
1008
+ return FAILURE
1009
+ elif not args.dids and args.filter and not args.scope:
1010
+ logger.error('The argument scope is mandatory')
1011
+ return FAILURE
1012
+
1013
+ if args.filter and args.metalink_file:
1014
+ logger.error('Arguments filter and metalink cannot be used together.')
1015
+ return FAILURE
1016
+
1017
+ if args.dids and args.metalink_file:
1018
+ logger.error('Arguments dids and metalink cannot be used together.')
1019
+ return FAILURE
1020
+
1021
+ if args.ignore_checksum and args.check_local_with_filesize_only:
1022
+ logger.error('Arguments ignore-checksum and check-local-with-filesize-only cannot be used together.')
1023
+ return FAILURE
1024
+
1025
+ trace_pattern = {}
1026
+
1027
+ if args.trace_appid:
1028
+ trace_pattern['appid'] = args.trace_appid
1029
+ if args.trace_dataset:
1030
+ trace_pattern['dataset'] = args.trace_dataset
1031
+ if args.trace_datasetscope:
1032
+ trace_pattern['datasetScope'] = args.trace_datasetscope
1033
+ if args.trace_eventtype:
1034
+ trace_pattern['eventType'] = args.trace_eventtype
1035
+ if args.trace_pq:
1036
+ trace_pattern['pq'] = args.trace_pq
1037
+ if args.trace_taskid:
1038
+ trace_pattern['taskid'] = args.trace_taskid
1039
+ if args.trace_usrdn:
1040
+ trace_pattern['usrdn'] = args.trace_usrdn
1041
+ deactivate_file_download_exceptions = args.deactivate_file_download_exceptions if args.deactivate_file_download_exceptions is not None else False
1042
+
1043
+ from rucio.client.downloadclient import DownloadClient
1044
+ download_client = DownloadClient(client=client, logger=logger, check_admin=args.allow_tape)
1045
+
1046
+ result = None
1047
+ item_defaults = {}
1048
+ item_defaults['rse'] = args.rses
1049
+ item_defaults['base_dir'] = args.dir
1050
+ item_defaults['no_subdir'] = args.no_subdir
1051
+ item_defaults['transfer_timeout'] = args.transfer_timeout
1052
+ item_defaults['no_resolve_archives'] = args.no_resolve_archives
1053
+ item_defaults['ignore_checksum'] = args.ignore_checksum
1054
+ item_defaults['check_local_with_filesize_only'] = args.check_local_with_filesize_only
1055
+ archive_did = args.archive_did
1056
+ if archive_did:
1057
+ logger.warning("Archives are treated transparently. --archive-did option is being obsoleted.") # TODO
1058
+
1059
+ # Get filters
1060
+ filters = {}
1061
+ type_ = 'all'
1062
+ if args.filter:
1063
+ try:
1064
+ filters, type_ = parse_did_filter_from_string(args.filter)
1065
+ if args.scope:
1066
+ filters['scope'] = args.scope
1067
+ except InvalidType as error:
1068
+ logger.error(error)
1069
+ return FAILURE
1070
+ except ValueError as error:
1071
+ logger.error(error)
1072
+ return FAILURE
1073
+ except Exception as error:
1074
+ logger.error(error)
1075
+ logger.error("Invalid Filter. Filter must be 'key=value', 'key>=value', 'key>value', 'key<=value', 'key<value'")
1076
+ return FAILURE
1077
+ item_defaults['filters'] = filters
1078
+
1079
+ if not args.pfn:
1080
+ item_defaults['impl'] = args.impl
1081
+ item_defaults['force_scheme'] = args.protocol
1082
+ item_defaults['nrandom'] = args.nrandom
1083
+ item_defaults['transfer_speed_timeout'] = args.transfer_speed_timeout \
1084
+ if args.transfer_speed_timeout is not None \
1085
+ else config_get_float('download', 'transfer_speed_timeout', False, 500)
1086
+ items = []
1087
+ if args.dids:
1088
+ for did in args.dids:
1089
+ item = {'did': did}
1090
+ item.update(item_defaults)
1091
+ items.append(item)
1092
+ else:
1093
+ items.append(item_defaults)
1094
+
1095
+ if args.aria:
1096
+ result = download_client.download_aria2c(items, trace_pattern, deactivate_file_download_exceptions=deactivate_file_download_exceptions, sort=args.sort)
1097
+ elif args.metalink_file:
1098
+ result = download_client.download_from_metalink_file(items[0], args.metalink_file, deactivate_file_download_exceptions=deactivate_file_download_exceptions)
1099
+ if args.sort:
1100
+ logger.warning('Ignoring --replica-selection option because --metalink option given')
1101
+ else:
1102
+ result = download_client.download_dids(items, args.ndownloader, trace_pattern, deactivate_file_download_exceptions=deactivate_file_download_exceptions, sort=args.sort)
1103
+ else:
1104
+ if args.aria:
1105
+ logger.warning('Ignoring --aria option because --pfn option given')
1106
+ if args.impl:
1107
+ logger.warning('Ignoring --impl option because --pfn option given')
1108
+ if args.protocol:
1109
+ logger.warning('Ignoring --protocol option because --pfn option given')
1110
+ if args.transfer_speed_timeout:
1111
+ logger.warning("Download with --pfn doesn't support --transfer-speed-timeout")
1112
+ num_dids = len(args.dids)
1113
+ did_str = args.dids[0]
1114
+ if num_dids > 1:
1115
+ logger.warning('Download with --pfn option only supports one DID but {} DIDs were given. Considering only first DID: {}'.format(num_dids, did_str))
1116
+ logger.debug(args.dids)
1117
+ item_defaults['pfn'] = args.pfn
1118
+ item_defaults['did'] = did_str
1119
+ if args.rses is None:
1120
+ logger.warning("No RSE was given, selecting one.")
1121
+
1122
+ replicas = client.list_replicas(
1123
+ [{"scope": did_str.split(':')[0], "name": did_str.split(':')[-1]}],
1124
+ schemes=args.protocol,
1125
+ ignore_availability=False,
1126
+ client_location=detect_client_location(),
1127
+ resolve_archives=not args.no_resolve_archives
1128
+ )
1129
+
1130
+ download_rse = _get_rse_for_pfn(replicas, args.pfn)
1131
+ if download_rse is None:
1132
+ logger.error("Could not find RSE for pfn %s", args.pfn)
1133
+ return FAILURE
1134
+ else:
1135
+ item_defaults['rse'] = download_rse
1136
+
1137
+ result = download_client.download_pfns([item_defaults], 1, trace_pattern, deactivate_file_download_exceptions=deactivate_file_download_exceptions)
1138
+
1139
+ if not result:
1140
+ raise RucioException('Download API failed')
1141
+
1142
+ summary = {}
1143
+ for item in result:
1144
+ for did, did_stats in item.get('input_dids', {}).items():
1145
+ did_summary = summary.setdefault(did, {'length': did_stats.get('length'), 'DONE': 0, 'ALREADY_DONE': 0, '_total': 0})
1146
+ did_summary['_total'] += 1
1147
+ state = item['clientState'].upper()
1148
+ if state in did_summary:
1149
+ did_summary[state] += 1
1150
+
1151
+ print('----------------------------------')
1152
+ print('Download summary')
1153
+ if not len(summary):
1154
+ print('-' * 40)
1155
+ print('No DID matching the pattern')
1156
+
1157
+ for summary_key, did_summary in summary.items():
1158
+ print('-' * 40)
1159
+ print('DID %s' % summary_key)
1160
+ length = did_summary['length']
1161
+ ds_total = did_summary['_total']
1162
+ downloaded_files = did_summary['DONE']
1163
+ local_files = did_summary['ALREADY_DONE']
1164
+ not_downloaded_files = ds_total - downloaded_files - local_files
1165
+
1166
+ if length:
1167
+ print('{0:40} {1:6d}'.format('Total files (DID): ', length))
1168
+ print('{0:40} {1:6d}'.format('Total files (filtered): ', ds_total))
1169
+ else:
1170
+ print('{0:40} {1:6d}'.format('Total files: ', ds_total))
1171
+ print('{0:40} {1:6d}'.format('Downloaded files: ', downloaded_files))
1172
+ print('{0:40} {1:6d}'.format('Files already found locally: ', local_files))
1173
+ print('{0:40} {1:6d}'.format('Files that cannot be downloaded: ', not_downloaded_files))
1174
+
1175
+ return SUCCESS
1176
+
1177
+
1178
+ def _get_rse_for_pfn(replicas, pfn) -> Optional[str]:
1179
+ # Check each rse in the replica list for the pfn. If no pfn is found, returns None.
1180
+ # If it is found, stop the generator and return the item.
1181
+ for replica in replicas:
1182
+ try:
1183
+ download_rse = next(
1184
+ rse for rse in replica['rses']
1185
+ if pfn in replica['rses'][rse]
1186
+ )
1187
+ except StopIteration:
1188
+ continue
1189
+ else:
1190
+ return download_rse
1191
+
1192
+
1193
+ @exception_handler
1194
+ def get_metadata(args, client, logger, console, spinner):
1195
+ """
1196
+ %(prog)s get_metadata [options] <field1=value1 field2=value2 ...>
1197
+
1198
+ Get data identifier metadata
1199
+ """
1200
+
1201
+ if args.plugin:
1202
+ plugin = args.plugin
1203
+ else:
1204
+ plugin = config_get('client', 'metadata_default_plugin', default='DID_COLUMN')
1205
+
1206
+ if cli_config == 'rich':
1207
+ spinner.update(status='Fetching metadata')
1208
+ spinner.start()
1209
+ keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE, **CLITheme.AVAILABILITY}
1210
+
1211
+ output = []
1212
+ for i, did in enumerate(args.dids):
1213
+ scope, name = get_scope(did, client)
1214
+ meta = client.get_metadata(scope=scope, name=name, plugin=plugin)
1215
+ if cli_config == 'rich':
1216
+ if i > 0:
1217
+ output.append(Text(f'\nDID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
1218
+ elif len(args.dids) > 1:
1219
+ output.append(Text(f'DID: {did}', style=CLITheme.TEXT_HIGHLIGHT))
1220
+ table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for (k, v) in sorted(meta.items())]
1221
+ table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
1222
+ output.append(table)
1223
+ else:
1224
+ if i > 0:
1225
+ print('------')
1226
+ table = [(k + ':', str(v)) for (k, v) in sorted(meta.items())]
1227
+ print(tabulate(table, tablefmt='plain', disable_numparse=True))
1228
+
1229
+ if cli_config == 'rich':
1230
+ spinner.stop()
1231
+ print_output(*output, console=console, no_pager=args.no_pager)
1232
+ return SUCCESS
1233
+
1234
+
1235
+ @exception_handler
1236
+ def set_metadata(args, client, logger, console, spinner):
1237
+ """
1238
+ %(prog)s set_metadata [options] <field1=value1 field2=value2 ...>
1239
+
1240
+ Set data identifier metadata
1241
+ """
1242
+
1243
+ value = args.value
1244
+ if args.key == 'lifetime':
1245
+ value = None if args.value.lower() == 'none' else float(args.value)
1246
+ scope, name = get_scope(args.did, client)
1247
+ client.set_metadata(scope=scope, name=name, key=args.key, value=value)
1248
+ return SUCCESS
1249
+
1250
+
1251
+ @exception_handler
1252
+ def delete_metadata(args, client, logger, console, spinner):
1253
+ """
1254
+ %(prog)s set_metadata [options] <field1=value1 field2=value2 ...>
1255
+
1256
+ Delete data identifier metadata
1257
+ """
1258
+
1259
+ scope, name = get_scope(args.did, client)
1260
+ client.delete_metadata(scope=scope, name=name, key=args.key)
1261
+ return SUCCESS
1262
+
1263
+
1264
+ @exception_handler
1265
+ def add_rule(args, client, logger, console, spinner):
1266
+ """
1267
+ %(prog)s add-rule <did> <copies> <rse-expression> [options]
1268
+
1269
+ Add a rule to a did.
1270
+ """
1271
+
1272
+ dids = []
1273
+ rule_ids = []
1274
+ for did in args.dids:
1275
+ scope, name = get_scope(did, client)
1276
+ dids.append({'scope': scope, 'name': name})
1277
+ try:
1278
+ rule_ids = client.add_replication_rule(dids=dids,
1279
+ copies=args.copies,
1280
+ rse_expression=args.rse_expression,
1281
+ weight=args.weight,
1282
+ lifetime=args.lifetime,
1283
+ grouping=args.grouping,
1284
+ account=args.rule_account,
1285
+ locked=args.locked,
1286
+ source_replica_expression=args.source_replica_expression,
1287
+ notify=args.notify,
1288
+ activity=args.activity,
1289
+ comment=args.comment,
1290
+ ask_approval=args.ask_approval,
1291
+ asynchronous=args.asynchronous,
1292
+ delay_injection=args.delay_injection)
1293
+ except DuplicateRule as error:
1294
+ if args.ignore_duplicate:
1295
+ for did in dids:
1296
+ try:
1297
+ rule_id = client.add_replication_rule(dids=[did],
1298
+ copies=args.copies,
1299
+ rse_expression=args.rse_expression,
1300
+ weight=args.weight,
1301
+ lifetime=args.lifetime,
1302
+ grouping=args.grouping,
1303
+ account=args.rule_account,
1304
+ locked=args.locked,
1305
+ source_replica_expression=args.source_replica_expression,
1306
+ notify=args.notify,
1307
+ activity=args.activity,
1308
+ comment=args.comment,
1309
+ ask_approval=args.ask_approval,
1310
+ asynchronous=args.asynchronous,
1311
+ delay_injection=args.delay_injection)
1312
+ rule_ids.extend(rule_id)
1313
+ except DuplicateRule:
1314
+ print('Duplicate rule for %s:%s found; Skipping.' % (did['scope'], did['name']))
1315
+ else:
1316
+ raise error
1317
+
1318
+ for rule in rule_ids:
1319
+ print(rule)
1320
+ return SUCCESS
1321
+
1322
+
1323
+ @exception_handler
1324
+ def delete_rule(args, client, logger, console, spinner):
1325
+ """
1326
+ %(prog)s delete-rule [options] <ruleid>
1327
+
1328
+ Delete a rule.
1329
+ """
1330
+
1331
+ try:
1332
+ # Test if the rule_id is a real rule_id
1333
+ uuid.UUID(args.rule_id)
1334
+ client.delete_replication_rule(rule_id=args.rule_id, purge_replicas=args.purge_replicas)
1335
+ except ValueError:
1336
+ # Otherwise, trying to extract the scope, name from args.rule_id
1337
+ if not args.rses:
1338
+ logger.error('A RSE expression must be specified if you do not provide a rule_id but a DID')
1339
+ return FAILURE
1340
+ scope, name = get_scope(args.rule_id, client)
1341
+ rules = client.list_did_rules(scope=scope, name=name)
1342
+ if args.rule_account is None:
1343
+ account = client.account
1344
+ else:
1345
+ account = args.rule_account
1346
+ deletion_success = False
1347
+ for rule in rules:
1348
+ if args.delete_all:
1349
+ account_checked = True
1350
+ else:
1351
+ account_checked = rule['account'] == account
1352
+ if rule['rse_expression'] == args.rses and account_checked:
1353
+ client.delete_replication_rule(rule_id=rule['id'], purge_replicas=args.purge_replicas)
1354
+ deletion_success = True
1355
+ if not deletion_success:
1356
+ logger.error('No replication rule was deleted from the DID')
1357
+ return FAILURE
1358
+ return SUCCESS
1359
+
1360
+
1361
+ @exception_handler
1362
+ def update_rule(args, client, logger, console, spinner):
1363
+ """
1364
+ %(prog)s update-rule [options] <ruleid>
1365
+
1366
+ Update a rule.
1367
+ """
1368
+
1369
+ options = {}
1370
+ if args.lifetime:
1371
+ options['lifetime'] = None if args.lifetime.lower() == "none" else int(args.lifetime)
1372
+ if args.locked:
1373
+ if args.locked.title() == "True":
1374
+ options['locked'] = True
1375
+ elif args.locked.title() == "False":
1376
+ options['locked'] = False
1377
+ else:
1378
+ logger.error('Locked must be True or False')
1379
+ return FAILURE
1380
+ if args.comment:
1381
+ options['comment'] = args.comment
1382
+ if args.rule_account:
1383
+ options['account'] = args.rule_account
1384
+ if args.state_stuck:
1385
+ options['state'] = 'STUCK'
1386
+ if args.state_suspended:
1387
+ options['state'] = 'SUSPENDED'
1388
+ if args.rule_activity:
1389
+ options['activity'] = args.rule_activity
1390
+ if args.source_replica_expression:
1391
+ options['source_replica_expression'] = None if args.source_replica_expression.lower() == 'none' else args.source_replica_expression
1392
+ if args.cancel_requests:
1393
+ if 'state' not in options:
1394
+ logger.error('--stuck or --suspend must be specified when running --cancel-requests')
1395
+ return FAILURE
1396
+ options['cancel_requests'] = True
1397
+ if args.priority:
1398
+ options['priority'] = int(args.priority)
1399
+ if args.child_rule_id:
1400
+ if args.child_rule_id.lower() == 'none':
1401
+ options['child_rule_id'] = None
1402
+ else:
1403
+ options['child_rule_id'] = args.child_rule_id
1404
+ if args.boost_rule:
1405
+ options['boost_rule'] = args.boost_rule
1406
+ client.update_replication_rule(rule_id=args.rule_id, options=options)
1407
+ print('Updated Rule')
1408
+ return SUCCESS
1409
+
1410
+
1411
+ @exception_handler
1412
+ def move_rule(args, client, logger, console, spinner):
1413
+ """
1414
+ %(prog)s move-rule [options] <ruleid> <rse_expression>
1415
+
1416
+ Update a rule.
1417
+ """
1418
+
1419
+ override = {}
1420
+ if args.activity:
1421
+ override['activity'] = args.activity
1422
+ if args.source_replica_expression:
1423
+ override['source_replica_expression'] = None if args.source_replica_expression.lower() == "none" else args.source_replica_expression
1424
+
1425
+ print(client.move_replication_rule(rule_id=args.rule_id,
1426
+ rse_expression=args.rse_expression,
1427
+ override=override))
1428
+ return SUCCESS
1429
+
1430
+
1431
+ @exception_handler
1432
+ def info_rule(args, client, logger, console, spinner):
1433
+ """
1434
+ %(prog)s rule-info [options] <ruleid>
1435
+
1436
+ Retrieve information about a rule.
1437
+ """
1438
+
1439
+ if cli_config == 'rich':
1440
+ spinner.update(status='Fetching rule info')
1441
+ spinner.start()
1442
+
1443
+ if args.examine:
1444
+ output = []
1445
+ analysis = client.examine_replication_rule(rule_id=args.rule_id)
1446
+ if cli_config == 'rich':
1447
+ keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE, **CLITheme.RULE_STATE}
1448
+ rule_status = " ".join([f'[{keyword_styles.get(word, "default")}]{word}[/]' for word in analysis['rule_error'].split()])
1449
+ output.append(f'Status of the replication rule: {rule_status}')
1450
+ if analysis['transfers']:
1451
+ output.append('[b]STUCK Requests:[/]')
1452
+ for transfer in analysis['transfers']:
1453
+ output.append(Padding.indent(Text(f"{transfer['scope']}:{transfer['name']}", style=CLITheme.SUBHEADER_HIGHLIGHT), 2))
1454
+ table_data = [['RSE:', str(transfer['rse'])],
1455
+ ['Attempts:', str(transfer['attempts'])],
1456
+ ['Last retry:', str(transfer['last_time'])],
1457
+ ['Last error:', str(transfer['last_source'])],
1458
+ ['Available sources:', ', '.join([source[0] for source in transfer['sources'] if source[1]])],
1459
+ ['Blocklisted sources:', ', '.join([source[0] for source in transfer['sources'] if not source[1]])]]
1460
+ table = generate_table(table_data, row_styles=['none'], col_alignments=['left', 'left'])
1461
+ output.append(Padding.indent(table, 2))
1462
+
1463
+ spinner.stop()
1464
+ print_output(*output, console=console, no_pager=args.no_pager)
1465
+ else:
1466
+ analysis = client.examine_replication_rule(rule_id=args.rule_id)
1467
+ print('Status of the replication rule: %s' % analysis['rule_error'])
1468
+ if analysis['transfers']:
1469
+ print('STUCK Requests:')
1470
+ for transfer in analysis['transfers']:
1471
+ print(' %s:%s' % (transfer['scope'], transfer['name']))
1472
+ print(' RSE: %s' % str(transfer['rse']))
1473
+ print(' Attempts: %s' % str(transfer['attempts']))
1474
+ print(' Last Retry: %s' % str(transfer['last_time']))
1475
+ print(' Last error: %s' % str(transfer['last_error']))
1476
+ print(' Last source: %s' % str(transfer['last_source']))
1477
+ print(' Available sources: %s' % ', '.join([source[0] for source in transfer['sources'] if source[1]]))
1478
+ print(' Blocklisted sources: %s' % ', '.join([source[0] for source in transfer['sources'] if not source[1]]))
1479
+ else:
1480
+ rule = client.get_replication_rule(rule_id=args.rule_id)
1481
+ if cli_config == 'rich':
1482
+ keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.DID_TYPE, **CLITheme.RULE_STATE}
1483
+ table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for k, v in sorted(rule.items())]
1484
+ table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
1485
+ spinner.stop()
1486
+ print_output(table, console=console, no_pager=args.no_pager)
1487
+ else:
1488
+ print("Id: %s" % rule['id'])
1489
+ print("Account: %s" % rule['account'])
1490
+ print("Scope: %s" % rule['scope'])
1491
+ print("Name: %s" % rule['name'])
1492
+ print("RSE Expression: %s" % rule['rse_expression'])
1493
+ print("Copies: %s" % rule['copies'])
1494
+ print("State: %s" % rule['state'])
1495
+ print("Locks OK/REPLICATING/STUCK: %s/%s/%s" % (rule['locks_ok_cnt'], rule['locks_replicating_cnt'], rule['locks_stuck_cnt']))
1496
+ print("Grouping: %s" % rule['grouping'])
1497
+ print("Expires at: %s" % rule['expires_at'])
1498
+ print("Locked: %s" % rule['locked'])
1499
+ print("Weight: %s" % rule['weight'])
1500
+ print("Created at: %s" % rule['created_at'])
1501
+ print("Updated at: %s" % rule['updated_at'])
1502
+ print("Error: %s" % rule['error'])
1503
+ print("Subscription Id: %s" % rule['subscription_id'])
1504
+ print("Source replica expression: %s" % rule['source_replica_expression'])
1505
+ print("Activity: %s" % rule['activity'])
1506
+ print("Comment: %s" % rule['comments'])
1507
+ print("Ignore Quota: %s" % rule['ignore_account_limit'])
1508
+ print("Ignore Availability: %s" % rule['ignore_availability'])
1509
+ print("Purge replicas: %s" % rule['purge_replicas'])
1510
+ print("Notification: %s" % rule['notification'])
1511
+ print("End of life: %s" % rule['eol_at'])
1512
+ print("Child Rule Id: %s" % rule['child_rule_id'])
1513
+ return SUCCESS
1514
+
1515
+
1516
+ @exception_handler
1517
+ def list_rules(args, client, logger, console, spinner):
1518
+ """
1519
+ %(prog)s list-rules ...
1520
+
1521
+ List rules.
1522
+ """
1523
+
1524
+ if cli_config == 'rich':
1525
+ spinner.update(status='Fetching rules')
1526
+ spinner.start()
1527
+
1528
+ if args.rule_id:
1529
+ rules = [client.get_replication_rule(args.rule_id)]
1530
+ elif args.file:
1531
+ scope, name = get_scope(args.file, client)
1532
+ rules = client.list_associated_rules_for_file(scope=scope, name=name)
1533
+ elif args.traverse:
1534
+ scope, name = get_scope(args.did, client)
1535
+ locks = client.get_dataset_locks(scope=scope, name=name)
1536
+ rules = []
1537
+ for rule_id in list(set([lock['rule_id'] for lock in locks])):
1538
+ rules.append(client.get_replication_rule(rule_id))
1539
+ elif args.did:
1540
+ scope, name = get_scope(args.did, client)
1541
+ meta = client.get_metadata(scope=scope, name=name)
1542
+ rules = client.list_did_rules(scope=scope, name=name)
1543
+ try:
1544
+ next(rules)
1545
+ rules = client.list_did_rules(scope=scope, name=name)
1546
+ except StopIteration:
1547
+ rules = []
1548
+ # looking for other rules
1549
+ if meta['did_type'] == 'CONTAINER':
1550
+ for dsn in client.list_content(scope, name):
1551
+ rules.extend(client.list_did_rules(scope=dsn['scope'], name=dsn['name']))
1552
+ if rules:
1553
+ print('No rules found, listing rules for content')
1554
+ if meta['did_type'] == 'DATASET':
1555
+ for container in client.list_parent_dids(scope, name):
1556
+ rules.extend(client.list_did_rules(scope=container['scope'], name=container['name']))
1557
+ if rules:
1558
+ print('No rules found, listing rules for parents')
1559
+ elif args.rule_account:
1560
+ rules = client.list_account_rules(account=args.rule_account)
1561
+ elif args.subscription:
1562
+ account = args.subscription[0]
1563
+ name = args.subscription[1]
1564
+ rules = client.list_subscription_rules(account=account, name=name)
1565
+ else:
1566
+ print('At least one option has to be given. Use -h to list the options.')
1567
+ return FAILURE
1568
+ if args.csv:
1569
+ for rule in rules:
1570
+ print(rule['id'],
1571
+ rule['account'],
1572
+ f"{rule['scope']}:{rule['name']}",
1573
+ f"{rule['state']}[{rule['locks_ok_cnt']}/{rule['locks_replicating_cnt']}/{rule['locks_stuck_cnt']}]",
1574
+ rule['rse_expression'],
1575
+ rule['copies'],
1576
+ sizefmt(rule['bytes'], args.human) if rule['bytes'] is not None else 'N/A',
1577
+ rule['expires_at'],
1578
+ rule['created_at'],
1579
+ sep=',')
1580
+
1581
+ if cli_config == 'rich':
1582
+ spinner.stop()
1583
+ else:
1584
+ table_data = []
1585
+ for rule in rules:
1586
+ if cli_config == 'rich':
1587
+ table_data.append([rule['id'],
1588
+ rule['account'],
1589
+ f"{rule['scope']}:{rule['name']}",
1590
+ f"[{CLITheme.RULE_STATE.get(rule['state'], 'default')}]{rule['state']}[/][{rule['locks_ok_cnt']}/{rule['locks_replicating_cnt']}/{rule['locks_stuck_cnt']}]",
1591
+ rule['rse_expression'],
1592
+ rule['copies'],
1593
+ sizefmt(rule['bytes'], args.human) if rule['bytes'] is not None else 'N/A',
1594
+ rule['expires_at'],
1595
+ rule['created_at']])
1596
+ else:
1597
+ table_data.append([rule['id'],
1598
+ rule['account'],
1599
+ f"{rule['scope']}:{rule['name']}",
1600
+ f"{rule['state']}[{rule['locks_ok_cnt']}/{rule['locks_replicating_cnt']}/{rule['locks_stuck_cnt']}]",
1601
+ rule['rse_expression'],
1602
+ rule['copies'],
1603
+ sizefmt(rule['bytes'], args.human) if rule['bytes'] is not None else 'N/A',
1604
+ rule['expires_at'],
1605
+ rule['created_at']])
1606
+
1607
+ if cli_config == 'rich':
1608
+ table = generate_table(table_data, headers=['ID', 'ACCOUNT', 'SCOPE:NAME', 'STATE[OK/REPL/STUCK]', 'RSE EXPRESSION', 'COPIES', 'SIZE', 'EXPIRES (UTC)', 'CREATED (UTC)'],
1609
+ col_alignments=['left', 'left', 'left', 'right', 'left', 'right', 'right', 'left', 'left'])
1610
+ spinner.stop()
1611
+ print_output(table, console=console, no_pager=args.no_pager)
1612
+ else:
1613
+ print(tabulate(table_data, tablefmt='simple', headers=['ID', 'ACCOUNT', 'SCOPE:NAME', 'STATE[OK/REPL/STUCK]', 'RSE_EXPRESSION', 'COPIES', 'SIZE', 'EXPIRES (UTC)', 'CREATED (UTC)'], disable_numparse=True))
1614
+ return SUCCESS
1615
+
1616
+
1617
+ @exception_handler
1618
+ def list_rules_history(args, client, logger, console, spinner):
1619
+ """
1620
+ %(prog)s list-rules_history ...
1621
+
1622
+ List replication rules history for a DID.
1623
+ """
1624
+ rule_dict = []
1625
+ if cli_config == 'rich':
1626
+ spinner.update(status='Fetching rules history')
1627
+ spinner.start()
1628
+
1629
+ scope, name = get_scope(args.did, client)
1630
+ table_data = []
1631
+ for rule in client.list_replication_rule_full_history(scope, name):
1632
+ if rule['rule_id'] not in rule_dict:
1633
+ rule_dict.append(rule['rule_id'])
1634
+ if cli_config == 'rich':
1635
+ table_data.append(['Insertion', rule['account'], rule['rse_expression'], rule['created_at']])
1636
+ else:
1637
+ print('-' * 40)
1638
+ print('Rule insertion')
1639
+ print('Account : %s' % rule['account'])
1640
+ print('RSE expression : %s' % (rule['rse_expression']))
1641
+ print('Time : %s' % (rule['created_at']))
1642
+ else:
1643
+ rule_dict.remove(rule['rule_id'])
1644
+ if cli_config == 'rich':
1645
+ table_data.append(['Deletion', rule['account'], rule['rse_expression'], rule['updated_at']])
1646
+ else:
1647
+ print('-' * 40)
1648
+ print('Rule deletion')
1649
+ print('Account : %s' % rule['account'])
1650
+ print('RSE expression : %s' % (rule['rse_expression']))
1651
+ print('Time : %s' % (rule['updated_at']))
1652
+
1653
+ if cli_config == 'rich':
1654
+ table_data = sorted(table_data, key=lambda entry: entry[-1], reverse=True)
1655
+ table = generate_table(table_data, headers=['ACTION', 'ACCOUNT', 'RSE EXPRESSION', 'TIME'])
1656
+ spinner.stop()
1657
+ print_output(table, console=console, no_pager=args.no_pager)
1658
+ return SUCCESS
1659
+
1660
+
1661
+ @exception_handler
1662
+ def list_rses(args, client, logger, console, spinner):
1663
+ """
1664
+ %(prog)s list-rses [options] <field1=value1 field2=value2 ...>
1665
+
1666
+ List rses.
1667
+
1668
+ """
1669
+ if cli_config == 'rich':
1670
+ spinner.update(status='Fetching RSEs')
1671
+ spinner.start()
1672
+
1673
+ rses = client.list_rses(args.rses)
1674
+ if args.csv:
1675
+ print(*(rse['rse'] for rse in rses), sep='\n')
1676
+ elif cli_config == 'rich':
1677
+ table = generate_table([[rse['rse']] for rse in sorted(rses, key=lambda elem: elem['rse'])], headers=['RSE'], col_alignments=['left'])
1678
+ spinner.stop()
1679
+ print_output(table, console=console, no_pager=args.no_pager)
1680
+ else:
1681
+ for rse in rses:
1682
+ print('%(rse)s' % rse)
1683
+ return SUCCESS
1684
+
1685
+
1686
+ @exception_handler
1687
+ def list_suspicious_replicas(args, client, logger, console, spinner):
1688
+ """
1689
+ %(prog)s list-suspicious-replicas [options] <field1=value1 field2=value2 ...>
1690
+
1691
+ List replicas marked as suspicious.
1692
+
1693
+ """
1694
+
1695
+ rse_expression = None
1696
+ younger_than = None
1697
+ nattempts = None
1698
+ if args.rse_expression:
1699
+ rse_expression = args.rse_expression
1700
+ if args.younger_than:
1701
+ younger_than = args.younger_than
1702
+ if args.nattempts:
1703
+ nattempts = args.nattempts
1704
+
1705
+ if cli_config == 'rich':
1706
+ spinner.update(status='Fetching suspicious replicas')
1707
+ spinner.start()
1708
+
1709
+ # Generator is a list with one entry, which itself is a list of lists.
1710
+ replicas_gen = client.list_suspicious_replicas(rse_expression, younger_than, nattempts)
1711
+ for i in replicas_gen:
1712
+ replicas = i
1713
+ table = []
1714
+ table_data = []
1715
+ for rep in replicas:
1716
+ table_data.append([rep['rse'], rep['scope'], rep['created_at'], rep['cnt'], rep['name']])
1717
+
1718
+ if cli_config == 'rich':
1719
+ table = generate_table(table_data, headers=['RSE EXPRESSION', 'SCOPE', 'CREATED AT', 'N-ATTEMPTS', 'FILE NAME'], col_alignments=['left', 'left', 'left', 'right', 'left'])
1720
+ spinner.stop()
1721
+ print_output(table, console=console, no_pager=args.no_pager)
1722
+ else:
1723
+ print(tabulate(table_data, headers=(['RSE Expression:', 'Scope:', 'Created at:', 'Nattempts:', 'File Name:'])))
1724
+ return SUCCESS
1725
+
1726
+
1727
+ @exception_handler
1728
+ def list_rse_attributes(args, client, logger, console, spinner):
1729
+ """
1730
+ %(prog)s list-rse-attributes [options] <field1=value1 field2=value2 ...>
1731
+
1732
+ List rses.
1733
+
1734
+ """
1735
+
1736
+ attributes = client.list_rse_attributes(rse=args.rse)
1737
+ if cli_config == 'rich':
1738
+ keyword_styles = {**CLITheme.BOOLEAN, **CLITheme.RSE_TYPE}
1739
+ table_data = [(k, Text(str(v), style=keyword_styles.get(str(v), 'default'))) for k, v in sorted(attributes.items())] # columns have mixed datatypes
1740
+ table = generate_table(table_data, col_alignments=['left', 'left'], row_styles=['none'])
1741
+ print_output(table, console=console, no_pager=args.no_pager)
1742
+ else:
1743
+ table = [(k + ':', str(v)) for (k, v) in sorted(attributes.items())] # columns have mixed datatypes
1744
+ print(tabulate(table, tablefmt='plain', disable_numparse=True)) # disabling number parsing
1745
+ return SUCCESS
1746
+
1747
+
1748
+ @exception_handler
1749
+ def list_rse_usage(args, client, logger, console, spinner):
1750
+ """
1751
+ %(prog)s list-rse-usage [options] <rse>
1752
+
1753
+ Show the space usage of a given rse
1754
+
1755
+ """
1756
+
1757
+ if cli_config == 'rich':
1758
+ spinner.update(status='Fetching RSE usage')
1759
+ spinner.start()
1760
+
1761
+ all_usages = client.get_rse_usage(rse=args.rse, filters={'per_account': args.show_accounts})
1762
+ select_usages = [u for u in all_usages if u['source'] not in ('srm', 'gsiftp', 'webdav')]
1763
+
1764
+ if cli_config == 'rich':
1765
+ output = []
1766
+ table_data = []
1767
+ header = ['SOURCE', 'USED', 'FILES', 'FREE', 'TOTAL', 'UPDATED AT']
1768
+ header_account_data = ['ACCOUNT', 'USED', 'PERCENTAGE %']
1769
+ key2id = {header[i].lower().replace(' ', '_'): i for i in range(len(header))}
1770
+ account_data = {}
1771
+ for usage in select_usages:
1772
+ if cli_config == 'rich':
1773
+ row = [''] * len(header)
1774
+ for elem in usage:
1775
+ if elem in ['free', 'total'] and usage['source'] != 'storage' or elem == 'files' and usage['source'] != 'rucio':
1776
+ continue
1777
+ elif elem in ['used', 'free', 'total']:
1778
+ if cli_config == 'rich':
1779
+ row[key2id[elem]] = sizefmt(usage[elem], args.human)
1780
+ else:
1781
+ print(' {0}: {1}'.format(elem, sizefmt(usage[elem], args.human)))
1782
+ elif elem == 'account_usages':
1783
+ if cli_config == 'rich':
1784
+ if usage[elem]:
1785
+ for account in usage[elem]:
1786
+ if cli_config == 'rich':
1787
+ account_data[usage['source']].append([account['account'], sizefmt(account['used'], args.human), str(account['percentage'])])
1788
+ else:
1789
+ account_usages_title = ' per account:'
1790
+ if not usage[elem]:
1791
+ account_usages_title += ' no usage'
1792
+ else:
1793
+ print(account_usages_title)
1794
+ print(' ------')
1795
+ col_width = max(len(str(entry[1])) for account in usage[elem] for entry in list(account.items())) + 16
1796
+ for account in usage[elem]:
1797
+ base_string = ' '
1798
+ used_string = 'used: {0}'.format(sizefmt(account['used'], args.human))
1799
+ account_string = 'account: {0}'.format(account['account'])
1800
+ percentage_string = 'percentage: {0}'.format(account['percentage'])
1801
+ print(base_string + account_string.ljust(col_width) + used_string.ljust(col_width) + percentage_string.ljust(col_width))
1802
+ print(' ------')
1803
+ else:
1804
+ if cli_config == 'rich':
1805
+ if elem in key2id:
1806
+ row[key2id[elem]] = str(usage[elem])
1807
+ if elem == 'source':
1808
+ account_data[usage[elem]] = []
1809
+ else:
1810
+ print(' {0}: {1}'.format(elem, usage[elem]))
1811
+
1812
+ if cli_config == 'rich':
1813
+ table_data.append(row)
1814
+
1815
+ if cli_config == 'rich':
1816
+ table = generate_table(table_data, headers=header, col_alignments=['left', 'right', 'right', 'right', 'right', 'left'])
1817
+ output.append(table)
1818
+
1819
+ if args.show_accounts:
1820
+ output.append('\n[b]USAGE PER ACCOUNT:')
1821
+ for source in account_data:
1822
+ if len(account_data[source]) > 0:
1823
+ output.append(Padding.indent(Text(f'source: {source}', style=CLITheme.SUBHEADER_HIGHLIGHT), 2))
1824
+ account_table = generate_table(account_data[source], headers=header_account_data, col_alignments=['left', 'right', 'right'])
1825
+ output.append(Padding.indent(account_table, 2))
1826
+
1827
+ spinner.stop()
1828
+ print_output(*output, console=console, no_pager=args.no_pager)
1829
+ else:
1830
+ print('------')
1831
+ return SUCCESS
1832
+
1833
+
1834
+ @exception_handler
1835
+ def list_account_limits(args, client, logger, console, spinner):
1836
+ """
1837
+ %(prog)s list [options] <field1=value1 field2=value2 ...>
1838
+
1839
+ List account limits.
1840
+
1841
+ """
1842
+ if cli_config == 'rich':
1843
+ spinner.update(status='Fetching account limits')
1844
+ spinner.start()
1845
+
1846
+ if args.rse:
1847
+ limits = client.get_local_account_limit(account=args.limit_account, rse=args.rse)
1848
+ else:
1849
+ limits = client.get_local_account_limits(account=args.limit_account)
1850
+
1851
+ table_data = []
1852
+ for limit in list(limits.items()):
1853
+ table_data.append([limit[0], sizefmt(limit[1], args.human)])
1854
+ table_data.sort()
1855
+
1856
+ if cli_config == 'rich':
1857
+ table1 = generate_table(table_data, headers=['RSE', 'LIMIT'], col_alignments=['left', 'right'])
1858
+ else:
1859
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE', 'LIMIT']))
1860
+
1861
+ table_data = []
1862
+ limits = client.get_global_account_limits(account=args.limit_account)
1863
+ for limit in list(limits.items()):
1864
+ if (args.rse and args.rse in limit[1]['resolved_rses']) or not args.rse:
1865
+ table_data.append([limit[0], sizefmt(limit[1]['limit'], args.human)])
1866
+ table_data.sort()
1867
+
1868
+ if cli_config == 'rich':
1869
+ table2 = generate_table(table_data, headers=['RSE EXPRESSION', 'LIMIT'], col_alignments=['left', 'right'])
1870
+ else:
1871
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE EXPRESSION', 'LIMIT']))
1872
+
1873
+ if cli_config == 'rich':
1874
+ spinner.stop()
1875
+ print_output(table1, table2, console=console, no_pager=args.no_pager)
1876
+ return SUCCESS
1877
+
1878
+
1879
+ @exception_handler
1880
+ def list_account_usage(args, client, logger, console, spinner):
1881
+ """
1882
+ %(prog)s list [options] <field1=value1 field2=value2 ...>
1883
+
1884
+ List account usage.
1885
+
1886
+ """
1887
+ if cli_config == 'rich':
1888
+ spinner.update(status='Fetching account usage')
1889
+ spinner.start()
1890
+
1891
+ usage = client.get_local_account_usage(account=args.usage_account, rse=args.rse)
1892
+ table_data = []
1893
+ for item in usage:
1894
+ remaining = 0 if float(item['bytes_remaining']) < 0 else float(item['bytes_remaining'])
1895
+ table_data.append([item['rse'], sizefmt(item['bytes'], args.human), sizefmt(item['bytes_limit'], args.human), sizefmt(remaining, args.human)])
1896
+ table_data.sort()
1897
+
1898
+ if cli_config == 'rich':
1899
+ table1 = generate_table(table_data, headers=['RSE', 'USAGE', 'LIMIT', 'QUOTA LEFT'], col_alignments=['left', 'right', 'right', 'right'])
1900
+ else:
1901
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE', 'USAGE', 'LIMIT', 'QUOTA LEFT']))
1902
+
1903
+ table_data = []
1904
+ usage = client.get_global_account_usage(account=args.usage_account)
1905
+ for item in usage:
1906
+ if (args.rse and args.rse in item['rse_expression']) or not args.rse:
1907
+ remaining = 0 if float(item['bytes_remaining']) < 0 else float(item['bytes_remaining'])
1908
+ table_data.append([item['rse_expression'], sizefmt(item['bytes'], args.human), sizefmt(item['bytes_limit'], args.human), sizefmt(remaining, args.human)])
1909
+ table_data.sort()
1910
+
1911
+ if cli_config == 'rich':
1912
+ table2 = generate_table(table_data, headers=['RSE EXPRESSION', 'USAGE', 'LIMIT', 'QUOTA LEFT'], col_alignments=['left', 'right', 'right', 'right'])
1913
+ else:
1914
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['RSE EXPRESSION', 'USAGE', 'LIMIT', 'QUOTA LEFT']))
1915
+
1916
+ if cli_config == 'rich':
1917
+ spinner.stop()
1918
+ print_output(table1, table2, console=console, no_pager=args.no_pager)
1919
+ return SUCCESS
1920
+
1921
+
1922
+ @exception_handler
1923
+ def list_datasets_rse(args, client, logger, console, spinner):
1924
+ """
1925
+ %(prog)s list [options] <field1=value1 field2=value2 ...>
1926
+
1927
+ List the datasets in a site.
1928
+
1929
+ """
1930
+
1931
+ if cli_config == 'rich':
1932
+ spinner.update(status='Fetching datasets at RSE')
1933
+ spinner.start()
1934
+
1935
+ if args.long:
1936
+ table_data = []
1937
+ for dsn in client.list_datasets_per_rse(args.rse):
1938
+ table_data.append([f"{dsn['scope']}:{dsn['name']}"
1939
+ f"{str(dsn['available_length'])}/{str(dsn['length'])}",
1940
+ f"{str(dsn['available_bytes'])}/{str(dsn['bytes'])}"])
1941
+
1942
+ if cli_config == 'rich':
1943
+ table_data.sort()
1944
+ table = generate_table(table_data, headers=['SCOPE:NAME', 'LOCAL FILES/TOTAL FILES', 'LOCAL BYTES/TOTAL BYTES'], col_alignments=['left', 'right', 'right'])
1945
+ spinner.stop()
1946
+ print_output(table, console=console, no_pager=args.no_pager)
1947
+ else:
1948
+ print(tabulate(table_data, tablefmt=tablefmt, headers=['DID', 'LOCAL FILES/TOTAL FILES', 'LOCAL BYTES/TOTAL BYTES']))
1949
+ else:
1950
+ dsns = list(set([f"{dsn['scope']}:{dsn['name']}" for dsn in client.list_datasets_per_rse(args.rse)]))
1951
+ dsns.sort()
1952
+ if cli_config == 'rich':
1953
+ table = generate_table([[dsn] for dsn in dsns], headers=['SCOPE:NAME'])
1954
+ spinner.stop()
1955
+ print_output(table, console=console, no_pager=args.no_pager)
1956
+ else:
1957
+ print("SCOPE:NAME")
1958
+ print('----------')
1959
+ for dsn in dsns:
1960
+ print(dsn)
1961
+ return SUCCESS
1962
+
1963
+
1964
+ @exception_handler
1965
+ def add_lifetime_exception(args, client, logger, console, spinner):
1966
+ """
1967
+ %(prog)s add_lifetime_exception [options] <field1=value1 field2=value2 ...>
1968
+
1969
+ Declare a lifetime model exception.
1970
+
1971
+ """
1972
+
1973
+ if not args.reason:
1974
+ logger.error('reason for the extension is mandatory')
1975
+ return FAILURE
1976
+ reason = args.reason
1977
+ if not args.expiration:
1978
+ logger.error('expiration is mandatory')
1979
+ return FAILURE
1980
+ try:
1981
+ expiration = datetime.strptime(args.expiration, "%Y-%m-%d")
1982
+ except Exception as err:
1983
+ logger.error(err)
1984
+ return FAILURE
1985
+
1986
+ if not args.inputfile:
1987
+ logger.error('inputfile is mandatory')
1988
+ return FAILURE
1989
+ with open(args.inputfile) as infile:
1990
+ # Deduplicate the content of the input file and ignore empty lines.
1991
+ dids = set(did for line in infile if (did := line.strip()))
1992
+
1993
+ dids_list = []
1994
+ containers = []
1995
+ datasets = []
1996
+ for did in dids:
1997
+ scope, name = get_scope(did, client)
1998
+ dids_list.append({'scope': scope, 'name': name})
1999
+ error_summary = {
2000
+ "total_dids": {"description": "Total DIDs", "count": len(dids_list)},
2001
+ "files_ignored": {"description": "DID not submitted because it is a file", "count": 0},
2002
+ "containers_resolved": {"description": "DID that are containers and were resolved", "count": 0},
2003
+ "not_in_lifetime_model": {"description": "DID not submitted because it is not part of the lifetime campaign", "count": 0},
2004
+ "successfully_submitted": {"description": "DID successfully submitted including the one from containers resolved", "count": 0},
2005
+ }
2006
+ chunk_limit = 500 # Server should be able to accept 1000
2007
+ dids_list_copy = deepcopy(dids_list)
2008
+ for chunk in chunks(dids_list_copy, chunk_limit):
2009
+ for meta in client.get_metadata_bulk(chunk):
2010
+ scope, name = meta['scope'], meta['name']
2011
+ dids_list.remove({'scope': scope, 'name': name})
2012
+ if meta['did_type'] == 'FILE':
2013
+ logger.warning('%s:%s is a file. Will be ignored' % (scope, name))
2014
+ error_summary["files_ignored"]["count"] += 1
2015
+ elif meta['did_type'] == 'CONTAINER':
2016
+ logger.warning('%s:%s is a container. It needs to be resolved' % (scope, name))
2017
+ containers.append({'scope': scope, 'name': name})
2018
+ error_summary["containers_resolved"]["count"] += 1
2019
+ elif not meta['eol_at']:
2020
+ logger.warning('%s:%s is not affected by the lifetime model' % (scope, name))
2021
+ error_summary["not_in_lifetime_model"]["count"] += 1
2022
+ else:
2023
+ logger.info('%s:%s will be declared' % (scope, name))
2024
+ datasets.append({'scope': scope, 'name': name})
2025
+ error_summary["successfully_submitted"]["count"] += 1
2026
+
2027
+ for did in dids_list:
2028
+ scope = did['scope']
2029
+ name = did['name']
2030
+ logger.warning('%s:%s does not exist' % (scope, name))
2031
+
2032
+ if containers:
2033
+ logger.warning('One or more DIDs are containers. They will be resolved into a list of datasets to request exception. Full list below')
2034
+ for container in containers:
2035
+ logger.info('Resolving %s:%s into datasets :' % (container['scope'], container['name']))
2036
+ list_datasets = __resolve_containers_to_datasets(container['scope'], container['name'], client)
2037
+ for chunk in chunks(list_datasets, chunk_limit):
2038
+ for meta in client.get_metadata_bulk(chunk):
2039
+ scope, name = meta['scope'], meta['name']
2040
+ logger.debug('%s:%s' % (scope, name))
2041
+ if not meta['eol_at']:
2042
+ logger.warning('%s:%s is not affected by the lifetime model' % (scope, name))
2043
+ error_summary["not_in_lifetime_model"]["count"] += 1
2044
+ else:
2045
+ logger.info('%s:%s will be declared' % (scope, name))
2046
+ datasets.append({'scope': scope, 'name': name})
2047
+ error_summary["successfully_submitted"]["count"] += 1
2048
+ if not datasets:
2049
+ logger.error('Nothing to submit')
2050
+ return SUCCESS
2051
+ try:
2052
+ client.add_exception(dids=datasets, account=client.account, pattern='', comments=reason, expires_at=expiration)
2053
+ except UnsupportedOperation as err:
2054
+ logger.error(err)
2055
+ return FAILURE
2056
+ except Exception:
2057
+ error_message = 'Failure to submit exception. Please retry.'
2058
+ if cli_config == 'rich':
2059
+ if logger.level == DEBUG:
2060
+ logger.exception(error_message)
2061
+ else:
2062
+ logger.error(error_message)
2063
+ else:
2064
+ logger.error(error_message)
2065
+ logger.debug(traceback.format_exc())
2066
+ return FAILURE
2067
+
2068
+ logger.info('Exception successfully submitted. Summary below:')
2069
+ for key, data in error_summary.items():
2070
+ print('{0:100} {1:6d}'.format(data["description"], data["count"]))
2071
+ return SUCCESS
2072
+
2073
+
2074
+ def test_server(args, client, logger, console, spinner):
2075
+ """"
2076
+ %(prog)s test-rucio-server [options] <field1=value1 field2=value2 ...>
2077
+ Test the client against a server.
2078
+ """
2079
+ suite = unittest.TestLoader().loadTestsFromTestCase(TestRucioServer)
2080
+ unittest.TextTestRunner(verbosity=2).run(suite)
2081
+ return SUCCESS
2082
+
2083
+
2084
+ def touch(args, client, logger, console, spinner):
2085
+ """
2086
+ %(prog)s touch [options] <did1 did2 ...>
2087
+ """
2088
+
2089
+ for did in args.dids:
2090
+ scope, name = get_scope(did, client)
2091
+ client.touch(scope, name, args.rse)
2092
+
2093
+
2094
+ def rse_completer(prefix, parsed_args, **kwargs):
2095
+ """
2096
+ Completes the argument with a list of RSEs
2097
+ """
2098
+ client = get_client(parsed_args, logger=None)
2099
+ return ["%(rse)s" % rse for rse in client.list_rses()]
2100
+
2101
+
2102
+ def get_parser():
2103
+ """
2104
+ Returns the argparse parser.
2105
+ """
2106
+ oparser = argparse.ArgumentParser(prog=os.path.basename(sys.argv[0]), add_help=True, exit_on_error=False)
2107
+ subparsers = oparser.add_subparsers()
2108
+
2109
+ # Main arguments
2110
+ oparser.add_argument('--version', action='version', version='%(prog)s ' + version.version_string())
2111
+ oparser.add_argument('--config', dest="config", help="The Rucio configuration file to use.")
2112
+ oparser.add_argument('--verbose', '-v', default=False, action='store_true', help="Print more verbose output.")
2113
+ oparser.add_argument('-H', '--host', dest="host", metavar="ADDRESS", help="The Rucio API host.")
2114
+ oparser.add_argument('--auth-host', dest="auth_host", metavar="ADDRESS", help="The Rucio Authentication host.")
2115
+ oparser.add_argument('-a', '--account', dest="issuer", help="Rucio account to use.")
2116
+ oparser.add_argument('-S', '--auth-strategy', dest="auth_strategy", default=None, help="Authentication strategy (userpass, x509...)")
2117
+ oparser.add_argument('-T', '--timeout', dest="timeout", type=float, default=None, help="Set all timeout values to seconds.")
2118
+ oparser.add_argument('--robot', '-R', dest="human", default=True, action='store_false', help="All output in bytes and without the units. This output format is preferred by parsers and scripts.")
2119
+ oparser.add_argument('--user-agent', '-U', dest="user_agent", default='rucio-clients', action='store', help="Rucio User Agent")
2120
+ oparser.add_argument('--vo', dest="vo", metavar="VO", default=None, help="VO to authenticate at. Only used in multi-VO mode.")
2121
+ oparser.add_argument("--no-pager", dest="no_pager", default=False, action='store_true', help=argparse.SUPPRESS)
2122
+
2123
+ # Options for the userpass or OIDC auth_strategy
2124
+ oparser.add_argument('-u', '--user', dest='username', default=None, help='username')
2125
+ oparser.add_argument('-pwd', '--password', dest='password', default=None, help='password')
2126
+ # Options for defining remaining OIDC parameters
2127
+ oparser.add_argument('--oidc-user', dest='oidc_username', default=None, help='OIDC username')
2128
+ oparser.add_argument('--oidc-password', dest='oidc_password', default=None, help='OIDC password')
2129
+ oparser.add_argument('--oidc-scope', dest='oidc_scope', default='openid profile', help='Defines which (OIDC) information user will share with Rucio. '
2130
+ + 'Rucio requires at least -sc="openid profile". To request refresh token for Rucio, scope must include "openid offline_access" and ' # NOQA: W503
2131
+ + 'there must be no active access token saved on the side of the currently used Rucio Client.') # NOQA: W503
2132
+ oparser.add_argument('--oidc-audience', dest='oidc_audience', default=None, help='Defines which audience are tokens requested for.')
2133
+ oparser.add_argument('--oidc-auto', dest='oidc_auto', default=False, action='store_true', help='If not specified, username and password credentials are not required and users will be given a URL '
2134
+ + 'to use in their browser. If specified, the users explicitly trust Rucio with their IdP credentials.') # NOQA: W503
2135
+ oparser.add_argument('--oidc-polling', dest='oidc_polling', default=False, action='store_true', help='If not specified, user will be asked to enter a code returned by the browser to the command line. '
2136
+ + 'If --polling is set, Rucio Client should get the token without any further interaction of the user. This option is active only if --auto is *not* specified.') # NOQA: W503
2137
+ oparser.add_argument('--oidc-refresh-lifetime', dest='oidc_refresh_lifetime', default=None, help='Max lifetime in hours for this an access token will be refreshed by asynchronous Rucio daemon. '
2138
+ + 'If not specified, refresh will be stopped after 4 days. This option is effective only if --oidc-scope includes offline_access scope for a refresh token to be granted to Rucio.') # NOQA: W503
2139
+ oparser.add_argument('--oidc-issuer', dest='oidc_issuer', default=None,
2140
+ help='Defines which Identity Provider is going to be used. The issuer string must correspond '
2141
+ + 'to the keys configured in the /etc/idpsecrets.json auth server configuration file.') # NOQA: W503
2142
+
2143
+ # Options for the x509 auth_strategy
2144
+ oparser.add_argument('--certificate', dest='certificate', default=None, help='Client certificate file for x509 Authentication.')
2145
+ oparser.add_argument('--client-key', dest='client_key', default=None, help='Client key for x509 Authentication.')
2146
+ oparser.add_argument('--ca-certificate', dest='ca_certificate', default=None, help='CA certificate to verify peer against (SSL).')
2147
+
2148
+ # Ping command
2149
+ ping_parser = subparsers.add_parser('ping', formatter_class=argparse.RawDescriptionHelpFormatter, help='Ping Rucio server.',
2150
+ epilog='Usage example\n'
2151
+ '"""""""""""""\n'
2152
+ '\n'
2153
+ 'To ping the server::\n'
2154
+ '\n'
2155
+ ' $ rucio ping\n'
2156
+ ' 1.14.8\n'
2157
+ '\n'
2158
+ 'The returned value is the version of Rucio installed on the server.'
2159
+ '\n')
2160
+ ping_parser.set_defaults(function=ping)
2161
+
2162
+ # The whoami command
2163
+ whoami_parser = subparsers.add_parser('whoami', help='Get information about account whose token is used.', formatter_class=argparse.RawDescriptionHelpFormatter,
2164
+ epilog='''Usage example
2165
+ """""""""""""
2166
+ ::
2167
+
2168
+ $ rucio whoami
2169
+ jdoe
2170
+
2171
+ The returned value is the account currently used.
2172
+ ''')
2173
+
2174
+ whoami_parser.set_defaults(function=whoami_account)
2175
+
2176
+ # The list-file-replicas command
2177
+ list_file_replicas_parser = subparsers.add_parser('list-file-replicas', help='List the replicas of a DID and its PFNs.', description='This method allows to list all the replicas of a given Data IDentifier (DID). \
2178
+ The only mandatory parameter is the DID which can be a container/dataset/files. By default all the files replicas in state available are returned.', formatter_class=argparse.RawDescriptionHelpFormatter,
2179
+ epilog='''Usage example
2180
+ ^^^^^^^^^^^^^
2181
+
2182
+ To list the file replicas for a given dataset::
2183
+
2184
+ $ rucio list-file-replicas user.jdoe:user.jdoe.test.data.1234.1
2185
+ +-----------+---------------------------------+------------+-----------+-----------------------------------------------------------------------------------+
2186
+ | SCOPE | NAME | FILESIZE | ADLER32 | RSE: REPLICA |
2187
+ |-----------+---------------------------------+------------+-----------+-----------------------------------------------------------------------------------|
2188
+ | user.jdoe | user.jdoe.test.data.1234.file.1 | 94.835 MB | 5d000974 | SITE1_DISK: srm://blahblih/path/to/file/user.jdoe/user.jdoe.test.data.1234.file.1 |
2189
+ | user.jdoe | user.jdoe.test.data.1234.file.1 | 94.835 MB | 5d000974 | SITE2_DISK: file://another/path/to/file/user.jdoe/user.jdoe.test.data.1234.file.1 |
2190
+ | user.jdoe | user.jdoe.test.data.1234.file.2 | 82.173 MB | 01e56f23 | SITE2_DISK: file://another/path/to/file/user.jdoe/user.jdoe.test.data.1234.file.2 |
2191
+ +-----------+---------------------------------+------------+-----------+-----------------------------------------------------------------------------------+
2192
+
2193
+ To list the missing replica of a dataset of a given RSE-expression::
2194
+
2195
+ $ rucio list-file-replicas --rses SITE1_DISK user.jdoe:user.jdoe.test.data.1234.1
2196
+ +-----------+---------------------------------+------------+-----------+-----------------------------------------------------------------------------------+
2197
+ | SCOPE | NAME | FILESIZE | ADLER32 | RSE: REPLICA |
2198
+ |-----------+---------------------------------+------------+-----------+-----------------------------------------------------------------------------------|
2199
+ | user.jdoe | user.jdoe.test.data.1234.file.1 | 94.835 MB | 5d000974 | SITE1_DISK: srm://blahblih/path/to/file/user.jdoe/user.jdoe.test.data.1234.file.1 |
2200
+ +-----------+---------------------------------+------------+-----------+-----------------------------------------------------------------------------------+
2201
+ ''')
2202
+ list_file_replicas_parser.set_defaults(function=list_file_replicas)
2203
+ list_file_replicas_parser.add_argument('--protocols', dest='protocols', action='store', help='List of comma separated protocols. (i.e. https, root, srm).', required=False)
2204
+ list_file_replicas_parser.add_argument('--all-states', dest='all_states', action='store_true', default=False, help='To select all replicas (including unavailable ones).\
2205
+ Also gets information about the current state of a DID in each RSE.\
2206
+ Legend: ' + ', '.join(["{0} = {1}".format(state.value, state.name) for state in ReplicaState]), required=False)
2207
+ list_file_replicas_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2208
+ list_file_replicas_parser.add_argument('--pfns', default=False, action='store_true', help='Show only the PFNs.', required=False)
2209
+ list_file_replicas_parser.add_argument('--domain', default=None, action='store', help='Force the networking domain. Available options: wan, lan, all.', required=False)
2210
+ list_file_replicas_parser.add_argument('--link', dest='link', default=None, action='store', help='Symlink PFNs with directory substitution.\
2211
+ For example: rucio list-file-replicas --rse RSE_TEST --link /eos/:/eos/ scope:datasetname', required=False)
2212
+ list_file_replicas_parser.add_argument('--missing', dest='missing', default=False, action='store_true', help='To list missing replicas at a RSE-Expression. Must be used with --rses option', required=False)
2213
+ list_file_replicas_parser.add_argument('--metalink', dest='metalink', default=False, action='store_true', help='Output available replicas as metalink.', required=False)
2214
+ list_file_replicas_parser.add_argument('--no-resolve-archives', dest='no_resolve_archives', default=False, action='store_true', help='Do not resolve archives which may contain the files.', required=False)
2215
+ list_file_replicas_parser.add_argument('--sort', dest='sort', default=None, action='store', help='Replica sort algorithm. Available options: geoip (default), random', required=False)
2216
+ list_file_replicas_parser.add_argument('--rses', dest='rses', default=None, action='store', help='The RSE filter expression. A comprehensive help about RSE expressions\
2217
+ can be found in ' + Color.BOLD + 'https://rucio.cern.ch/documentation/started/concepts/rse_expressions' + Color.END)
2218
+
2219
+ # The list-dataset-replicas command
2220
+ list_dataset_replicas_parser = subparsers.add_parser('list-dataset-replicas', help='List the dataset replicas.',
2221
+ formatter_class=argparse.RawDescriptionHelpFormatter,
2222
+ epilog='''Usage example
2223
+ """""""""""""
2224
+ ::
2225
+
2226
+ $ rucio list-dataset-replicas user.jdoe:user.jdoe.test.data.1234.1
2227
+
2228
+ DATASET: user.jdoe:user.jdoe.test.data.1234.1
2229
+ +------------+---------+---------+
2230
+ | RSE | FOUND | TOTAL |
2231
+ |------------+---------+---------|
2232
+ | SITE1_DISK | 1 | 2 |
2233
+ | SITE2_DISK | 2 | 2 |
2234
+ +------------+---------+---------+
2235
+ ''')
2236
+ list_dataset_replicas_parser.set_defaults(function=list_dataset_replicas)
2237
+ list_dataset_replicas_parser.add_argument(dest='dids', action='store', nargs='+', help='The name of the DID to search.')
2238
+ list_dataset_replicas_parser.add_argument('--deep', action='store_true', help='Make a deep check.')
2239
+ list_dataset_replicas_parser.add_argument('--csv', dest='csv', action='store_true', default=False, help='Comma Separated Value output.',)
2240
+
2241
+ # The add-dataset command
2242
+ add_dataset_parser = subparsers.add_parser('add-dataset', help='Add a dataset to Rucio Catalog.',
2243
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2244
+ """""""""""""
2245
+ ::
2246
+
2247
+ $ rucio add-dataset user.jdoe:user.jdoe.test.data.1234.1
2248
+ Added user.jdoe:user.jdoe.test.data.1234.1
2249
+
2250
+ ''')
2251
+
2252
+ add_dataset_parser.set_defaults(function=add_dataset)
2253
+ add_dataset_parser.add_argument('--monotonic', action='store_true', help='Monotonic status to True.')
2254
+ add_dataset_parser.add_argument(dest='did', action='store', help='The name of the dataset to add.')
2255
+ add_dataset_parser.add_argument('--lifetime', dest='lifetime', action='store', type=int, help='Lifetime in seconds.')
2256
+
2257
+ # The add-container command
2258
+ add_container_parser = subparsers.add_parser('add-container', help='Add a container to Rucio Catalog.',
2259
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2260
+ """""""""""""
2261
+ ::
2262
+
2263
+ $ rucio add-container user.jdoe:user.jdoe.test.cont.1234.1
2264
+ Added user.jdoe:user.jdoe.test.cont.1234.1
2265
+
2266
+ ''')
2267
+
2268
+ add_container_parser.set_defaults(function=add_container)
2269
+ add_container_parser.add_argument('--monotonic', action='store_true', help='Monotonic status to True.')
2270
+ add_container_parser.add_argument(dest='did', action='store', help='The name of the container to add.')
2271
+ add_container_parser.add_argument('--lifetime', dest='lifetime', action='store', type=int, help='Lifetime in seconds.')
2272
+
2273
+ # The attach command
2274
+ attach_parser = subparsers.add_parser('attach', help='Attach a list of DIDs to a parent DID.',
2275
+ description='Attach a list of Data IDentifiers (file, dataset or container) to an other Data IDentifier (dataset or container).',
2276
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2277
+ """""""""""""
2278
+ ::
2279
+
2280
+ $ rucio attach user.jdoe:user.jdoe.test.cont.1234.1 user.jdoe:user.jdoe.test.data.1234.1
2281
+ DIDs successfully attached to user.jdoe:user.jdoe.test.cont.1234.1
2282
+
2283
+ ''')
2284
+
2285
+ attach_parser.set_defaults(function=attach)
2286
+ attach_parser.add_argument(dest='todid', action='store', help='Destination Data IDentifier (either dataset or container).')
2287
+ attach_parser.add_argument('-f', '--from-file', dest='fromfile', action='store_true', default=False, help='Attach the DIDs contained in a file. The file should contain one did per line.')
2288
+ attach_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers (or a file containing one did per line, if -f is present).')
2289
+
2290
+ # The detach command
2291
+ detach_parser = subparsers.add_parser('detach', help='Detach a list of DIDs from a parent DID.',
2292
+ description='Detach a list of Data Identifiers (file, dataset or container) from an other Data Identifier (dataset or container).',
2293
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2294
+ """""""""""""
2295
+ ::
2296
+
2297
+ $ rucio detach user.jdoe:user.jdoe.test.cont.1234.1 user.jdoe:user.jdoe.test.data.1234.1
2298
+ DIDs successfully detached from user.jdoe:user.jdoe.test.cont.1234.1
2299
+
2300
+ ''')
2301
+
2302
+ detach_parser.set_defaults(function=detach)
2303
+ detach_parser.add_argument(dest='fromdid', action='store', help='Target Data IDentifier (must be a dataset or container).')
2304
+ detach_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2305
+
2306
+ # The list command
2307
+ ls_parser = subparsers.add_parser('ls', help='List the data identifiers matching some metadata (synonym for list-dids).', description='List the Data IDentifiers matching certain pattern. \
2308
+ Only the collections (i.e. dataset or container) are returned by default. With the filter option, you can specify a list of metadata that the Data IDentifier should match.',
2309
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2310
+ """""""""""""
2311
+ You can query the DIDs matching a certain pattern. It always requires to specify the scope in which you want to search::
2312
+
2313
+ $ rucio ls user.jdoe:*
2314
+ +-------------------------------------------+--------------+
2315
+ | SCOPE:NAME | [DID TYPE] |
2316
+ |-------------------------------------------+--------------|
2317
+ | user.jdoe:user.jdoe.test.container.1234.1 | CONTAINER |
2318
+ | user.jdoe:user.jdoe.test.container.1234.2 | CONTAINER |
2319
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2320
+ | user.jdoe:user.jdoe.test.dataset.1 | DATASET |
2321
+ | user.jdoe:user.jdoe.test.dataset.2 | DATASET |
2322
+ | user.jdoe:user.jdoe.test.data.1234.1 | DATASET |
2323
+ | user.jdoe:test.file.1 | FILE |
2324
+ | user.jdoe:test.file.2 | FILE |
2325
+ | user.jdoe:test.file.3 | FILE |
2326
+ +-------------------------------------------+--------------+
2327
+
2328
+ You can filter by key/value, e.g.::
2329
+
2330
+ $ rucio ls --filter type=CONTAINER
2331
+ +-------------------------------------------+--------------+
2332
+ | SCOPE:NAME | [DID TYPE] |
2333
+ |-------------------------------------------+--------------|
2334
+ | user.jdoe:user.jdoe.test.container.1234.1 | CONTAINER |
2335
+ | user.jdoe:user.jdoe.test.container.1234.2 | CONTAINER |
2336
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2337
+ +-------------------------------------------+--------------+
2338
+ ''')
2339
+
2340
+ ls_parser.set_defaults(function=list_dids)
2341
+ ls_parser.add_argument('-r', '--recursive', dest='recursive', action='store_true', default=False, help='List data identifiers recursively.')
2342
+ ls_parser.add_argument('--filter', dest='filter', action='store', help='Filter arguments in form `key=value,another_key=next_value`. Valid keys are name, type.')
2343
+ ls_parser.add_argument('--short', dest='short', action='store_true', help='Just dump the list of DIDs.')
2344
+ ls_parser.add_argument(dest='did', nargs=1, action='store', default=None, help='Data IDentifier pattern.')
2345
+
2346
+ list_parser = subparsers.add_parser('list-dids',
2347
+ help='List the data identifiers matching some metadata (synonym for ls).',
2348
+ description='''List the Data IDentifiers matching certain pattern.
2349
+ Only the collections (i.e. dataset or container) are returned by default.
2350
+ With the filter option, you can specify a list of metadata that the Data IDentifier should match.
2351
+ Please use the filter option `--filter type=all` to find all types of Data IDentifiers.''',
2352
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2353
+ """""""""""""
2354
+
2355
+ You can query the DIDs matching a certain pattern. It always requires to specify the scope in which you want to search::
2356
+
2357
+ $ rucio list-dids --filter 'type=all' user.jdoe:*
2358
+ +-------------------------------------------+--------------+
2359
+ | SCOPE:NAME | [DID TYPE] |
2360
+ |-------------------------------------------+--------------|
2361
+ | user.jdoe:user.jdoe.test.container.1234.1 | CONTAINER |
2362
+ | user.jdoe:user.jdoe.test.container.1234.2 | CONTAINER |
2363
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2364
+ | user.jdoe:user.jdoe.test.dataset.1 | DATASET |
2365
+ | user.jdoe:user.jdoe.test.dataset.2 | DATASET |
2366
+ | user.jdoe:user.jdoe.test.data.1234.1 | DATASET |
2367
+ | user.jdoe:test.file.1 | FILE |
2368
+ | user.jdoe:test.file.2 | FILE |
2369
+ | user.jdoe:test.file.3 | FILE |
2370
+ +-------------------------------------------+--------------+
2371
+
2372
+ You can filter by key/value, e.g.::
2373
+
2374
+ $ rucio list-dids --filter 'type=CONTAINER'
2375
+ +-------------------------------------------+--------------+
2376
+ | SCOPE:NAME | [DID TYPE] |
2377
+ |-------------------------------------------+--------------|
2378
+ | user.jdoe:user.jdoe.test.container.1234.1 | CONTAINER |
2379
+ | user.jdoe:user.jdoe.test.container.1234.2 | CONTAINER |
2380
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2381
+ +-------------------------------------------+--------------+''')
2382
+
2383
+ list_parser.set_defaults(function=list_dids)
2384
+ list_parser.add_argument('--recursive', dest='recursive', action='store_true', default=False, help='List data identifiers recursively.')
2385
+ list_parser.add_argument('--filter', dest='filter', action='store', help='Single or logically combined filtering expression(s) either in the form <key><operator><value> or <value1><operator1><key><operator2><value2> (compound inequality). Keys are equivalent to columns in the DID table. Operators must belong to the set of (<=, >=, ==, !=, >, <). The following conventions for combining expressions are used: ";" represents the logical OR operator; "," represents the logical AND operator.') # noqa: E501
2386
+ list_parser.add_argument('--short', dest='short', action='store_true', help='Just dump the list of DIDs.')
2387
+ list_parser.add_argument(dest='did', nargs=1, action='store', default=None, help='Data IDentifier pattern')
2388
+
2389
+ # The extended version of list_dids that goes through the plugin mechanism
2390
+ list_extended_parser = subparsers.add_parser('list-dids-extended',
2391
+ help='List the data identifiers matching some metadata (extended version to include metadata from various resources).',
2392
+ description='''List the Data IDentifiers matching certain pattern.
2393
+ Only the collections (i.e. dataset or container) are returned by default.
2394
+ With the filter option, you can specify a list of metadata that the Data IDentifier should match.
2395
+ Please use the filter option `--filter type=all` to find all types of Data IDentifiers.''',
2396
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2397
+ """""""""""""
2398
+
2399
+ You can query the DIDs matching a certain pattern. It always requires to specify the scope in which you want to search::
2400
+
2401
+ $ rucio list-dids --filter 'type=all' user.jdoe:*
2402
+ +-------------------------------------------+--------------+
2403
+ | SCOPE:NAME | [DID TYPE] |
2404
+ |-------------------------------------------+--------------|
2405
+ | user.jdoe:user.jdoe.test.container.1234.1 | CONTAINER |
2406
+ | user.jdoe:user.jdoe.test.container.1234.2 | CONTAINER |
2407
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2408
+ | user.jdoe:user.jdoe.test.dataset.1 | DATASET |
2409
+ | user.jdoe:user.jdoe.test.dataset.2 | DATASET |
2410
+ | user.jdoe:user.jdoe.test.data.1234.1 | DATASET |
2411
+ | user.jdoe:test.file.1 | FILE |
2412
+ | user.jdoe:test.file.2 | FILE |
2413
+ | user.jdoe:test.file.3 | FILE |
2414
+ +-------------------------------------------+--------------+
2415
+
2416
+ You can filter by key/value, e.g.::
2417
+
2418
+ $ rucio list-dids --filter 'type=CONTAINER'
2419
+ +-------------------------------------------+--------------+
2420
+ | SCOPE:NAME | [DID TYPE] |
2421
+ |-------------------------------------------+--------------|
2422
+ | user.jdoe:user.jdoe.test.container.1234.1 | CONTAINER |
2423
+ | user.jdoe:user.jdoe.test.container.1234.2 | CONTAINER |
2424
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2425
+ +-------------------------------------------+--------------+''')
2426
+
2427
+ list_extended_parser.set_defaults(function=list_dids_extended)
2428
+
2429
+ # The list parent-dids command
2430
+ list_parent_parser = subparsers.add_parser('list-parent-dids', help='List parent DIDs for a given DID', description='List all parents Data IDentifier that contains the target Data IDentifier.',
2431
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2432
+ """""""""""""
2433
+ ::
2434
+
2435
+ $ rucio list-parent-dids user.jdoe:user.jdoe.test.data.1234.1
2436
+ +--------------------------------------+--------------+
2437
+ | SCOPE:NAME | [DID TYPE] |
2438
+ |--------------------------------------+--------------|
2439
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2440
+ +--------------------------------------+--------------+
2441
+
2442
+ ''')
2443
+ list_parent_parser.set_defaults(function=list_parent_dids)
2444
+ list_parent_parser.add_argument(dest='did', action='store', nargs='?', default=None, help='Data identifier.')
2445
+ list_parent_parser.add_argument('--pfn', dest='pfns', action='store', nargs='+', help='List parent dids for these pfns.')
2446
+ list_parent_parser.add_argument('--guid', dest='guids', action='store', nargs='+', help='List parent dids for these guids.')
2447
+
2448
+ # argparse 2.7 does not allow aliases for commands, thus the list-parent-datasets is a copy&paste from list-parent-dids
2449
+ list_parent_datasets_parser = subparsers.add_parser('list-parent-datasets', help='List parent DIDs for a given DID', description='List all parents Data IDentifier that contains the target Data IDentifier.',
2450
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2451
+ """""""""""""
2452
+ ::
2453
+
2454
+ $ rucio list-parent-datasets user.jdoe:user.jdoe.test.data.1234.1
2455
+ +--------------------------------------+--------------+
2456
+ | SCOPE:NAME | [DID TYPE] |
2457
+ |--------------------------------------+--------------|
2458
+ | user.jdoe:user.jdoe.test.cont.1234.2 | CONTAINER |
2459
+ +--------------------------------------+--------------+
2460
+
2461
+ ''')
2462
+
2463
+ list_parent_datasets_parser.set_defaults(function=list_parent_dids)
2464
+ list_parent_datasets_parser.add_argument(dest='did', action='store', nargs='?', default=None, help='Data identifier.')
2465
+ list_parent_datasets_parser.add_argument('--pfn', dest='pfns', action='store', nargs='+', help='List parent dids for these pfns.')
2466
+ list_parent_datasets_parser.add_argument('--guid', dest='guids', action='store', nargs='+', help='List parent dids for these guids.')
2467
+
2468
+ # The list-scopes command
2469
+ scope_list_parser = subparsers.add_parser('list-scopes', help='List all available scopes.',
2470
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2471
+ """""""""""""
2472
+ ::
2473
+
2474
+ $ rucio list-scopes
2475
+ mc
2476
+ data
2477
+ user.jdoe
2478
+ user.janedoe
2479
+
2480
+ ''')
2481
+
2482
+ scope_list_parser.set_defaults(function=list_scopes)
2483
+
2484
+ # The close command
2485
+ close_parser = subparsers.add_parser('close', help='Close a dataset or container.')
2486
+ close_parser.set_defaults(function=close)
2487
+ close_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2488
+
2489
+ # The reopen command
2490
+ reopen_parser = subparsers.add_parser('reopen', help='Reopen a dataset or container (only for privileged users).')
2491
+ reopen_parser.set_defaults(function=reopen)
2492
+ reopen_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2493
+
2494
+ # The stat command
2495
+ stat_parser = subparsers.add_parser('stat', help='List attributes and statuses about data identifiers.')
2496
+ stat_parser.set_defaults(function=stat)
2497
+ stat_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2498
+
2499
+ # The erase command
2500
+ erase_parser = subparsers.add_parser('erase', help='Delete a data identifier.', description='This command sets the lifetime of the DID in order to expire in the next 24 hours.\
2501
+ After this time, the dataset is eligible for deletion. The deletion is not reversible after 24 hours grace time period expired.')
2502
+ erase_parser.set_defaults(function=erase)
2503
+ erase_parser.add_argument('--undo', dest='undo', action='store_true', default=False, help='Undo erase DIDs. Only works if has been less than 24 hours since erase operation.')
2504
+ erase_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2505
+
2506
+ # The list_files command
2507
+ list_files_parser = subparsers.add_parser('list-files', help='List DID contents', description='List all the files in a Data IDentifier. The DID can be a container, dataset or a file.\
2508
+ What is returned is a list of files in the DID with : <scope>:<name>\t<guid>\t<checksum>\t<filesize>')
2509
+ list_files_parser.set_defaults(function=list_files)
2510
+ list_files_parser.add_argument('--csv', dest='csv', action='store_true', default=False, help='Comma Separated Value output. This output format is preferred for easy parsing and scripting.')
2511
+ list_files_parser.add_argument('--pfc', dest='LOCALPATH', action='store', default=False, help='Outputs the list of files in the dataset with the LOCALPATH prepended as a PoolFileCatalog')
2512
+ list_files_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2513
+
2514
+ # The list_content command
2515
+ list_content_parser = subparsers.add_parser('list-content', help='List the content of a collection.')
2516
+ list_content_parser.set_defaults(function=list_content)
2517
+ list_content_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2518
+ list_content_parser.add_argument('--short', dest='short', action='store_true', help='Just dump the list of DIDs.')
2519
+
2520
+ # The list_content_history command
2521
+ list_content_history_parser = subparsers.add_parser('list-content-history', help='List the content history of a collection.')
2522
+ list_content_history_parser.set_defaults(function=list_content_history)
2523
+ list_content_history_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2524
+
2525
+ # The upload subparser
2526
+ upload_parser = subparsers.add_parser('upload', help='Upload method.')
2527
+ upload_parser.set_defaults(function=upload)
2528
+ upload_parser.add_argument('--rse', dest='rse', action='store', help='Rucio Storage Element (RSE) name.', required=True).completer = rse_completer
2529
+ upload_parser.add_argument('--lifetime', type=int, action='store', help='Lifetime of the rule in seconds.')
2530
+ upload_parser.add_argument('--expiration-date', action='store', help='The date when the rule expires in UTC, format: <year>-<month>-<day>-<hour>:<minute>:<second>. E.g. 2022-10-20-20:00:00')
2531
+ upload_parser.add_argument('--scope', dest='scope', action='store', help='Scope name.')
2532
+ upload_parser.add_argument('--impl', dest='impl', action='store', help='Transfer protocol implementation to use (e.g: xrootd, gfal.NoRename, webdav, ssh.Rsync, rclone).')
2533
+ # The --no-register option is hidden. This is pilot ONLY. Users should not use this. Will lead to unregistered data on storage!
2534
+ upload_parser.add_argument('--no-register', dest='no_register', action='store_true', default=False, help=argparse.SUPPRESS)
2535
+ upload_parser.add_argument('--register-after-upload', dest='register_after_upload', action='store_true', default=False, help='Register the file only after successful upload.')
2536
+ upload_parser.add_argument('--summary', dest='summary', action='store_true', default=False, help='Create rucio_upload.json summary file')
2537
+ upload_parser.add_argument('--guid', dest='guid', action='store', help='Manually specify the GUID for the file.')
2538
+ upload_parser.add_argument('--protocol', action='store', help='Force the protocol to use')
2539
+ upload_parser.add_argument('--pfn', dest='pfn', action='store', help='Specify the exact PFN for the upload.')
2540
+ upload_parser.add_argument('--name', dest='name', action='store', help='Specify the exact LFN for the upload.')
2541
+ upload_parser.add_argument('--transfer-timeout', dest='transfer_timeout', type=float, action='store', default=config_get_float('upload', 'transfer_timeout', False, 360), help='Transfer timeout (in seconds).')
2542
+ upload_parser.add_argument(dest='args', action='store', nargs='+', help='files and datasets.')
2543
+ upload_parser.add_argument('--recursive', dest='recursive', action='store_true', default=False, help='Convert recursively the folder structure into collections')
2544
+
2545
+ # The download and get subparser
2546
+ get_parser = subparsers.add_parser('get', help='Download method (synonym for download)')
2547
+ download_parser = subparsers.add_parser('download', help='Download method (synonym for get)')
2548
+ for selected_parser in [get_parser, download_parser]:
2549
+ selected_parser.set_defaults(function=download)
2550
+ selected_parser.add_argument('--dir', dest='dir', default='.', action='store', help='The directory to store the downloaded file.')
2551
+ selected_parser.add_argument(dest='dids', nargs='*', action='store', help='List of space separated data identifiers.')
2552
+ selected_parser.add_argument('--allow-tape', action='store_true', default=False, help="Also consider tape endpoints as source of the download.")
2553
+ selected_parser.add_argument('--rses', action='store', help='RSE Expression to specify allowed sources')
2554
+ selected_parser.add_argument('--impl', dest='impl', action='store', help='Transfer protocol implementation to use (e.g: xrootd, gfal.NoRename, webdav, ssh.Rsync, rclone).')
2555
+ selected_parser.add_argument('--protocol', action='store', help='Force the protocol to use.')
2556
+ selected_parser.add_argument('--nrandom', type=int, action='store', help='Download N random files from the DID.')
2557
+ selected_parser.add_argument('--ndownloader', type=int, default=3, action='store', help='Choose the number of parallel processes for download.')
2558
+ selected_parser.add_argument('--no-subdir', action='store_true', default=False, help="Don't create a subdirectory for the scope of the files.")
2559
+ selected_parser.add_argument('--pfn', dest='pfn', action='store', help="Specify the exact PFN for the download.")
2560
+ selected_parser.add_argument('--archive-did', action='store', dest='archive_did', help="Download from archive is transparent. This option is obsolete.")
2561
+ selected_parser.add_argument('--no-resolve-archives', action='store_true', default=False, help="If set archives will not be considered for download.")
2562
+ selected_parser.add_argument('--ignore-checksum', action='store_true', default=False, help="Don't validate checksum for downloaded files.")
2563
+ selected_parser.add_argument('--check-local-with-filesize-only', action='store_true', default=False, help="Don't use checksum verification for already downloaded files, use filesize instead.")
2564
+ selected_parser.add_argument('--transfer-timeout', dest='transfer_timeout', type=float, action='store', default=config_get_float('download', 'transfer_timeout', False, None), help='Transfer timeout (in seconds). Default: computed dynamically from --transfer-speed-timeout. If set to any value >= 0, --transfer-speed-timeout is ignored.') # NOQA: E501
2565
+ selected_parser.add_argument('--transfer-speed-timeout', dest='transfer_speed_timeout', type=float, action='store', default=None, help='Minimum allowed average transfer speed (in KBps). Default: 500. Used to dynamically compute the timeout if --transfer-timeout not set. Is not supported for --pfn.') # NOQA: E501
2566
+ selected_parser.add_argument('--aria', action='store_true', default=False, help="Use aria2c utility if possible. (EXPERIMENTAL)")
2567
+ selected_parser.add_argument('--trace_appid', '--trace-appid', new_option_string='--trace-appid', dest='trace_appid', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_APPID', None), help=argparse.SUPPRESS)
2568
+ selected_parser.add_argument('--trace_dataset', '--trace-dataset', new_option_string='--trace-dataset', dest='trace_dataset', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_DATASET', None), help=argparse.SUPPRESS)
2569
+ selected_parser.add_argument('--trace_datasetscope', '--trace-datasetscope', new_option_string='--trace-datasetscope', dest='trace_datasetscope', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_DATASETSCOPE', None), help=argparse.SUPPRESS) # NOQA: E501
2570
+ selected_parser.add_argument('--trace_eventtype', '--trace-eventtype', new_option_string='--trace-eventtype', dest='trace_eventtype', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_EVENTTYPE', None), help=argparse.SUPPRESS) # NOQA: E501
2571
+ selected_parser.add_argument('--trace_pq', '--trace-pq', new_option_string='--trace-pq', dest='trace_pq', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_PQ', None), help=argparse.SUPPRESS)
2572
+ selected_parser.add_argument('--trace_taskid', '--trace-taskid', new_option_string='--trace-taskid', dest='trace_taskid', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_TASKID', None), help=argparse.SUPPRESS)
2573
+ selected_parser.add_argument('--trace_usrdn', '--trace-usrdn', new_option_string='--trace-usrdn', dest='trace_usrdn', action=StoreAndDeprecateWarningAction, default=os.environ.get('RUCIO_TRACE_USRDN', None), help=argparse.SUPPRESS)
2574
+ selected_parser.add_argument('--filter', dest='filter', action='store', help='Filter files by key-value pairs like guid=2e2232aafac8324db452070304f8d745.')
2575
+ selected_parser.add_argument('--scope', dest='scope', action='store', help='Scope if you are using the filter option and no full DID.')
2576
+ selected_parser.add_argument('--metalink', dest='metalink_file', action='store', help='Path to a metalink file.')
2577
+ selected_parser.add_argument('--deactivate-file-download-exceptions', dest='deactivate_file_download_exceptions', action='store_true', help='Does not raise NoFilesDownloaded, NotAllFilesDownloaded or incorrect number of output queue files Exception.') # NOQA: E501
2578
+ selected_parser.add_argument('--replica-selection', dest='sort', action='store', help='Select the best replica using a replica sorting algorithm provided by replica sorter (e.g., random, geoip).')
2579
+
2580
+ # The get-metadata subparser
2581
+ get_metadata_parser = subparsers.add_parser('get-metadata', help='Get metadata for DIDs.')
2582
+ get_metadata_parser.set_defaults(function=get_metadata)
2583
+ get_metadata_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2584
+ get_metadata_parser.add_argument('--plugin', dest='plugin', action='store', help='Filter down to metadata from specific metadata plugin', required=False)
2585
+
2586
+ # The set-metadata subparser
2587
+ set_metadata_parser = subparsers.add_parser('set-metadata', help='set-metadata method')
2588
+ set_metadata_parser.set_defaults(function=set_metadata)
2589
+ set_metadata_parser.add_argument('--did', dest='did', action='store', help='Data identifier whose metadata will be set', required=True)
2590
+ set_metadata_parser.add_argument('--key', dest='key', action='store', help='Attribute key', required=True)
2591
+ set_metadata_parser.add_argument('--value', dest='value', action='store', help='Attribute value', required=True)
2592
+
2593
+ # delete-did-meta subparser
2594
+ delete_metadata_parser = subparsers.add_parser('delete-metadata', help='delete metadata')
2595
+ delete_metadata_parser.set_defaults(function=delete_metadata)
2596
+ delete_metadata_parser.add_argument('--did', dest='did', action='store', help='Data identifier to delete', required=True)
2597
+ delete_metadata_parser.add_argument('--key', dest='key', action='store', help='Attribute key', required=True)
2598
+
2599
+ # The list-rse-usage subparser
2600
+ list_rse_usage_parser = subparsers.add_parser('list-rse-usage', help='Shows the total/free/used space for a given RSE. This values can differ for different RSE source.')
2601
+ list_rse_usage_parser.set_defaults(function=list_rse_usage)
2602
+ list_rse_usage_parser.add_argument(dest='rse', action='store', help='Rucio Storage Element (RSE) name.').completer = rse_completer
2603
+ list_rse_usage_parser.add_argument('--history', dest='history', default=False, action='store', help='List RSE usage history. [Unimplemented]')
2604
+ list_rse_usage_parser.add_argument('--show-accounts', dest='show_accounts', action='store_true', default=False, help='List accounts usages of RSE')
2605
+
2606
+ # The list-account-usage subparser
2607
+ list_account_usage_parser = subparsers.add_parser('list-account-usage', help='Shows the space used, the quota limit and the quota left for an account for every RSE where the user have quota.')
2608
+ list_account_usage_parser.set_defaults(function=list_account_usage)
2609
+ list_account_usage_parser.add_argument(dest='usage_account', action='store', help='Account name.')
2610
+ list_account_usage_parser.add_argument('--rse', action='store', help='Show usage for only for this RSE.')
2611
+
2612
+ # The list-account-limits subparser
2613
+ list_account_limits_parser = subparsers.add_parser('list-account-limits', help='List quota limits for an account in every RSEs.')
2614
+ list_account_limits_parser.set_defaults(function=list_account_limits)
2615
+ list_account_limits_parser.add_argument('limit_account', action='store', help='The account name.')
2616
+ list_account_limits_parser.add_argument('--rse', dest='rse', action='store', help='If this option is given, the results are restricted to only this RSE.').completer = rse_completer
2617
+
2618
+ # Add replication rule subparser
2619
+ add_rule_parser = subparsers.add_parser('add-rule', help='Add replication rule.')
2620
+ add_rule_parser.set_defaults(function=add_rule)
2621
+ add_rule_parser.add_argument(dest='dids', action='store', nargs='+', help='DID(s) to apply the rule to')
2622
+ add_rule_parser.add_argument(dest='copies', action='store', type=int, help='Number of copies')
2623
+ add_rule_parser.add_argument(dest='rse_expression', action='store', help='RSE Expression')
2624
+ add_rule_parser.add_argument('--weight', dest='weight', action='store', help='RSE Weight')
2625
+ add_rule_parser.add_argument('--lifetime', dest='lifetime', action='store', type=int, help='Rule lifetime (in seconds)')
2626
+ add_rule_parser.add_argument('--grouping', dest='grouping', action='store', choices=['DATASET', 'ALL', 'NONE'], help='Rule grouping')
2627
+ add_rule_parser.add_argument('--locked', dest='locked', action='store_true', help='Rule locking')
2628
+ add_rule_parser.add_argument('--source-replica-expression', dest='source_replica_expression', action='store', help='RSE Expression for RSEs to be considered for source replicas')
2629
+ add_rule_parser.add_argument('--notify', dest='notify', action='store', help='Notification strategy : Y (Yes), N (No), C (Close)')
2630
+ add_rule_parser.add_argument('--activity', dest='activity', action='store', help='Activity to be used (e.g. User, Data Consolidation)')
2631
+ add_rule_parser.add_argument('--comment', dest='comment', action='store', help='Comment about the replication rule')
2632
+ add_rule_parser.add_argument('--ask-approval', dest='ask_approval', action='store_true', help='Ask for rule approval')
2633
+ add_rule_parser.add_argument('--asynchronous', dest='asynchronous', action='store_true', help='Create rule asynchronously')
2634
+ add_rule_parser.add_argument('--delay-injection', dest='delay_injection', action='store', type=int, help='Delay (in seconds) to wait before starting applying the rule. This option implies --asynchronous.')
2635
+ add_rule_parser.add_argument('--account', dest='rule_account', action='store', help='The account owning the rule')
2636
+ add_rule_parser.add_argument('--skip-duplicates', dest='ignore_duplicate', action='store_true', help='Skip duplicate rules')
2637
+
2638
+ # Delete replication rule subparser
2639
+ delete_rule_parser = subparsers.add_parser('delete-rule', help='Delete replication rule.')
2640
+ delete_rule_parser.set_defaults(function=delete_rule)
2641
+ delete_rule_parser.add_argument(dest='rule_id', action='store', help='Rule id or DID. If DID, the RSE expression is mandatory.')
2642
+ delete_rule_parser.add_argument('--purge-replicas', dest='purge_replicas', action='store_true', help='Purge rule replicas')
2643
+ delete_rule_parser.add_argument('--all', dest='delete_all', action='store_true', default=False, help='Delete all the rules, even the ones that are not owned by the account')
2644
+ delete_rule_parser.add_argument('--rses', dest='rses', action='store', help='The RSE expression. Must be specified if a DID is provided.')
2645
+ delete_rule_parser.add_argument('--account', dest='rule_account', action='store', help='The account of the rule that must be deleted')
2646
+
2647
+ # Info replication rule subparser
2648
+ info_rule_parser = subparsers.add_parser('rule-info', help='Retrieve information about a rule.')
2649
+ info_rule_parser.set_defaults(function=info_rule)
2650
+ info_rule_parser.add_argument(dest='rule_id', action='store', help='The rule ID')
2651
+ info_rule_parser.add_argument('--examine', dest='examine', action='store_true', help='Detailed analysis of transfer errors')
2652
+
2653
+ # The list_rules command
2654
+ list_rules_parser = subparsers.add_parser('list-rules', help='List replication rules.', formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2655
+ """""""""""""
2656
+
2657
+ You can list the rules for a particular DID::
2658
+
2659
+ $ rucio list-rules user.jdoe:user.jdoe.test.container.1234.1
2660
+ ID ACCOUNT SCOPE:NAME STATE[OK/REPL/STUCK] RSE_EXPRESSION COPIES EXPIRES (UTC)
2661
+ -------------------------------- --------- ----------------------------------------- ---------------------- ------------------ -------- -------------------
2662
+ a12e5664555a4f12b3cc6991db5accf9 jdoe user.jdoe:user.jdoe.test.container.1234.1 OK[3/0/0] tier=1&disk=1 1 2018-02-09 03:57:46
2663
+ b0fcde2acbdb489b874c3c4537595adc janedoe user.jdoe:user.jdoe.test.container.1234.1 REPLICATING[4/1/1] tier=1&tape=1 2
2664
+ 4a6bd85c13384bd6836fbc06e8b316d7 mc user.jdoe:user.jdoe.test.container.1234.1 OK[3/0/0] tier=1&tape=1 2
2665
+
2666
+ You can filter by account::
2667
+
2668
+ $ rucio list-rules --account jdoe
2669
+ ID ACCOUNT SCOPE:NAME STATE[OK/REPL/STUCK] RSE_EXPRESSION COPIES EXPIRES (UTC)
2670
+ -------------------------------- --------- ----------------------------------------- ---------------------- ------------------ -------- -------------------
2671
+ a12e5664555a4f12b3cc6991db5accf9 jdoe user.jdoe:user.jdoe.test.container.1234.1 OK[3/0/0] tier=1&disk=1 1 2018-02-09 03:57:46
2672
+ 08537b2176843d92e05317938a89d148 jdoe user.jdoe:user.jdoe.test.data.1234.1 OK[2/0/0] SITE2_DISK 1
2673
+
2674
+ ''')
2675
+
2676
+ list_rules_parser.set_defaults(function=list_rules)
2677
+ list_rules_parser.add_argument(dest='did', action='store', nargs='?', default=None, help='List by did')
2678
+ list_rules_parser.add_argument('--id', dest='rule_id', action='store', help='List by rule id')
2679
+ list_rules_parser.add_argument('--traverse', dest='traverse', action='store_true', help='Traverse the did tree and search for rules affecting this did')
2680
+ list_rules_parser.add_argument('--csv', dest='csv', action='store_true', default=False, help='Comma Separated Value output')
2681
+ list_rules_parser.add_argument('--file', dest='file', action='store', help='List associated rules of an affected file')
2682
+ list_rules_parser.add_argument('--account', dest='rule_account', action='store', help='List by account')
2683
+ list_rules_parser.add_argument('--subscription', dest='subscription', action='store', help='List by account and subscription name', metavar=('ACCOUNT', 'SUBSCRIPTION'), nargs=2)
2684
+
2685
+ # The list_rules_history command
2686
+ list_rules_history_parser = subparsers.add_parser('list-rules-history', help='List replication rules history for a DID.')
2687
+ list_rules_history_parser.set_defaults(function=list_rules_history)
2688
+ list_rules_history_parser.add_argument(dest='did', action='store', help='The Data IDentifier.')
2689
+
2690
+ # The update_rule command
2691
+ update_rule_parser = subparsers.add_parser('update-rule', help='Update replication rule.')
2692
+ update_rule_parser.set_defaults(function=update_rule)
2693
+ update_rule_parser.add_argument(dest='rule_id', action='store', help='Rule id')
2694
+ update_rule_parser.add_argument('--lifetime', dest='lifetime', action='store', help='Lifetime in seconds.')
2695
+ update_rule_parser.add_argument('--locked', dest='locked', action='store', help='Locked (True/False).')
2696
+ update_rule_parser.add_argument('--account', dest='rule_account', action='store', help='Account to change.')
2697
+ update_rule_parser.add_argument('--stuck', dest='state_stuck', action='store_true', help='Set state to STUCK.')
2698
+ update_rule_parser.add_argument('--suspend', dest='state_suspended', action='store_true', help='Set state to SUSPENDED.')
2699
+ update_rule_parser.add_argument('--activity', dest='rule_activity', action='store', help='Activity of the rule.')
2700
+ update_rule_parser.add_argument('--source-replica-expression', dest='source_replica_expression', action='store', help='Source replica expression of the rule.')
2701
+ update_rule_parser.add_argument('--comment', dest='comment', action='store', help="Update comment for the rule")
2702
+ update_rule_parser.add_argument('--cancel-requests', dest='cancel_requests', action='store_true', help='Cancel requests when setting rules to stuck.')
2703
+ update_rule_parser.add_argument('--priority', dest='priority', action='store', help='Priority of the requests of the rule.')
2704
+ update_rule_parser.add_argument('--child-rule-id', dest='child_rule_id', action='store', help='Child rule id of the rule. Use "None" to remove an existing parent/child relationship.')
2705
+ update_rule_parser.add_argument('--boost-rule', dest='boost_rule', action='store_true', help='Quickens the transition of a rule from STUCK to REPLICATING.')
2706
+
2707
+ # The move_rule command
2708
+ move_rule_parser = subparsers.add_parser('move-rule', help='Move a replication rule to another RSE.')
2709
+ move_rule_parser.set_defaults(function=move_rule)
2710
+ move_rule_parser.add_argument(dest='rule_id', action='store', help='Rule id')
2711
+ move_rule_parser.add_argument(dest='rse_expression', action='store', help='RSE expression of new rule')
2712
+ move_rule_parser.add_argument('--activity', dest='activity', action='store', help='Update activity for moved rule.')
2713
+ move_rule_parser.add_argument('--source-replica-expression', dest='source_replica_expression', action='store', help='Update source-replica-expression for moved rule. Use "None" to remove the old value.')
2714
+
2715
+ # The list-rses command
2716
+ list_rses_parser = subparsers.add_parser('list-rses', help='Show the list of all the registered Rucio Storage Elements (RSEs).')
2717
+ list_rses_parser.set_defaults(function=list_rses)
2718
+ list_rses_parser.add_argument('--rses', dest='rses', action='store', help='The RSE filter expression. A comprehensive help about RSE expressions \
2719
+ can be found in ' + Color.BOLD + 'https://rucio.cern.ch/documentation/started/concepts/rse_expressions' + Color.END)
2720
+ list_rses_parser.add_argument("--csv", action='store_true', help='Output a list of RSEs as a csv')
2721
+
2722
+ # The list-suspicious-replicas command
2723
+ list_suspicious_replicas_parser = subparsers.add_parser('list-suspicious-replicas', help='Show the list of all replicas marked "suspicious".')
2724
+ list_suspicious_replicas_parser.set_defaults(function=list_suspicious_replicas)
2725
+ list_suspicious_replicas_parser.add_argument('--expression', dest='rse_expression', action='store', help='The RSE filter expression. A comprehensive help about RSE expressions \
2726
+ can be found in ' + Color.BOLD + 'https://rucio.cern.ch/documentation/started/concepts/rse_expressions' + Color.END)
2727
+ list_suspicious_replicas_parser.add_argument('--younger_than', '--younger-than', new_option_string='--younger-than', dest='younger_than', action=StoreAndDeprecateWarningAction, help='List files that have been marked suspicious since the date "younger_than", e.g. 2021-11-29T00:00:00.') # NOQA: E501
2728
+ list_suspicious_replicas_parser.add_argument('--nattempts', dest='nattempts', action='store', help='Minimum number of failed attempts to access a suspicious file.')
2729
+
2730
+ # The list-rses-attributes command
2731
+ list_rse_attributes_parser = subparsers.add_parser('list-rse-attributes', help='List the attributes of an RSE.', description='This command is useful to create RSE filter expressions.')
2732
+ list_rse_attributes_parser.set_defaults(function=list_rse_attributes)
2733
+ list_rse_attributes_parser.add_argument(dest='rse', action='store', help='The RSE name').completer = rse_completer
2734
+
2735
+ # The list-datasets-rse command
2736
+ list_datasets_rse_parser = subparsers.add_parser('list-datasets-rse', help='List all the datasets at a RSE', description='This method allows to list all the datasets on a given Rucio Storage Element.\
2737
+ ' + Color.BOLD + 'Warning: ' + Color.END + 'This command can take a long time depending on the number of datasets in the RSE.')
2738
+ list_datasets_rse_parser.set_defaults(function=list_datasets_rse)
2739
+ list_datasets_rse_parser.add_argument(dest='rse', action='store', default=None, help='The RSE name').completer = rse_completer
2740
+ list_datasets_rse_parser.add_argument('--long', dest='long', action='store_true', default=False, help='The long option')
2741
+
2742
+ # The test-server command
2743
+ test_server_parser = subparsers.add_parser('test-rucio-server', help='Test Server', description='Run a bunch of tests against the Rucio Servers.')
2744
+ test_server_parser.set_defaults(function=test_server)
2745
+
2746
+ # The get-metadata subparser
2747
+ touch_parser = subparsers.add_parser('touch', help='Touch one or more DIDs and set the last accessed date to the current date')
2748
+ touch_parser.set_defaults(function=touch)
2749
+ touch_parser.add_argument(dest='dids', nargs='+', action='store', help='List of space separated data identifiers.')
2750
+ touch_parser.add_argument('--rse', dest='rse', action='store', help="The RSE of the DIDs that are touched.").completer = rse_completer
2751
+
2752
+ # The add-lifetime-exception command
2753
+ add_lifetime_exception_parser = subparsers.add_parser('add-lifetime-exception', help='Add an exception to the lifetime model.',
2754
+ formatter_class=argparse.RawDescriptionHelpFormatter, epilog='''Usage example
2755
+ """""""""""""
2756
+ ::
2757
+
2758
+ $ rucio add-lifetime-exception --inputfile myfile.txt --reason "Needed for my analysis" --expiration 2015-10-30
2759
+
2760
+ ''')
2761
+
2762
+ add_lifetime_exception_parser.set_defaults(function=add_lifetime_exception)
2763
+ add_lifetime_exception_parser.add_argument('--inputfile', action='store', help='File where the list of datasets requested to be extended are located.', required=True)
2764
+ add_lifetime_exception_parser.add_argument('--reason', action='store', help='The reason for the extension.', required=True)
2765
+ add_lifetime_exception_parser.add_argument('--expiration', action='store', help='The expiration date format YYYY-MM-DD', required=True)
2766
+
2767
+ return oparser
2768
+
2769
+
2770
+ def main():
2771
+
2772
+ pager = get_pager()
2773
+ console = Console(theme=Theme(CLITheme.LOG_THEMES), soft_wrap=True)
2774
+ console.width = max(MIN_CONSOLE_WIDTH, console.width)
2775
+ spinner = Status('Initializing spinner', spinner=CLITheme.SPINNER, spinner_style=CLITheme.SPINNER_STYLE, console=console)
2776
+
2777
+ arguments = sys.argv[1:]
2778
+ # set the configuration before anything else, if the config parameter is present
2779
+ for argi in range(len(arguments)):
2780
+ if arguments[argi] == '--config' and (argi + 1) < len(arguments):
2781
+ os.environ['RUCIO_CONFIG'] = arguments[argi + 1]
2782
+
2783
+ oparser = get_parser()
2784
+ if EXTRA_MODULES['argcomplete']:
2785
+ argcomplete.autocomplete(oparser)
2786
+
2787
+ if len(sys.argv) == 1:
2788
+ oparser.print_help()
2789
+ sys.exit(FAILURE)
2790
+
2791
+ args = oparser.parse_args(arguments)
2792
+
2793
+ if cli_config == 'rich':
2794
+ install(console=console, word_wrap=True, width=min(console.width, MAX_TRACEBACK_WIDTH)) # Make rich exception tracebacks the default.
2795
+ logger = setup_rich_logger(module_name=__name__, logger_name='user', verbose=args.verbose, console=console)
2796
+ else:
2797
+ logger = setup_logger(module_name=__name__, logger_name='user', verbose=args.verbose)
2798
+
2799
+ setup_gfal2_logger()
2800
+ signal.signal(signal.SIGINT, lambda sig, frame: signal_handler(sig, frame, logger))
2801
+
2802
+ start_time = time.time()
2803
+ client = get_client(args, logger)
2804
+ result = args.function(args, client, logger, console, spinner)
2805
+ end_time = time.time()
2806
+ if cli_config == 'rich':
2807
+ spinner.stop()
2808
+ if console.is_terminal and not args.no_pager:
2809
+ command_output = console.end_capture()
2810
+ if command_output == '' and args.verbose:
2811
+ print("Completed in %-0.4f sec." % (end_time - start_time))
2812
+ else:
2813
+ if args.verbose:
2814
+ command_output += "Completed in %-0.4f sec." % (end_time - start_time)
2815
+ # Ignore SIGINT during pager execution.
2816
+ signal.signal(signal.SIGINT, signal.SIG_IGN)
2817
+ pager(command_output)
2818
+ else:
2819
+ if args.verbose:
2820
+ print("Completed in %-0.4f sec." % (end_time - start_time))
2821
+ sys.exit(result)
2822
+
2823
+
2824
+ if __name__ == '__main__':
2825
+ main()