rucio-clients 32.8.6__py3-none-any.whl → 35.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rucio-clients might be problematic. Click here for more details.

Files changed (92) hide show
  1. rucio/__init__.py +0 -1
  2. rucio/alembicrevision.py +1 -2
  3. rucio/client/__init__.py +0 -1
  4. rucio/client/accountclient.py +45 -25
  5. rucio/client/accountlimitclient.py +37 -9
  6. rucio/client/baseclient.py +199 -154
  7. rucio/client/client.py +2 -3
  8. rucio/client/configclient.py +19 -6
  9. rucio/client/credentialclient.py +9 -4
  10. rucio/client/didclient.py +238 -63
  11. rucio/client/diracclient.py +13 -5
  12. rucio/client/downloadclient.py +162 -51
  13. rucio/client/exportclient.py +4 -4
  14. rucio/client/fileclient.py +3 -4
  15. rucio/client/importclient.py +4 -4
  16. rucio/client/lifetimeclient.py +21 -5
  17. rucio/client/lockclient.py +18 -8
  18. rucio/client/{metaclient.py → metaconventionsclient.py} +18 -15
  19. rucio/client/pingclient.py +0 -1
  20. rucio/client/replicaclient.py +15 -5
  21. rucio/client/requestclient.py +35 -19
  22. rucio/client/rseclient.py +133 -51
  23. rucio/client/ruleclient.py +29 -22
  24. rucio/client/scopeclient.py +8 -6
  25. rucio/client/subscriptionclient.py +47 -35
  26. rucio/client/touchclient.py +8 -4
  27. rucio/client/uploadclient.py +166 -82
  28. rucio/common/__init__.py +0 -1
  29. rucio/common/cache.py +4 -4
  30. rucio/common/config.py +52 -47
  31. rucio/common/constants.py +69 -2
  32. rucio/common/constraints.py +0 -1
  33. rucio/common/didtype.py +24 -22
  34. rucio/common/exception.py +281 -222
  35. rucio/common/extra.py +0 -1
  36. rucio/common/logging.py +54 -38
  37. rucio/common/pcache.py +122 -101
  38. rucio/common/plugins.py +153 -0
  39. rucio/common/policy.py +4 -4
  40. rucio/common/schema/__init__.py +17 -10
  41. rucio/common/schema/atlas.py +7 -5
  42. rucio/common/schema/belleii.py +7 -5
  43. rucio/common/schema/domatpc.py +7 -5
  44. rucio/common/schema/escape.py +7 -5
  45. rucio/common/schema/generic.py +8 -6
  46. rucio/common/schema/generic_multi_vo.py +7 -5
  47. rucio/common/schema/icecube.py +7 -5
  48. rucio/common/stomp_utils.py +0 -1
  49. rucio/common/stopwatch.py +0 -1
  50. rucio/common/test_rucio_server.py +2 -2
  51. rucio/common/types.py +262 -17
  52. rucio/common/utils.py +743 -451
  53. rucio/rse/__init__.py +3 -4
  54. rucio/rse/protocols/__init__.py +0 -1
  55. rucio/rse/protocols/bittorrent.py +184 -0
  56. rucio/rse/protocols/cache.py +1 -2
  57. rucio/rse/protocols/dummy.py +1 -2
  58. rucio/rse/protocols/gfal.py +12 -10
  59. rucio/rse/protocols/globus.py +7 -7
  60. rucio/rse/protocols/gsiftp.py +2 -3
  61. rucio/rse/protocols/http_cache.py +1 -2
  62. rucio/rse/protocols/mock.py +1 -2
  63. rucio/rse/protocols/ngarc.py +1 -2
  64. rucio/rse/protocols/posix.py +12 -13
  65. rucio/rse/protocols/protocol.py +116 -52
  66. rucio/rse/protocols/rclone.py +6 -7
  67. rucio/rse/protocols/rfio.py +4 -5
  68. rucio/rse/protocols/srm.py +9 -10
  69. rucio/rse/protocols/ssh.py +8 -9
  70. rucio/rse/protocols/storm.py +2 -3
  71. rucio/rse/protocols/webdav.py +17 -14
  72. rucio/rse/protocols/xrootd.py +23 -17
  73. rucio/rse/rsemanager.py +19 -7
  74. rucio/vcsversion.py +4 -4
  75. rucio/version.py +5 -13
  76. rucio_clients-35.8.0.data/data/requirements.client.txt +15 -0
  77. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/rucio_client/merge_rucio_configs.py +2 -5
  78. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/scripts/rucio +87 -85
  79. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/scripts/rucio-admin +45 -32
  80. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/METADATA +13 -13
  81. rucio_clients-35.8.0.dist-info/RECORD +88 -0
  82. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/WHEEL +1 -1
  83. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/licenses/AUTHORS.rst +3 -0
  84. rucio/common/schema/cms.py +0 -478
  85. rucio/common/schema/lsst.py +0 -423
  86. rucio_clients-32.8.6.data/data/requirements.txt +0 -55
  87. rucio_clients-32.8.6.dist-info/RECORD +0 -88
  88. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rse-accounts.cfg.template +0 -0
  89. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rucio.cfg.atlas.client.template +0 -0
  90. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rucio.cfg.template +0 -0
  91. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/licenses/LICENSE +0 -0
  92. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,13 +13,16 @@
14
13
  # limitations under the License.
15
14
 
16
15
  from json import dumps
16
+ from typing import TYPE_CHECKING, Any, Literal, Optional
17
17
 
18
18
  from requests.status_codes import codes
19
19
 
20
- from rucio.client.baseclient import BaseClient
21
- from rucio.client.baseclient import choice
20
+ from rucio.client.baseclient import BaseClient, choice
22
21
  from rucio.common.utils import build_url
23
22
 
23
+ if TYPE_CHECKING:
24
+ from collections.abc import Iterable, Mapping
25
+
24
26
 
25
27
  class DiracClient(BaseClient):
26
28
 
@@ -28,7 +30,12 @@ class DiracClient(BaseClient):
28
30
 
29
31
  DIRAC_BASEURL = 'dirac'
30
32
 
31
- def add_files(self, lfns, ignore_availability=False):
33
+ def add_files(
34
+ self,
35
+ lfns: "Iterable[Mapping[str, Any]]",
36
+ ignore_availability: bool = False,
37
+ parents_metadata: Optional["Mapping[str, Mapping[str, Any]]"] = None
38
+ ) -> Literal[True]:
32
39
  """
33
40
  Bulk add files :
34
41
  - Create the file and replica.
@@ -37,10 +44,11 @@ class DiracClient(BaseClient):
37
44
 
38
45
  :param lfns: List of lfn (dictionary {'lfn': <lfn>, 'rse': <rse>, 'bytes': <bytes>, 'adler32': <adler32>, 'guid': <guid>, 'pfn': <pfn>}
39
46
  :param ignore_availability: A boolean to ignore blocked sites.
47
+ :param parents_metadata: Metadata for selected hierarchy DIDs. (dictionary {'lpn': {key : value}}). Default=None
40
48
  """
41
49
  path = '/'.join([self.DIRAC_BASEURL, 'addfiles'])
42
50
  url = build_url(choice(self.list_hosts), path=path)
43
- r = self._send_request(url, type_='POST', data=dumps({'lfns': lfns, 'ignore_availability': ignore_availability}))
51
+ r = self._send_request(url, type_='POST', data=dumps({'lfns': lfns, 'ignore_availability': ignore_availability, 'parents_metadata': parents_metadata}))
44
52
  if r.status_code == codes.created:
45
53
  return True
46
54
  else:
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -19,24 +18,31 @@ import itertools
19
18
  import logging
20
19
  import os
21
20
  import random
21
+ import secrets
22
22
  import shutil
23
23
  import signal
24
24
  import subprocess
25
25
  import time
26
- from queue import Queue, Empty, deque
26
+ from queue import Empty, Queue, deque
27
27
  from threading import Thread
28
+ from typing import TYPE_CHECKING, Any, Optional
28
29
 
29
30
  from rucio import version
30
31
  from rucio.client.client import Client
31
32
  from rucio.common.config import config_get
32
33
  from rucio.common.didtype import DID
33
- from rucio.common.exception import (InputValidationError, NoFilesDownloaded, NotAllFilesDownloaded, RucioException)
34
+ from rucio.common.exception import InputValidationError, NoFilesDownloaded, NotAllFilesDownloaded, RucioException
34
35
  from rucio.common.pcache import Pcache
35
- from rucio.common.utils import GLOBALLY_SUPPORTED_CHECKSUMS, CHECKSUM_ALGO_DICT, PREFERRED_CHECKSUM
36
- from rucio.common.utils import adler32, detect_client_location, generate_uuid, parse_replicas_from_string, \
37
- send_trace, sizefmt, execute, parse_replicas_from_file, extract_scope
36
+ from rucio.common.utils import CHECKSUM_ALGO_DICT, GLOBALLY_SUPPORTED_CHECKSUMS, PREFERRED_CHECKSUM, adler32, detect_client_location, execute, extract_scope, generate_uuid, parse_replicas_from_file, parse_replicas_from_string, send_trace, sizefmt
38
37
  from rucio.rse import rsemanager as rsemgr
39
38
 
39
+ if TYPE_CHECKING:
40
+ from collections.abc import Iterable, Iterator
41
+ from xmlrpc.client import ServerProxy as RPCServerProxy
42
+
43
+ from rucio.common.constants import SORTING_ALGORITHMS_LITERAL
44
+ from rucio.common.types import LoggerFunction
45
+
40
46
 
41
47
  @enum.unique
42
48
  class FileDownloadState(str, enum.Enum):
@@ -55,7 +61,13 @@ class FileDownloadState(str, enum.Enum):
55
61
 
56
62
  class BaseExtractionTool:
57
63
 
58
- def __init__(self, program_name, useability_check_args, extract_args, logger=logging.log):
64
+ def __init__(
65
+ self,
66
+ program_name: str,
67
+ useability_check_args: str,
68
+ extract_args: str,
69
+ logger: "LoggerFunction" = logging.log
70
+ ):
59
71
  """
60
72
  Initialises a extraction tool object
61
73
 
@@ -70,7 +82,7 @@ class BaseExtractionTool:
70
82
  self.logger = logger
71
83
  self.is_useable_result = None
72
84
 
73
- def is_useable(self):
85
+ def is_useable(self) -> bool:
74
86
  """
75
87
  Checks if the extraction tool is installed and usable
76
88
 
@@ -90,7 +102,12 @@ class BaseExtractionTool:
90
102
  self.logger(logging.DEBUG, error)
91
103
  return self.is_usable_result
92
104
 
93
- def try_extraction(self, archive_file_path, file_to_extract, dest_dir_path):
105
+ def try_extraction(
106
+ self,
107
+ archive_file_path: str,
108
+ file_to_extract: str,
109
+ dest_dir_path: str
110
+ ) -> bool:
94
111
  """
95
112
  Calls the extraction program to extract a file from an archive
96
113
 
@@ -120,7 +137,14 @@ class BaseExtractionTool:
120
137
 
121
138
  class DownloadClient:
122
139
 
123
- def __init__(self, client=None, logger=None, tracing=True, check_admin=False, check_pcache=False):
140
+ def __init__(
141
+ self,
142
+ client: Optional[Client] = None,
143
+ logger: Optional["LoggerFunction"] = None,
144
+ tracing: bool = True,
145
+ check_admin: bool = False,
146
+ check_pcache: bool = False
147
+ ):
124
148
  """
125
149
  Initialises the basic settings for an DownloadClient object
126
150
 
@@ -129,13 +153,19 @@ class DownloadClient:
129
153
  :param logger: Optional: logging.Logger object. If None, default logger will be used.
130
154
  """
131
155
  self.check_pcache = check_pcache
132
- if not logger:
156
+ if logger is None:
133
157
  self.logger = logging.log
134
158
  else:
135
- self.logger = logger.log
159
+ if hasattr(logger, "debug"):
160
+ self.logger = logger.log
161
+ else:
162
+ self.logger = logger
163
+
136
164
  self.tracing = tracing
165
+
137
166
  if not self.tracing:
138
- logger(logging.DEBUG, 'Tracing is turned off.')
167
+ self.logger(logging.DEBUG, 'Tracing is turned off.')
168
+
139
169
  self.is_human_readable = True
140
170
  self.client = client if client else Client()
141
171
  # if token should be used, use only JWT tokens
@@ -153,7 +183,7 @@ class DownloadClient:
153
183
  break
154
184
  if self.is_admin:
155
185
  self.is_tape_excluded = False
156
- logger(logging.DEBUG, 'Admin mode enabled')
186
+ self.logger(logging.DEBUG, 'Admin mode enabled')
157
187
 
158
188
  self.trace_tpl = {}
159
189
  self.trace_tpl['hostname'] = self.client_location['fqdn']
@@ -176,7 +206,14 @@ class DownloadClient:
176
206
  self.extraction_tools.append(BaseExtractionTool('tar', '--version', extract_args, logger=self.logger))
177
207
  self.extract_scope_convention = config_get('common', 'extract_scope', False, None)
178
208
 
179
- def download_pfns(self, items, num_threads=2, trace_custom_fields={}, traces_copy_out=None, deactivate_file_download_exceptions=False):
209
+ def download_pfns(
210
+ self,
211
+ items: list[dict[str, Any]],
212
+ num_threads: int = 2,
213
+ trace_custom_fields: Optional[dict[str, Any]] = None,
214
+ traces_copy_out: Optional[list[dict[str, Any]]] = None,
215
+ deactivate_file_download_exceptions: bool = False
216
+ ) -> list[dict[str, Any]]:
180
217
  """
181
218
  Download items with a given PFN. This function can only download files, no datasets.
182
219
 
@@ -204,6 +241,7 @@ class DownloadClient:
204
241
  :raises NotAllFilesDownloaded: if not all files could be downloaded
205
242
  :raises RucioException: if something unexpected went wrong during the download
206
243
  """
244
+ trace_custom_fields = trace_custom_fields or {}
207
245
  logger = self.logger
208
246
  trace_custom_fields['uuid'] = generate_uuid()
209
247
 
@@ -252,8 +290,15 @@ class DownloadClient:
252
290
 
253
291
  return self._check_output(output_items, deactivate_file_download_exceptions=deactivate_file_download_exceptions)
254
292
 
255
- def download_dids(self, items, num_threads=2, trace_custom_fields={}, traces_copy_out=None,
256
- deactivate_file_download_exceptions=False, sort=None):
293
+ def download_dids(
294
+ self,
295
+ items: list[dict[str, Any]],
296
+ num_threads: int = 2,
297
+ trace_custom_fields: Optional[dict[str, Any]] = None,
298
+ traces_copy_out: Optional[list[dict[str, Any]]] = None,
299
+ deactivate_file_download_exceptions: bool = False,
300
+ sort: Optional["SORTING_ALGORITHMS_LITERAL"] = None
301
+ ) -> list[dict[str, Any]]:
257
302
  """
258
303
  Download items with given DIDs. This function can also download datasets and wildcarded DIDs.
259
304
 
@@ -267,7 +312,7 @@ class DownloadClient:
267
312
  force_scheme - Optional: force a specific scheme to download this item. (Default: None)
268
313
  base_dir - Optional: base directory where the downloaded files will be stored. (Default: '.')
269
314
  no_subdir - Optional: If true, files are written directly into base_dir. (Default: False)
270
- nrandom - Optional: if the DID addresses a dataset, nrandom files will be randomly choosen for download from the dataset
315
+ nrandom - Optional: if the DID addresses a dataset, nrandom files will be randomly chosen for download from the dataset
271
316
  ignore_checksum - Optional: If true, skips the checksum validation between the downloaded file and the rucio catalouge. (Default: False)
272
317
  transfer_timeout - Optional: Timeout time for the download protocols. (Default: None)
273
318
  transfer_speed_timeout - Optional: Minimum allowed transfer speed (in KBps). Ignored if transfer_timeout set. Otherwise, used to compute default timeout (Default: 500)
@@ -288,6 +333,7 @@ class DownloadClient:
288
333
  :raises NotAllFilesDownloaded: if not all files could be downloaded
289
334
  :raises RucioException: if something unexpected went wrong during the download
290
335
  """
336
+ trace_custom_fields = trace_custom_fields or {}
291
337
  logger = self.logger
292
338
  trace_custom_fields['uuid'] = generate_uuid()
293
339
 
@@ -306,7 +352,15 @@ class DownloadClient:
306
352
 
307
353
  return self._check_output(output_items, deactivate_file_download_exceptions=deactivate_file_download_exceptions)
308
354
 
309
- def download_from_metalink_file(self, item, metalink_file_path, num_threads=2, trace_custom_fields={}, traces_copy_out=None, deactivate_file_download_exceptions=False):
355
+ def download_from_metalink_file(
356
+ self,
357
+ item: dict[str, Any],
358
+ metalink_file_path: str,
359
+ num_threads: int = 2,
360
+ trace_custom_fields: Optional[dict[str, Any]] = None,
361
+ traces_copy_out: Optional[list[dict[str, Any]]] = None,
362
+ deactivate_file_download_exceptions: bool = False
363
+ ) -> list[dict[str, Any]]:
310
364
  """
311
365
  Download items using a given metalink file.
312
366
 
@@ -329,6 +383,7 @@ class DownloadClient:
329
383
  :raises NotAllFilesDownloaded: if not all files could be downloaded
330
384
  :raises RucioException: if something unexpected went wrong during the download
331
385
  """
386
+ trace_custom_fields = trace_custom_fields or {}
332
387
  logger = self.logger
333
388
 
334
389
  logger(logging.INFO, 'Getting sources from metalink file')
@@ -353,7 +408,13 @@ class DownloadClient:
353
408
 
354
409
  return self._check_output(output_items, deactivate_file_download_exceptions=deactivate_file_download_exceptions)
355
410
 
356
- def _download_multithreaded(self, input_items, num_threads, trace_custom_fields={}, traces_copy_out=None):
411
+ def _download_multithreaded(
412
+ self,
413
+ input_items: list[dict[str, Any]],
414
+ num_threads: int,
415
+ trace_custom_fields: Optional[dict[str, Any]] = None,
416
+ traces_copy_out: Optional[list[dict[str, Any]]] = None
417
+ ) -> list[dict[str, Any]]:
357
418
  """
358
419
  Starts an appropriate number of threads to download items from the input list.
359
420
  (This function is meant to be used as class internal only)
@@ -365,6 +426,7 @@ class DownloadClient:
365
426
 
366
427
  :returns: list with output items as dictionaries
367
428
  """
429
+ trace_custom_fields = trace_custom_fields or {}
368
430
  logger = self.logger
369
431
 
370
432
  num_files = len(input_items)
@@ -408,7 +470,14 @@ class DownloadClient:
408
470
  thread.kill_received = True
409
471
  return list(output_queue.queue)
410
472
 
411
- def _download_worker(self, input_queue, output_queue, trace_custom_fields, traces_copy_out, log_prefix):
473
+ def _download_worker(
474
+ self,
475
+ input_queue: Queue,
476
+ output_queue: Queue,
477
+ trace_custom_fields: dict[str, Any],
478
+ traces_copy_out: Optional[list[dict[str, Any]]],
479
+ log_prefix: str
480
+ ) -> None:
412
481
  """
413
482
  This function runs as long as there are items in the input queue,
414
483
  downloads them and stores the output in the output queue.
@@ -444,7 +513,7 @@ class DownloadClient:
444
513
  output_queue.put(item)
445
514
 
446
515
  @staticmethod
447
- def _compute_actual_transfer_timeout(item):
516
+ def _compute_actual_transfer_timeout(item: dict[str, Any]) -> int:
448
517
  """
449
518
  Merge the two options related to timeout into the value which will be used for protocol download.
450
519
  :param item: dictionary that describes the item to download
@@ -456,11 +525,11 @@ class DownloadClient:
456
525
  # establishing connections and download of small files
457
526
  transfer_speed_timeout_static_increment = 60
458
527
 
459
- transfer_timeout = item.get('merged_options', {}).get('transfer_timeout')
528
+ transfer_timeout: Optional[int] = item.get('merged_options', {}).get('transfer_timeout')
460
529
  if transfer_timeout is not None:
461
530
  return transfer_timeout
462
531
 
463
- transfer_speed_timeout = item.get('merged_options', {}).get('transfer_speed_timeout')
532
+ transfer_speed_timeout: Optional[int] = item.get('merged_options', {}).get('transfer_speed_timeout')
464
533
  bytes_ = item.get('bytes')
465
534
  if not bytes_ or transfer_speed_timeout is None:
466
535
  return default_transfer_timeout
@@ -473,7 +542,13 @@ class DownloadClient:
473
542
  timeout = bytes_ // transfer_speed_timeout + transfer_speed_timeout_static_increment
474
543
  return timeout
475
544
 
476
- def _download_item(self, item, trace, traces_copy_out, log_prefix=''):
545
+ def _download_item(
546
+ self,
547
+ item: dict[str, Any],
548
+ trace: dict[str, Any],
549
+ traces_copy_out: Optional[list[dict[str, Any]]],
550
+ log_prefix: str = ''
551
+ ) -> dict[str, Any]:
477
552
  """
478
553
  Downloads the given item and sends traces for success/failure.
479
554
  (This function is meant to be used as class internal only)
@@ -673,10 +748,10 @@ class DownloadClient:
673
748
 
674
749
  # if the file was downloaded with success, it can be linked to pcache
675
750
  if pcache:
676
- logger(logging.INFO, 'File %s is going to be registerred into pcache.' % dest_file_path)
751
+ logger(logging.INFO, 'File %s is going to be registered into pcache.' % dest_file_path)
677
752
  try:
678
753
  pcache_state, hardlink_state = pcache.check_and_link(src=pfn, storage_root=storage_prefix, local_src=first_dest_file_path)
679
- logger(logging.INFO, 'File %s is now registerred into pcache.' % first_dest_file_path)
754
+ logger(logging.INFO, 'File %s is now registered into pcache.' % first_dest_file_path)
680
755
  except Exception as e:
681
756
  logger(logging.WARNING, 'Failed to load file to pcache: %s' % str(e))
682
757
 
@@ -732,7 +807,14 @@ class DownloadClient:
732
807
 
733
808
  return item
734
809
 
735
- def download_aria2c(self, items, trace_custom_fields={}, filters={}, deactivate_file_download_exceptions=False, sort=None):
810
+ def download_aria2c(
811
+ self,
812
+ items: list[dict[str, Any]],
813
+ trace_custom_fields: Optional[dict[str, Any]] = None,
814
+ filters: Optional[dict[str, Any]] = None,
815
+ deactivate_file_download_exceptions: bool = False,
816
+ sort: Optional["SORTING_ALGORITHMS_LITERAL"] = None
817
+ ) -> list[dict[str, Any]]:
736
818
  """
737
819
  Uses aria2c to download the items with given DIDs. This function can also download datasets and wildcarded DIDs.
738
820
  It only can download files that are available via https/davs.
@@ -743,7 +825,7 @@ class DownloadClient:
743
825
  rse - Optional: rse name (e.g. 'CERN-PROD_DATADISK') or rse expression from where to download
744
826
  base_dir - Optional: base directory where the downloaded files will be stored. (Default: '.')
745
827
  no_subdir - Optional: If true, files are written directly into base_dir. (Default: False)
746
- nrandom - Optional: if the DID addresses a dataset, nrandom files will be randomly choosen for download from the dataset
828
+ nrandom - Optional: if the DID addresses a dataset, nrandom files will be randomly chosen for download from the dataset
747
829
  ignore_checksum - Optional: If true, skips the checksum validation between the downloaded file and the rucio catalouge. (Default: False)
748
830
  check_local_with_filesize_only - Optional: If true, already downloaded files will not be validated by checksum.
749
831
 
@@ -762,10 +844,12 @@ class DownloadClient:
762
844
  :raises NotAllFilesDownloaded: if not all files could be downloaded
763
845
  :raises RucioException: if something went wrong during the download (e.g. aria2c could not be started)
764
846
  """
847
+ trace_custom_fields = trace_custom_fields or {}
848
+ filters = filters or {}
765
849
  logger = self.logger
766
850
  trace_custom_fields['uuid'] = generate_uuid()
767
851
 
768
- rpc_secret = '%x' % (random.getrandbits(64))
852
+ rpc_secret = '%x' % (secrets.randbits(64))
769
853
  rpc_auth = 'token:%s' % rpc_secret
770
854
  rpcproc, aria_rpc = self._start_aria2c_rpc(rpc_secret)
771
855
 
@@ -792,7 +876,7 @@ class DownloadClient:
792
876
 
793
877
  return self._check_output(output_items, deactivate_file_download_exceptions=deactivate_file_download_exceptions)
794
878
 
795
- def _start_aria2c_rpc(self, rpc_secret):
879
+ def _start_aria2c_rpc(self, rpc_secret: str) -> tuple[subprocess.Popen, "RPCServerProxy"]:
796
880
  """
797
881
  Starts aria2c in RPC mode as a subprocess. Also creates
798
882
  the RPC proxy instance.
@@ -800,7 +884,7 @@ class DownloadClient:
800
884
 
801
885
  :param rpc_secret: the secret for the RPC proxy
802
886
 
803
- :returns: a tupel with the process and the rpc proxy objects
887
+ :returns: a tuple with the process and the rpc proxy objects
804
888
 
805
889
  :raises RucioException: if the process or the proxy could not be created
806
890
  """
@@ -826,7 +910,7 @@ class DownloadClient:
826
910
 
827
911
  # trying up to 3 random ports
828
912
  for attempt in range(3):
829
- port = random.randint(1024, 65534)
913
+ port = random.randint(1024, 65534) # noqa: S311
830
914
  logger(logging.DEBUG, 'Trying to start rpc server on port: %d' % port)
831
915
  try:
832
916
  to_exec = cmd % (os.getpid(), rpc_secret, port)
@@ -862,7 +946,13 @@ class DownloadClient:
862
946
  raise RucioException('Failed to initialise rpc proxy!', error)
863
947
  return (rpcproc, aria_rpc)
864
948
 
865
- def _download_items_aria2c(self, items, aria_rpc, rpc_auth, trace_custom_fields={}):
949
+ def _download_items_aria2c(
950
+ self,
951
+ items: list[dict[str, Any]],
952
+ aria_rpc: Any,
953
+ rpc_auth: str,
954
+ trace_custom_fields: Optional[dict[str, Any]] = None
955
+ ) -> list[dict[str, Any]]:
866
956
  """
867
957
  Uses aria2c to download the given items. Aria2c needs to be started
868
958
  as RPC background process first and a RPC proxy is needed.
@@ -875,6 +965,7 @@ class DownloadClient:
875
965
 
876
966
  :returns: a list of dictionaries with an entry for each file, containing the input options, the did, and the clientState
877
967
  """
968
+ trace_custom_fields = trace_custom_fields or {}
878
969
  logger = self.logger
879
970
 
880
971
  gid_to_item = {} # maps an aria2c download id (gid) to the download item
@@ -960,7 +1051,7 @@ class DownloadClient:
960
1051
  # workaround: only consider first dest file path for aria2c download
961
1052
  dest_file_path = next(iter(item['dest_file_paths']))
962
1053
 
963
- # ensure we didnt miss the active state (e.g. a very fast download)
1054
+ # ensure we didn't miss the active state (e.g. a very fast download)
964
1055
  start_time = item.setdefault('transferStart', time.time())
965
1056
  end_time = item.setdefault('transferEnd', time.time())
966
1057
 
@@ -1019,7 +1110,7 @@ class DownloadClient:
1019
1110
 
1020
1111
  return items
1021
1112
 
1022
- def _resolve_one_item_dids(self, item):
1113
+ def _resolve_one_item_dids(self, item: dict[str, Any]) -> "Iterator[dict[str, Any]]":
1023
1114
  """
1024
1115
  Resolve scopes or wildcard DIDs to lists of full did names:
1025
1116
  :param item: One input item
@@ -1052,7 +1143,11 @@ class DownloadClient:
1052
1143
  if not any_did_resolved and '*' not in did_name:
1053
1144
  yield {'scope': scope, 'name': did_name}
1054
1145
 
1055
- def _resolve_and_merge_input_items(self, input_items, sort=None):
1146
+ def _resolve_and_merge_input_items(
1147
+ self,
1148
+ input_items: list[dict[str, Any]],
1149
+ sort: Optional["SORTING_ALGORITHMS_LITERAL"] = None
1150
+ ) -> tuple[dict[str, Any], list[dict[str, Any]]]:
1056
1151
  """
1057
1152
  This function takes the input items given to download_dids etc.
1058
1153
  and resolves the sources.
@@ -1095,7 +1190,7 @@ class DownloadClient:
1095
1190
  self.extraction_tools = [tool for tool in self.extraction_tools if tool.is_useable()]
1096
1191
  if len(self.extraction_tools) < 1:
1097
1192
  logger(logging.WARNING, 'Archive resolution is enabled but no extraction tool is available. '
1098
- 'Sources whose protocol doesnt support extraction wont be considered for download.')
1193
+ 'Sources whose protocol does not support extraction will not be considered for download.')
1099
1194
 
1100
1195
  # if excluding tapes, we need to list them first
1101
1196
  tape_rses = []
@@ -1112,7 +1207,7 @@ class DownloadClient:
1112
1207
  for item in input_items:
1113
1208
  resolved_dids = list(self._resolve_one_item_dids(item))
1114
1209
  if not resolved_dids:
1115
- logger(logging.WARNING, 'An item didnt have any DIDs after resolving the input: %s.' % item.get('did', item))
1210
+ logger(logging.WARNING, 'An item did not have any DIDs after resolving the input: %s.' % item.get('did', item))
1116
1211
  item['dids'] = resolved_dids
1117
1212
  for did in resolved_dids:
1118
1213
  did_to_input_items.setdefault(DID(did), []).append(item)
@@ -1184,7 +1279,7 @@ class DownloadClient:
1184
1279
  resolve_parents=True,
1185
1280
  nrandom=nrandom,
1186
1281
  metalink=True)
1187
- file_items = parse_replicas_from_string(metalink_str)
1282
+ file_items = parse_replicas_from_string(metalink_str) # type: ignore
1188
1283
  for file in file_items:
1189
1284
  if impl:
1190
1285
  file['impl'] = impl
@@ -1229,7 +1324,7 @@ class DownloadClient:
1229
1324
 
1230
1325
  return did_to_input_items, merged_items_with_sources
1231
1326
 
1232
- def _options_from_input_items(self, input_items):
1327
+ def _options_from_input_items(self, input_items: "Iterable[dict[str, Any]]") -> dict[str, Any]:
1233
1328
  """
1234
1329
  Best-effort generation of download options from multiple input items which resolve to the same file DID.
1235
1330
  This is done to download each file DID only once, even if it is requested multiple times via overlapping
@@ -1270,7 +1365,11 @@ class DownloadClient:
1270
1365
  options['transfer_speed_timeout'] = float(new_transfer_speed_timeout)
1271
1366
  return options
1272
1367
 
1273
- def _prepare_items_for_download(self, did_to_input_items, file_items):
1368
+ def _prepare_items_for_download(
1369
+ self,
1370
+ did_to_input_items: dict[str, Any],
1371
+ file_items: list[dict[str, Any]]
1372
+ ) -> list[dict[str, Any]]:
1274
1373
  """
1275
1374
  Optimises the amount of files to download
1276
1375
  (This function is meant to be used as class internal only)
@@ -1344,7 +1443,7 @@ class DownloadClient:
1344
1443
  file_item['dest_file_paths'] = list(dest_file_paths)
1345
1444
  file_item['temp_file_path'] = '%s.part' % file_item['dest_file_paths'][0]
1346
1445
 
1347
- # the file did str ist not an unique key for this dict because multiple calls of list_replicas
1446
+ # the file did str is not an unique key for this dict because multiple calls of list_replicas
1348
1447
  # could result in the same DID multiple times. So we're using the id of the dictionary objects
1349
1448
  fiid = id(file_item)
1350
1449
  fiid_to_file_item[fiid] = file_item
@@ -1497,12 +1596,12 @@ class DownloadClient:
1497
1596
  download_packs.append(file_item)
1498
1597
  return download_packs
1499
1598
 
1500
- def _split_did_str(self, did_str):
1599
+ def _split_did_str(self, did_str: str) -> tuple[str, str]:
1501
1600
  """
1502
1601
  Splits a given DID string (e.g. 'scope1:name.file') into its scope and name part
1503
1602
  (This function is meant to be used as class internal only)
1504
1603
 
1505
- :param did_str: the DID string that will be splitted
1604
+ :param did_str: the DID string that will be split
1506
1605
 
1507
1606
  :returns: the scope- and name part of the given DID
1508
1607
 
@@ -1530,7 +1629,12 @@ class DownloadClient:
1530
1629
 
1531
1630
  return did_scope, did_name
1532
1631
 
1533
- def _prepare_dest_dir(self, base_dir, dest_dir_name, no_subdir):
1632
+ def _prepare_dest_dir(
1633
+ self,
1634
+ base_dir: str,
1635
+ dest_dir_name: str,
1636
+ no_subdir: Optional[bool]
1637
+ ) -> str:
1534
1638
  """
1535
1639
  Builds the final destination path for a file and creates the
1536
1640
  destination directory if it's not existent.
@@ -1540,7 +1644,7 @@ class DownloadClient:
1540
1644
  :param dest_dir_name: name of the destination directory
1541
1645
  :param no_subdir: if no subdirectory should be created
1542
1646
 
1543
- :returns: the absolut path of the destination directory
1647
+ :returns: the absolute path of the destination directory
1544
1648
  """
1545
1649
  # append dest_dir_name, if subdir should be used
1546
1650
  if dest_dir_name.startswith('/'):
@@ -1552,7 +1656,11 @@ class DownloadClient:
1552
1656
 
1553
1657
  return dest_dir_path
1554
1658
 
1555
- def _check_output(self, output_items, deactivate_file_download_exceptions=False):
1659
+ def _check_output(
1660
+ self,
1661
+ output_items: list[dict[str, Any]],
1662
+ deactivate_file_download_exceptions: bool = False
1663
+ ) -> list[dict[str, Any]]:
1556
1664
  """
1557
1665
  Checks if all files were successfully downloaded
1558
1666
  (This function is meant to be used as class internal only)
@@ -1582,7 +1690,7 @@ class DownloadClient:
1582
1690
 
1583
1691
  return output_items
1584
1692
 
1585
- def _send_trace(self, trace):
1693
+ def _send_trace(self, trace: dict[str, Any]) -> None:
1586
1694
  """
1587
1695
  Checks if sending trace is allowed and send the trace.
1588
1696
 
@@ -1591,7 +1699,7 @@ class DownloadClient:
1591
1699
  if self.tracing:
1592
1700
  send_trace(trace, self.client.trace_host, self.client.user_agent)
1593
1701
 
1594
- def preferred_impl(self, sources):
1702
+ def preferred_impl(self, sources: list[dict[str, Any]]) -> Optional[str]:
1595
1703
  """
1596
1704
  Finds the optimum protocol impl preferred by the client and
1597
1705
  supported by the remote RSE.
@@ -1655,7 +1763,10 @@ class DownloadClient:
1655
1763
  return supported_impl
1656
1764
 
1657
1765
 
1658
- def _verify_checksum(item, path):
1766
+ def _verify_checksum(
1767
+ item: dict[str, Any],
1768
+ path: str
1769
+ ) -> tuple[bool, Optional[str], Optional[str]]:
1659
1770
  rucio_checksum = item.get(PREFERRED_CHECKSUM)
1660
1771
  local_checksum = None
1661
1772
  checksum_algo = CHECKSUM_ALGO_DICT.get(PREFERRED_CHECKSUM)
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,11 @@
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
14
 
15
+ from typing import Any
16
+
16
17
  from requests.status_codes import codes
17
18
 
18
- from rucio.client.baseclient import BaseClient
19
- from rucio.client.baseclient import choice
19
+ from rucio.client.baseclient import BaseClient, choice
20
20
  from rucio.common.utils import build_url, parse_response
21
21
 
22
22
 
@@ -25,7 +25,7 @@ class ExportClient(BaseClient):
25
25
 
26
26
  EXPORT_BASEURL = 'export'
27
27
 
28
- def export_data(self, distance=True):
28
+ def export_data(self, distance: bool = True) -> dict[str, Any]:
29
29
  """
30
30
  Export RSE data (RSE, settings, attributes and distance).
31
31
  :param distance: To include the distance.
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -14,12 +13,12 @@
14
13
  # limitations under the License.
15
14
 
16
15
  from json import loads
16
+ from typing import Any
17
17
  from urllib.parse import quote_plus
18
18
 
19
19
  from requests.status_codes import codes
20
20
 
21
- from rucio.client.baseclient import BaseClient
22
- from rucio.client.baseclient import choice
21
+ from rucio.client.baseclient import BaseClient, choice
23
22
  from rucio.common.utils import build_url
24
23
 
25
24
 
@@ -28,7 +27,7 @@ class FileClient(BaseClient):
28
27
 
29
28
  BASEURL = 'files'
30
29
 
31
- def list_file_replicas(self, scope, lfn):
30
+ def list_file_replicas(self, scope: str, lfn: str) -> list[dict[str, Any]]:
32
31
  """
33
32
  List file replicas.
34
33
 
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,10 +12,11 @@
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
14
 
15
+ from typing import Any
16
+
16
17
  from requests.status_codes import codes
17
18
 
18
- from rucio.client.baseclient import BaseClient
19
- from rucio.client.baseclient import choice
19
+ from rucio.client.baseclient import BaseClient, choice
20
20
  from rucio.common.utils import build_url, render_json
21
21
 
22
22
 
@@ -25,7 +25,7 @@ class ImportClient(BaseClient):
25
25
 
26
26
  IMPORT_BASEURL = 'import'
27
27
 
28
- def import_data(self, data):
28
+ def import_data(self, data: dict[str, Any]) -> str:
29
29
  """
30
30
  Imports data into Rucio.
31
31