rucio-clients 32.8.6__py3-none-any.whl → 35.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rucio-clients might be problematic. Click here for more details.

Files changed (92) hide show
  1. rucio/__init__.py +0 -1
  2. rucio/alembicrevision.py +1 -2
  3. rucio/client/__init__.py +0 -1
  4. rucio/client/accountclient.py +45 -25
  5. rucio/client/accountlimitclient.py +37 -9
  6. rucio/client/baseclient.py +199 -154
  7. rucio/client/client.py +2 -3
  8. rucio/client/configclient.py +19 -6
  9. rucio/client/credentialclient.py +9 -4
  10. rucio/client/didclient.py +238 -63
  11. rucio/client/diracclient.py +13 -5
  12. rucio/client/downloadclient.py +162 -51
  13. rucio/client/exportclient.py +4 -4
  14. rucio/client/fileclient.py +3 -4
  15. rucio/client/importclient.py +4 -4
  16. rucio/client/lifetimeclient.py +21 -5
  17. rucio/client/lockclient.py +18 -8
  18. rucio/client/{metaclient.py → metaconventionsclient.py} +18 -15
  19. rucio/client/pingclient.py +0 -1
  20. rucio/client/replicaclient.py +15 -5
  21. rucio/client/requestclient.py +35 -19
  22. rucio/client/rseclient.py +133 -51
  23. rucio/client/ruleclient.py +29 -22
  24. rucio/client/scopeclient.py +8 -6
  25. rucio/client/subscriptionclient.py +47 -35
  26. rucio/client/touchclient.py +8 -4
  27. rucio/client/uploadclient.py +166 -82
  28. rucio/common/__init__.py +0 -1
  29. rucio/common/cache.py +4 -4
  30. rucio/common/config.py +52 -47
  31. rucio/common/constants.py +69 -2
  32. rucio/common/constraints.py +0 -1
  33. rucio/common/didtype.py +24 -22
  34. rucio/common/exception.py +281 -222
  35. rucio/common/extra.py +0 -1
  36. rucio/common/logging.py +54 -38
  37. rucio/common/pcache.py +122 -101
  38. rucio/common/plugins.py +153 -0
  39. rucio/common/policy.py +4 -4
  40. rucio/common/schema/__init__.py +17 -10
  41. rucio/common/schema/atlas.py +7 -5
  42. rucio/common/schema/belleii.py +7 -5
  43. rucio/common/schema/domatpc.py +7 -5
  44. rucio/common/schema/escape.py +7 -5
  45. rucio/common/schema/generic.py +8 -6
  46. rucio/common/schema/generic_multi_vo.py +7 -5
  47. rucio/common/schema/icecube.py +7 -5
  48. rucio/common/stomp_utils.py +0 -1
  49. rucio/common/stopwatch.py +0 -1
  50. rucio/common/test_rucio_server.py +2 -2
  51. rucio/common/types.py +262 -17
  52. rucio/common/utils.py +743 -451
  53. rucio/rse/__init__.py +3 -4
  54. rucio/rse/protocols/__init__.py +0 -1
  55. rucio/rse/protocols/bittorrent.py +184 -0
  56. rucio/rse/protocols/cache.py +1 -2
  57. rucio/rse/protocols/dummy.py +1 -2
  58. rucio/rse/protocols/gfal.py +12 -10
  59. rucio/rse/protocols/globus.py +7 -7
  60. rucio/rse/protocols/gsiftp.py +2 -3
  61. rucio/rse/protocols/http_cache.py +1 -2
  62. rucio/rse/protocols/mock.py +1 -2
  63. rucio/rse/protocols/ngarc.py +1 -2
  64. rucio/rse/protocols/posix.py +12 -13
  65. rucio/rse/protocols/protocol.py +116 -52
  66. rucio/rse/protocols/rclone.py +6 -7
  67. rucio/rse/protocols/rfio.py +4 -5
  68. rucio/rse/protocols/srm.py +9 -10
  69. rucio/rse/protocols/ssh.py +8 -9
  70. rucio/rse/protocols/storm.py +2 -3
  71. rucio/rse/protocols/webdav.py +17 -14
  72. rucio/rse/protocols/xrootd.py +23 -17
  73. rucio/rse/rsemanager.py +19 -7
  74. rucio/vcsversion.py +4 -4
  75. rucio/version.py +5 -13
  76. rucio_clients-35.8.0.data/data/requirements.client.txt +15 -0
  77. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/rucio_client/merge_rucio_configs.py +2 -5
  78. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/scripts/rucio +87 -85
  79. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/scripts/rucio-admin +45 -32
  80. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/METADATA +13 -13
  81. rucio_clients-35.8.0.dist-info/RECORD +88 -0
  82. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/WHEEL +1 -1
  83. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/licenses/AUTHORS.rst +3 -0
  84. rucio/common/schema/cms.py +0 -478
  85. rucio/common/schema/lsst.py +0 -423
  86. rucio_clients-32.8.6.data/data/requirements.txt +0 -55
  87. rucio_clients-32.8.6.dist-info/RECORD +0 -88
  88. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rse-accounts.cfg.template +0 -0
  89. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rucio.cfg.atlas.client.template +0 -0
  90. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rucio.cfg.template +0 -0
  91. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/licenses/LICENSE +0 -0
  92. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/top_level.txt +0 -0
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -13,6 +12,7 @@
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
14
 
15
+ import base64
16
16
  import copy
17
17
  import json
18
18
  import logging
@@ -21,22 +21,45 @@ import os.path
21
21
  import random
22
22
  import socket
23
23
  import time
24
+ from typing import TYPE_CHECKING, Any, Final, Optional, Union, cast
24
25
 
25
26
  from rucio import version
26
27
  from rucio.client.client import Client
27
- from rucio.common.config import config_get_int, config_get
28
- from rucio.common.exception import (RucioException, RSEWriteBlocked, DataIdentifierAlreadyExists, RSEOperationNotSupported,
29
- DataIdentifierNotFound, NoFilesUploaded, NotAllFilesUploaded, FileReplicaAlreadyExists,
30
- ResourceTemporaryUnavailable, ServiceUnavailable, InputValidationError, RSEChecksumUnavailable,
31
- ScopeNotFound)
32
- from rucio.common.utils import (adler32, detect_client_location, execute, generate_uuid, make_valid_did, md5, send_trace,
33
- retry, GLOBALLY_SUPPORTED_CHECKSUMS)
28
+ from rucio.common.config import config_get, config_get_bool, config_get_int
29
+ from rucio.common.constants import RseAttr
30
+ from rucio.common.exception import (
31
+ DataIdentifierAlreadyExists,
32
+ DataIdentifierNotFound,
33
+ FileReplicaAlreadyExists,
34
+ InputValidationError,
35
+ NoFilesUploaded,
36
+ NotAllFilesUploaded,
37
+ ResourceTemporaryUnavailable,
38
+ RSEChecksumUnavailable,
39
+ RSEOperationNotSupported,
40
+ RSEWriteBlocked,
41
+ RucioException,
42
+ ScopeNotFound,
43
+ ServiceUnavailable,
44
+ )
45
+ from rucio.common.utils import GLOBALLY_SUPPORTED_CHECKSUMS, adler32, bittorrent_v2_merkle_sha256, detect_client_location, execute, generate_uuid, make_valid_did, md5, retry, send_trace
34
46
  from rucio.rse import rsemanager as rsemgr
35
47
 
48
+ if TYPE_CHECKING:
49
+ from collections.abc import Iterable, Mapping
50
+
51
+ from rucio.common.types import AttachDict, DatasetDict, DIDStringDict, FileToUploadDict, FileToUploadWithCollectedAndDatasetInfoDict, FileToUploadWithCollectedInfoDict, LoggerFunction, PathTypeAlias, RSESettingsDict, TraceBaseDict, TraceDict
52
+ from rucio.rse.protocols.protocol import RSEProtocol
53
+
36
54
 
37
55
  class UploadClient:
38
56
 
39
- def __init__(self, _client=None, logger=None, tracing=True):
57
+ def __init__(
58
+ self,
59
+ _client: Optional[Client] = None,
60
+ logger: Optional["LoggerFunction"] = None,
61
+ tracing: bool = True
62
+ ):
40
63
  """
41
64
  Initialises the basic settings for an UploadClient object
42
65
 
@@ -48,26 +71,33 @@ class UploadClient:
48
71
  else:
49
72
  self.logger = logger.log
50
73
 
51
- self.client = _client if _client else Client()
74
+ self.client: Final[Client] = _client if _client else Client()
52
75
  self.client_location = detect_client_location()
53
76
  # if token should be used, use only JWT tokens
54
- self.auth_token = self.client.auth_token if len(self.client.auth_token.split(".")) == 3 else None
77
+ self.auth_token: Optional[str] = self.client.auth_token if len(self.client.auth_token.split(".")) == 3 else None
55
78
  self.tracing = tracing
56
79
  if not self.tracing:
57
80
  logger(logging.DEBUG, 'Tracing is turned off.')
58
- self.default_file_scope = 'user.' + self.client.account
81
+ self.default_file_scope: Final[str] = 'user.' + self.client.account
59
82
  self.rses = {}
60
83
  self.rse_expressions = {}
61
84
 
62
- self.trace = {}
63
- self.trace['hostname'] = socket.getfqdn()
64
- self.trace['account'] = self.client.account
65
- if self.client.vo != 'def':
66
- self.trace['vo'] = self.client.vo
67
- self.trace['eventType'] = 'upload'
68
- self.trace['eventVersion'] = version.RUCIO_VERSION[0]
69
-
70
- def upload(self, items, summary_file_path=None, traces_copy_out=None, ignore_availability=False, activity=None):
85
+ self.trace: "TraceBaseDict" = {
86
+ 'hostname': socket.getfqdn(),
87
+ 'account': self.client.account,
88
+ 'eventType': 'upload',
89
+ 'eventVersion': version.RUCIO_VERSION[0],
90
+ 'vo': self.client.vo if self.client.vo != 'def' else None
91
+ }
92
+
93
+ def upload(
94
+ self,
95
+ items: "Iterable[FileToUploadDict]",
96
+ summary_file_path: Optional[str] = None,
97
+ traces_copy_out: Optional[list["TraceBaseDict"]] = None,
98
+ ignore_availability: bool = False,
99
+ activity: Optional[str] = None
100
+ ) -> int:
71
101
  """
72
102
  :param items: List of dictionaries. Each dictionary describing a file to upload. Keys:
73
103
  path - path of the file that will be uploaded
@@ -99,7 +129,7 @@ class UploadClient:
99
129
  :raises NotAllFilesUploaded: if not all files were successfully uploaded
100
130
  """
101
131
  # helper to get rse from rse_expression:
102
- def _pick_random_rse(rse_expression):
132
+ def _pick_random_rse(rse_expression: str) -> dict[str, Any]:
103
133
  rses = [r['rse'] for r in self.client.list_rses(rse_expression)] # can raise InvalidRSEExpression
104
134
  random.shuffle(rses)
105
135
  return rses[0]
@@ -185,8 +215,8 @@ class UploadClient:
185
215
  rse_attributes = self.client.list_rse_attributes(rse)
186
216
  except:
187
217
  logger(logging.WARNING, 'Attributes of the RSE: %s not available.' % rse)
188
- if (self.client_location and 'lan' in rse_settings['domain'] and 'site' in rse_attributes):
189
- if self.client_location['site'] == rse_attributes['site']:
218
+ if (self.client_location and 'lan' in rse_settings['domain'] and RseAttr.SITE in rse_attributes):
219
+ if self.client_location['site'] == rse_attributes[RseAttr.SITE]:
190
220
  domain = 'lan'
191
221
  logger(logging.DEBUG, '{} domain is used for the upload'.format(domain))
192
222
 
@@ -268,7 +298,7 @@ class UploadClient:
268
298
  sign_service=sign_service)
269
299
  logger(logging.DEBUG, 'Upload done.')
270
300
  success = True
271
- file['upload_result'] = {0: True, 1: None, 'success': True, 'pfn': pfn} # needs to be removed
301
+ file['upload_result'] = {0: True, 1: None, 'success': True, 'pfn': pfn} # TODO: needs to be removed
272
302
  except (ServiceUnavailable, ResourceTemporaryUnavailable, RSEOperationNotSupported, RucioException) as error:
273
303
  logger(logging.WARNING, 'Upload attempt failed')
274
304
  logger(logging.INFO, 'Exception: %s' % str(error), exc_info=True)
@@ -279,7 +309,7 @@ class UploadClient:
279
309
  trace['clientState'] = 'DONE'
280
310
  file['state'] = 'A'
281
311
  logger(logging.INFO, 'Successfully uploaded file %s' % basename)
282
- self._send_trace(trace)
312
+ self._send_trace(cast("TraceDict", trace))
283
313
 
284
314
  if summary_file_path:
285
315
  summary.append(copy.deepcopy(file))
@@ -300,7 +330,7 @@ class UploadClient:
300
330
  # add file to dataset if needed
301
331
  if dataset_did_str and not no_register:
302
332
  try:
303
- self.client.attach_dids(file['dataset_scope'], file['dataset_name'], [file_did])
333
+ self.client.attach_dids(file['dataset_scope'], file['dataset_name'], [file_did]) # type: ignore (`dataset_scope` and `dataset_name` always exist if `dataset_did_str`)
304
334
  except Exception as error:
305
335
  registration_succeeded = False
306
336
  logger(logging.ERROR, 'Failed to attach file to the dataset')
@@ -312,7 +342,7 @@ class UploadClient:
312
342
  else:
313
343
  trace['clientState'] = 'FAILED'
314
344
  trace['stateReason'] = state_reason
315
- self._send_trace(trace)
345
+ self._send_trace(cast('TraceDict', trace))
316
346
  logger(logging.ERROR, 'Failed to upload file %s' % basename)
317
347
 
318
348
  if summary_file_path:
@@ -342,7 +372,23 @@ class UploadClient:
342
372
  raise NotAllFilesUploaded()
343
373
  return 0
344
374
 
345
- def _register_file(self, file, registered_dataset_dids, ignore_availability=False, activity=None):
375
+ def _add_bittorrent_meta(self, file: "Mapping[str, Any]") -> None:
376
+ pieces_root, pieces_layers, piece_length = bittorrent_v2_merkle_sha256(os.path.join(file['dirname'], file['basename']))
377
+ bittorrent_meta = {
378
+ 'bittorrent_pieces_root': base64.b64encode(pieces_root).decode(),
379
+ 'bittorrent_pieces_layers': base64.b64encode(pieces_layers).decode(),
380
+ 'bittorrent_piece_length': piece_length,
381
+ }
382
+ self.client.set_metadata_bulk(scope=file['did_scope'], name=file['did_name'], meta=bittorrent_meta)
383
+ self.logger(logging.INFO, f"Added bittorrent metadata to file DID {file['did_scope']}:{file['did_name']}")
384
+
385
+ def _register_file(
386
+ self,
387
+ file: "Mapping[str, Any]",
388
+ registered_dataset_dids: set[str],
389
+ ignore_availability: bool = False,
390
+ activity: Optional[str] = None
391
+ ) -> None:
346
392
  """
347
393
  Registers the given file in Rucio. Creates a dataset if
348
394
  needed. Registers the file DID and creates the replication
@@ -386,7 +432,6 @@ class UploadClient:
386
432
  logger(logging.INFO, 'Successfully created dataset %s' % dataset_did_str)
387
433
  except DataIdentifierAlreadyExists:
388
434
  logger(logging.INFO, 'Dataset %s already exists - no rule will be created' % dataset_did_str)
389
-
390
435
  if file.get('lifetime') is not None:
391
436
  raise InputValidationError('Dataset %s exists and lifetime %s given. Prohibited to modify parent dataset lifetime.' % (dataset_did_str,
392
437
  file.get('lifetime')))
@@ -405,7 +450,6 @@ class UploadClient:
405
450
 
406
451
  if str(meta['adler32']).lstrip('0') != str(file['adler32']).lstrip('0'):
407
452
  logger(logging.ERROR, 'Local checksum %s does not match remote checksum %s' % (file['adler32'], meta['adler32']))
408
-
409
453
  raise DataIdentifierAlreadyExists
410
454
 
411
455
  # add file to rse if it is not registered yet
@@ -416,13 +460,15 @@ class UploadClient:
416
460
  except DataIdentifierNotFound:
417
461
  logger(logging.DEBUG, 'File DID does not exist')
418
462
  self.client.add_replicas(rse=rse, files=[replica_for_api])
463
+ if config_get_bool('client', 'register_bittorrent_meta', default=False):
464
+ self._add_bittorrent_meta(file=file)
419
465
  logger(logging.INFO, 'Successfully added replica in Rucio catalogue at %s' % rse)
420
466
  if not dataset_did_str:
421
467
  # only need to add rules for files if no dataset is given
422
468
  self.client.add_replication_rule([file_did], copies=1, rse_expression=rse, lifetime=file.get('lifetime'), ignore_availability=ignore_availability, activity=activity)
423
469
  logger(logging.INFO, 'Successfully added replication rule at %s' % rse)
424
470
 
425
- def _get_file_guid(self, file):
471
+ def _get_file_guid(self, file: "Mapping[str, Any]") -> str:
426
472
  """
427
473
  Get the guid of a file, trying different strategies
428
474
  (This function is meant to be used as class internal only)
@@ -441,14 +487,18 @@ class UploadClient:
441
487
  try:
442
488
  guid = output.splitlines()[-1].split()[0].replace('-', '').lower()
443
489
  except Exception:
444
- raise RucioException('Error extracting GUID from ouput of pool_extractFileIdentifier')
490
+ raise RucioException('Error extracting GUID from output of pool_extractFileIdentifier')
445
491
  elif guid:
446
492
  guid = guid.replace('-', '')
447
493
  else:
448
494
  guid = generate_uuid()
449
495
  return guid
450
496
 
451
- def _collect_file_info(self, filepath, item):
497
+ def _collect_file_info(
498
+ self,
499
+ filepath: "PathTypeAlias",
500
+ item: "FileToUploadDict"
501
+ ) -> "FileToUploadWithCollectedInfoDict":
452
502
  """
453
503
  Collects infos (e.g. size, checksums, etc.) about the file and
454
504
  returns them as a dictionary
@@ -460,6 +510,7 @@ class UploadClient:
460
510
  :returns: a dictionary containing all collected info and the input options
461
511
  """
462
512
  new_item = copy.deepcopy(item)
513
+ new_item = cast("FileToUploadWithCollectedInfoDict", new_item)
463
514
  new_item['path'] = filepath
464
515
  new_item['dirname'] = os.path.dirname(filepath)
465
516
  new_item['basename'] = os.path.basename(filepath)
@@ -476,7 +527,7 @@ class UploadClient:
476
527
 
477
528
  return new_item
478
529
 
479
- def _collect_and_validate_file_info(self, items):
530
+ def _collect_and_validate_file_info(self, items: "Iterable[FileToUploadDict]") -> list["FileToUploadWithCollectedInfoDict"]:
480
531
  """
481
532
  Checks if there are any inconsistencies within the given input
482
533
  options and stores the output of _collect_file_info for every file
@@ -489,7 +540,7 @@ class UploadClient:
489
540
  :raises InputValidationError: if an input option has a wrong format
490
541
  """
491
542
  logger = self.logger
492
- files = []
543
+ files: list["FileToUploadWithCollectedInfoDict"] = []
493
544
  for item in items:
494
545
  path = item.get('path')
495
546
  pfn = item.get('pfn')
@@ -502,8 +553,8 @@ class UploadClient:
502
553
  continue
503
554
  if pfn:
504
555
  item['force_scheme'] = pfn.split(':')[0]
505
- if item.get('impl'):
506
- impl = item.get('impl')
556
+ impl = item.get('impl')
557
+ if impl:
507
558
  impl_split = impl.split('.')
508
559
  if len(impl_split) == 1:
509
560
  impl = 'rucio.rse.protocols.' + impl + '.Default'
@@ -520,7 +571,7 @@ class UploadClient:
520
571
  elif not len(fnames):
521
572
  logger(logging.WARNING, 'Skipping %s because it has no files in it. Subdirectories are not supported.' % dname)
522
573
  elif os.path.isdir(path) and recursive:
523
- files.extend(self._recursive(item))
574
+ files.extend(cast("list[FileToUploadWithCollectedInfoDict]", self._recursive(item)))
524
575
  elif os.path.isfile(path) and not recursive:
525
576
  file = self._collect_file_info(path, item)
526
577
  files.append(file)
@@ -534,7 +585,7 @@ class UploadClient:
534
585
 
535
586
  return files
536
587
 
537
- def _convert_file_for_api(self, file):
588
+ def _convert_file_for_api(self, file: "Mapping[str, Any]") -> dict[str, Any]:
538
589
  """
539
590
  Creates a new dictionary that contains only the values
540
591
  that are needed for the upload with the correct keys
@@ -557,7 +608,20 @@ class UploadClient:
557
608
  replica['pfn'] = pfn
558
609
  return replica
559
610
 
560
- def _upload_item(self, rse_settings, rse_attributes, lfn, source_dir=None, domain='wan', impl=None, force_pfn=None, force_scheme=None, transfer_timeout=None, delete_existing=False, sign_service=None):
611
+ def _upload_item(
612
+ self,
613
+ rse_settings: "RSESettingsDict",
614
+ rse_attributes: dict[str, Any],
615
+ lfn: dict[str, Union[str, int]],
616
+ source_dir: Optional[str] = None,
617
+ domain: str = 'wan',
618
+ impl: Optional[str] = None,
619
+ force_pfn: Optional[str] = None,
620
+ force_scheme: Optional[str] = None,
621
+ transfer_timeout: Optional[int] = None,
622
+ delete_existing: bool = False,
623
+ sign_service: Optional[str] = None
624
+ ) -> Optional[str]:
561
625
  """
562
626
  Uploads a file to the connected storage.
563
627
 
@@ -605,11 +669,12 @@ class UploadClient:
605
669
  # Auth. mostly for object stores
606
670
  if sign_service:
607
671
  protocol_read = self._create_protocol(rse_settings, 'read', domain=domain, impl=impl)
608
- signed_read_pfn = self.client.get_signed_url(rse_settings['rse'], sign_service, 'read', pfn) # NOQA pylint: disable=undefined-variable
609
- pfn = self.client.get_signed_url(rse_settings['rse'], sign_service, 'write', pfn) # NOQA pylint: disable=undefined-variable
672
+ if pfn is not None:
673
+ signed_read_pfn = self.client.get_signed_url(rse_settings['rse'], sign_service, 'read', pfn)
674
+ pfn = self.client.get_signed_url(rse_settings['rse'], sign_service, 'write', pfn)
610
675
 
611
676
  # Create a name of tmp file if renaming operation is supported
612
- pfn_tmp = '%s.rucio.upload' % pfn if protocol_write.renaming else pfn
677
+ pfn_tmp = cast("str", '%s.rucio.upload' % pfn if protocol_write.renaming else pfn)
613
678
  signed_read_pfn_tmp = '%s.rucio.upload' % signed_read_pfn if protocol_write.renaming else signed_read_pfn
614
679
 
615
680
  # Either DID exists or not register_after_upload
@@ -657,7 +722,7 @@ class UploadClient:
657
722
  raise RSEOperationNotSupported(str(error))
658
723
 
659
724
  # Is stat after that upload allowed?
660
- skip_upload_stat = rse_attributes.get('skip_upload_stat', False)
725
+ skip_upload_stat = rse_attributes.get(RseAttr.SKIP_UPLOAD_STAT, False)
661
726
  self.logger(logging.DEBUG, 'skip_upload_stat=%s', skip_upload_stat)
662
727
 
663
728
  # Checksum verification, obsolete, see Gabriele changes.
@@ -693,7 +758,11 @@ class UploadClient:
693
758
 
694
759
  return pfn
695
760
 
696
- def _retry_protocol_stat(self, protocol, pfn):
761
+ def _retry_protocol_stat(
762
+ self,
763
+ protocol: "RSEProtocol",
764
+ pfn: str
765
+ ) -> dict[str, Any]:
697
766
  """
698
767
  Try to stat file, on fail try again 1s, 2s, 4s, 8s, 16s, 32s later. Fail is all fail
699
768
  :param protocol: The protocol to use to reach this file
@@ -721,9 +790,16 @@ class UploadClient:
721
790
  time.sleep(2**attempt)
722
791
  return protocol.stat(pfn)
723
792
 
724
- def _create_protocol(self, rse_settings, operation, impl=None, force_scheme=None, domain='wan'):
793
+ def _create_protocol(
794
+ self,
795
+ rse_settings: "RSESettingsDict",
796
+ operation: str,
797
+ impl: Optional[str] = None,
798
+ force_scheme: Optional[str] = None,
799
+ domain: str = 'wan'
800
+ ) -> "RSEProtocol":
725
801
  """
726
- Protol construction.
802
+ Protocol construction.
727
803
  :param rse_settings: rse_settings
728
804
  :param operation: activity, e.g. read, write, delete etc.
729
805
  :param force_scheme: custom scheme
@@ -738,7 +814,7 @@ class UploadClient:
738
814
  raise error
739
815
  return protocol
740
816
 
741
- def _send_trace(self, trace):
817
+ def _send_trace(self, trace: "TraceDict") -> None:
742
818
  """
743
819
  Checks if sending trace is allowed and send the trace.
744
820
 
@@ -747,7 +823,7 @@ class UploadClient:
747
823
  if self.tracing:
748
824
  send_trace(trace, self.client.trace_host, self.client.user_agent)
749
825
 
750
- def _recursive(self, item):
826
+ def _recursive(self, item: "FileToUploadDict") -> list["FileToUploadWithCollectedAndDatasetInfoDict"]:
751
827
  """
752
828
  If the --recursive flag is set, it replicates the folder structure recursively into collections
753
829
  A folder only can have either other folders inside or files, but not both of them
@@ -757,38 +833,42 @@ class UploadClient:
757
833
 
758
834
  :param item: dictionary containing all descriptions of the files to upload
759
835
  """
760
- files = []
761
- datasets = []
762
- containers = []
763
- attach = []
764
- scope = item.get('did_scope') if item.get('did_scope') is not None else self.default_file_scope
836
+ files: list["FileToUploadWithCollectedAndDatasetInfoDict"] = []
837
+ datasets: list["DatasetDict"] = []
838
+ containers: list["DIDStringDict"] = []
839
+ attach: "Iterable[AttachDict]" = []
840
+ scope = item.get('did_scope')
841
+ if scope is None:
842
+ scope = self.default_file_scope
765
843
  rse = item.get('rse')
766
844
  path = item.get('path')
767
- if path[-1] == '/':
768
- path = path[0:-1]
769
- i = 0
770
- path = os.path.abspath(path)
771
- for root, dirs, fnames in os.walk(path):
772
- if len(dirs) > 0 and len(fnames) > 0 and i == 0:
773
- self.logger(logging.ERROR, 'A container can only have either collections or files, not both')
774
- raise InputValidationError('Invalid input folder structure')
775
- if len(fnames) > 0:
776
- datasets.append({'scope': scope, 'name': root.split('/')[-1], 'rse': rse})
777
- self.logger(logging.DEBUG, 'Appended dataset with DID %s:%s' % (scope, path))
778
- for fname in fnames:
779
- file = self._collect_file_info(os.path.join(root, fname), item)
780
- file['dataset_scope'] = scope
781
- file['dataset_name'] = root.split('/')[-1]
782
- files.append(file)
783
- self.logger(logging.DEBUG, 'Appended file with DID %s:%s' % (scope, fname))
784
- elif len(dirs) > 0:
785
- containers.append({'scope': scope, 'name': root.split('/')[-1]})
786
- self.logger(logging.DEBUG, 'Appended container with DID %s:%s' % (scope, path))
787
- attach.extend([{'scope': scope, 'name': root.split('/')[-1], 'rse': rse, 'dids': {'scope': scope, 'name': dir_}} for dir_ in dirs])
788
- elif len(dirs) == 0 and len(fnames) == 0:
789
- self.logger(logging.WARNING, 'The folder %s is empty, skipping' % root)
790
- continue
791
- i += 1
845
+ if path and isinstance(path, str):
846
+ if path[-1] == '/':
847
+ path = path[0:-1]
848
+ i = 0
849
+ path = os.path.abspath(path)
850
+ for root, dirs, fnames in os.walk(path):
851
+ if len(dirs) > 0 and len(fnames) > 0 and i == 0:
852
+ self.logger(logging.ERROR, 'A container can only have either collections or files, not both')
853
+ raise InputValidationError('Invalid input folder structure')
854
+ if len(fnames) > 0:
855
+ datasets.append({'scope': scope, 'name': root.split('/')[-1], 'rse': rse})
856
+ self.logger(logging.DEBUG, 'Appended dataset with DID %s:%s' % (scope, path))
857
+ for fname in fnames:
858
+ file = self._collect_file_info(os.path.join(root, fname), item)
859
+ file = cast("FileToUploadWithCollectedAndDatasetInfoDict", file)
860
+ file['dataset_scope'] = scope
861
+ file['dataset_name'] = root.split('/')[-1]
862
+ files.append(file)
863
+ self.logger(logging.DEBUG, 'Appended file with DID %s:%s' % (scope, fname))
864
+ elif len(dirs) > 0:
865
+ containers.append({'scope': scope, 'name': root.split('/')[-1]})
866
+ self.logger(logging.DEBUG, 'Appended container with DID %s:%s' % (scope, path))
867
+ attach.extend([{'scope': scope, 'name': root.split('/')[-1], 'rse': rse, 'did': {'scope': scope, 'name': dir_}} for dir_ in dirs])
868
+ elif len(dirs) == 0 and len(fnames) == 0:
869
+ self.logger(logging.WARNING, 'The folder %s is empty, skipping' % root)
870
+ continue
871
+ i += 1
792
872
  # if everything went ok, replicate the folder structure in Rucio storage
793
873
  for dataset in datasets:
794
874
  try:
@@ -806,14 +886,18 @@ class UploadClient:
806
886
  self.logger(logging.ERROR, 'It was not possible to create dataset with DID %s:%s' % (container['scope'], container['name']))
807
887
  for att in attach:
808
888
  try:
809
- self.client.attach_dids(scope=att['scope'], name=att['name'], dids=[att['dids']])
889
+ self.client.attach_dids(scope=att['scope'], name=att['name'], dids=[att['did']])
810
890
  self.logger(logging.INFO, 'DIDs attached to collection %s:%s' % (att['scope'], att['name']))
811
891
  except RucioException as error:
812
892
  self.logger(logging.ERROR, error)
813
893
  self.logger(logging.ERROR, 'It was not possible to attach to collection with DID %s:%s' % (att['scope'], att['name']))
814
894
  return files
815
895
 
816
- def preferred_impl(self, rse_settings, domain):
896
+ def preferred_impl(
897
+ self,
898
+ rse_settings: "RSESettingsDict",
899
+ domain: str
900
+ ) -> Optional[str]:
817
901
  """
818
902
  Finds the optimum protocol impl preferred by the client and
819
903
  supported by the remote RSE.
rucio/common/__init__.py CHANGED
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
rucio/common/cache.py CHANGED
@@ -1,4 +1,3 @@
1
- # -*- coding: utf-8 -*-
2
1
  # Copyright European Organization for Nuclear Research (CERN) since 2012
3
2
  #
4
3
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -12,7 +11,6 @@
12
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
12
  # See the License for the specific language governing permissions and
14
13
  # limitations under the License.
15
- from __future__ import absolute_import
16
14
 
17
15
  from typing import TYPE_CHECKING
18
16
 
@@ -25,6 +23,8 @@ if TYPE_CHECKING:
25
23
  from collections.abc import Callable
26
24
  from typing import Optional
27
25
 
26
+ from dogpile.cache.region import CacheRegion
27
+
28
28
  CACHE_URL = config_get('cache', 'url', False, '127.0.0.1:11211', check_config_table=False)
29
29
 
30
30
  ENABLE_CACHING = True
@@ -36,7 +36,7 @@ try:
36
36
  import pymemcache
37
37
  _mc_client = pymemcache.Client(CACHE_URL, connect_timeout=1, timeout=1)
38
38
  _mc_client.version()
39
- except IOError:
39
+ except OSError:
40
40
  ENABLE_CACHING = False
41
41
  except ImportError:
42
42
  ENABLE_CACHING = False
@@ -49,7 +49,7 @@ def make_region_memcached(
49
49
  expiration_time: int,
50
50
  function_key_generator: "Optional[Callable]" = None,
51
51
  memcached_expire_time: "Optional[int]" = None
52
- ):
52
+ ) -> "CacheRegion":
53
53
  """
54
54
  Make and configure a dogpile.cache.pymemcache region
55
55
  """