rucio-clients 32.8.6__py3-none-any.whl → 35.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of rucio-clients might be problematic. Click here for more details.

Files changed (92) hide show
  1. rucio/__init__.py +0 -1
  2. rucio/alembicrevision.py +1 -2
  3. rucio/client/__init__.py +0 -1
  4. rucio/client/accountclient.py +45 -25
  5. rucio/client/accountlimitclient.py +37 -9
  6. rucio/client/baseclient.py +199 -154
  7. rucio/client/client.py +2 -3
  8. rucio/client/configclient.py +19 -6
  9. rucio/client/credentialclient.py +9 -4
  10. rucio/client/didclient.py +238 -63
  11. rucio/client/diracclient.py +13 -5
  12. rucio/client/downloadclient.py +162 -51
  13. rucio/client/exportclient.py +4 -4
  14. rucio/client/fileclient.py +3 -4
  15. rucio/client/importclient.py +4 -4
  16. rucio/client/lifetimeclient.py +21 -5
  17. rucio/client/lockclient.py +18 -8
  18. rucio/client/{metaclient.py → metaconventionsclient.py} +18 -15
  19. rucio/client/pingclient.py +0 -1
  20. rucio/client/replicaclient.py +15 -5
  21. rucio/client/requestclient.py +35 -19
  22. rucio/client/rseclient.py +133 -51
  23. rucio/client/ruleclient.py +29 -22
  24. rucio/client/scopeclient.py +8 -6
  25. rucio/client/subscriptionclient.py +47 -35
  26. rucio/client/touchclient.py +8 -4
  27. rucio/client/uploadclient.py +166 -82
  28. rucio/common/__init__.py +0 -1
  29. rucio/common/cache.py +4 -4
  30. rucio/common/config.py +52 -47
  31. rucio/common/constants.py +69 -2
  32. rucio/common/constraints.py +0 -1
  33. rucio/common/didtype.py +24 -22
  34. rucio/common/exception.py +281 -222
  35. rucio/common/extra.py +0 -1
  36. rucio/common/logging.py +54 -38
  37. rucio/common/pcache.py +122 -101
  38. rucio/common/plugins.py +153 -0
  39. rucio/common/policy.py +4 -4
  40. rucio/common/schema/__init__.py +17 -10
  41. rucio/common/schema/atlas.py +7 -5
  42. rucio/common/schema/belleii.py +7 -5
  43. rucio/common/schema/domatpc.py +7 -5
  44. rucio/common/schema/escape.py +7 -5
  45. rucio/common/schema/generic.py +8 -6
  46. rucio/common/schema/generic_multi_vo.py +7 -5
  47. rucio/common/schema/icecube.py +7 -5
  48. rucio/common/stomp_utils.py +0 -1
  49. rucio/common/stopwatch.py +0 -1
  50. rucio/common/test_rucio_server.py +2 -2
  51. rucio/common/types.py +262 -17
  52. rucio/common/utils.py +743 -451
  53. rucio/rse/__init__.py +3 -4
  54. rucio/rse/protocols/__init__.py +0 -1
  55. rucio/rse/protocols/bittorrent.py +184 -0
  56. rucio/rse/protocols/cache.py +1 -2
  57. rucio/rse/protocols/dummy.py +1 -2
  58. rucio/rse/protocols/gfal.py +12 -10
  59. rucio/rse/protocols/globus.py +7 -7
  60. rucio/rse/protocols/gsiftp.py +2 -3
  61. rucio/rse/protocols/http_cache.py +1 -2
  62. rucio/rse/protocols/mock.py +1 -2
  63. rucio/rse/protocols/ngarc.py +1 -2
  64. rucio/rse/protocols/posix.py +12 -13
  65. rucio/rse/protocols/protocol.py +116 -52
  66. rucio/rse/protocols/rclone.py +6 -7
  67. rucio/rse/protocols/rfio.py +4 -5
  68. rucio/rse/protocols/srm.py +9 -10
  69. rucio/rse/protocols/ssh.py +8 -9
  70. rucio/rse/protocols/storm.py +2 -3
  71. rucio/rse/protocols/webdav.py +17 -14
  72. rucio/rse/protocols/xrootd.py +23 -17
  73. rucio/rse/rsemanager.py +19 -7
  74. rucio/vcsversion.py +4 -4
  75. rucio/version.py +5 -13
  76. rucio_clients-35.8.0.data/data/requirements.client.txt +15 -0
  77. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/rucio_client/merge_rucio_configs.py +2 -5
  78. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/scripts/rucio +87 -85
  79. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/scripts/rucio-admin +45 -32
  80. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/METADATA +13 -13
  81. rucio_clients-35.8.0.dist-info/RECORD +88 -0
  82. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/WHEEL +1 -1
  83. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/licenses/AUTHORS.rst +3 -0
  84. rucio/common/schema/cms.py +0 -478
  85. rucio/common/schema/lsst.py +0 -423
  86. rucio_clients-32.8.6.data/data/requirements.txt +0 -55
  87. rucio_clients-32.8.6.dist-info/RECORD +0 -88
  88. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rse-accounts.cfg.template +0 -0
  89. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rucio.cfg.atlas.client.template +0 -0
  90. {rucio_clients-32.8.6.data → rucio_clients-35.8.0.data}/data/etc/rucio.cfg.template +0 -0
  91. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/licenses/LICENSE +0 -0
  92. {rucio_clients-32.8.6.dist-info → rucio_clients-35.8.0.dist-info}/top_level.txt +0 -0
rucio/common/pcache.py CHANGED
@@ -1,5 +1,4 @@
1
1
  #!/usr/bin/env python
2
- # -*- coding: utf-8 -*-
3
2
  # Copyright European Organization for Nuclear Research (CERN) since 2012
4
3
  #
5
4
  # Licensed under the Apache License, Version 2.0 (the "License");
@@ -15,6 +14,7 @@
15
14
  # limitations under the License.
16
15
 
17
16
  import errno
17
+ import fcntl
18
18
  import getopt
19
19
  import os
20
20
  import re
@@ -23,10 +23,17 @@ import subprocess
23
23
  import sys
24
24
  import time
25
25
  from socket import gethostname
26
+ from typing import TYPE_CHECKING, Any, Optional
26
27
  from urllib.parse import urlencode
27
28
  from urllib.request import urlopen
28
29
 
29
- import fcntl
30
+ if TYPE_CHECKING:
31
+ from collections.abc import Iterable, Iterator
32
+ from types import FrameType
33
+ from urllib.parse import _QueryType
34
+
35
+ from _typeshed import StrOrBytesPath
36
+
30
37
 
31
38
  # The pCache Version
32
39
  pcacheversion = "4.2.3"
@@ -42,12 +49,12 @@ sessid = "%s.%s" % (int(time.time()), os.getpid())
42
49
 
43
50
 
44
51
  # Run a command with a timeout
45
- def run_cmd(args, timeout=0):
52
+ def run_cmd(args: "subprocess._CMD", timeout: int = 0) -> tuple[int, Optional[bytes]]:
46
53
 
47
54
  class Alarm(Exception):
48
55
  pass
49
56
 
50
- def alarm_handler(signum, frame):
57
+ def alarm_handler(signum: int, frame: Optional["FrameType"]) -> None:
51
58
  raise Alarm
52
59
 
53
60
  # Execute the command as a subprocess
@@ -71,7 +78,7 @@ def run_cmd(args, timeout=0):
71
78
  # Collect the output when the command completes
72
79
  stdout = p.communicate()[0][:-1]
73
80
 
74
- # Commmand completed in time, cancel the alarm
81
+ # Command completed in time, cancel the alarm
75
82
  if (timeout > 0):
76
83
  signal.alarm(0)
77
84
 
@@ -97,9 +104,9 @@ def run_cmd(args, timeout=0):
97
104
  return (p.returncode, stdout)
98
105
 
99
106
 
100
- def get_process_children(pid):
107
+ def get_process_children(pid: int) -> list[int]:
101
108
 
102
- # Get a list of all pids assocaited with a given pid
109
+ # Get a list of all pids associated with a given pid
103
110
  p = subprocess.Popen(args='ps --no-headers -o pid --ppid %d' % pid,
104
111
  shell=True,
105
112
  stdout=subprocess.PIPE,
@@ -112,19 +119,19 @@ def get_process_children(pid):
112
119
  return [int(pr) for pr in stdout.split()]
113
120
 
114
121
 
115
- def unitize(x):
122
+ def unitize(x: int) -> str:
116
123
 
117
124
  suff = 'BKMGTPEZY'
118
125
 
119
126
  while ((x >= 1024) and suff):
120
- x /= 1024.0
127
+ y = x / 1024.0
121
128
  suff = suff[1:]
122
- return "%.4g%s" % (x, suff[0])
129
+ return "%.4g%s" % (y, suff[0])
123
130
 
124
131
 
125
132
  class Pcache:
126
133
 
127
- def Usage(self):
134
+ def Usage(self) -> None:
128
135
  msg = """Usage: %s [flags] copy_prog [copy_flags] input output""" % self.progname
129
136
  sys.stderr.write("%s\n" % msg) # py3, py2
130
137
  # print>>sys.stderr, " flags are: "
@@ -163,11 +170,11 @@ class Pcache:
163
170
  self.hysterisis = 0.75
164
171
  # self.hysterisis = 0.9
165
172
  self.clean = False
166
- self.transfer_timeout = "600"
173
+ self.transfer_timeout_as_str: str = "600"
167
174
  self.max_retries = 3
168
175
  self.guid = None
169
- self.accept_patterns = []
170
- self.reject_patterns = []
176
+ self.accept_patterns: list[re.Pattern] = []
177
+ self.reject_patterns: list[re.Pattern] = []
171
178
  self.force = False
172
179
  self.flush = False
173
180
  self.verbose = False
@@ -187,7 +194,7 @@ class Pcache:
187
194
  self.deleted_guids = []
188
195
  self.version = pcacheversion
189
196
 
190
- def parse_args(self, args):
197
+ def parse_args(self, args: list[str]) -> None:
191
198
  # handle pcache flags and leave the rest in self.args
192
199
 
193
200
  try:
@@ -241,11 +248,11 @@ class Pcache:
241
248
  else:
242
249
  self.hysterisis = float(arg)
243
250
  elif opt in ("-A", "--accept"):
244
- self.accept_patterns.append(arg)
251
+ self.accept_patterns.append(re.compile(arg))
245
252
  elif opt in ("-R", "--reject"):
246
- self.reject_patterns.append(arg)
253
+ self.reject_patterns.append(re.compile(arg))
247
254
  elif opt in ("-t", "--timeout"):
248
- self.transfer_timeout = arg
255
+ self.transfer_timeout_as_str = arg
249
256
  elif opt in ("-f", "--force"):
250
257
  self.force = True
251
258
  elif opt in ("-F", "--flush-cache"):
@@ -260,7 +267,7 @@ class Pcache:
260
267
  elif opt in ("-r", "--retry"):
261
268
  self.max_retries = int(arg)
262
269
  elif opt in ("-V", "--version"):
263
- print((str(self.version)))
270
+ print(str(self.version))
264
271
  sys.exit(0)
265
272
  elif opt in ("-l", "--log"):
266
273
  self.log_file = arg
@@ -289,8 +296,8 @@ class Pcache:
289
296
  self._convert_max_space()
290
297
 
291
298
  # Convert timeout to seconds
292
- t = self.transfer_timeout
293
299
  mult = 1
300
+ t = self.transfer_timeout_as_str
294
301
  suff = t[-1]
295
302
  if suff in ('H', 'h'):
296
303
  mult = 3600
@@ -301,11 +308,7 @@ class Pcache:
301
308
  elif suff in ('S', 's'):
302
309
  mult = 1
303
310
  t = t[:-1]
304
- self.transfer_timeout = mult * int(t)
305
-
306
- # Pre-compile regexes
307
- self.accept_patterns = list(map(re.compile, self.accept_patterns))
308
- self.reject_patterns = list(map(re.compile, self.reject_patterns))
311
+ self.transfer_timeout: int = mult * int(t)
309
312
 
310
313
  # Set host and name
311
314
  if self.hostname is None:
@@ -316,7 +319,7 @@ class Pcache:
316
319
  # All done
317
320
  self.args = args
318
321
 
319
- def _convert_max_space(self):
322
+ def _convert_max_space(self) -> None:
320
323
  '''
321
324
  Added by Rucio team. Converts max allowed space usage of pcache into units used by this tool.
322
325
  :input self.max_space: limit set by user
@@ -337,7 +340,7 @@ class Pcache:
337
340
  else: # Numeric value w/o units (exception if invalid)
338
341
  self.bytes_max = float(self.max_space)
339
342
 
340
- def clean_pcache(self, max_space=None):
343
+ def clean_pcache(self, max_space: Optional[str] = None) -> None:
341
344
  '''
342
345
  Added by Rucio team. Cleans pcache in case it is over limit.
343
346
  Used for tests of the pcache functionality. Can be called without other init.
@@ -362,8 +365,21 @@ class Pcache:
362
365
  # clean pcache
363
366
  self.maybe_start_cleaner_thread()
364
367
 
365
- def check_and_link(self, src='', dst='', dst_prefix='', scratch_dir='/scratch/', storage_root=None, force=False,
366
- guid=None, log_file=None, version='', hostname=None, sitename=None, local_src=None):
368
+ def check_and_link(
369
+ self,
370
+ src: str = '',
371
+ dst: str = '',
372
+ dst_prefix: str = '',
373
+ scratch_dir: str = '/scratch/',
374
+ storage_root: Optional[str] = None,
375
+ force: bool = False,
376
+ guid: Optional[str] = None,
377
+ log_file: Optional[str] = None,
378
+ version: str = '',
379
+ hostname: Optional[str] = None,
380
+ sitename: Optional[str] = None,
381
+ local_src: Optional[str] = None
382
+ ):
367
383
  '''
368
384
  Added by Rucio team. Replacement for the main method.
369
385
  Checks whether a file is in pcache:
@@ -466,7 +482,7 @@ class Pcache:
466
482
  # Return if the file was cached, copied or an error (and its code)
467
483
  return (exit_status, copy_status)
468
484
 
469
- def main(self, args):
485
+ def main(self, args: list[str]) -> tuple[int, Optional[int]]:
470
486
 
471
487
  # args
472
488
  self.cmdline = ' '.join(args)
@@ -531,7 +547,7 @@ class Pcache:
531
547
 
532
548
  # Execute original command, no further action
533
549
  if (not (self.dst.startswith(self.scratch_dir) and self.accept(self.src) and (not self.reject(self.src)))):
534
- os.execvp(self.copy_util, self.args)
550
+ os.execvp(self.copy_util, self.args) # noqa: S606
535
551
  os._exit(1)
536
552
 
537
553
  # XXXX todo: fast-path - try to acquire lock
@@ -581,7 +597,7 @@ class Pcache:
581
597
  # Return if the file was cached, copied or an error (and its code)
582
598
  return (exit_status, copy_status)
583
599
 
584
- def finish(self, local_src=None):
600
+ def finish(self, local_src: Optional[str] = None) -> None:
585
601
  cache_file = self.pcache_dst_dir + "data"
586
602
  self.update_mru()
587
603
  if self.local_src:
@@ -591,7 +607,7 @@ class Pcache:
591
607
  if (self.make_hard_link(cache_file, self.dst)):
592
608
  self.fail(102)
593
609
 
594
- def pcache_copy_in(self):
610
+ def pcache_copy_in(self) -> tuple[int, Optional[int]]:
595
611
 
596
612
  cache_file = self.pcache_dst_dir + "data"
597
613
 
@@ -658,33 +674,34 @@ class Pcache:
658
674
 
659
675
  return (exit_status, copy_status)
660
676
 
661
- def create_pcache_dst_dir(self):
677
+ def create_pcache_dst_dir(self) -> None:
662
678
 
663
679
  d = self.src
664
- index = d.find(self.storage_root)
680
+ if self.storage_root is not None:
681
+ index = d.find(self.storage_root)
665
682
 
666
- if (index >= 0):
667
- d = d[index:]
668
- else:
669
- index = d.find("SFN=")
670
683
  if (index >= 0):
671
- d = d[index + 4:]
672
-
673
- # self.log(INFO, '%s', self.storage_root)
674
- # self.log(INFO, '%s', d)
675
- # XXXX any more patterns to look for?
676
- d = os.path.normpath(self.pcache_dir + "CACHE/" + d)
677
- if (not d.endswith('/')):
678
- d += '/'
679
-
680
- self.pcache_dst_dir = d
681
- status = self.mkdir_p(d)
682
- if (status):
683
- self.log(ERROR, "mkdir %s %s", d, status)
684
- self.fail(103)
685
-
686
- def get_disk_usage(self):
687
- p = os.popen("df -P %s | tail -1" % self.pcache_dir, 'r')
684
+ d = d[index:]
685
+ else:
686
+ index = d.find("SFN=")
687
+ if (index >= 0):
688
+ d = d[index + 4:]
689
+
690
+ # self.log(INFO, '%s', self.storage_root)
691
+ # self.log(INFO, '%s', d)
692
+ # XXXX any more patterns to look for?
693
+ d = os.path.normpath(self.pcache_dir + "CACHE/" + d)
694
+ if (not d.endswith('/')):
695
+ d += '/'
696
+
697
+ self.pcache_dst_dir = d
698
+ status = self.mkdir_p(d)
699
+ if (status):
700
+ self.log(ERROR, "mkdir %s %s", d, status)
701
+ self.fail(103)
702
+
703
+ def get_disk_usage(self) -> int:
704
+ p = os.popen("df -P %s | tail -1" % self.pcache_dir, 'r') # noqa: S605
688
705
  data = p.read()
689
706
  status = p.close()
690
707
  if status:
@@ -698,19 +715,23 @@ class Pcache:
698
715
  percent = int(percent[:-1])
699
716
  return percent
700
717
 
701
- def over_limit(self, factor=1.0):
718
+ def over_limit(self, factor: float = 1.0) -> bool:
702
719
  if self.percent_max:
703
720
  return self.get_disk_usage() > factor * self.percent_max
704
721
  if self.bytes_max:
705
- return self.get_cache_size() > factor * self.bytes_max
722
+ cache_size = self.get_cache_size()
723
+ if cache_size is not None:
724
+ return cache_size > factor * self.bytes_max
706
725
  return False
707
726
 
708
- def clean_cache(self):
727
+ def clean_cache(self) -> None:
709
728
  t0 = time.time()
729
+ cache_size = self.get_cache_size()
710
730
 
711
- self.log(INFO, "starting cleanup, cache size=%s, usage=%s%%",
712
- unitize(self.get_cache_size()),
713
- self.get_disk_usage())
731
+ if cache_size is not None:
732
+ self.log(INFO, "starting cleanup, cache size=%s, usage=%s%%",
733
+ unitize(cache_size),
734
+ self.get_disk_usage())
714
735
 
715
736
  for link in self.list_by_mru():
716
737
  try:
@@ -748,7 +769,7 @@ class Pcache:
748
769
  self.get_disk_usage(),
749
770
  time.time() - t0)
750
771
 
751
- def list_by_mru(self):
772
+ def list_by_mru(self) -> "Iterator[str]":
752
773
  mru_dir = self.pcache_dir + "MRU/"
753
774
  for root, dirs, files in os.walk(mru_dir):
754
775
  dirs.sort()
@@ -763,7 +784,7 @@ class Pcache:
763
784
  path = os.path.join(root, file)
764
785
  yield path
765
786
 
766
- def flush_cache(self):
787
+ def flush_cache(self) -> None:
767
788
  # Delete everything in CACHE, MRU, and reset stats
768
789
  self.log(INFO, "flushing cache")
769
790
  if self.update_panda:
@@ -774,12 +795,12 @@ class Pcache:
774
795
  d = self.pcache_dir + d
775
796
  try:
776
797
  os.rename(d, d + ts)
777
- os.system("rm -rf %s &" % (d + ts))
798
+ os.system("rm -rf %s &" % (d + ts)) # noqa: S605
778
799
  except OSError as e:
779
800
  if e.errno != errno.ENOENT:
780
801
  self.log(ERROR, "%s: %s", d, e)
781
802
 
782
- def do_transfer(self):
803
+ def do_transfer(self) -> tuple[int, Optional[int]]:
783
804
 
784
805
  # Cache file and transfer file locations
785
806
  cache_file = self.pcache_dst_dir + "data"
@@ -844,7 +865,7 @@ class Pcache:
844
865
  # Transfer completed, return the transfer command status
845
866
  return (0, None)
846
867
 
847
- def maybe_start_cleaner_thread(self):
868
+ def maybe_start_cleaner_thread(self) -> None:
848
869
  if not self.over_limit():
849
870
  return
850
871
  # exit immediately if another cleaner is active
@@ -868,7 +889,7 @@ class Pcache:
868
889
  self.unlock_file(cleaner_lock)
869
890
  os._exit(0)
870
891
 
871
- def make_hard_link(self, src, dst):
892
+ def make_hard_link(self, src: "StrOrBytesPath", dst: "StrOrBytesPath") -> Optional[int]:
872
893
  self.log(INFO, "linking %s to %s", src, dst)
873
894
  try:
874
895
  if os.path.exists(dst):
@@ -890,13 +911,13 @@ class Pcache:
890
911
  self.log(INFO, "cannot stat %s", dst)
891
912
  return ret
892
913
 
893
- def reject(self, name):
914
+ def reject(self, name: str) -> bool:
894
915
  for pat in self.reject_patterns:
895
916
  if pat.search(name):
896
917
  return True
897
918
  return False
898
919
 
899
- def accept(self, name):
920
+ def accept(self, name: str) -> bool:
900
921
  if not self.accept_patterns:
901
922
  return True
902
923
  for pat in self.accept_patterns:
@@ -904,7 +925,7 @@ class Pcache:
904
925
  return True
905
926
  return False
906
927
 
907
- def get_stat(self, stats_dir, stat_name):
928
+ def get_stat(self, stats_dir: str, stat_name: str) -> int:
908
929
  filename = os.path.join(self.pcache_dir, stats_dir, stat_name)
909
930
  try:
910
931
  f = open(filename, 'r')
@@ -914,12 +935,12 @@ class Pcache:
914
935
  data = 0
915
936
  return data
916
937
 
917
- def print_stats(self):
938
+ def print_stats(self) -> None:
918
939
  print(("Cache size: %s", unitize(self.get_stat("CACHE", "size"))))
919
940
  print(("Cache hits: %s", self.get_stat("stats", "cache_hits")))
920
941
  print(("Cache misses: %s", self.get_stat("stats", "cache_misses")))
921
942
 
922
- def reset_stats(self):
943
+ def reset_stats(self) -> None:
923
944
  stats_dir = os.path.join(self.pcache_dir, "stats")
924
945
  try:
925
946
  for f in os.listdir(stats_dir):
@@ -932,7 +953,7 @@ class Pcache:
932
953
  # XXXX error handling
933
954
  pass
934
955
 
935
- def update_stat_file(self, stats_dir, name, delta): # internal
956
+ def update_stat_file(self, stats_dir: str, name: str, delta: int) -> None: # internal
936
957
  stats_dir = os.path.join(self.pcache_dir, stats_dir)
937
958
  self.mkdir_p(stats_dir)
938
959
  self.lock_dir(stats_dir)
@@ -956,13 +977,13 @@ class Pcache:
956
977
  # XXX
957
978
  self.unlock_dir(stats_dir)
958
979
 
959
- def update_stats(self, name, delta=1):
980
+ def update_stats(self, name: str, delta: int = 1) -> None:
960
981
  return self.update_stat_file("stats", name, delta)
961
982
 
962
- def update_cache_size(self, bytes_):
983
+ def update_cache_size(self, bytes_: int) -> None:
963
984
  return self.update_stat_file("CACHE", "size", bytes_)
964
985
 
965
- def get_cache_size(self):
986
+ def get_cache_size(self) -> Optional[int]:
966
987
  filename = os.path.join(self.pcache_dir, "CACHE", "size")
967
988
  size = 0
968
989
 
@@ -978,14 +999,14 @@ class Pcache:
978
999
  size = self.do_cache_inventory()
979
1000
 
980
1001
  # The size should never be negative, so lets cleanup and start over
981
- if size < 0:
1002
+ if size is not None and size < 0:
982
1003
  self.log(WARN, "CACHE corruption found. Negative CACHE size: %d", size)
983
1004
  self.flush_cache()
984
1005
  size = 0
985
1006
 
986
1007
  return size
987
1008
 
988
- def do_cache_inventory(self):
1009
+ def do_cache_inventory(self) -> Optional[int]:
989
1010
 
990
1011
  inventory_lock = os.path.join(self.pcache_dir, ".inventory")
991
1012
  if self.lock_file(inventory_lock, blocking=False):
@@ -1018,7 +1039,7 @@ class Pcache:
1018
1039
  self.log(INFO, "inventory complete, cache size %s", size)
1019
1040
  return size
1020
1041
 
1021
- def daemonize(self):
1042
+ def daemonize(self) -> None:
1022
1043
  if self.debug:
1023
1044
  return
1024
1045
  try:
@@ -1057,7 +1078,7 @@ class Pcache:
1057
1078
  pass
1058
1079
 
1059
1080
  # Panda server callback functions
1060
- def do_http_post(self, url, data):
1081
+ def do_http_post(self, url: str, data: "_QueryType") -> None:
1061
1082
  # see http://www.faqs.org/faqs/unix-faq/faq/part3/section-13.html
1062
1083
  # for explanation of double-fork (is it overkill here?)
1063
1084
  pid = os.fork()
@@ -1074,7 +1095,7 @@ class Pcache:
1074
1095
  # This will retry for up to 1 hour, at 2 minute intervals
1075
1096
  while retry < 30:
1076
1097
  try:
1077
- u = urlopen(url, data=urlencode(data))
1098
+ u = urlopen(url, data=urlencode(data)) # type: ignore
1078
1099
  ret = u.read()
1079
1100
  u.close()
1080
1101
  self.log(INFO, "http post to %s, retry %s, data='%s', return='%s'",
@@ -1089,25 +1110,25 @@ class Pcache:
1089
1110
  # finished, don't keep the child thread around!
1090
1111
  os._exit(0)
1091
1112
 
1092
- def panda_flush_cache(self):
1113
+ def panda_flush_cache(self) -> None:
1093
1114
  self.do_http_post(self.panda_url + "flushCacheDB",
1094
1115
  data={"site": self.sitename,
1095
1116
  "node": self.hostname})
1096
1117
 
1097
- def panda_add_cache_files(self, guids):
1118
+ def panda_add_cache_files(self, guids: "Iterable[str]") -> None:
1098
1119
  self.do_http_post(self.panda_url + "addFilesToCacheDB",
1099
1120
  data={"site": self.sitename,
1100
1121
  "node": self.hostname,
1101
1122
  "guids": ','.join(guids)})
1102
1123
 
1103
- def panda_del_cache_files(self, guids):
1124
+ def panda_del_cache_files(self, guids: "Iterable[str]") -> None:
1104
1125
  self.do_http_post(self.panda_url + "deleteFilesFromCacheDB",
1105
1126
  data={"site": self.sitename,
1106
1127
  "node": self.hostname,
1107
1128
  "guids": ','.join(guids)})
1108
1129
 
1109
1130
  # Locking functions
1110
- def lock_dir(self, d, create=True, blocking=True):
1131
+ def lock_dir(self, d: str, create: bool = True, blocking: bool = True) -> Optional[int]:
1111
1132
  lock_name = os.path.join(d, LOCK_NAME)
1112
1133
  lock_status = self.lock_file(lock_name, blocking)
1113
1134
  if (not lock_status): # succeeded
@@ -1120,16 +1141,16 @@ class Pcache:
1120
1141
  lock_status = self.lock_file(lock_name, blocking)
1121
1142
  return lock_status
1122
1143
 
1123
- def unlock_dir(self, d):
1144
+ def unlock_dir(self, d: str) -> Optional[Any]:
1124
1145
  return self.unlock_file(os.path.join(d, LOCK_NAME))
1125
1146
 
1126
- def lock_file(self, name, blocking=True):
1147
+ def lock_file(self, name: str, blocking: bool = True) -> Optional[int]:
1127
1148
  if name in self.locks:
1128
1149
  self.log(DEBUG, "lock_file: %s already locked", name)
1129
1150
  return
1130
1151
  try:
1131
1152
  f = open(name, 'w')
1132
- except IOError as e:
1153
+ except OSError as e:
1133
1154
  self.log(ERROR, "open: %s", e)
1134
1155
  return e.errno
1135
1156
 
@@ -1141,7 +1162,7 @@ class Pcache:
1141
1162
  try:
1142
1163
  status = fcntl.lockf(f, flag)
1143
1164
  break
1144
- except IOError as e:
1165
+ except OSError as e:
1145
1166
  if e.errno in (errno.EAGAIN, errno.EACCES) and not blocking:
1146
1167
  f.close()
1147
1168
  del self.locks[name]
@@ -1152,7 +1173,7 @@ class Pcache:
1152
1173
  self.fail(106)
1153
1174
  return status
1154
1175
 
1155
- def unlock_file(self, name):
1176
+ def unlock_file(self, name: str) -> Optional[Any]:
1156
1177
  f = self.locks.get(name)
1157
1178
  if not f:
1158
1179
  self.log(DEBUG, "unlock_file: %s not locked", name)
@@ -1169,7 +1190,7 @@ class Pcache:
1169
1190
  del self.locks[name]
1170
1191
  return status
1171
1192
 
1172
- def unlock_all(self):
1193
+ def unlock_all(self) -> None:
1173
1194
  for filename, f in list(self.locks.items()):
1174
1195
  try:
1175
1196
  f.close()
@@ -1178,7 +1199,7 @@ class Pcache:
1178
1199
  pass
1179
1200
 
1180
1201
  # Cleanup functions
1181
- def delete_file_and_parents(self, name):
1202
+ def delete_file_and_parents(self, name: str) -> None:
1182
1203
  try:
1183
1204
  os.unlink(name)
1184
1205
  except OSError as e:
@@ -1187,7 +1208,7 @@ class Pcache:
1187
1208
  self.fail(107)
1188
1209
  self.delete_parents_recursive(name)
1189
1210
 
1190
- def delete_parents_recursive(self, name): # internal
1211
+ def delete_parents_recursive(self, name: str) -> None: # internal
1191
1212
  try:
1192
1213
  dirname = os.path.dirname(name)
1193
1214
  if not os.listdir(dirname):
@@ -1196,7 +1217,7 @@ class Pcache:
1196
1217
  except OSError as e:
1197
1218
  self.log(DEBUG, "delete_parents_recursive: %s", e)
1198
1219
 
1199
- def update_mru(self):
1220
+ def update_mru(self) -> None:
1200
1221
  now = time.time()
1201
1222
  link_to_mru = self.pcache_dst_dir + "mru"
1202
1223
  if os.path.exists(link_to_mru):
@@ -1251,13 +1272,13 @@ class Pcache:
1251
1272
  self.log(ERROR, "symlink: %s %s", e, link_from_mru)
1252
1273
  self.fail(109)
1253
1274
 
1254
- def cleanup_failed_transfer(self):
1275
+ def cleanup_failed_transfer(self) -> None:
1255
1276
  try:
1256
1277
  os.unlink(self.pcache_dir + 'xfer')
1257
1278
  except:
1258
1279
  pass
1259
1280
 
1260
- def empty_dir(self, d):
1281
+ def empty_dir(self, d: str) -> None:
1261
1282
  status = None
1262
1283
  bytes_deleted = 0
1263
1284
  for name in os.listdir(d):
@@ -1296,14 +1317,14 @@ class Pcache:
1296
1317
  self.delete_parents_recursive(d)
1297
1318
  return status
1298
1319
 
1299
- def chmod(self, path, mode):
1320
+ def chmod(self, path: str, mode: int) -> None:
1300
1321
  try:
1301
1322
  os.chmod(path, mode)
1302
1323
  except OSError as e:
1303
1324
  if e.errno != errno.EPERM: # Cannot chmod files we don't own!
1304
1325
  self.log(ERROR, "chmod %s %s", path, e)
1305
1326
 
1306
- def mkdir_p(self, d, mode=0o777):
1327
+ def mkdir_p(self, d: str, mode: int = 0o777) -> Optional[int]:
1307
1328
  # Thread-safe
1308
1329
  try:
1309
1330
  os.makedirs(d, mode)
@@ -1316,7 +1337,7 @@ class Pcache:
1316
1337
  sys.stderr.write("%s\n" % str(e))
1317
1338
  return e.errno
1318
1339
 
1319
- def log(self, level, msg, *args):
1340
+ def log(self, level: str, msg: str, *args) -> None:
1320
1341
 
1321
1342
  # Disable all logging
1322
1343
  if (self.quiet):
@@ -1345,7 +1366,7 @@ class Pcache:
1345
1366
  sys.stderr.write(msg)
1346
1367
  sys.stderr.flush()
1347
1368
 
1348
- def fail(self, errcode=1):
1369
+ def fail(self, errcode: int = 1) -> None:
1349
1370
  self.unlock_all()
1350
1371
  sys.exit(errcode)
1351
1372