PyCatFile 0.24.6__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pycatfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pycatfile.py - Last Update: 11/3/2025 Ver. 0.24.6 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pycatfile.py - Last Update: 11/3/2025 Ver. 0.25.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -73,6 +73,17 @@ except ImportError:
73
73
  except ImportError:
74
74
  import json
75
75
 
76
+ testyaml = False
77
+ try:
78
+ import oyaml as yaml
79
+ testyaml = True
80
+ except ImportError:
81
+ try:
82
+ import yaml
83
+ testyaml = True
84
+ except ImportError:
85
+ testyaml = False
86
+
76
87
  try:
77
88
  import configparser
78
89
  except ImportError:
@@ -115,6 +126,16 @@ else:
115
126
  bytes_type = bytes
116
127
  text_type = str
117
128
 
129
+ # Text streams (as provided by Python)
130
+ PY_STDIN_TEXT = sys.stdin
131
+ PY_STDOUT_TEXT = sys.stdout
132
+ PY_STDERR_TEXT = sys.stderr
133
+
134
+ # Binary-friendly streams (use .buffer on Py3, fall back on Py2)
135
+ PY_STDIN_BUF = getattr(sys.stdin, "buffer", sys.stdin)
136
+ PY_STDOUT_BUF = getattr(sys.stdout, "buffer", sys.stdout)
137
+ PY_STDERR_BUF = getattr(sys.stderr, "buffer", sys.stderr)
138
+
118
139
  # Text vs bytes tuples you can use with isinstance()
119
140
  TEXT_TYPES = (basestring,) # "str or unicode" on Py2, "str" on Py3
120
141
  BINARY_TYPES = (bytes,) if not PY2 else (str,) # bytes on Py3, str on Py2
@@ -211,12 +232,6 @@ if sys.version_info[0] == 2:
211
232
  except (NameError, AttributeError):
212
233
  pass
213
234
 
214
- # CRC32 import
215
- try:
216
- from zlib import crc32
217
- except ImportError:
218
- from binascii import crc32
219
-
220
235
  # Define FileNotFoundError for Python 2
221
236
  try:
222
237
  FileNotFoundError
@@ -251,9 +266,7 @@ py7zr_support = False
251
266
  try:
252
267
  import py7zr
253
268
  py7zr_support = True
254
- except ImportError:
255
- pass
256
- except OSError:
269
+ except (ImportError, OSError):
257
270
  pass
258
271
 
259
272
  # TAR file checking
@@ -279,9 +292,7 @@ haveparamiko = False
279
292
  try:
280
293
  import paramiko
281
294
  haveparamiko = True
282
- except ImportError:
283
- pass
284
- except OSError:
295
+ except (ImportError, OSError):
285
296
  pass
286
297
 
287
298
  # PySFTP support
@@ -289,9 +300,7 @@ havepysftp = False
289
300
  try:
290
301
  import pysftp
291
302
  havepysftp = True
292
- except ImportError:
293
- pass
294
- except OSError:
303
+ except (ImportError, OSError):
295
304
  pass
296
305
 
297
306
  # Add the mechanize import check
@@ -299,9 +308,7 @@ havemechanize = False
299
308
  try:
300
309
  import mechanize
301
310
  havemechanize = True
302
- except ImportError:
303
- pass
304
- except OSError:
311
+ except (ImportError, OSError):
305
312
  pass
306
313
 
307
314
  # Requests support
@@ -311,9 +318,7 @@ try:
311
318
  haverequests = True
312
319
  import urllib3
313
320
  logging.getLogger("urllib3").setLevel(logging.WARNING)
314
- except ImportError:
315
- pass
316
- except OSError:
321
+ except (ImportError, OSError):
317
322
  pass
318
323
 
319
324
  # HTTPX support
@@ -323,9 +328,7 @@ try:
323
328
  havehttpx = True
324
329
  logging.getLogger("httpx").setLevel(logging.WARNING)
325
330
  logging.getLogger("httpcore").setLevel(logging.WARNING)
326
- except ImportError:
327
- pass
328
- except OSError:
331
+ except (ImportError, OSError):
329
332
  pass
330
333
 
331
334
  # HTTP and URL parsing
@@ -416,9 +419,14 @@ __include_defaults__ = True
416
419
  __use_inmemfile__ = True
417
420
  __use_spoolfile__ = False
418
421
  __use_spooldir__ = tempfile.gettempdir()
419
- BYTES_PER_MiB = 1024 * 1024
420
- DEFAULT_SPOOL_MAX = 8 * BYTES_PER_MiB
422
+ BYTES_PER_KiB = 1024
423
+ BYTES_PER_MiB = 1024 * BYTES_PER_KiB
424
+ # Spool: not tiny, but won’t blow up RAM if many are in use
425
+ DEFAULT_SPOOL_MAX = 4 * BYTES_PER_MiB # 4 MiB per spooled temp file
421
426
  __spoolfile_size__ = DEFAULT_SPOOL_MAX
427
+ # Buffer: bigger than stdlib default (16 KiB), but still modest
428
+ DEFAULT_BUFFER_MAX = 256 * BYTES_PER_KiB # 256 KiB copy buffer
429
+ __filebuff_size__ = DEFAULT_BUFFER_MAX
422
430
  __program_name__ = "Py"+__file_format_default__
423
431
  __use_env_file__ = True
424
432
  __use_ini_file__ = True
@@ -641,12 +649,12 @@ __project__ = __program_name__
641
649
  __program_alt_name__ = __program_name__
642
650
  __project_url__ = "https://github.com/GameMaker2k/PyCatFile"
643
651
  __project_release_url__ = __project_url__+"/releases/latest"
644
- __version_info__ = (0, 24, 6, "RC 1", 1)
645
- __version_date_info__ = (2025, 11, 3, "RC 1", 1)
652
+ __version_info__ = (0, 25, 0, "RC 1", 1)
653
+ __version_date_info__ = (2025, 11, 5, "RC 1", 1)
646
654
  __version_date__ = str(__version_date_info__[0]) + "." + str(
647
655
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
648
656
  __revision__ = __version_info__[3]
649
- __revision_id__ = "$Id: d68fe3665e19a576beefb4e2c06db1bf32d5f94a $"
657
+ __revision_id__ = "$Id: 74aa70c1670492a2322dd50826f637a861fcab9e $"
650
658
  if(__version_info__[4] is not None):
651
659
  __version_date_plusrc__ = __version_date__ + \
652
660
  "-" + str(__version_date_info__[4])
@@ -797,7 +805,7 @@ geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prour
797
805
  if(platform.python_implementation() != ""):
798
806
  py_implementation = platform.python_implementation()
799
807
  if(platform.python_implementation() == ""):
800
- py_implementation = "Python"
808
+ py_implementation = "CPython"
801
809
  geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
802
810
  )+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
803
811
  geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
@@ -2385,7 +2393,7 @@ def GetTotalSize(file_list):
2385
2393
  try:
2386
2394
  total_size += os.path.getsize(item)
2387
2395
  except OSError:
2388
- sys.stderr.write("Error accessing file {}: {}\n".format(item, e))
2396
+ PY_STDERR_TEXT.write("Error accessing file {}: {}\n".format(item, e))
2389
2397
  return total_size
2390
2398
 
2391
2399
 
@@ -2622,7 +2630,7 @@ class ZlibFile(object):
2622
2630
  scanned_leading = 0 # for tolerant header scan
2623
2631
 
2624
2632
  while True:
2625
- data = self.file.read(1 << 20) # 1 MiB blocks
2633
+ data = self.file.read(__filebuff_size__) # 1 MiB blocks
2626
2634
  if not data:
2627
2635
  if d is not None:
2628
2636
  self._spool.write(d.flush())
@@ -2780,7 +2788,7 @@ class ZlibFile(object):
2780
2788
 
2781
2789
  # Buffer and compress in chunks to limit memory
2782
2790
  self._write_buf += data
2783
- if len(self._write_buf) >= (1 << 20): # 1 MiB threshold
2791
+ if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
2784
2792
  chunk = self._compressor.compress(bytes(self._write_buf))
2785
2793
  if chunk:
2786
2794
  self.file.write(chunk)
@@ -2890,7 +2898,7 @@ class ZlibFile(object):
2890
2898
  """
2891
2899
  if not isinstance(data, (bytes, bytearray, memoryview)):
2892
2900
  raise TypeError("from_bytes() expects a bytes-like object")
2893
- bio = io.BytesIO(bytes(data) if not isinstance(data, bytes) else data)
2901
+ bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
2894
2902
  return cls(fileobj=bio, mode=mode, **kw)
2895
2903
 
2896
2904
  # compatibility aliases for unwrapping utilities
@@ -2926,7 +2934,7 @@ def compress_bytes(payload, level=6, wbits=15, text=False, **kw):
2926
2934
  out = compress_bytes(b"hello")
2927
2935
  out = compress_bytes(u"hello\n", text=True, encoding="utf-8", newline="\n")
2928
2936
  """
2929
- bio = io.BytesIO()
2937
+ bio = MkTempFile()
2930
2938
  mode = 'wt' if text else 'wb'
2931
2939
  f = ZlibFile(fileobj=bio, mode=mode, level=level, wbits=wbits, **kw)
2932
2940
  try:
@@ -3085,7 +3093,7 @@ class GzipFile(object):
3085
3093
 
3086
3094
  self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
3087
3095
 
3088
- CHUNK = 1 << 20
3096
+ CHUNK = __filebuff_size__
3089
3097
  pending = b""
3090
3098
  d = None
3091
3099
  absolute_offset = 0
@@ -3248,7 +3256,7 @@ class GzipFile(object):
3248
3256
 
3249
3257
  # Stage and compress in chunks
3250
3258
  self._write_buf += data
3251
- if len(self._write_buf) >= (1 << 20): # 1 MiB threshold
3259
+ if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
3252
3260
  out = self._compressor.compress(bytes(self._write_buf))
3253
3261
  if out:
3254
3262
  self.file.write(out)
@@ -3348,7 +3356,7 @@ class GzipFile(object):
3348
3356
  """
3349
3357
  if not isinstance(data, (bytes, bytearray, memoryview)):
3350
3358
  raise TypeError("from_bytes() expects a bytes-like object")
3351
- bio = io.BytesIO(bytes(data) if not isinstance(data, bytes) else data)
3359
+ bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
3352
3360
  return cls(fileobj=bio, mode=mode, **kw)
3353
3361
 
3354
3362
  # compatibility aliases for unwrapping utilities
@@ -3390,7 +3398,7 @@ def gzip_compress_bytes(payload, level=6, text=False, **kw):
3390
3398
  - text=False: 'payload' must be bytes-like; written via GzipFile('wb')
3391
3399
  You can pass newline/encoding/errors to control text encoding.
3392
3400
  """
3393
- bio = io.BytesIO()
3401
+ bio = MkTempFile()
3394
3402
  mode = 'wt' if text else 'wb'
3395
3403
  gf = GzipFile(fileobj=bio, mode=mode, level=level, **kw)
3396
3404
  try:
@@ -3622,280 +3630,6 @@ def crc_generic(msg, width, poly, init, xorout, refin, refout):
3622
3630
  crc = _reflect(crc, width)
3623
3631
  return (crc ^ xorout) & mask
3624
3632
 
3625
- # =========================
3626
- # Named CRCs
3627
- # =========================
3628
- # CRC-16/ANSI (ARC/MODBUS family with init=0xFFFF by default)
3629
- def crc16_ansi(msg, initial_value=0xFFFF):
3630
- return crc_generic(msg, 16, 0x8005, initial_value & 0xFFFF, 0x0000, True, True)
3631
-
3632
- def crc16_ibm(msg, initial_value=0xFFFF):
3633
- return crc16_ansi(msg, initial_value)
3634
-
3635
- def crc16(msg):
3636
- return crc16_ansi(msg, 0xFFFF)
3637
-
3638
- def crc16_ccitt(msg, initial_value=0xFFFF):
3639
- # CCITT-FALSE
3640
- return crc_generic(msg, 16, 0x1021, initial_value & 0xFFFF, 0x0000, False, False)
3641
-
3642
- def crc16_x25(msg):
3643
- return crc_generic(msg, 16, 0x1021, 0xFFFF, 0xFFFF, True, True)
3644
-
3645
- def crc16_kermit(msg):
3646
- return crc_generic(msg, 16, 0x1021, 0x0000, 0x0000, True, True)
3647
-
3648
- def crc64_ecma(msg, initial_value=0x0000000000000000):
3649
- return crc_generic(msg, 64, 0x42F0E1EBA9EA3693,
3650
- initial_value & 0xFFFFFFFFFFFFFFFF,
3651
- 0x0000000000000000, False, False)
3652
-
3653
- def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
3654
- return crc_generic(msg, 64, 0x000000000000001B,
3655
- initial_value & 0xFFFFFFFFFFFFFFFF,
3656
- 0xFFFFFFFFFFFFFFFF, True, True)
3657
-
3658
- # =========================
3659
- # Incremental CRC context
3660
- # =========================
3661
- CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
3662
-
3663
- _CRC_SPECS = {
3664
- "crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
3665
- "crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
3666
- "crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
3667
- "crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
3668
- "crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
3669
- "crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
3670
- }
3671
-
3672
- class CRCContext(object):
3673
- __slots__ = ("spec", "table", "mask", "shift", "crc")
3674
-
3675
- def __init__(self, spec):
3676
- self.spec = spec
3677
- self.table = _build_table(spec.width, spec.poly, spec.refin)
3678
- self.mask = (1 << spec.width) - 1
3679
- self.shift = spec.width - 8
3680
- self.crc = spec.init & self.mask
3681
-
3682
- def update(self, data):
3683
- if not isinstance(data, (bytes, bytearray, memoryview)):
3684
- data = bytes(bytearray(data))
3685
- buf = _mv_tobytes(memoryview(data))
3686
- if self.spec.refin:
3687
- c = self.crc
3688
- tbl = self.table
3689
- for b in buf:
3690
- if not isinstance(b, int): # Py2
3691
- b = ord(b)
3692
- c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
3693
- self.crc = c & self.mask
3694
- else:
3695
- c = self.crc
3696
- tbl = self.table
3697
- sh = self.shift
3698
- msk = self.mask
3699
- for b in buf:
3700
- if not isinstance(b, int):
3701
- b = ord(b)
3702
- c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
3703
- self.crc = c & msk
3704
- return self
3705
-
3706
- def digest_int(self):
3707
- c = self.crc
3708
- if self.spec.refout ^ self.spec.refin:
3709
- c = _reflect(c, self.spec.width)
3710
- return (c ^ self.spec.xorout) & self.mask
3711
-
3712
- def hexdigest(self):
3713
- width_hex = (self.spec.width + 3) // 4
3714
- return format(self.digest_int(), "0{}x".format(width_hex)).lower()
3715
-
3716
- def crc_context_from_name(name_norm):
3717
- spec = _CRC_SPECS.get(name_norm)
3718
- if spec is None:
3719
- raise KeyError("Unknown CRC spec: {}".format(name_norm))
3720
- return CRCContext(spec)
3721
-
3722
- # =========================
3723
- # Dispatch helpers
3724
- # =========================
3725
- _CRC_ALIASES = {
3726
- # keep your historical behaviors
3727
- "crc16": "crc16_ansi",
3728
- "crc16_ibm": "crc16_ansi",
3729
- "crc16_ansi": "crc16_ansi",
3730
- "crc16_modbus": "crc16_ansi",
3731
- "crc16_ccitt": "crc16_ccitt",
3732
- "crc16_ccitt_false": "crc16_ccitt",
3733
- "crc16_x25": "crc16_x25",
3734
- "crc16_kermit": "crc16_kermit",
3735
- "crc64": "crc64_iso",
3736
- "crc64_iso": "crc64_iso",
3737
- "crc64_ecma": "crc64_ecma",
3738
- "adler32": "adler32",
3739
- "crc32": "crc32",
3740
- }
3741
-
3742
- _CRC_WIDTH = {
3743
- "crc16_ansi": 16,
3744
- "crc16_ccitt": 16,
3745
- "crc16_x25": 16,
3746
- "crc16_kermit": 16,
3747
- "crc64_iso": 64,
3748
- "crc64_ecma": 64,
3749
- "adler32": 32,
3750
- "crc32": 32,
3751
- }
3752
-
3753
- def _crc_compute(algo_key, data_bytes):
3754
- if algo_key == "crc16_ansi":
3755
- return crc16_ansi(data_bytes) & 0xFFFF
3756
- if algo_key == "crc16_ccitt":
3757
- return crc16_ccitt(data_bytes) & 0xFFFF
3758
- if algo_key == "crc16_x25":
3759
- return crc16_x25(data_bytes) & 0xFFFF
3760
- if algo_key == "crc16_kermit":
3761
- return crc16_kermit(data_bytes) & 0xFFFF
3762
- if algo_key == "crc64_iso":
3763
- return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
3764
- if algo_key == "crc64_ecma":
3765
- return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
3766
- if algo_key == "adler32":
3767
- return zlib.adler32(data_bytes) & 0xFFFFFFFF
3768
- if algo_key == "crc32":
3769
- return zlib.crc32(data_bytes) & 0xFFFFFFFF
3770
- raise KeyError(algo_key)
3771
-
3772
- try:
3773
- hashlib_guaranteed
3774
- except NameError:
3775
- hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
3776
-
3777
- def CheckSumSupportAlt(name, guaranteed):
3778
- try:
3779
- return name.lower() in guaranteed
3780
- except Exception:
3781
- return False
3782
-
3783
- # =========================
3784
- # Public checksum API
3785
- # =========================
3786
- def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3787
- """
3788
- Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
3789
- or a single field) and compute the requested checksum. Returns lowercase hex.
3790
- """
3791
- checksumtype_norm = (checksumtype or "crc32").lower()
3792
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3793
-
3794
- delim = formatspecs.get('format_delimiter', u"\0")
3795
- hdr_bytes = _serialize_header_fields(inlist or [], delim)
3796
- if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
3797
- hdr_bytes = _to_bytes(hdr_bytes)
3798
- hdr_bytes = bytes(hdr_bytes)
3799
-
3800
- if algo_key in _CRC_WIDTH:
3801
- n = _crc_compute(algo_key, hdr_bytes)
3802
- return _hex_pad(n, _CRC_WIDTH[algo_key])
3803
-
3804
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3805
- h = hashlib.new(algo_key)
3806
- h.update(hdr_bytes)
3807
- return h.hexdigest().lower()
3808
-
3809
- return "0"
3810
-
3811
- def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3812
- """
3813
- Accepts bytes/str/file-like.
3814
- - Hashlib algos: streamed in 1 MiB chunks.
3815
- - CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
3816
- - Falls back to one-shot for non-file-like inputs.
3817
- """
3818
- checksumtype_norm = (checksumtype or "crc32").lower()
3819
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3820
-
3821
- # file-like streaming
3822
- if hasattr(instr, "read"):
3823
- # hashlib
3824
- if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3825
- h = hashlib.new(algo_key)
3826
- while True:
3827
- chunk = instr.read(1 << 20)
3828
- if not chunk:
3829
- break
3830
- if not isinstance(chunk, (bytes, bytearray, memoryview)):
3831
- chunk = bytes(bytearray(chunk))
3832
- h.update(chunk)
3833
- return h.hexdigest().lower()
3834
-
3835
- # CRC streaming via context
3836
- if algo_key in _CRC_SPECS:
3837
- ctx = crc_context_from_name(algo_key)
3838
- while True:
3839
- chunk = instr.read(1 << 20)
3840
- if not chunk:
3841
- break
3842
- if not isinstance(chunk, (bytes, bytearray, memoryview)):
3843
- chunk = bytes(bytearray(chunk))
3844
- ctx.update(chunk)
3845
- return ctx.hexdigest()
3846
-
3847
- # not known streaming algo: fallback to one-shot bytes
3848
- data = instr.read()
3849
- if not isinstance(data, (bytes, bytearray, memoryview)):
3850
- data = bytes(bytearray(data))
3851
- else:
3852
- data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
3853
- data = bytes(data)
3854
-
3855
- # one-shot
3856
- if algo_key in _CRC_SPECS:
3857
- return crc_context_from_name(algo_key).update(data).hexdigest()
3858
-
3859
- if algo_key in _CRC_WIDTH:
3860
- n = _crc_compute(algo_key, data)
3861
- return _hex_pad(n, _CRC_WIDTH[algo_key])
3862
-
3863
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3864
- h = hashlib.new(algo_key)
3865
- h.update(data)
3866
- return h.hexdigest().lower()
3867
-
3868
- return "0"
3869
-
3870
- def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3871
- calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
3872
- want = (inchecksum or "0").strip().lower()
3873
- if want.startswith("0x"):
3874
- want = want[2:]
3875
- return hmac.compare_digest(want, calc)
3876
-
3877
- def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3878
- calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
3879
- want = (inchecksum or "0").strip().lower()
3880
- if want.startswith("0x"):
3881
- want = want[2:]
3882
- return hmac.compare_digest(want, calc)
3883
-
3884
-
3885
- # =========================
3886
- # Incremental CRC context
3887
- # =========================
3888
- CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
3889
-
3890
- _CRC_SPECS = {
3891
- "crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
3892
- "crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
3893
- "crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
3894
- "crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
3895
- "crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
3896
- "crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
3897
- }
3898
-
3899
3633
  # --- helpers --------------------------------------------------------------
3900
3634
 
3901
3635
  try:
@@ -3936,206 +3670,15 @@ def _bytes_to_int(b):
3936
3670
  value = (value << 8) | ch
3937
3671
  return value
3938
3672
 
3939
-
3940
- # --- your existing CRCContext (unchanged) ---------------------------------
3941
-
3942
- class CRCContext(object):
3943
- __slots__ = ("spec", "table", "mask", "shift", "crc")
3944
-
3945
- def __init__(self, spec):
3946
- self.spec = spec
3947
- self.table = _build_table(spec.width, spec.poly, spec.refin)
3948
- self.mask = (1 << spec.width) - 1
3949
- self.shift = spec.width - 8
3950
- self.crc = spec.init & self.mask
3951
-
3952
- def update(self, data):
3953
- if not isinstance(data, (bytes, bytearray, memoryview)):
3954
- data = bytes(bytearray(data))
3955
- buf = _mv_tobytes(memoryview(data))
3956
- if self.spec.refin:
3957
- c = self.crc
3958
- tbl = self.table
3959
- for b in buf:
3960
- if not isinstance(b, int): # Py2
3961
- b = ord(b)
3962
- c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
3963
- self.crc = c & self.mask
3964
- else:
3965
- c = self.crc
3966
- tbl = self.table
3967
- sh = self.shift
3968
- msk = self.mask
3969
- for b in buf:
3970
- if not isinstance(b, int):
3971
- b = ord(b)
3972
- c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
3973
- self.crc = c & msk
3974
- return self
3975
-
3976
- def digest_int(self):
3977
- c = self.crc
3978
- if self.spec.refout ^ self.spec.refin:
3979
- c = _reflect(c, self.spec.width)
3980
- return (c ^ self.spec.xorout) & self.mask
3981
-
3982
- def hexdigest(self):
3983
- width_hex = (self.spec.width + 3) // 4
3984
- return format(self.digest_int(), "0{}x".format(width_hex)).lower()
3985
-
3986
-
3987
- # --- hashlib-backed implementation ---------------------------------------
3988
-
3989
- class _HashlibCRCWrapper(object):
3990
- """
3991
- Wrap a hashlib object to present the same interface as CRCContext
3992
- (update, digest_int, hexdigest).
3993
-
3994
- Assumes the hashlib algorithm already implements the exact CRC
3995
- specification (refin/refout/xorout/etc.).
3996
- """
3997
- __slots__ = ("_h", "spec", "mask", "width_hex")
3998
-
3999
- def __init__(self, algo_name, spec):
4000
- self._h = hashlib.new(algo_name)
4001
- self.spec = spec
4002
- self.mask = (1 << spec.width) - 1
4003
- self.width_hex = (spec.width + 3) // 4
4004
-
4005
- def update(self, data):
4006
- self._h.update(_coerce_bytes(data))
4007
- return self
4008
-
4009
- def digest_int(self):
4010
- # Convert final digest bytes to an integer and mask to width
4011
- value = _bytes_to_int(self._h.digest())
4012
- return value & self.mask
4013
-
4014
- def hexdigest(self):
4015
- h = self._h.hexdigest().lower()
4016
- # Normalize to the same number of hex digits as CRCContext
4017
- if len(h) < self.width_hex:
4018
- h = ("0" * (self.width_hex - len(h))) + h
4019
- elif len(h) > self.width_hex:
4020
- h = h[-self.width_hex:]
4021
- return h
4022
-
4023
-
4024
- # --- public class: choose hashlib or fallback -----------------------------
4025
-
4026
- class CRC(object):
4027
- """
4028
- CRC wrapper that uses hashlib if available, otherwise falls back to
4029
- the pure-Python CRCContext.
4030
-
4031
- spec.hashlib_name (preferred) or spec.name is used as the hashlib
4032
- algorithm name, e.g. 'crc32', 'crc32c', etc.
4033
- """
4034
-
4035
- __slots__ = ("spec", "_impl")
4036
-
4037
- def __init__(self, spec):
4038
- self.spec = spec
4039
-
4040
- algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
4041
- impl = None
4042
-
4043
- if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
4044
- # Use hashlib-backed implementation
4045
- impl = _HashlibCRCWrapper(algo_name, spec)
4046
- else:
4047
- # Fallback to your pure-Python implementation
4048
- impl = CRCContext(spec)
4049
-
4050
- self._impl = impl
4051
-
4052
- def update(self, data):
4053
- self._impl.update(data)
4054
- return self
4055
-
4056
- def digest_int(self):
4057
- return self._impl.digest_int()
4058
-
4059
- def hexdigest(self):
4060
- return self._impl.hexdigest()
4061
-
4062
- def crc_context_from_name(name_norm):
4063
- spec = _CRC_SPECS.get(name_norm)
4064
- if spec is None:
4065
- raise KeyError("Unknown CRC spec: {}".format(name_norm))
4066
- return CRCContext(spec)
4067
-
4068
- # =========================
4069
- # Dispatch helpers
4070
- # =========================
4071
- _CRC_ALIASES = {
4072
- # keep your historical behaviors
4073
- "crc16": "crc16_ansi",
4074
- "crc16_ibm": "crc16_ansi",
4075
- "crc16_ansi": "crc16_ansi",
4076
- "crc16_modbus": "crc16_ansi",
4077
- "crc16_ccitt": "crc16_ccitt",
4078
- "crc16_ccitt_false": "crc16_ccitt",
4079
- "crc16_x25": "crc16_x25",
4080
- "crc16_kermit": "crc16_kermit",
4081
- "crc64": "crc64_iso",
4082
- "crc64_iso": "crc64_iso",
4083
- "crc64_ecma": "crc64_ecma",
4084
- "adler32": "adler32",
4085
- "crc32": "crc32",
4086
- }
4087
-
4088
- _CRC_WIDTH = {
4089
- "crc16_ansi": 16,
4090
- "crc16_ccitt": 16,
4091
- "crc16_x25": 16,
4092
- "crc16_kermit": 16,
4093
- "crc64_iso": 64,
4094
- "crc64_ecma": 64,
4095
- "adler32": 32,
4096
- "crc32": 32,
4097
- }
4098
-
4099
- def _crc_compute(algo_key, data_bytes):
4100
- if algo_key == "crc16_ansi":
4101
- return crc16_ansi(data_bytes) & 0xFFFF
4102
- if algo_key == "crc16_ccitt":
4103
- return crc16_ccitt(data_bytes) & 0xFFFF
4104
- if algo_key == "crc16_x25":
4105
- return crc16_x25(data_bytes) & 0xFFFF
4106
- if algo_key == "crc16_kermit":
4107
- return crc16_kermit(data_bytes) & 0xFFFF
4108
- if algo_key == "crc64_iso":
4109
- return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
4110
- if algo_key == "crc64_ecma":
4111
- return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
4112
- if algo_key == "adler32":
4113
- return zlib.adler32(data_bytes) & 0xFFFFFFFF
4114
- if algo_key == "crc32":
4115
- return zlib.crc32(data_bytes) & 0xFFFFFFFF
4116
- raise KeyError(algo_key)
4117
-
4118
- try:
4119
- hashlib_guaranteed
4120
- except NameError:
4121
- hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
4122
-
4123
- def CheckSumSupportAlt(name, guaranteed):
4124
- try:
4125
- return name.lower() in guaranteed
4126
- except Exception:
4127
- return False
4128
-
4129
3673
  # =========================
4130
3674
  # Public checksum API
4131
3675
  # =========================
4132
- def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3676
+ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
4133
3677
  """
4134
3678
  Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
4135
3679
  or a single field) and compute the requested checksum. Returns lowercase hex.
4136
3680
  """
4137
- checksumtype_norm = (checksumtype or "crc32").lower()
4138
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3681
+ algo_key = (checksumtype or "md5").lower()
4139
3682
 
4140
3683
  delim = formatspecs.get('format_delimiter', u"\0")
4141
3684
  hdr_bytes = _serialize_header_fields(inlist or [], delim)
@@ -4143,34 +3686,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, format
4143
3686
  hdr_bytes = _to_bytes(hdr_bytes)
4144
3687
  hdr_bytes = bytes(hdr_bytes)
4145
3688
 
4146
- if algo_key in _CRC_WIDTH:
4147
- n = _crc_compute(algo_key, hdr_bytes)
4148
- return _hex_pad(n, _CRC_WIDTH[algo_key])
4149
-
4150
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
4151
- h = hashlib.new(algo_key)
4152
- h.update(hdr_bytes)
4153
- return h.hexdigest().lower()
3689
+ if CheckSumSupport(algo_key, hashlib_guaranteed):
3690
+ h = hashlib.new(algo_key)
3691
+ h.update(hdr_bytes)
3692
+ return h.hexdigest().lower()
4154
3693
 
4155
3694
  return "0"
4156
3695
 
4157
- def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3696
+ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
4158
3697
  """
4159
3698
  Accepts bytes/str/file-like.
4160
3699
  - Hashlib algos: streamed in 1 MiB chunks.
4161
3700
  - CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
4162
3701
  - Falls back to one-shot for non-file-like inputs.
4163
3702
  """
4164
- checksumtype_norm = (checksumtype or "crc32").lower()
4165
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3703
+ algo_key = (checksumtype or "md5").lower()
4166
3704
 
4167
3705
  # file-like streaming
4168
- if hasattr(instr, "read"):
3706
+ if hasattr(inbytes, "read"):
4169
3707
  # hashlib
4170
- if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3708
+
3709
+ if CheckSumSupport(algo_key, hashlib_guaranteed):
4171
3710
  h = hashlib.new(algo_key)
4172
3711
  while True:
4173
- chunk = instr.read(1 << 20)
3712
+ chunk = inbytes.read(__filebuff_size__)
4174
3713
  if not chunk:
4175
3714
  break
4176
3715
  if not isinstance(chunk, (bytes, bytearray, memoryview)):
@@ -4178,49 +3717,31 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
4178
3717
  h.update(chunk)
4179
3718
  return h.hexdigest().lower()
4180
3719
 
4181
- # CRC streaming via context
4182
- if algo_key in _CRC_SPECS:
4183
- ctx = crc_context_from_name(algo_key)
4184
- while True:
4185
- chunk = instr.read(1 << 20)
4186
- if not chunk:
4187
- break
4188
- if not isinstance(chunk, (bytes, bytearray, memoryview)):
4189
- chunk = bytes(bytearray(chunk))
4190
- ctx.update(chunk)
4191
- return ctx.hexdigest()
4192
-
4193
3720
  # not known streaming algo: fallback to one-shot bytes
4194
- data = instr.read()
3721
+ data = inbytes.read()
4195
3722
  if not isinstance(data, (bytes, bytearray, memoryview)):
4196
3723
  data = bytes(bytearray(data))
4197
3724
  else:
4198
- data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
3725
+ data = _to_bytes(inbytes) if (encodedata or not isinstance(inbytes, (bytes, bytearray, memoryview))) else inbytes
4199
3726
  data = bytes(data)
4200
3727
 
4201
3728
  # one-shot
4202
- if algo_key in _CRC_SPECS:
4203
- return crc_context_from_name(algo_key).update(data).hexdigest()
4204
-
4205
- if algo_key in _CRC_WIDTH:
4206
- n = _crc_compute(algo_key, data)
4207
- return _hex_pad(n, _CRC_WIDTH[algo_key])
4208
3729
 
4209
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3730
+ if CheckSumSupport(algo_key, hashlib_guaranteed):
4210
3731
  h = hashlib.new(algo_key)
4211
3732
  h.update(data)
4212
3733
  return h.hexdigest().lower()
4213
3734
 
4214
3735
  return "0"
4215
3736
 
4216
- def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3737
+ def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
4217
3738
  calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
4218
3739
  want = (inchecksum or "0").strip().lower()
4219
3740
  if want.startswith("0x"):
4220
3741
  want = want[2:]
4221
3742
  return hmac.compare_digest(want, calc)
4222
3743
 
4223
- def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3744
+ def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
4224
3745
  calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
4225
3746
  want = (inchecksum or "0").strip().lower()
4226
3747
  if want.startswith("0x"):
@@ -4267,66 +3788,6 @@ def GetDataFromArrayAlt(structure, path, default=None):
4267
3788
  return element
4268
3789
 
4269
3790
 
4270
- def GetHeaderChecksum(inlist=[], checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
4271
- fileheader = AppendNullBytes(inlist, formatspecs['format_delimiter']) if isinstance(
4272
- inlist, list) else AppendNullByte(inlist, formatspecs['format_delimiter'])
4273
- if encodedata and hasattr(fileheader, "encode"):
4274
- fileheader = fileheader.encode('UTF-8')
4275
- checksum_methods = {
4276
- "crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4277
- "crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4278
- "crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4279
- "crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
4280
- "adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
4281
- "crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
4282
- "crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
4283
- "crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4284
- "crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4285
- }
4286
- if checksumtype in checksum_methods:
4287
- return checksum_methods[checksumtype](fileheader)
4288
- elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
4289
- checksumoutstr = hashlib.new(checksumtype)
4290
- checksumoutstr.update(fileheader)
4291
- return checksumoutstr.hexdigest().lower()
4292
- return format(0, 'x').lower()
4293
-
4294
-
4295
- def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
4296
- if encodedata and hasattr(instr, "encode"):
4297
- instr = instr.encode('UTF-8')
4298
- checksum_methods = {
4299
- "crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4300
- "crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4301
- "crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4302
- "crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
4303
- "adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
4304
- "crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
4305
- "crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
4306
- "crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4307
- "crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4308
- }
4309
- if checksumtype in checksum_methods:
4310
- return checksum_methods[checksumtype](instr)
4311
- elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
4312
- checksumoutstr = hashlib.new(checksumtype)
4313
- checksumoutstr.update(instr)
4314
- return checksumoutstr.hexdigest().lower()
4315
- return format(0, 'x').lower()
4316
-
4317
-
4318
- def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
4319
- infileheadercshex = GetHeaderChecksum(
4320
- inlist, checksumtype, True, formatspecs).lower()
4321
- return inchecksum.lower() == infileheadercshex
4322
-
4323
-
4324
- def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
4325
- catinfilecshex = GetFileChecksum(
4326
- infile, checksumtype, True, formatspecs).lower()
4327
- return inchecksum.lower() == catinfilecshex
4328
-
4329
-
4330
3791
  # ========= pushback-aware delimiter reader =========
4331
3792
  class _DelimiterReader(object):
4332
3793
  """
@@ -4659,7 +4120,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4659
4120
  if(not hasattr(fp, "read")):
4660
4121
  return False
4661
4122
  delimiter = formatspecs['format_delimiter']
4662
- fheaderstart = fp.tell()
4663
4123
  if(formatspecs['new_style']):
4664
4124
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4665
4125
  else:
@@ -4682,22 +4142,74 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4682
4142
  fjsonchecksumtype = HeaderOut[30]
4683
4143
  fjsonchecksum = HeaderOut[31]
4684
4144
  fjsoncontent = {}
4685
- fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4686
- if(fjsonsize > 0):
4687
- try:
4688
- fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4689
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4145
+ if(fjsontype=="json"):
4146
+ fjsoncontent = {}
4147
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4148
+ if(fjsonsize > 0):
4690
4149
  try:
4691
- fjsoncontent = json.loads(fprejsoncontent)
4150
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4151
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4692
4152
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4693
- fprejsoncontent = ""
4694
- fjsoncontent = {}
4695
- else:
4696
- fprejsoncontent = ""
4153
+ try:
4154
+ fjsonrawcontent = fprejsoncontent
4155
+ fjsoncontent = json.loads(fprejsoncontent)
4156
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4157
+ fprejsoncontent = ""
4158
+ fjsonrawcontent = fprejsoncontent
4159
+ fjsoncontent = {}
4160
+ else:
4161
+ fprejsoncontent = ""
4162
+ fjsonrawcontent = fprejsoncontent
4163
+ fjsoncontent = {}
4164
+ elif(testyaml and fjsontype == "yaml"):
4165
+ fjsoncontent = {}
4166
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4167
+ if (fjsonsize > 0):
4168
+ try:
4169
+ # try base64 → utf-8 → YAML
4170
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4171
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4172
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
4173
+ try:
4174
+ # fall back to treating the bytes as plain text YAML
4175
+ fjsonrawcontent = fprejsoncontent
4176
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4177
+ except (UnicodeDecodeError, yaml.YAMLError):
4178
+ # final fallback: empty
4179
+ fprejsoncontent = ""
4180
+ fjsonrawcontent = fprejsoncontent
4181
+ fjsoncontent = {}
4182
+ else:
4183
+ fprejsoncontent = ""
4184
+ fjsonrawcontent = fprejsoncontent
4185
+ fjsoncontent = {}
4186
+ elif(not testyaml and fjsontype == "yaml"):
4697
4187
  fjsoncontent = {}
4188
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4189
+ fprejsoncontent = ""
4190
+ fjsonrawcontent = fprejsoncontent
4191
+ elif(fjsontype=="list"):
4192
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4193
+ flisttmp = MkTempFile()
4194
+ flisttmp.write(fprejsoncontent.encode())
4195
+ flisttmp.seek(0)
4196
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
4197
+ flisttmp.close()
4198
+ fjsonrawcontent = fjsoncontent
4199
+ if(fjsonlen==1):
4200
+ try:
4201
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
4202
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
4203
+ fjsonlen = len(fjsoncontent)
4204
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4205
+ try:
4206
+ fjsonrawcontent = fjsoncontent[0]
4207
+ fjsoncontent = json.loads(fjsoncontent[0])
4208
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4209
+ pass
4698
4210
  fp.seek(len(delimiter), 1)
4699
4211
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4700
- if(jsonfcs != fjsonchecksum and not skipchecksum):
4212
+ if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4701
4213
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4702
4214
  fname + " at offset " + str(fheaderstart))
4703
4215
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
@@ -4711,8 +4223,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4711
4223
  fname + " at offset " + str(fheaderstart))
4712
4224
  VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
4713
4225
  return False
4714
- fhend = fp.tell() - 1
4715
- fcontentstart = fp.tell()
4716
4226
  fcontents = MkTempFile()
4717
4227
  if(fsize > 0 and not listonly):
4718
4228
  if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
@@ -4726,9 +4236,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4726
4236
  fp.seek(fcsize, 1)
4727
4237
  fcontents.seek(0, 0)
4728
4238
  newfccs = GetFileChecksum(
4729
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4239
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4730
4240
  fcontents.seek(0, 0)
4731
- if(fccs != newfccs and not skipchecksum and not listonly):
4241
+ if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4732
4242
  VerbosePrintOut("File Content Checksum Error with file " +
4733
4243
  fname + " at offset " + str(fcontentstart))
4734
4244
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4741,10 +4251,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4741
4251
  cfcontents = UncompressFileAlt(fcontents, formatspecs)
4742
4252
  cfcontents.seek(0, 0)
4743
4253
  fcontents = MkTempFile()
4744
- shutil.copyfileobj(cfcontents, fcontents)
4254
+ shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4745
4255
  cfcontents.close()
4746
4256
  fcontents.seek(0, 0)
4747
- fcontentend = fp.tell()
4748
4257
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4749
4258
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
4750
4259
  if(abs(fseeknextasnum) == 0):
@@ -4836,22 +4345,49 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4836
4345
  if(fjsontype=="json"):
4837
4346
  fjsoncontent = {}
4838
4347
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4839
- if(fjsonsize > 0):
4348
+ if(fjsonsize > 0):
4349
+ try:
4350
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4351
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4352
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4353
+ try:
4354
+ fjsonrawcontent = fprejsoncontent
4355
+ fjsoncontent = json.loads(fprejsoncontent)
4356
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4357
+ fprejsoncontent = ""
4358
+ fjsonrawcontent = fprejsoncontent
4359
+ fjsoncontent = {}
4360
+ else:
4361
+ fprejsoncontent = ""
4362
+ fjsonrawcontent = fprejsoncontent
4363
+ fjsoncontent = {}
4364
+ elif(testyaml and fjsontype == "yaml"):
4365
+ fjsoncontent = {}
4366
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4367
+ if (fjsonsize > 0):
4840
4368
  try:
4369
+ # try base64 → utf-8 → YAML
4841
4370
  fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4842
- fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4843
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4371
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4372
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
4844
4373
  try:
4374
+ # fall back to treating the bytes as plain text YAML
4845
4375
  fjsonrawcontent = fprejsoncontent
4846
- fjsoncontent = json.loads(fprejsoncontent)
4847
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4376
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4377
+ except (UnicodeDecodeError, yaml.YAMLError):
4378
+ # final fallback: empty
4848
4379
  fprejsoncontent = ""
4849
- fjsonrawcontent = fprejsoncontent
4380
+ fjsonrawcontent = fprejsoncontent
4850
4381
  fjsoncontent = {}
4851
4382
  else:
4852
4383
  fprejsoncontent = ""
4853
- fjsonrawcontent = fprejsoncontent
4384
+ fjsonrawcontent = fprejsoncontent
4854
4385
  fjsoncontent = {}
4386
+ elif(not testyaml and fjsontype == "yaml"):
4387
+ fjsoncontent = {}
4388
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4389
+ fprejsoncontent = ""
4390
+ fjsonrawcontent = fprejsoncontent
4855
4391
  elif(fjsontype=="list"):
4856
4392
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4857
4393
  flisttmp = MkTempFile()
@@ -4874,7 +4410,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4874
4410
  fp.seek(len(delimiter), 1)
4875
4411
  fjend = fp.tell() - 1
4876
4412
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4877
- if(jsonfcs != fjsonchecksum and not skipchecksum):
4413
+ if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4878
4414
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4879
4415
  fname + " at offset " + str(fheaderstart))
4880
4416
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
@@ -4906,9 +4442,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4906
4442
  pyhascontents = False
4907
4443
  fcontents.seek(0, 0)
4908
4444
  newfccs = GetFileChecksum(
4909
- version = "0.24.6" HeaderOut[-3].lower(), False, formatspecs)
4445
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4910
4446
  fcontents.seek(0, 0)
4911
- if(fccs != newfccs and not skipchecksum and not listonly):
4447
+ if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4912
4448
  VerbosePrintOut("File Content Checksum Error with file " +
4913
4449
  fname + " at offset " + str(fcontentstart))
4914
4450
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4922,11 +4458,11 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4922
4458
  fcontents, formatspecs)
4923
4459
  cfcontents.seek(0, 0)
4924
4460
  fcontents = MkTempFile()
4925
- shutil.copyfileobj(cfcontents, fcontents)
4461
+ shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4926
4462
  cfcontents.close()
4927
4463
  fcontents.seek(0, 0)
4928
4464
  fccs = GetFileChecksum(
4929
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4465
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4930
4466
  fcontentend = fp.tell()
4931
4467
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4932
4468
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -5038,6 +4574,33 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5038
4574
  fprejsoncontent = ""
5039
4575
  fjsonrawcontent = fprejsoncontent
5040
4576
  fjsoncontent = {}
4577
+ elif(testyaml and fjsontype == "yaml"):
4578
+ fjsoncontent = {}
4579
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4580
+ if (fjsonsize > 0):
4581
+ try:
4582
+ # try base64 → utf-8 → YAML
4583
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4584
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4585
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
4586
+ try:
4587
+ # fall back to treating the bytes as plain text YAML
4588
+ fjsonrawcontent = fprejsoncontent
4589
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4590
+ except (UnicodeDecodeError, yaml.YAMLError):
4591
+ # final fallback: empty
4592
+ fprejsoncontent = ""
4593
+ fjsonrawcontent = fprejsoncontent
4594
+ fjsoncontent = {}
4595
+ else:
4596
+ fprejsoncontent = ""
4597
+ fjsonrawcontent = fprejsoncontent
4598
+ fjsoncontent = {}
4599
+ elif(not testyaml and fjsontype == "yaml"):
4600
+ fjsoncontent = {}
4601
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4602
+ fprejsoncontent = ""
4603
+ fjsonrawcontent = fprejsoncontent
5041
4604
  elif(fjsontype=="list"):
5042
4605
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5043
4606
  flisttmp = MkTempFile()
@@ -5059,7 +4622,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5059
4622
  pass
5060
4623
  fp.seek(len(delimiter), 1)
5061
4624
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
5062
- if(jsonfcs != fjsonchecksum and not skipchecksum):
4625
+ if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
5063
4626
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5064
4627
  fname + " at offset " + str(fheaderstart))
5065
4628
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
@@ -5091,8 +4654,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5091
4654
  pyhascontents = False
5092
4655
  fcontents.seek(0, 0)
5093
4656
  newfccs = GetFileChecksum(
5094
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
5095
- if(fccs != newfccs and not skipchecksum and not listonly):
4657
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4658
+ if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
5096
4659
  VerbosePrintOut("File Content Checksum Error with file " +
5097
4660
  fname + " at offset " + str(fcontentstart))
5098
4661
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -5106,11 +4669,11 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5106
4669
  fcontents, formatspecs)
5107
4670
  cfcontents.seek(0, 0)
5108
4671
  fcontents = MkTempFile()
5109
- shutil.copyfileobj(cfcontents, fcontents)
4672
+ shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
5110
4673
  cfcontents.close()
5111
4674
  fcontents.seek(0, 0)
5112
4675
  fccs = GetFileChecksum(
5113
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4676
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
5114
4677
  fcontentend = fp.tell()
5115
4678
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5116
4679
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -5144,9 +4707,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
5144
4707
  curloc = filestart
5145
4708
  try:
5146
4709
  fp.seek(0, 2)
5147
- except OSError:
5148
- SeekToEndOfFile(fp)
5149
- except ValueError:
4710
+ except (OSError, ValueError):
5150
4711
  SeekToEndOfFile(fp)
5151
4712
  CatSize = fp.tell()
5152
4713
  CatSizeEnd = CatSize
@@ -5195,9 +4756,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5195
4756
  curloc = filestart
5196
4757
  try:
5197
4758
  fp.seek(0, 2)
5198
- except OSError:
5199
- SeekToEndOfFile(fp)
5200
- except ValueError:
4759
+ except (OSError, ValueError):
5201
4760
  SeekToEndOfFile(fp)
5202
4761
  CatSize = fp.tell()
5203
4762
  CatSizeEnd = CatSize
@@ -5216,10 +4775,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5216
4775
  else:
5217
4776
  inheader = ReadFileHeaderDataWoSize(
5218
4777
  fp, formatspecs['format_delimiter'])
5219
- fnumextrafieldsize = int(inheader[5], 16)
5220
- fnumextrafields = int(inheader[6], 16)
4778
+ fnumextrafieldsize = int(inheader[6], 16)
4779
+ fnumextrafields = int(inheader[7], 16)
5221
4780
  fextrafieldslist = []
5222
- extrastart = 7
4781
+ extrastart = 8
5223
4782
  extraend = extrastart + fnumextrafields
5224
4783
  while(extrastart < extraend):
5225
4784
  fextrafieldslist.append(inheader[extrastart])
@@ -5238,7 +4797,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5238
4797
  fnumfields = int(inheader[1], 16)
5239
4798
  fhencoding = inheader[2]
5240
4799
  fostype = inheader[3]
5241
- fnumfiles = int(inheader[4], 16)
4800
+ fpythontype = inheader[4]
4801
+ fnumfiles = int(inheader[5], 16)
5242
4802
  fprechecksumtype = inheader[-2]
5243
4803
  fprechecksum = inheader[-1]
5244
4804
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
@@ -5251,7 +4811,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5251
4811
  return False
5252
4812
  formversions = re.search('(.*?)(\\d+)', formstring).groups()
5253
4813
  fcompresstype = ""
5254
- outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
4814
+ outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
5255
4815
  if (seekstart < 0) or (seekstart > fnumfiles):
5256
4816
  seekstart = 0
5257
4817
  if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
@@ -5279,7 +4839,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5279
4839
  prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5280
4840
  fp.seek(len(delimiter), 1)
5281
4841
  prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
5282
- if(prejsonfcs != prefjsonchecksum and not skipchecksum):
4842
+ if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5283
4843
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5284
4844
  prefname + " at offset " + str(prefhstart))
5285
4845
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
@@ -5287,7 +4847,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5287
4847
  prenewfcs = GetHeaderChecksum(
5288
4848
  preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5289
4849
  prefcs = preheaderdata[-2]
5290
- if(prefcs != prenewfcs and not skipchecksum):
4850
+ if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5291
4851
  VerbosePrintOut("File Header Checksum Error with file " +
5292
4852
  prefname + " at offset " + str(prefhstart))
5293
4853
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -5303,10 +4863,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5303
4863
  prefcontents.write(fp.read(prefsize))
5304
4864
  prefcontents.seek(0, 0)
5305
4865
  prenewfccs = GetFileChecksum(
5306
- prefcontents.read(), preheaderdata[-3].lower(), False, formatspecs)
4866
+ prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5307
4867
  prefccs = preheaderdata[-1]
5308
4868
  pyhascontents = True
5309
- if(prefccs != prenewfccs and not skipchecksum):
4869
+ if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5310
4870
  VerbosePrintOut("File Content Checksum Error with file " +
5311
4871
  prefname + " at offset " + str(prefcontentstart))
5312
4872
  VerbosePrintOut("'" + prefccs +
@@ -5352,9 +4912,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5352
4912
  curloc = filestart
5353
4913
  try:
5354
4914
  fp.seek(0, 2)
5355
- except OSError:
5356
- SeekToEndOfFile(fp)
5357
- except ValueError:
4915
+ except (OSError, ValueError):
5358
4916
  SeekToEndOfFile(fp)
5359
4917
  CatSize = fp.tell()
5360
4918
  CatSizeEnd = CatSize
@@ -5373,10 +4931,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5373
4931
  else:
5374
4932
  inheader = ReadFileHeaderDataWoSize(
5375
4933
  fp, formatspecs['format_delimiter'])
5376
- fnumextrafieldsize = int(inheader[5], 16)
5377
- fnumextrafields = int(inheader[6], 16)
4934
+ fnumextrafieldsize = int(inheader[6], 16)
4935
+ fnumextrafields = int(inheader[7], 16)
5378
4936
  fextrafieldslist = []
5379
- extrastart = 7
4937
+ extrastart = 8
5380
4938
  extraend = extrastart + fnumextrafields
5381
4939
  while(extrastart < extraend):
5382
4940
  fextrafieldslist.append(inheader[extrastart])
@@ -5395,7 +4953,8 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5395
4953
  fnumfields = int(inheader[1], 16)
5396
4954
  fhencoding = inheader[2]
5397
4955
  fostype = inheader[3]
5398
- fnumfiles = int(inheader[4], 16)
4956
+ fpythontype = inheader[4]
4957
+ fnumfiles = int(inheader[5], 16)
5399
4958
  fprechecksumtype = inheader[-2]
5400
4959
  fprechecksum = inheader[-1]
5401
4960
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
@@ -5441,7 +5000,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5441
5000
  prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5442
5001
  fp.seek(len(delimiter), 1)
5443
5002
  prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
5444
- if(prejsonfcs != prefjsonchecksum and not skipchecksum):
5003
+ if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5445
5004
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5446
5005
  prefname + " at offset " + str(prefhstart))
5447
5006
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
@@ -5470,7 +5029,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5470
5029
  prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5471
5030
  prefccs = preheaderdata[-1]
5472
5031
  pyhascontents = True
5473
- if(prefccs != prenewfccs and not skipchecksum):
5032
+ if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5474
5033
  VerbosePrintOut("File Content Checksum Error with file " +
5475
5034
  prefname + " at offset " + str(prefcontentstart))
5476
5035
  VerbosePrintOut("'" + prefccs +
@@ -5511,24 +5070,17 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5511
5070
  fp = infile
5512
5071
  try:
5513
5072
  fp.seek(0, 2)
5514
- except OSError:
5515
- SeekToEndOfFile(fp)
5516
- except ValueError:
5073
+ except (OSError, ValueError):
5517
5074
  SeekToEndOfFile(fp)
5518
5075
  outfsize = fp.tell()
5519
5076
  fp.seek(filestart, 0)
5520
5077
  currentfilepos = fp.tell()
5521
5078
  elif(infile == "-"):
5522
5079
  fp = MkTempFile()
5523
- if(hasattr(sys.stdin, "buffer")):
5524
- shutil.copyfileobj(sys.stdin.buffer, fp)
5525
- else:
5526
- shutil.copyfileobj(sys.stdin, fp)
5080
+ shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
5527
5081
  try:
5528
5082
  fp.seek(0, 2)
5529
- except OSError:
5530
- SeekToEndOfFile(fp)
5531
- except ValueError:
5083
+ except (OSError, ValueError):
5532
5084
  SeekToEndOfFile(fp)
5533
5085
  outfsize = fp.tell()
5534
5086
  fp.seek(filestart, 0)
@@ -5538,9 +5090,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5538
5090
  fp.write(infile)
5539
5091
  try:
5540
5092
  fp.seek(0, 2)
5541
- except OSError:
5542
- SeekToEndOfFile(fp)
5543
- except ValueError:
5093
+ except (OSError, ValueError):
5544
5094
  SeekToEndOfFile(fp)
5545
5095
  outfsize = fp.tell()
5546
5096
  fp.seek(filestart, 0)
@@ -5549,9 +5099,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5549
5099
  fp = download_file_from_internet_file(infile)
5550
5100
  try:
5551
5101
  fp.seek(0, 2)
5552
- except OSError:
5553
- SeekToEndOfFile(fp)
5554
- except ValueError:
5102
+ except (OSError, ValueError):
5555
5103
  SeekToEndOfFile(fp)
5556
5104
  outfsize = fp.tell()
5557
5105
  fp.seek(filestart, 0)
@@ -5559,9 +5107,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5559
5107
  elif(isinstance(infile, FileLikeAdapter)):
5560
5108
  try:
5561
5109
  fp.seek(0, 2)
5562
- except OSError:
5563
- SeekToEndOfFile(fp)
5564
- except ValueError:
5110
+ except (OSError, ValueError):
5565
5111
  SeekToEndOfFile(fp)
5566
5112
  outfsize = fp.tell()
5567
5113
  fp.seek(filestart, 0)
@@ -5571,9 +5117,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5571
5117
  fp = open(infile, "rb")
5572
5118
  try:
5573
5119
  fp.seek(0, 2)
5574
- except OSError:
5575
- SeekToEndOfFile(fp)
5576
- except ValueError:
5120
+ except (OSError, ValueError):
5577
5121
  SeekToEndOfFile(fp)
5578
5122
  outfsize = fp.tell()
5579
5123
  fp.seek(filestart, 0)
@@ -5624,9 +5168,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5624
5168
  currentinfilepos = infp.tell()
5625
5169
  try:
5626
5170
  infp.seek(0, 2)
5627
- except OSError:
5628
- SeekToEndOfFile(infp)
5629
- except ValueError:
5171
+ except (OSError, ValueError):
5630
5172
  SeekToEndOfFile(infp)
5631
5173
  outinfsize = infp.tell()
5632
5174
  infp.seek(currentinfilepos, 0)
@@ -5665,24 +5207,17 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5665
5207
  fp = infile
5666
5208
  try:
5667
5209
  fp.seek(0, 2)
5668
- except OSError:
5669
- SeekToEndOfFile(fp)
5670
- except ValueError:
5210
+ except (OSError, ValueError):
5671
5211
  SeekToEndOfFile(fp)
5672
5212
  outfsize = fp.tell()
5673
5213
  fp.seek(filestart, 0)
5674
5214
  currentfilepos = fp.tell()
5675
5215
  elif(infile == "-"):
5676
5216
  fp = MkTempFile()
5677
- if(hasattr(sys.stdin, "buffer")):
5678
- shutil.copyfileobj(sys.stdin.buffer, fp)
5679
- else:
5680
- shutil.copyfileobj(sys.stdin, fp)
5217
+ shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
5681
5218
  try:
5682
5219
  fp.seek(0, 2)
5683
- except OSError:
5684
- SeekToEndOfFile(fp)
5685
- except ValueError:
5220
+ except (OSError, ValueError):
5686
5221
  SeekToEndOfFile(fp)
5687
5222
  outfsize = fp.tell()
5688
5223
  fp.seek(filestart, 0)
@@ -5692,9 +5227,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5692
5227
  fp.write(infile)
5693
5228
  try:
5694
5229
  fp.seek(0, 2)
5695
- except OSError:
5696
- SeekToEndOfFile(fp)
5697
- except ValueError:
5230
+ except (OSError, ValueError):
5698
5231
  SeekToEndOfFile(fp)
5699
5232
  outfsize = fp.tell()
5700
5233
  fp.seek(filestart, 0)
@@ -5703,9 +5236,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5703
5236
  fp = download_file_from_internet_file(infile)
5704
5237
  try:
5705
5238
  fp.seek(0, 2)
5706
- except OSError:
5707
- SeekToEndOfFile(fp)
5708
- except ValueError:
5239
+ except (OSError, ValueError):
5709
5240
  SeekToEndOfFile(fp)
5710
5241
  outfsize = fp.tell()
5711
5242
  fp.seek(filestart, 0)
@@ -5713,9 +5244,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5713
5244
  elif(isinstance(infile, FileLikeAdapter)):
5714
5245
  try:
5715
5246
  fp.seek(0, 2)
5716
- except OSError:
5717
- SeekToEndOfFile(fp)
5718
- except ValueError:
5247
+ except (OSError, ValueError):
5719
5248
  SeekToEndOfFile(fp)
5720
5249
  outfsize = fp.tell()
5721
5250
  fp.seek(filestart, 0)
@@ -5725,9 +5254,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5725
5254
  fp = open(infile, "rb")
5726
5255
  try:
5727
5256
  fp.seek(0, 2)
5728
- except OSError:
5729
- SeekToEndOfFile(fp)
5730
- except ValueError:
5257
+ except (OSError, ValueError):
5731
5258
  SeekToEndOfFile(fp)
5732
5259
  outfsize = fp.tell()
5733
5260
  fp.seek(filestart, 0)
@@ -5778,9 +5305,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5778
5305
  currentinfilepos = infp.tell()
5779
5306
  try:
5780
5307
  infp.seek(0, 2)
5781
- except OSError:
5782
- SeekToEndOfFile(infp)
5783
- except ValueError:
5308
+ except (OSError, ValueError):
5784
5309
  SeekToEndOfFile(infp)
5785
5310
  outinfsize = infp.tell()
5786
5311
  infp.seek(currentinfilepos, 0)
@@ -5869,7 +5394,7 @@ def AppendFileHeader(fp,
5869
5394
  numfiles,
5870
5395
  fencoding,
5871
5396
  extradata=None,
5872
- checksumtype="crc32",
5397
+ checksumtype="md5",
5873
5398
  formatspecs=__file_format_dict__):
5874
5399
  """
5875
5400
  Build and write the archive file header.
@@ -5921,11 +5446,11 @@ def AppendFileHeader(fp,
5921
5446
 
5922
5447
  # Preserve your original "tmpoutlen" computation exactly
5923
5448
  tmpoutlist = [extrasizelen, extrafields] # you used this as a separate list
5924
- tmpoutlen = 3 + len(tmpoutlist) + len(xlist) + 2
5449
+ tmpoutlen = 4 + len(tmpoutlist) + len(xlist) + 2
5925
5450
  tmpoutlenhex = _hex_lower(tmpoutlen)
5926
5451
 
5927
5452
  # Serialize the first group
5928
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), fnumfiles_hex], delimiter)
5453
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
5929
5454
  # Append tmpoutlist
5930
5455
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5931
5456
  # Append extradata items if any
@@ -5980,7 +5505,7 @@ def AppendFileHeader(fp,
5980
5505
  return fp
5981
5506
 
5982
5507
 
5983
- def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc32", formatspecs=__file_format_multi_dict__):
5508
+ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5984
5509
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
5985
5510
  formatspecs = formatspecs[fmttype]
5986
5511
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
@@ -5990,11 +5515,11 @@ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc3
5990
5515
  return fp
5991
5516
 
5992
5517
 
5993
- def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc32", formatspecs=__file_format_multi_dict__):
5518
+ def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5994
5519
  return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
5995
5520
 
5996
5521
 
5997
- def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="crc32", formatspecs=__file_format_multi_dict__, returnfp=False):
5522
+ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
5998
5523
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
5999
5524
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
6000
5525
  get_in_ext = os.path.splitext(outfile)
@@ -6043,18 +5568,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
6043
5568
  fp.flush()
6044
5569
  if(hasattr(os, "sync")):
6045
5570
  os.fsync(fp.fileno())
6046
- except io.UnsupportedOperation:
6047
- pass
6048
- except AttributeError:
6049
- pass
6050
- except OSError:
5571
+ except (io.UnsupportedOperation, AttributeError, OSError):
6051
5572
  pass
6052
5573
  if(outfile == "-"):
6053
5574
  fp.seek(0, 0)
6054
- if(hasattr(sys.stdout, "buffer")):
6055
- shutil.copyfileobj(fp, sys.stdout.buffer)
6056
- else:
6057
- shutil.copyfileobj(fp, sys.stdout)
5575
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
6058
5576
  elif(outfile is None):
6059
5577
  fp.seek(0, 0)
6060
5578
  outvar = fp.read()
@@ -6073,11 +5591,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
6073
5591
  return True
6074
5592
 
6075
5593
 
6076
- def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="crc32", formatspecs=__file_format_dict__, returnfp=False):
5594
+ def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
6077
5595
  return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
6078
5596
 
6079
5597
 
6080
- def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["crc32", "crc32", "crc32"], formatspecs=__file_format_dict__):
5598
+ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
6081
5599
  if(not hasattr(fp, "write")):
6082
5600
  return False
6083
5601
  if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
@@ -6156,26 +5674,21 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
6156
5674
  fp.flush()
6157
5675
  if(hasattr(os, "sync")):
6158
5676
  os.fsync(fp.fileno())
6159
- except io.UnsupportedOperation:
6160
- pass
6161
- except AttributeError:
6162
- pass
6163
- except OSError:
5677
+ except (io.UnsupportedOperation, AttributeError, OSError):
6164
5678
  pass
6165
5679
  return fp
6166
5680
 
6167
-
6168
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
5681
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6169
5682
  if(not hasattr(fp, "write")):
6170
5683
  return False
6171
5684
  advancedlist = formatspecs['use_advanced_list']
6172
5685
  altinode = formatspecs['use_alt_inode']
6173
5686
  if(verbose):
6174
5687
  logging.basicConfig(format="%(message)s",
6175
- stream=sys.stdout, level=logging.DEBUG)
5688
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6176
5689
  infilelist = []
6177
5690
  if(infiles == "-"):
6178
- for line in sys.stdin:
5691
+ for line in PY_STDIN_TEXT:
6179
5692
  infilelist.append(line.strip())
6180
5693
  infilelist = list(filter(None, infilelist))
6181
5694
  elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
@@ -6219,11 +5732,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6219
5732
  fp.flush()
6220
5733
  if(hasattr(os, "sync")):
6221
5734
  os.fsync(fp.fileno())
6222
- except io.UnsupportedOperation:
6223
- pass
6224
- except AttributeError:
6225
- pass
6226
- except OSError:
5735
+ except (io.UnsupportedOperation, AttributeError, OSError):
6227
5736
  pass
6228
5737
  FullSizeFilesAlt = 0
6229
5738
  for curfname in GetDirList:
@@ -6373,7 +5882,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6373
5882
  curcompression = "none"
6374
5883
  if not followlink and ftype in data_types:
6375
5884
  with open(fname, "rb") as fpc:
6376
- copy_opaque(fpc, fcontents, bufsize=1 << 20) # 1 MiB chunks, opaque copy
5885
+ shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6377
5886
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6378
5887
  fcontents.seek(0, 0)
6379
5888
  if(typechecktest is not False):
@@ -6391,7 +5900,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6391
5900
  while(ilmin < ilsize):
6392
5901
  cfcontents = MkTempFile()
6393
5902
  fcontents.seek(0, 0)
6394
- shutil.copyfileobj(fcontents, cfcontents)
5903
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6395
5904
  fcontents.seek(0, 0)
6396
5905
  cfcontents.seek(0, 0)
6397
5906
  cfcontents = CompressOpenFileAlt(
@@ -6407,7 +5916,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6407
5916
  curcompression = compressionuselist[ilcmin]
6408
5917
  fcontents.seek(0, 0)
6409
5918
  cfcontents = MkTempFile()
6410
- shutil.copyfileobj(fcontents, cfcontents)
5919
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6411
5920
  cfcontents.seek(0, 0)
6412
5921
  cfcontents = CompressOpenFileAlt(
6413
5922
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6423,7 +5932,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6423
5932
  return False
6424
5933
  flstatinfo = os.stat(flinkname)
6425
5934
  with open(flinkname, "rb") as fpc:
6426
- copy_opaque(fpc, fcontents, bufsize=1 << 20) # 1 MiB chunks, opaque copy
5935
+ shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6427
5936
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6428
5937
  fcontents.seek(0, 0)
6429
5938
  if(typechecktest is not False):
@@ -6441,7 +5950,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6441
5950
  while(ilmin < ilsize):
6442
5951
  cfcontents = MkTempFile()
6443
5952
  fcontents.seek(0, 0)
6444
- shutil.copyfileobj(fcontents, cfcontents)
5953
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6445
5954
  fcontents.seek(0, 0)
6446
5955
  cfcontents.seek(0, 0)
6447
5956
  cfcontents = CompressOpenFileAlt(
@@ -6457,7 +5966,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6457
5966
  curcompression = compressionuselist[ilcmin]
6458
5967
  fcontents.seek(0, 0)
6459
5968
  cfcontents = MkTempFile()
6460
- shutil.copyfileobj(fcontents, cfcontents)
5969
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6461
5970
  cfcontents.seek(0, 0)
6462
5971
  cfcontents = CompressOpenFileAlt(
6463
5972
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6480,20 +5989,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6480
5989
  fp.flush()
6481
5990
  if(hasattr(os, "sync")):
6482
5991
  os.fsync(fp.fileno())
6483
- except io.UnsupportedOperation:
6484
- pass
6485
- except AttributeError:
6486
- pass
6487
- except OSError:
5992
+ except (io.UnsupportedOperation, AttributeError, OSError):
6488
5993
  pass
6489
5994
  return fp
6490
5995
 
6491
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
5996
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6492
5997
  if(not hasattr(fp, "write")):
6493
5998
  return False
6494
5999
  if(verbose):
6495
6000
  logging.basicConfig(format="%(message)s",
6496
- stream=sys.stdout, level=logging.DEBUG)
6001
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6497
6002
  curinode = 0
6498
6003
  curfid = 0
6499
6004
  inodelist = []
@@ -6502,10 +6007,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6502
6007
  inodetoforminode = {}
6503
6008
  if(infile == "-"):
6504
6009
  infile = MkTempFile()
6505
- if(hasattr(sys.stdin, "buffer")):
6506
- shutil.copyfileobj(sys.stdin.buffer, infile)
6507
- else:
6508
- shutil.copyfileobj(sys.stdin, infile)
6010
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6509
6011
  infile.seek(0, 0)
6510
6012
  if(not infile):
6511
6013
  return False
@@ -6565,11 +6067,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6565
6067
  fp.flush()
6566
6068
  if(hasattr(os, "sync")):
6567
6069
  os.fsync(fp.fileno())
6568
- except io.UnsupportedOperation:
6569
- pass
6570
- except AttributeError:
6571
- pass
6572
- except OSError:
6070
+ except (io.UnsupportedOperation, AttributeError, OSError):
6573
6071
  pass
6574
6072
  for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
6575
6073
  fencoding = "UTF-8"
@@ -6655,7 +6153,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6655
6153
  curcompression = "none"
6656
6154
  if ftype in data_types:
6657
6155
  fpc = tarfp.extractfile(member)
6658
- copy_opaque(fpc, fcontents, bufsize=1 << 20) # 1 MiB chunks, opaque copy
6156
+ shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6659
6157
  fpc.close()
6660
6158
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6661
6159
  fcontents.seek(0, 0)
@@ -6674,7 +6172,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6674
6172
  while(ilmin < ilsize):
6675
6173
  cfcontents = MkTempFile()
6676
6174
  fcontents.seek(0, 0)
6677
- shutil.copyfileobj(fcontents, cfcontents)
6175
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6678
6176
  fcontents.seek(0, 0)
6679
6177
  cfcontents.seek(0, 0)
6680
6178
  cfcontents = CompressOpenFileAlt(
@@ -6690,7 +6188,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6690
6188
  curcompression = compressionuselist[ilcmin]
6691
6189
  fcontents.seek(0, 0)
6692
6190
  cfcontents = MkTempFile()
6693
- shutil.copyfileobj(fcontents, cfcontents)
6191
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6694
6192
  cfcontents.seek(0, 0)
6695
6193
  cfcontents = CompressOpenFileAlt(
6696
6194
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6713,21 +6211,17 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6713
6211
  fp.flush()
6714
6212
  if(hasattr(os, "sync")):
6715
6213
  os.fsync(fp.fileno())
6716
- except io.UnsupportedOperation:
6717
- pass
6718
- except AttributeError:
6719
- pass
6720
- except OSError:
6214
+ except (io.UnsupportedOperation, AttributeError, OSError):
6721
6215
  pass
6722
6216
  fcontents.close()
6723
6217
  return fp
6724
6218
 
6725
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6219
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6726
6220
  if(not hasattr(fp, "write")):
6727
6221
  return False
6728
6222
  if(verbose):
6729
6223
  logging.basicConfig(format="%(message)s",
6730
- stream=sys.stdout, level=logging.DEBUG)
6224
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6731
6225
  curinode = 0
6732
6226
  curfid = 0
6733
6227
  inodelist = []
@@ -6736,10 +6230,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6736
6230
  inodetoforminode = {}
6737
6231
  if(infile == "-"):
6738
6232
  infile = MkTempFile()
6739
- if(hasattr(sys.stdin, "buffer")):
6740
- shutil.copyfileobj(sys.stdin.buffer, infile)
6741
- else:
6742
- shutil.copyfileobj(sys.stdin, infile)
6233
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6743
6234
  infile.seek(0, 0)
6744
6235
  if(not infile):
6745
6236
  return False
@@ -6769,11 +6260,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6769
6260
  fp.flush()
6770
6261
  if(hasattr(os, "sync")):
6771
6262
  os.fsync(fp.fileno())
6772
- except io.UnsupportedOperation:
6773
- pass
6774
- except AttributeError:
6775
- pass
6776
- except OSError:
6263
+ except (io.UnsupportedOperation, AttributeError, OSError):
6777
6264
  pass
6778
6265
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
6779
6266
  fencoding = "UTF-8"
@@ -6858,24 +6345,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6858
6345
  fcsize = format(int(0), 'x').lower()
6859
6346
  try:
6860
6347
  fuid = format(int(os.getuid()), 'x').lower()
6861
- except AttributeError:
6862
- fuid = format(int(0), 'x').lower()
6863
- except KeyError:
6348
+ except (KeyError, AttributeError):
6864
6349
  fuid = format(int(0), 'x').lower()
6865
6350
  try:
6866
6351
  fgid = format(int(os.getgid()), 'x').lower()
6867
- except AttributeError:
6868
- fgid = format(int(0), 'x').lower()
6869
- except KeyError:
6352
+ except (KeyError, AttributeError):
6870
6353
  fgid = format(int(0), 'x').lower()
6871
6354
  try:
6872
6355
  import pwd
6873
6356
  try:
6874
6357
  userinfo = pwd.getpwuid(os.getuid())
6875
6358
  funame = userinfo.pw_name
6876
- except KeyError:
6877
- funame = ""
6878
- except AttributeError:
6359
+ except (KeyError, AttributeError):
6879
6360
  funame = ""
6880
6361
  except ImportError:
6881
6362
  funame = ""
@@ -6885,9 +6366,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6885
6366
  try:
6886
6367
  groupinfo = grp.getgrgid(os.getgid())
6887
6368
  fgname = groupinfo.gr_name
6888
- except KeyError:
6889
- fgname = ""
6890
- except AttributeError:
6369
+ except (KeyError, AttributeError):
6891
6370
  fgname = ""
6892
6371
  except ImportError:
6893
6372
  fgname = ""
@@ -6910,7 +6389,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6910
6389
  while(ilmin < ilsize):
6911
6390
  cfcontents = MkTempFile()
6912
6391
  fcontents.seek(0, 0)
6913
- shutil.copyfileobj(fcontents, cfcontents)
6392
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6914
6393
  fcontents.seek(0, 0)
6915
6394
  cfcontents.seek(0, 0)
6916
6395
  cfcontents = CompressOpenFileAlt(
@@ -6923,7 +6402,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6923
6402
  curcompression = compressionuselist[ilcmin]
6924
6403
  fcontents.seek(0, 0)
6925
6404
  cfcontents = MkTempFile()
6926
- shutil.copyfileobj(fcontents, cfcontents)
6405
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6927
6406
  cfcontents.seek(0, 0)
6928
6407
  cfcontents = CompressOpenFileAlt(
6929
6408
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6946,26 +6425,22 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6946
6425
  fp.flush()
6947
6426
  if(hasattr(os, "sync")):
6948
6427
  os.fsync(fp.fileno())
6949
- except io.UnsupportedOperation:
6950
- pass
6951
- except AttributeError:
6952
- pass
6953
- except OSError:
6428
+ except (io.UnsupportedOperation, AttributeError, OSError):
6954
6429
  pass
6955
6430
  fcontents.close()
6956
6431
  return fp
6957
6432
 
6958
6433
  if(not rarfile_support):
6959
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6434
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6960
6435
  return False
6961
6436
 
6962
6437
  if(rarfile_support):
6963
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6438
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6964
6439
  if(not hasattr(fp, "write")):
6965
6440
  return False
6966
6441
  if(verbose):
6967
6442
  logging.basicConfig(format="%(message)s",
6968
- stream=sys.stdout, level=logging.DEBUG)
6443
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6969
6444
  curinode = 0
6970
6445
  curfid = 0
6971
6446
  inodelist = []
@@ -6986,21 +6461,13 @@ if(rarfile_support):
6986
6461
  fp.flush()
6987
6462
  if(hasattr(os, "sync")):
6988
6463
  os.fsync(fp.fileno())
6989
- except io.UnsupportedOperation:
6990
- pass
6991
- except AttributeError:
6992
- pass
6993
- except OSError:
6464
+ except (io.UnsupportedOperation, AttributeError, OSError):
6994
6465
  pass
6995
6466
  try:
6996
6467
  fp.flush()
6997
6468
  if(hasattr(os, "sync")):
6998
6469
  os.fsync(fp.fileno())
6999
- except io.UnsupportedOperation:
7000
- pass
7001
- except AttributeError:
7002
- pass
7003
- except OSError:
6470
+ except (io.UnsupportedOperation, AttributeError, OSError):
7004
6471
  pass
7005
6472
  for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
7006
6473
  is_unix = False
@@ -7109,24 +6576,18 @@ if(rarfile_support):
7109
6576
  int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
7110
6577
  try:
7111
6578
  fuid = format(int(os.getuid()), 'x').lower()
7112
- except AttributeError:
7113
- fuid = format(int(0), 'x').lower()
7114
- except KeyError:
6579
+ except (KeyError, AttributeError):
7115
6580
  fuid = format(int(0), 'x').lower()
7116
6581
  try:
7117
6582
  fgid = format(int(os.getgid()), 'x').lower()
7118
- except AttributeError:
7119
- fgid = format(int(0), 'x').lower()
7120
- except KeyError:
6583
+ except (KeyError, AttributeError):
7121
6584
  fgid = format(int(0), 'x').lower()
7122
6585
  try:
7123
6586
  import pwd
7124
6587
  try:
7125
6588
  userinfo = pwd.getpwuid(os.getuid())
7126
6589
  funame = userinfo.pw_name
7127
- except KeyError:
7128
- funame = ""
7129
- except AttributeError:
6590
+ except (KeyError, AttributeError):
7130
6591
  funame = ""
7131
6592
  except ImportError:
7132
6593
  funame = ""
@@ -7136,9 +6597,7 @@ if(rarfile_support):
7136
6597
  try:
7137
6598
  groupinfo = grp.getgrgid(os.getgid())
7138
6599
  fgname = groupinfo.gr_name
7139
- except KeyError:
7140
- fgname = ""
7141
- except AttributeError:
6600
+ except (KeyError, AttributeError):
7142
6601
  fgname = ""
7143
6602
  except ImportError:
7144
6603
  fgname = ""
@@ -7161,7 +6620,7 @@ if(rarfile_support):
7161
6620
  while(ilmin < ilsize):
7162
6621
  cfcontents = MkTempFile()
7163
6622
  fcontents.seek(0, 0)
7164
- shutil.copyfileobj(fcontents, cfcontents)
6623
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7165
6624
  fcontents.seek(0, 0)
7166
6625
  cfcontents.seek(0, 0)
7167
6626
  cfcontents = CompressOpenFileAlt(
@@ -7177,7 +6636,7 @@ if(rarfile_support):
7177
6636
  curcompression = compressionuselist[ilcmin]
7178
6637
  fcontents.seek(0, 0)
7179
6638
  cfcontents = MkTempFile()
7180
- shutil.copyfileobj(fcontents, cfcontents)
6639
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7181
6640
  cfcontents.seek(0, 0)
7182
6641
  cfcontents = CompressOpenFileAlt(
7183
6642
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -7200,26 +6659,22 @@ if(rarfile_support):
7200
6659
  fp.flush()
7201
6660
  if(hasattr(os, "sync")):
7202
6661
  os.fsync(fp.fileno())
7203
- except io.UnsupportedOperation:
7204
- pass
7205
- except AttributeError:
7206
- pass
7207
- except OSError:
6662
+ except (io.UnsupportedOperation, AttributeError, OSError):
7208
6663
  pass
7209
6664
  fcontents.close()
7210
6665
  return fp
7211
6666
 
7212
6667
  if(not py7zr_support):
7213
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6668
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7214
6669
  return False
7215
6670
 
7216
6671
  if(py7zr_support):
7217
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6672
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7218
6673
  if(not hasattr(fp, "write")):
7219
6674
  return False
7220
6675
  if(verbose):
7221
6676
  logging.basicConfig(format="%(message)s",
7222
- stream=sys.stdout, level=logging.DEBUG)
6677
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7223
6678
  formver = formatspecs['format_ver']
7224
6679
  fileheaderver = str(int(formver.replace(".", "")))
7225
6680
  curinode = 0
@@ -7242,11 +6697,7 @@ if(py7zr_support):
7242
6697
  fp.flush()
7243
6698
  if(hasattr(os, "sync")):
7244
6699
  os.fsync(fp.fileno())
7245
- except io.UnsupportedOperation:
7246
- pass
7247
- except AttributeError:
7248
- pass
7249
- except OSError:
6700
+ except (io.UnsupportedOperation, AttributeError, OSError):
7250
6701
  pass
7251
6702
  for member in sorted(szpfp.list(), key=lambda x: x.filename):
7252
6703
  fencoding = "UTF-8"
@@ -7296,24 +6747,18 @@ if(py7zr_support):
7296
6747
  int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7297
6748
  try:
7298
6749
  fuid = format(int(os.getuid()), 'x').lower()
7299
- except AttributeError:
7300
- fuid = format(int(0), 'x').lower()
7301
- except KeyError:
6750
+ except (KeyError, AttributeError):
7302
6751
  fuid = format(int(0), 'x').lower()
7303
6752
  try:
7304
6753
  fgid = format(int(os.getgid()), 'x').lower()
7305
- except AttributeError:
7306
- fgid = format(int(0), 'x').lower()
7307
- except KeyError:
6754
+ except (KeyError, AttributeError):
7308
6755
  fgid = format(int(0), 'x').lower()
7309
6756
  try:
7310
6757
  import pwd
7311
6758
  try:
7312
6759
  userinfo = pwd.getpwuid(os.getuid())
7313
6760
  funame = userinfo.pw_name
7314
- except KeyError:
7315
- funame = ""
7316
- except AttributeError:
6761
+ except (KeyError, AttributeError):
7317
6762
  funame = ""
7318
6763
  except ImportError:
7319
6764
  funame = ""
@@ -7323,9 +6768,7 @@ if(py7zr_support):
7323
6768
  try:
7324
6769
  groupinfo = grp.getgrgid(os.getgid())
7325
6770
  fgname = groupinfo.gr_name
7326
- except KeyError:
7327
- fgname = ""
7328
- except AttributeError:
6771
+ except (KeyError, AttributeError):
7329
6772
  fgname = ""
7330
6773
  except ImportError:
7331
6774
  fgname = ""
@@ -7351,7 +6794,7 @@ if(py7zr_support):
7351
6794
  while(ilmin < ilsize):
7352
6795
  cfcontents = MkTempFile()
7353
6796
  fcontents.seek(0, 0)
7354
- shutil.copyfileobj(fcontents, cfcontents)
6797
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7355
6798
  fcontents.seek(0, 0)
7356
6799
  cfcontents.seek(0, 0)
7357
6800
  cfcontents = CompressOpenFileAlt(
@@ -7367,7 +6810,7 @@ if(py7zr_support):
7367
6810
  curcompression = compressionuselist[ilcmin]
7368
6811
  fcontents.seek(0, 0)
7369
6812
  cfcontents = MkTempFile()
7370
- shutil.copyfileobj(fcontents, cfcontents)
6813
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7371
6814
  cfcontents.seek(0, 0)
7372
6815
  cfcontents = CompressOpenFileAlt(
7373
6816
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -7390,20 +6833,16 @@ if(py7zr_support):
7390
6833
  fp.flush()
7391
6834
  if(hasattr(os, "sync")):
7392
6835
  os.fsync(fp.fileno())
7393
- except io.UnsupportedOperation:
7394
- pass
7395
- except AttributeError:
7396
- pass
7397
- except OSError:
6836
+ except (io.UnsupportedOperation, AttributeError, OSError):
7398
6837
  pass
7399
6838
  fcontents.close()
7400
6839
  return fp
7401
6840
 
7402
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6841
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7403
6842
  if(not hasattr(fp, "write")):
7404
6843
  return False
7405
6844
  if(verbose):
7406
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
6845
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7407
6846
  GetDirList = inlist
7408
6847
  if(not GetDirList):
7409
6848
  return False
@@ -7461,12 +6900,12 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
7461
6900
  return fp
7462
6901
 
7463
6902
 
7464
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6903
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7465
6904
  inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7466
6905
  return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7467
6906
 
7468
6907
 
7469
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
6908
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7470
6909
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7471
6910
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7472
6911
  get_in_ext = os.path.splitext(outfile)
@@ -7519,18 +6958,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7519
6958
  fp.flush()
7520
6959
  if(hasattr(os, "sync")):
7521
6960
  os.fsync(fp.fileno())
7522
- except io.UnsupportedOperation:
7523
- pass
7524
- except AttributeError:
7525
- pass
7526
- except OSError:
6961
+ except (io.UnsupportedOperation, AttributeError, OSError):
7527
6962
  pass
7528
6963
  if(outfile == "-"):
7529
6964
  fp.seek(0, 0)
7530
- if(hasattr(sys.stdout, "buffer")):
7531
- shutil.copyfileobj(fp, sys.stdout.buffer)
7532
- else:
7533
- shutil.copyfileobj(fp, sys.stdout)
6965
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7534
6966
  elif(outfile is None):
7535
6967
  fp.seek(0, 0)
7536
6968
  outvar = fp.read()
@@ -7547,7 +6979,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7547
6979
  fp.close()
7548
6980
  return True
7549
6981
 
7550
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
6982
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7551
6983
  if not isinstance(infiles, list):
7552
6984
  infiles = [infiles]
7553
6985
  returnout = False
@@ -7562,7 +6994,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
7562
6994
  return True
7563
6995
  return returnout
7564
6996
 
7565
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
6997
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7566
6998
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7567
6999
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7568
7000
  get_in_ext = os.path.splitext(outfile)
@@ -7612,18 +7044,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7612
7044
  fp.flush()
7613
7045
  if(hasattr(os, "sync")):
7614
7046
  os.fsync(fp.fileno())
7615
- except io.UnsupportedOperation:
7616
- pass
7617
- except AttributeError:
7618
- pass
7619
- except OSError:
7047
+ except (io.UnsupportedOperation, AttributeError, OSError):
7620
7048
  pass
7621
7049
  if(outfile == "-"):
7622
7050
  fp.seek(0, 0)
7623
- if(hasattr(sys.stdout, "buffer")):
7624
- shutil.copyfileobj(fp, sys.stdout.buffer)
7625
- else:
7626
- shutil.copyfileobj(fp, sys.stdout)
7051
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7627
7052
  elif(outfile is None):
7628
7053
  fp.seek(0, 0)
7629
7054
  outvar = fp.read()
@@ -7641,7 +7066,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7641
7066
  fp.close()
7642
7067
  return True
7643
7068
 
7644
- def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7069
+ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7645
7070
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7646
7071
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7647
7072
  get_in_ext = os.path.splitext(outfile)
@@ -7692,18 +7117,11 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7692
7117
  fp.flush()
7693
7118
  if(hasattr(os, "sync")):
7694
7119
  os.fsync(fp.fileno())
7695
- except io.UnsupportedOperation:
7696
- pass
7697
- except AttributeError:
7698
- pass
7699
- except OSError:
7120
+ except (io.UnsupportedOperation, AttributeError, OSError):
7700
7121
  pass
7701
7122
  if(outfile == "-"):
7702
7123
  fp.seek(0, 0)
7703
- if(hasattr(sys.stdout, "buffer")):
7704
- shutil.copyfileobj(fp, sys.stdout.buffer)
7705
- else:
7706
- shutil.copyfileobj(fp, sys.stdout)
7124
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7707
7125
  elif(outfile is None):
7708
7126
  fp.seek(0, 0)
7709
7127
  outvar = fp.read()
@@ -7721,7 +7139,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7721
7139
  fp.close()
7722
7140
  return True
7723
7141
 
7724
- def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7142
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7725
7143
  if not isinstance(infiles, list):
7726
7144
  infiles = [infiles]
7727
7145
  returnout = False
@@ -7736,7 +7154,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
7736
7154
  return True
7737
7155
  return returnout
7738
7156
 
7739
- def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7157
+ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7740
7158
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7741
7159
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7742
7160
  get_in_ext = os.path.splitext(outfile)
@@ -7787,18 +7205,11 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7787
7205
  fp.flush()
7788
7206
  if(hasattr(os, "sync")):
7789
7207
  os.fsync(fp.fileno())
7790
- except io.UnsupportedOperation:
7791
- pass
7792
- except AttributeError:
7793
- pass
7794
- except OSError:
7208
+ except (io.UnsupportedOperation, AttributeError, OSError):
7795
7209
  pass
7796
7210
  if(outfile == "-"):
7797
7211
  fp.seek(0, 0)
7798
- if(hasattr(sys.stdout, "buffer")):
7799
- shutil.copyfileobj(fp, sys.stdout.buffer)
7800
- else:
7801
- shutil.copyfileobj(fp, sys.stdout)
7212
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7802
7213
  elif(outfile is None):
7803
7214
  fp.seek(0, 0)
7804
7215
  outvar = fp.read()
@@ -7816,7 +7227,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7816
7227
  fp.close()
7817
7228
  return True
7818
7229
 
7819
- def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7230
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7820
7231
  if not isinstance(infiles, list):
7821
7232
  infiles = [infiles]
7822
7233
  returnout = False
@@ -7832,11 +7243,11 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
7832
7243
  return returnout
7833
7244
 
7834
7245
  if(not rarfile_support):
7835
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7246
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7836
7247
  return False
7837
7248
 
7838
7249
  if(rarfile_support):
7839
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7250
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7840
7251
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7841
7252
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7842
7253
  get_in_ext = os.path.splitext(outfile)
@@ -7887,18 +7298,11 @@ if(rarfile_support):
7887
7298
  fp.flush()
7888
7299
  if(hasattr(os, "sync")):
7889
7300
  os.fsync(fp.fileno())
7890
- except io.UnsupportedOperation:
7891
- pass
7892
- except AttributeError:
7893
- pass
7894
- except OSError:
7301
+ except (io.UnsupportedOperation, AttributeError, OSError):
7895
7302
  pass
7896
7303
  if(outfile == "-"):
7897
7304
  fp.seek(0, 0)
7898
- if(hasattr(sys.stdout, "buffer")):
7899
- shutil.copyfileobj(fp, sys.stdout.buffer)
7900
- else:
7901
- shutil.copyfileobj(fp, sys.stdout)
7305
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7902
7306
  elif(outfile is None):
7903
7307
  fp.seek(0, 0)
7904
7308
  outvar = fp.read()
@@ -7916,7 +7320,7 @@ if(rarfile_support):
7916
7320
  fp.close()
7917
7321
  return True
7918
7322
 
7919
- def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7323
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7920
7324
  if not isinstance(infiles, list):
7921
7325
  infiles = [infiles]
7922
7326
  returnout = False
@@ -7932,11 +7336,11 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
7932
7336
  return returnout
7933
7337
 
7934
7338
  if(not py7zr_support):
7935
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7339
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7936
7340
  return False
7937
7341
 
7938
7342
  if(py7zr_support):
7939
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7343
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7940
7344
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7941
7345
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7942
7346
  get_in_ext = os.path.splitext(outfile)
@@ -7987,18 +7391,11 @@ if(py7zr_support):
7987
7391
  fp.flush()
7988
7392
  if(hasattr(os, "sync")):
7989
7393
  os.fsync(fp.fileno())
7990
- except io.UnsupportedOperation:
7991
- pass
7992
- except AttributeError:
7993
- pass
7994
- except OSError:
7394
+ except (io.UnsupportedOperation, AttributeError, OSError):
7995
7395
  pass
7996
7396
  if(outfile == "-"):
7997
7397
  fp.seek(0, 0)
7998
- if(hasattr(sys.stdout, "buffer")):
7999
- shutil.copyfileobj(fp, sys.stdout.buffer)
8000
- else:
8001
- shutil.copyfileobj(fp, sys.stdout)
7398
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8002
7399
  elif(outfile is None):
8003
7400
  fp.seek(0, 0)
8004
7401
  outvar = fp.read()
@@ -8016,7 +7413,7 @@ if(py7zr_support):
8016
7413
  fp.close()
8017
7414
  return True
8018
7415
 
8019
- def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7416
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8020
7417
  if not isinstance(infiles, list):
8021
7418
  infiles = [infiles]
8022
7419
  returnout = False
@@ -8031,7 +7428,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
8031
7428
  return True
8032
7429
  return returnout
8033
7430
 
8034
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7431
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
8035
7432
  inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
8036
7433
  return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
8037
7434
 
@@ -8065,9 +7462,7 @@ def PrintPermissionString(fchmode, ftype):
8065
7462
  permissionstr = "w" + permissionstr
8066
7463
  try:
8067
7464
  permissionoutstr = stat.filemode(fchmode)
8068
- except AttributeError:
8069
- permissionoutstr = permissionstr
8070
- except KeyError:
7465
+ except (KeyError, AttributeError):
8071
7466
  permissionoutstr = permissionstr
8072
7467
  return permissionoutstr
8073
7468
 
@@ -8983,7 +8378,7 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
8983
8378
 
8984
8379
 
8985
8380
  def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
8986
- filefp = StringIO()
8381
+ filefp = MkTempFile("", isbytes=False)
8987
8382
  outstring = UncompressString(instring, formatspecs, filestart)
8988
8383
  filefp.write(outstring)
8989
8384
  filefp.seek(0, 0)
@@ -8998,7 +8393,7 @@ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=
8998
8393
  fp.seek(filestart, 0)
8999
8394
  if(prechck!="zstd"):
9000
8395
  return UncompressFileAlt(fp, formatspecs, filestart)
9001
- filefp = StringIO()
8396
+ filefp = MkTempFile("", isbytes=False)
9002
8397
  fp.seek(filestart, 0)
9003
8398
  outstring = UncompressString(fp.read(), formatspecs, 0)
9004
8399
  filefp.write(outstring)
@@ -9070,9 +8465,7 @@ def _extract_base_fp(obj):
9070
8465
  try:
9071
8466
  f() # probe fileno()
9072
8467
  return cur
9073
- except UnsupportedOperation:
9074
- pass
9075
- except Exception:
8468
+ except (Exception, UnsupportedOperation):
9076
8469
  pass
9077
8470
  for attr in ("fileobj", "fp", "_fp", "buffer", "raw"):
9078
8471
  nxt = getattr(cur, attr, None)
@@ -9464,7 +8857,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw):
9464
8857
 
9465
8858
  # ========= copy helpers =========
9466
8859
 
9467
- def fast_copy(infp, outfp, bufsize=1 << 20):
8860
+ def fast_copy(infp, outfp, bufsize=__filebuff_size__):
9468
8861
  """
9469
8862
  Efficient copy from any readable file-like to any writable file-like.
9470
8863
  Uses readinto() when available to avoid extra allocations.
@@ -9508,7 +8901,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
9508
8901
  shutil.copyfileobj(fp, outfp, length=chunk_size)
9509
8902
 
9510
8903
 
9511
- def copy_opaque(src, dst, bufsize=1 << 20, grow_step=64 << 20):
8904
+ def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20):
9512
8905
  """
9513
8906
  Copy opaque bytes from 'src' (any readable file-like) to 'dst'
9514
8907
  (your mmap-backed FileLikeAdapter or any writable file-like).
@@ -9570,11 +8963,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
9570
8963
 
9571
8964
  try:
9572
8965
  fp.seek(0, 0)
9573
- except io.UnsupportedOperation:
9574
- pass
9575
- except AttributeError:
9576
- pass
9577
- except OSError:
8966
+ except (io.UnsupportedOperation, AttributeError, OSError):
9578
8967
  pass
9579
8968
 
9580
8969
  if (not compression or compression == formatspecs['format_magic']
@@ -9633,11 +9022,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
9633
9022
 
9634
9023
  try:
9635
9024
  bytesfp.seek(0, 0)
9636
- except io.UnsupportedOperation:
9637
- pass
9638
- except AttributeError:
9639
- pass
9640
- except OSError:
9025
+ except (io.UnsupportedOperation, AttributeError, OSError):
9641
9026
  pass
9642
9027
  out = FileLikeAdapter(bytesfp, mode="rb") # read interface for the caller
9643
9028
  try:
@@ -9767,31 +9152,18 @@ def CheckSumSupport(checkfor, guaranteed=True):
9767
9152
  try:
9768
9153
  hash_list = sorted(list(hashlib.algorithms_guaranteed))
9769
9154
  except AttributeError:
9770
- hash_list = sorted(list(hashlib.algorithms))
9771
- else:
9772
- try:
9773
- hash_list = sorted(list(hashlib.algorithms_available))
9774
- except AttributeError:
9775
- hash_list = sorted(list(hashlib.algorithms))
9776
- checklistout = sorted(hash_list + ['adler32', 'crc16', 'crc16_ansi', 'crc16_ibm',
9777
- 'crc16_ccitt', 'crc32', 'crc64', 'crc64_ecma', 'crc64_iso', 'none'])
9778
- if(checkfor in checklistout):
9779
- return True
9780
- else:
9781
- return False
9782
-
9783
-
9784
- def CheckSumSupportAlt(checkfor, guaranteed=True):
9785
- if(guaranteed):
9786
- try:
9787
- hash_list = sorted(list(hashlib.algorithms_guaranteed))
9788
- except AttributeError:
9789
- hash_list = sorted(list(hashlib.algorithms))
9155
+ try:
9156
+ hash_list = sorted(list(hashlib.algorithms))
9157
+ except AttributeError:
9158
+ hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
9790
9159
  else:
9791
9160
  try:
9792
9161
  hash_list = sorted(list(hashlib.algorithms_available))
9793
9162
  except AttributeError:
9794
- hash_list = sorted(list(hashlib.algorithms))
9163
+ try:
9164
+ hash_list = sorted(list(hashlib.algorithms))
9165
+ except AttributeError:
9166
+ hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
9795
9167
  checklistout = hash_list
9796
9168
  if(checkfor in checklistout):
9797
9169
  return True
@@ -9799,48 +9171,48 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
9799
9171
  return False
9800
9172
 
9801
9173
 
9802
- def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9174
+ def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9803
9175
  return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9804
9176
 
9805
- def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9177
+ def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9806
9178
  return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9807
9179
 
9808
- def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9180
+ def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9809
9181
  return PackCatFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
9810
9182
 
9811
9183
 
9812
- def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9184
+ def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9813
9185
  return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9814
9186
 
9815
9187
 
9816
- def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9188
+ def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9817
9189
  return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9818
9190
 
9819
9191
 
9820
9192
  if(not rarfile_support):
9821
- def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9193
+ def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9822
9194
  return False
9823
9195
 
9824
9196
  if(rarfile_support):
9825
- def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9197
+ def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9826
9198
  return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9827
9199
 
9828
9200
 
9829
9201
  if(not py7zr_support):
9830
- def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9202
+ def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9831
9203
  return False
9832
9204
 
9833
9205
  if(py7zr_support):
9834
- def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9206
+ def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9835
9207
  return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9836
9208
 
9837
9209
 
9838
- def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9210
+ def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9839
9211
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
9840
9212
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9841
9213
  formatspecs = formatspecs[checkcompressfile]
9842
9214
  if(verbose):
9843
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
9215
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9844
9216
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
9845
9217
  return PackCatFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9846
9218
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
@@ -9923,7 +9295,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9923
9295
  formatspecs=__file_format_multi_dict__, # keep default like original
9924
9296
  seektoend=False, verbose=False, returnfp=False):
9925
9297
  if(verbose):
9926
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
9298
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9927
9299
 
9928
9300
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
9929
9301
  formatspecs = formatspecs[fmttype]
@@ -9950,10 +9322,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
9950
9322
 
9951
9323
  elif(infile == "-"):
9952
9324
  fp = MkTempFile()
9953
- if(hasattr(sys.stdin, "buffer")):
9954
- shutil.copyfileobj(sys.stdin.buffer, fp)
9955
- else:
9956
- shutil.copyfileobj(sys.stdin, fp)
9325
+ shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
9957
9326
  fp.seek(filestart, 0)
9958
9327
  fp = UncompressFileAlt(fp, formatspecs, filestart)
9959
9328
  checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
@@ -10030,9 +9399,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
10030
9399
 
10031
9400
  try:
10032
9401
  fp.seek(0, 2)
10033
- except OSError:
10034
- SeekToEndOfFile(fp)
10035
- except ValueError:
9402
+ except (OSError, ValueError):
10036
9403
  SeekToEndOfFile(fp)
10037
9404
 
10038
9405
  CatSize = fp.tell()
@@ -10062,19 +9429,19 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
10062
9429
  else:
10063
9430
  inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
10064
9431
 
10065
- fnumextrafieldsize = int(inheader[5], 16)
10066
- fnumextrafields = int(inheader[6], 16)
10067
- extrastart = 7
9432
+ fnumextrafieldsize = int(inheader[6], 16)
9433
+ fnumextrafields = int(inheader[7], 16)
9434
+ extrastart = 8
10068
9435
  extraend = extrastart + fnumextrafields
10069
9436
  formversion = re.findall("([\\d]+)", formstring)
10070
9437
  fheadsize = int(inheader[0], 16)
10071
9438
  fnumfields = int(inheader[1], 16)
10072
9439
  fhencoding = inheader[2]
10073
9440
  fostype = inheader[3]
10074
- fnumfiles = int(inheader[4], 16)
9441
+ fpythontype = inheader[4]
9442
+ fnumfiles = int(inheader[5], 16)
10075
9443
  fprechecksumtype = inheader[-2]
10076
9444
  fprechecksum = inheader[-1]
10077
-
10078
9445
  il = 0
10079
9446
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
10080
9447
  newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs)
@@ -10192,7 +9559,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
10192
9559
  VerbosePrintOut(outfname)
10193
9560
  VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
10194
9561
 
10195
- if(outfcs == infcs):
9562
+ if(hmac.compare_digest(outfcs, infcs)):
10196
9563
  if(verbose):
10197
9564
  VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
10198
9565
  VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
@@ -10204,7 +9571,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
10204
9571
  VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
10205
9572
 
10206
9573
  if(outfjsonsize > 0):
10207
- if(outfjsonchecksum == injsonfcs):
9574
+ if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
10208
9575
  if(verbose):
10209
9576
  VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
10210
9577
  VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
@@ -10228,7 +9595,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
10228
9595
  infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
10229
9596
  pyhascontents = True
10230
9597
 
10231
- if(outfccs == infccs):
9598
+ if(hmac.compare_digest(outfccs, infccs)):
10232
9599
  if(verbose):
10233
9600
  VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
10234
9601
  VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
@@ -10301,7 +9668,7 @@ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
10301
9668
  while True:
10302
9669
  if outstartfile >= outfsize: # stop when function signals False
10303
9670
  break
10304
- is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
9671
+ is_valid_file = CatFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
10305
9672
  if is_valid_file is False: # stop when function signals False
10306
9673
  outretval.append(is_valid_file)
10307
9674
  break
@@ -10311,9 +9678,7 @@ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
10311
9678
  outstartfile = infile.tell()
10312
9679
  try:
10313
9680
  infile.seek(0, 2)
10314
- except OSError:
10315
- SeekToEndOfFile(infile)
10316
- except ValueError:
9681
+ except (OSError, ValueError):
10317
9682
  SeekToEndOfFile(infile)
10318
9683
  outfsize = infile.tell()
10319
9684
  infile.seek(outstartfile, 0)
@@ -10389,7 +9754,7 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
10389
9754
  formatspecs = formatspecs[checkcompressfile]
10390
9755
  fp = MkTempFile()
10391
9756
  fp = PackCatFileFromTarFile(
10392
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9757
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10393
9758
  listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10394
9759
  return listarrayfiles
10395
9760
 
@@ -10400,7 +9765,7 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
10400
9765
  formatspecs = formatspecs[checkcompressfile]
10401
9766
  fp = MkTempFile()
10402
9767
  fp = PackCatFileFromZipFile(
10403
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9768
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10404
9769
  listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10405
9770
  return listarrayfiles
10406
9771
 
@@ -10416,7 +9781,7 @@ if(rarfile_support):
10416
9781
  formatspecs = formatspecs[checkcompressfile]
10417
9782
  fp = MkTempFile()
10418
9783
  fp = PackCatFileFromRarFile(
10419
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9784
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10420
9785
  listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10421
9786
  return listarrayfiles
10422
9787
 
@@ -10431,7 +9796,7 @@ if(py7zr_support):
10431
9796
  formatspecs = formatspecs[checkcompressfile]
10432
9797
  fp = MkTempFile()
10433
9798
  fp = PackCatFileFromSevenZipFile(
10434
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9799
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10435
9800
  listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10436
9801
  return listarrayfiles
10437
9802
 
@@ -10455,7 +9820,7 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
10455
9820
  return False
10456
9821
 
10457
9822
 
10458
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
9823
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10459
9824
  outarray = MkTempFile()
10460
9825
  packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10461
9826
  compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
@@ -10586,7 +9951,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10586
9951
  if compressionuselist is None:
10587
9952
  compressionuselist = compressionlistalt
10588
9953
  if checksumtype is None:
10589
- checksumtype = ["crc32", "crc32", "crc32", "crc32"]
9954
+ checksumtype = ["md5", "md5", "md5", "md5"]
10590
9955
  if extradata is None:
10591
9956
  extradata = []
10592
9957
  if jsondata is None:
@@ -10673,7 +10038,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10673
10038
  compression = "auto"
10674
10039
 
10675
10040
  if verbose:
10676
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10041
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10677
10042
 
10678
10043
  # No files?
10679
10044
  if not listarrayfiles.get('ffilelist'):
@@ -10778,7 +10143,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10778
10143
  while ilmin < ilsize:
10779
10144
  cfcontents = MkTempFile()
10780
10145
  fcontents.seek(0, 0)
10781
- shutil.copyfileobj(fcontents, cfcontents)
10146
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10782
10147
  fcontents.seek(0, 0)
10783
10148
  cfcontents.seek(0, 0)
10784
10149
  cfcontents = CompressOpenFileAlt(
@@ -10796,7 +10161,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10796
10161
 
10797
10162
  fcontents.seek(0, 0)
10798
10163
  cfcontents = MkTempFile()
10799
- shutil.copyfileobj(fcontents, cfcontents)
10164
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10800
10165
  cfcontents.seek(0, 0)
10801
10166
  cfcontents = CompressOpenFileAlt(
10802
10167
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
@@ -10895,22 +10260,12 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
10895
10260
  fp.flush()
10896
10261
  if hasattr(os, "sync"):
10897
10262
  os.fsync(fp.fileno())
10898
- except io.UnsupportedOperation:
10899
- if verbose:
10900
- logging.warning("Flush/sync unsupported on this file object.")
10901
- except AttributeError:
10902
- if verbose:
10903
- logging.warning("Flush/sync attributes missing on this file object.")
10904
- except OSError as e:
10905
- if verbose:
10906
- logging.warning("OS error during flush/sync: %s", e)
10263
+ except (io.UnsupportedOperation, AttributeError, OSError):
10264
+ pass
10907
10265
 
10908
10266
  if outfile == "-":
10909
10267
  fp.seek(0, 0)
10910
- if hasattr(sys.stdout, "buffer"):
10911
- shutil.copyfileobj(fp, sys.stdout.buffer)
10912
- else:
10913
- shutil.copyfileobj(fp, sys.stdout)
10268
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
10914
10269
  elif outfile is None:
10915
10270
  fp.seek(0, 0)
10916
10271
  outvar = fp.read()
@@ -10949,14 +10304,14 @@ def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto",
10949
10304
  return True
10950
10305
  return returnout
10951
10306
 
10952
- def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10307
+ def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10953
10308
  fp = MkTempFile(instr)
10954
10309
  listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10955
10310
  checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10956
10311
  return listarrayfiles
10957
10312
 
10958
10313
 
10959
- def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10314
+ def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10960
10315
  outarray = MkTempFile()
10961
10316
  packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10962
10317
  compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
@@ -10969,7 +10324,7 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
10969
10324
  if(outdir is not None):
10970
10325
  outdir = RemoveWindowsPath(outdir)
10971
10326
  if(verbose):
10972
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10327
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10973
10328
  if(isinstance(infile, dict)):
10974
10329
  listarrayfiles = infile
10975
10330
  else:
@@ -11019,16 +10374,12 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
11019
10374
  listarrayfiles['ffilelist'][lcfi]['fcontents'])
11020
10375
  listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
11021
10376
  shutil.copyfileobj(
11022
- listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
10377
+ listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc, length=__filebuff_size__)
11023
10378
  try:
11024
10379
  fpc.flush()
11025
10380
  if(hasattr(os, "sync")):
11026
10381
  os.fsync(fpc.fileno())
11027
- except io.UnsupportedOperation:
11028
- pass
11029
- except AttributeError:
11030
- pass
11031
- except OSError:
10382
+ except (io.UnsupportedOperation, AttributeError, OSError):
11032
10383
  pass
11033
10384
  if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
11034
10385
  os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
@@ -11070,16 +10421,12 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
11070
10421
  flinkinfo['fcontents'] = MkTempFile(
11071
10422
  flinkinfo['fcontents'])
11072
10423
  flinkinfo['fcontents'].seek(0, 0)
11073
- shutil.copyfileobj(flinkinfo['fcontents'], fpc)
10424
+ shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
11074
10425
  try:
11075
10426
  fpc.flush()
11076
10427
  if(hasattr(os, "sync")):
11077
10428
  os.fsync(fpc.fileno())
11078
- except io.UnsupportedOperation:
11079
- pass
11080
- except AttributeError:
11081
- pass
11082
- except OSError:
10429
+ except (io.UnsupportedOperation, AttributeError, OSError):
11083
10430
  pass
11084
10431
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
11085
10432
  os.chown(PrependPath(
@@ -11149,16 +10496,12 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
11149
10496
  flinkinfo['fcontents'] = MkTempFile(
11150
10497
  flinkinfo['fcontents'])
11151
10498
  flinkinfo['fcontents'].seek(0, 0)
11152
- shutil.copyfileobj(flinkinfo['fcontents'], fpc)
10499
+ shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
11153
10500
  try:
11154
10501
  fpc.flush()
11155
10502
  if(hasattr(os, "sync")):
11156
10503
  os.fsync(fpc.fileno())
11157
- except io.UnsupportedOperation:
11158
- pass
11159
- except AttributeError:
11160
- pass
11161
- except OSError:
10504
+ except (io.UnsupportedOperation, AttributeError, OSError):
11162
10505
  pass
11163
10506
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
11164
10507
  os.chown(PrependPath(
@@ -11245,7 +10588,7 @@ def ftype_to_str(ftype):
11245
10588
 
11246
10589
  def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11247
10590
  if(verbose):
11248
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10591
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11249
10592
  if(isinstance(infile, dict)):
11250
10593
  listarrayfileslist = [infile]
11251
10594
  if(isinstance(infile, list)):
@@ -11253,7 +10596,7 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
11253
10596
  else:
11254
10597
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
11255
10598
  infile = RemoveWindowsPath(infile)
11256
- listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
10599
+ listarrayfileslist = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
11257
10600
  if(not listarrayfileslist):
11258
10601
  return False
11259
10602
  for listarrayfiles in listarrayfileslist:
@@ -11326,9 +10669,7 @@ def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
11326
10669
  outstartfile = infile.tell()
11327
10670
  try:
11328
10671
  infile.seek(0, 2)
11329
- except OSError:
11330
- SeekToEndOfFile(infile)
11331
- except ValueError:
10672
+ except (OSError, ValueError):
11332
10673
  SeekToEndOfFile(infile)
11333
10674
  outfsize = infile.tell()
11334
10675
  infile.seek(outstartfile, 0)
@@ -11358,13 +10699,10 @@ def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
11358
10699
 
11359
10700
  def TarFileListFiles(infile, verbose=False, returnfp=False):
11360
10701
  if(verbose):
11361
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10702
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11362
10703
  if(infile == "-"):
11363
10704
  infile = MkTempFile()
11364
- if(hasattr(sys.stdin, "buffer")):
11365
- shutil.copyfileobj(sys.stdin.buffer, infile)
11366
- else:
11367
- shutil.copyfileobj(sys.stdin, infile)
10705
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11368
10706
  infile.seek(0, 0)
11369
10707
  if(not infile):
11370
10708
  return False
@@ -11483,13 +10821,10 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11483
10821
 
11484
10822
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
11485
10823
  if(verbose):
11486
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10824
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11487
10825
  if(infile == "-"):
11488
10826
  infile = MkTempFile()
11489
- if(hasattr(sys.stdin, "buffer")):
11490
- shutil.copyfileobj(sys.stdin.buffer, infile)
11491
- else:
11492
- shutil.copyfileobj(sys.stdin, infile)
10827
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11493
10828
  infile.seek(0, 0)
11494
10829
  if(not infile):
11495
10830
  return False
@@ -11566,24 +10901,18 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11566
10901
  printfname = member.filename
11567
10902
  try:
11568
10903
  fuid = int(os.getuid())
11569
- except AttributeError:
11570
- fuid = int(0)
11571
- except KeyError:
10904
+ except (KeyError, AttributeError):
11572
10905
  fuid = int(0)
11573
10906
  try:
11574
10907
  fgid = int(os.getgid())
11575
- except AttributeError:
11576
- fgid = int(0)
11577
- except KeyError:
10908
+ except (KeyError, AttributeError):
11578
10909
  fgid = int(0)
11579
10910
  try:
11580
10911
  import pwd
11581
10912
  try:
11582
10913
  userinfo = pwd.getpwuid(os.getuid())
11583
10914
  funame = userinfo.pw_name
11584
- except KeyError:
11585
- funame = ""
11586
- except AttributeError:
10915
+ except (KeyError, AttributeError):
11587
10916
  funame = ""
11588
10917
  except ImportError:
11589
10918
  funame = ""
@@ -11593,9 +10922,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11593
10922
  try:
11594
10923
  groupinfo = grp.getgrgid(os.getgid())
11595
10924
  fgname = groupinfo.gr_name
11596
- except KeyError:
11597
- fgname = ""
11598
- except AttributeError:
10925
+ except (KeyError, AttributeError):
11599
10926
  fgname = ""
11600
10927
  except ImportError:
11601
10928
  fgname = ""
@@ -11621,7 +10948,7 @@ if(not rarfile_support):
11621
10948
  if(rarfile_support):
11622
10949
  def RarFileListFiles(infile, verbose=False, returnfp=False):
11623
10950
  if(verbose):
11624
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10951
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11625
10952
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11626
10953
  return False
11627
10954
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -11704,24 +11031,18 @@ if(rarfile_support):
11704
11031
  printfname = member.filename
11705
11032
  try:
11706
11033
  fuid = int(os.getuid())
11707
- except AttributeError:
11708
- fuid = int(0)
11709
- except KeyError:
11034
+ except (KeyError, AttributeError):
11710
11035
  fuid = int(0)
11711
11036
  try:
11712
11037
  fgid = int(os.getgid())
11713
- except AttributeError:
11714
- fgid = int(0)
11715
- except KeyError:
11038
+ except (KeyError, AttributeError):
11716
11039
  fgid = int(0)
11717
11040
  try:
11718
11041
  import pwd
11719
11042
  try:
11720
11043
  userinfo = pwd.getpwuid(os.getuid())
11721
11044
  funame = userinfo.pw_name
11722
- except KeyError:
11723
- funame = ""
11724
- except AttributeError:
11045
+ except (KeyError, AttributeError):
11725
11046
  funame = ""
11726
11047
  except ImportError:
11727
11048
  funame = ""
@@ -11731,9 +11052,7 @@ if(rarfile_support):
11731
11052
  try:
11732
11053
  groupinfo = grp.getgrgid(os.getgid())
11733
11054
  fgname = groupinfo.gr_name
11734
- except KeyError:
11735
- fgname = ""
11736
- except AttributeError:
11055
+ except (KeyError, AttributeError):
11737
11056
  fgname = ""
11738
11057
  except ImportError:
11739
11058
  fgname = ""
@@ -11758,7 +11077,7 @@ if(not py7zr_support):
11758
11077
  if(py7zr_support):
11759
11078
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11760
11079
  if(verbose):
11761
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
11080
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11762
11081
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11763
11082
  return False
11764
11083
  lcfi = 0
@@ -11811,24 +11130,18 @@ if(py7zr_support):
11811
11130
  file_content[member.filename].close()
11812
11131
  try:
11813
11132
  fuid = int(os.getuid())
11814
- except AttributeError:
11815
- fuid = int(0)
11816
- except KeyError:
11133
+ except (KeyError, AttributeError):
11817
11134
  fuid = int(0)
11818
11135
  try:
11819
11136
  fgid = int(os.getgid())
11820
- except AttributeError:
11821
- fgid = int(0)
11822
- except KeyError:
11137
+ except (KeyError, AttributeError):
11823
11138
  fgid = int(0)
11824
11139
  try:
11825
11140
  import pwd
11826
11141
  try:
11827
11142
  userinfo = pwd.getpwuid(os.getuid())
11828
11143
  funame = userinfo.pw_name
11829
- except KeyError:
11830
- funame = ""
11831
- except AttributeError:
11144
+ except (KeyError, AttributeError):
11832
11145
  funame = ""
11833
11146
  except ImportError:
11834
11147
  funame = ""
@@ -11838,9 +11151,7 @@ if(py7zr_support):
11838
11151
  try:
11839
11152
  groupinfo = grp.getgrgid(os.getgid())
11840
11153
  fgname = groupinfo.gr_name
11841
- except KeyError:
11842
- fgname = ""
11843
- except AttributeError:
11154
+ except (KeyError, AttributeError):
11844
11155
  fgname = ""
11845
11156
  except ImportError:
11846
11157
  fgname = ""
@@ -11861,7 +11172,7 @@ if(py7zr_support):
11861
11172
 
11862
11173
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11863
11174
  if(verbose):
11864
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
11175
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11865
11176
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11866
11177
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11867
11178
  formatspecs = formatspecs[checkcompressfile]
@@ -11880,7 +11191,7 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
11880
11191
  return False
11881
11192
 
11882
11193
 
11883
- def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11194
+ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11884
11195
  outarray = MkTempFile()
11885
11196
  packform = PackCatFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
11886
11197
  compressionlevel, followlink, checksumtype, formatspecs, False, True)
@@ -11892,19 +11203,19 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11892
11203
  PyNeoFile compatibility layer
11893
11204
  """
11894
11205
 
11895
- def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11206
+ def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11896
11207
  return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
11897
11208
 
11898
- def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11209
+ def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11899
11210
  return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
11900
11211
 
11901
- def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11212
+ def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11902
11213
  return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
11903
11214
 
11904
- def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11215
+ def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11905
11216
  return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
11906
11217
 
11907
- def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11218
+ def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11908
11219
  return PackCatFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
11909
11220
 
11910
11221
  def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
@@ -11913,7 +11224,7 @@ def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonl
11913
11224
  def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
11914
11225
  return UnPackCatFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
11915
11226
 
11916
- def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
11227
+ def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11917
11228
  return RePackCatFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11918
11229
 
11919
11230
  def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
@@ -11922,7 +11233,7 @@ def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False,
11922
11233
  def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
11923
11234
  return CatFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
11924
11235
 
11925
- def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
11236
+ def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11926
11237
  intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
11927
11238
  return RePackCatFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11928
11239
 
@@ -11964,10 +11275,7 @@ def download_file_from_ftp_file(url):
11964
11275
  ftp_port = 21
11965
11276
  try:
11966
11277
  ftp.connect(urlparts.hostname, ftp_port)
11967
- except socket.gaierror:
11968
- log.info("Error With URL "+url)
11969
- return False
11970
- except socket.timeout:
11278
+ except (socket.gaierror, socket.timeout):
11971
11279
  log.info("Error With URL "+url)
11972
11280
  return False
11973
11281
  if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
@@ -12055,10 +11363,7 @@ def upload_file_to_ftp_file(ftpfile, url):
12055
11363
  ftp_port = 21
12056
11364
  try:
12057
11365
  ftp.connect(urlparts.hostname, ftp_port)
12058
- except socket.gaierror:
12059
- log.info("Error With URL "+url)
12060
- return False
12061
- except socket.timeout:
11366
+ except (socket.gaierror, socket.timeout):
12062
11367
  log.info("Error With URL "+url)
12063
11368
  return False
12064
11369
  if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
@@ -12169,7 +11474,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12169
11474
  else:
12170
11475
  response = requests.get(rebuilt_url, headers=headers, timeout=(5, 30), stream=True)
12171
11476
  response.raw.decode_content = True
12172
- shutil.copyfileobj(response.raw, httpfile)
11477
+ shutil.copyfileobj(response.raw, httpfile, length=__filebuff_size__)
12173
11478
 
12174
11479
  # 2) HTTPX branch
12175
11480
  elif usehttp == 'httpx' and havehttpx:
@@ -12181,7 +11486,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12181
11486
  else:
12182
11487
  response = client.get(rebuilt_url, headers=headers)
12183
11488
  raw_wrapper = RawIteratorWrapper(response.iter_bytes())
12184
- shutil.copyfileobj(raw_wrapper, httpfile)
11489
+ shutil.copyfileobj(raw_wrapper, httpfile, length=__filebuff_size__)
12185
11490
 
12186
11491
  # 3) Mechanize branch
12187
11492
  elif usehttp == 'mechanize' and havemechanize:
@@ -12200,7 +11505,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12200
11505
 
12201
11506
  # Open the URL and copy the response to httpfile
12202
11507
  response = br.open(rebuilt_url)
12203
- shutil.copyfileobj(response, httpfile)
11508
+ shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
12204
11509
 
12205
11510
  # 4) Fallback to urllib
12206
11511
  else:
@@ -12213,7 +11518,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12213
11518
  else:
12214
11519
  opener = build_opener()
12215
11520
  response = opener.open(request)
12216
- shutil.copyfileobj(response, httpfile)
11521
+ shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
12217
11522
 
12218
11523
  # Reset file pointer to the start before returning
12219
11524
  httpfile.seek(0, 0)
@@ -12346,7 +11651,7 @@ def upload_file_to_http_file(
12346
11651
  fileobj.seek(0)
12347
11652
  except Exception:
12348
11653
  pass
12349
- shutil.copyfileobj(fileobj, buf)
11654
+ shutil.copyfileobj(fileobj, buf, length=__filebuff_size__)
12350
11655
 
12351
11656
  _w('\r\n')
12352
11657
  _w('--' + boundary + '--\r\n')
@@ -12435,10 +11740,7 @@ if(haveparamiko):
12435
11740
  username=sftp_username, password=sftp_password)
12436
11741
  except paramiko.ssh_exception.SSHException:
12437
11742
  return False
12438
- except socket.gaierror:
12439
- log.info("Error With URL "+url)
12440
- return False
12441
- except socket.timeout:
11743
+ except (socket.gaierror, socket.timeout):
12442
11744
  log.info("Error With URL "+url)
12443
11745
  return False
12444
11746
  sftp = ssh.open_sftp()
@@ -12492,10 +11794,7 @@ if(haveparamiko):
12492
11794
  username=sftp_username, password=sftp_password)
12493
11795
  except paramiko.ssh_exception.SSHException:
12494
11796
  return False
12495
- except socket.gaierror:
12496
- log.info("Error With URL "+url)
12497
- return False
12498
- except socket.timeout:
11797
+ except (socket.gaierror, socket.timeout):
12499
11798
  log.info("Error With URL "+url)
12500
11799
  return False
12501
11800
  sftp = ssh.open_sftp()
@@ -12546,10 +11845,7 @@ if(havepysftp):
12546
11845
  username=sftp_username, password=sftp_password)
12547
11846
  except paramiko.ssh_exception.SSHException:
12548
11847
  return False
12549
- except socket.gaierror:
12550
- log.info("Error With URL "+url)
12551
- return False
12552
- except socket.timeout:
11848
+ except (socket.gaierror, socket.timeout):
12553
11849
  log.info("Error With URL "+url)
12554
11850
  return False
12555
11851
  sftpfile = MkTempFile()
@@ -12599,10 +11895,7 @@ if(havepysftp):
12599
11895
  username=sftp_username, password=sftp_password)
12600
11896
  except paramiko.ssh_exception.SSHException:
12601
11897
  return False
12602
- except socket.gaierror:
12603
- log.info("Error With URL "+url)
12604
- return False
12605
- except socket.timeout:
11898
+ except (socket.gaierror, socket.timeout):
12606
11899
  log.info("Error With URL "+url)
12607
11900
  return False
12608
11901
  sftpfile.seek(0, 0)