PyFoxFile 0.24.4__py3-none-any.whl → 0.25.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
pyfoxfile.py CHANGED
@@ -14,7 +14,7 @@
14
14
  Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
15
15
  Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
16
16
 
17
- $FileInfo: pyfoxfile.py - Last Update: 11/3/2025 Ver. 0.24.4 RC 1 - Author: cooldude2k $
17
+ $FileInfo: pyfoxfile.py - Last Update: 11/3/2025 Ver. 0.25.0 RC 1 - Author: cooldude2k $
18
18
  '''
19
19
 
20
20
  from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
@@ -73,6 +73,17 @@ except ImportError:
73
73
  except ImportError:
74
74
  import json
75
75
 
76
+ testyaml = False
77
+ try:
78
+ import oyaml as yaml
79
+ testyaml = True
80
+ except ImportError:
81
+ try:
82
+ import yaml
83
+ testyaml = True
84
+ except ImportError:
85
+ testyaml = False
86
+
76
87
  try:
77
88
  import configparser
78
89
  except ImportError:
@@ -115,6 +126,16 @@ else:
115
126
  bytes_type = bytes
116
127
  text_type = str
117
128
 
129
+ # Text streams (as provided by Python)
130
+ PY_STDIN_TEXT = sys.stdin
131
+ PY_STDOUT_TEXT = sys.stdout
132
+ PY_STDERR_TEXT = sys.stderr
133
+
134
+ # Binary-friendly streams (use .buffer on Py3, fall back on Py2)
135
+ PY_STDIN_BUF = getattr(sys.stdin, "buffer", sys.stdin)
136
+ PY_STDOUT_BUF = getattr(sys.stdout, "buffer", sys.stdout)
137
+ PY_STDERR_BUF = getattr(sys.stderr, "buffer", sys.stderr)
138
+
118
139
  # Text vs bytes tuples you can use with isinstance()
119
140
  TEXT_TYPES = (basestring,) # "str or unicode" on Py2, "str" on Py3
120
141
  BINARY_TYPES = (bytes,) if not PY2 else (str,) # bytes on Py3, str on Py2
@@ -211,12 +232,6 @@ if sys.version_info[0] == 2:
211
232
  except (NameError, AttributeError):
212
233
  pass
213
234
 
214
- # CRC32 import
215
- try:
216
- from zlib import crc32
217
- except ImportError:
218
- from binascii import crc32
219
-
220
235
  # Define FileNotFoundError for Python 2
221
236
  try:
222
237
  FileNotFoundError
@@ -251,9 +266,7 @@ py7zr_support = False
251
266
  try:
252
267
  import py7zr
253
268
  py7zr_support = True
254
- except ImportError:
255
- pass
256
- except OSError:
269
+ except (ImportError, OSError):
257
270
  pass
258
271
 
259
272
  # TAR file checking
@@ -279,9 +292,7 @@ haveparamiko = False
279
292
  try:
280
293
  import paramiko
281
294
  haveparamiko = True
282
- except ImportError:
283
- pass
284
- except OSError:
295
+ except (ImportError, OSError):
285
296
  pass
286
297
 
287
298
  # PySFTP support
@@ -289,9 +300,7 @@ havepysftp = False
289
300
  try:
290
301
  import pysftp
291
302
  havepysftp = True
292
- except ImportError:
293
- pass
294
- except OSError:
303
+ except (ImportError, OSError):
295
304
  pass
296
305
 
297
306
  # Add the mechanize import check
@@ -299,9 +308,7 @@ havemechanize = False
299
308
  try:
300
309
  import mechanize
301
310
  havemechanize = True
302
- except ImportError:
303
- pass
304
- except OSError:
311
+ except (ImportError, OSError):
305
312
  pass
306
313
 
307
314
  # Requests support
@@ -311,9 +318,7 @@ try:
311
318
  haverequests = True
312
319
  import urllib3
313
320
  logging.getLogger("urllib3").setLevel(logging.WARNING)
314
- except ImportError:
315
- pass
316
- except OSError:
321
+ except (ImportError, OSError):
317
322
  pass
318
323
 
319
324
  # HTTPX support
@@ -323,9 +328,7 @@ try:
323
328
  havehttpx = True
324
329
  logging.getLogger("httpx").setLevel(logging.WARNING)
325
330
  logging.getLogger("httpcore").setLevel(logging.WARNING)
326
- except ImportError:
327
- pass
328
- except OSError:
331
+ except (ImportError, OSError):
329
332
  pass
330
333
 
331
334
  # HTTP and URL parsing
@@ -416,9 +419,14 @@ __include_defaults__ = True
416
419
  __use_inmemfile__ = True
417
420
  __use_spoolfile__ = False
418
421
  __use_spooldir__ = tempfile.gettempdir()
419
- BYTES_PER_MiB = 1024 * 1024
420
- DEFAULT_SPOOL_MAX = 8 * BYTES_PER_MiB
422
+ BYTES_PER_KiB = 1024
423
+ BYTES_PER_MiB = 1024 * BYTES_PER_KiB
424
+ # Spool: not tiny, but won’t blow up RAM if many are in use
425
+ DEFAULT_SPOOL_MAX = 4 * BYTES_PER_MiB # 4 MiB per spooled temp file
421
426
  __spoolfile_size__ = DEFAULT_SPOOL_MAX
427
+ # Buffer: bigger than stdlib default (16 KiB), but still modest
428
+ DEFAULT_BUFFER_MAX = 256 * BYTES_PER_KiB # 256 KiB copy buffer
429
+ __filebuff_size__ = DEFAULT_BUFFER_MAX
422
430
  __program_name__ = "Py"+__file_format_default__
423
431
  __use_env_file__ = True
424
432
  __use_ini_file__ = True
@@ -621,6 +629,8 @@ if __include_defaults__:
621
629
  add_format(__file_format_multi_dict__, "狐ファイル", "狐ファイル", ".狐", "KitsuneFairu")
622
630
  add_format(__file_format_multi_dict__, "狐狸文件", "狐狸文件", ".狐狸", "HúlíWénjiàn")
623
631
  add_format(__file_format_multi_dict__, "여우파일", "여우파일", ".여우", "YeouPa-il")
632
+ add_format(__file_format_multi_dict__, "基次内法伊鲁", "基次内法伊鲁", ".基次内", "JīCìNèiFǎYīLǔ")
633
+ add_format(__file_format_multi_dict__, "키츠네파일", "키츠네파일", ".키츠네", "KicheunePa-il")
624
634
 
625
635
  # Pick a default if current default key is not present
626
636
  if __file_format_default__ not in __file_format_multi_dict__:
@@ -640,12 +650,12 @@ __project__ = __program_name__
640
650
  __program_alt_name__ = __program_name__
641
651
  __project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
642
652
  __project_release_url__ = __project_url__+"/releases/latest"
643
- __version_info__ = (0, 24, 4, "RC 1", 1)
644
- __version_date_info__ = (2025, 11, 3, "RC 1", 1)
653
+ __version_info__ = (0, 25, 0, "RC 1", 1)
654
+ __version_date_info__ = (2025, 11, 5, "RC 1", 1)
645
655
  __version_date__ = str(__version_date_info__[0]) + "." + str(
646
656
  __version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
647
657
  __revision__ = __version_info__[3]
648
- __revision_id__ = "$Id: 3889d7a367d73addd635e5448a7febafff7919ad $"
658
+ __revision_id__ = "$Id: 1e44250af6454c3f042d7212eb751c2c18543954 $"
649
659
  if(__version_info__[4] is not None):
650
660
  __version_date_plusrc__ = __version_date__ + \
651
661
  "-" + str(__version_date_info__[4])
@@ -796,7 +806,7 @@ geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prour
796
806
  if(platform.python_implementation() != ""):
797
807
  py_implementation = platform.python_implementation()
798
808
  if(platform.python_implementation() == ""):
799
- py_implementation = "Python"
809
+ py_implementation = "CPython"
800
810
  geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
801
811
  )+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
802
812
  geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
@@ -2059,7 +2069,7 @@ def MkTempFile(data=None,
2059
2069
  suffix="",
2060
2070
  use_spool=__use_spoolfile__,
2061
2071
  spool_max=__spoolfile_size__,
2062
- spool_dir=__use_spooldir__:
2072
+ spool_dir=__use_spooldir__):
2063
2073
  """
2064
2074
  Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
2065
2075
 
@@ -2384,7 +2394,7 @@ def GetTotalSize(file_list):
2384
2394
  try:
2385
2395
  total_size += os.path.getsize(item)
2386
2396
  except OSError:
2387
- sys.stderr.write("Error accessing file {}: {}\n".format(item, e))
2397
+ PY_STDERR_TEXT.write("Error accessing file {}: {}\n".format(item, e))
2388
2398
  return total_size
2389
2399
 
2390
2400
 
@@ -2621,7 +2631,7 @@ class ZlibFile(object):
2621
2631
  scanned_leading = 0 # for tolerant header scan
2622
2632
 
2623
2633
  while True:
2624
- data = self.file.read(1 << 20) # 1 MiB blocks
2634
+ data = self.file.read(__filebuff_size__) # 1 MiB blocks
2625
2635
  if not data:
2626
2636
  if d is not None:
2627
2637
  self._spool.write(d.flush())
@@ -2779,7 +2789,7 @@ class ZlibFile(object):
2779
2789
 
2780
2790
  # Buffer and compress in chunks to limit memory
2781
2791
  self._write_buf += data
2782
- if len(self._write_buf) >= (1 << 20): # 1 MiB threshold
2792
+ if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
2783
2793
  chunk = self._compressor.compress(bytes(self._write_buf))
2784
2794
  if chunk:
2785
2795
  self.file.write(chunk)
@@ -2889,7 +2899,7 @@ class ZlibFile(object):
2889
2899
  """
2890
2900
  if not isinstance(data, (bytes, bytearray, memoryview)):
2891
2901
  raise TypeError("from_bytes() expects a bytes-like object")
2892
- bio = io.BytesIO(bytes(data) if not isinstance(data, bytes) else data)
2902
+ bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
2893
2903
  return cls(fileobj=bio, mode=mode, **kw)
2894
2904
 
2895
2905
  # compatibility aliases for unwrapping utilities
@@ -2925,7 +2935,7 @@ def compress_bytes(payload, level=6, wbits=15, text=False, **kw):
2925
2935
  out = compress_bytes(b"hello")
2926
2936
  out = compress_bytes(u"hello\n", text=True, encoding="utf-8", newline="\n")
2927
2937
  """
2928
- bio = io.BytesIO()
2938
+ bio = MkTempFile()
2929
2939
  mode = 'wt' if text else 'wb'
2930
2940
  f = ZlibFile(fileobj=bio, mode=mode, level=level, wbits=wbits, **kw)
2931
2941
  try:
@@ -3084,7 +3094,7 @@ class GzipFile(object):
3084
3094
 
3085
3095
  self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
3086
3096
 
3087
- CHUNK = 1 << 20
3097
+ CHUNK = __filebuff_size__
3088
3098
  pending = b""
3089
3099
  d = None
3090
3100
  absolute_offset = 0
@@ -3247,7 +3257,7 @@ class GzipFile(object):
3247
3257
 
3248
3258
  # Stage and compress in chunks
3249
3259
  self._write_buf += data
3250
- if len(self._write_buf) >= (1 << 20): # 1 MiB threshold
3260
+ if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
3251
3261
  out = self._compressor.compress(bytes(self._write_buf))
3252
3262
  if out:
3253
3263
  self.file.write(out)
@@ -3347,7 +3357,7 @@ class GzipFile(object):
3347
3357
  """
3348
3358
  if not isinstance(data, (bytes, bytearray, memoryview)):
3349
3359
  raise TypeError("from_bytes() expects a bytes-like object")
3350
- bio = io.BytesIO(bytes(data) if not isinstance(data, bytes) else data)
3360
+ bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
3351
3361
  return cls(fileobj=bio, mode=mode, **kw)
3352
3362
 
3353
3363
  # compatibility aliases for unwrapping utilities
@@ -3389,7 +3399,7 @@ def gzip_compress_bytes(payload, level=6, text=False, **kw):
3389
3399
  - text=False: 'payload' must be bytes-like; written via GzipFile('wb')
3390
3400
  You can pass newline/encoding/errors to control text encoding.
3391
3401
  """
3392
- bio = io.BytesIO()
3402
+ bio = MkTempFile()
3393
3403
  mode = 'wt' if text else 'wb'
3394
3404
  gf = GzipFile(fileobj=bio, mode=mode, level=level, **kw)
3395
3405
  try:
@@ -3621,280 +3631,6 @@ def crc_generic(msg, width, poly, init, xorout, refin, refout):
3621
3631
  crc = _reflect(crc, width)
3622
3632
  return (crc ^ xorout) & mask
3623
3633
 
3624
- # =========================
3625
- # Named CRCs
3626
- # =========================
3627
- # CRC-16/ANSI (ARC/MODBUS family with init=0xFFFF by default)
3628
- def crc16_ansi(msg, initial_value=0xFFFF):
3629
- return crc_generic(msg, 16, 0x8005, initial_value & 0xFFFF, 0x0000, True, True)
3630
-
3631
- def crc16_ibm(msg, initial_value=0xFFFF):
3632
- return crc16_ansi(msg, initial_value)
3633
-
3634
- def crc16(msg):
3635
- return crc16_ansi(msg, 0xFFFF)
3636
-
3637
- def crc16_ccitt(msg, initial_value=0xFFFF):
3638
- # CCITT-FALSE
3639
- return crc_generic(msg, 16, 0x1021, initial_value & 0xFFFF, 0x0000, False, False)
3640
-
3641
- def crc16_x25(msg):
3642
- return crc_generic(msg, 16, 0x1021, 0xFFFF, 0xFFFF, True, True)
3643
-
3644
- def crc16_kermit(msg):
3645
- return crc_generic(msg, 16, 0x1021, 0x0000, 0x0000, True, True)
3646
-
3647
- def crc64_ecma(msg, initial_value=0x0000000000000000):
3648
- return crc_generic(msg, 64, 0x42F0E1EBA9EA3693,
3649
- initial_value & 0xFFFFFFFFFFFFFFFF,
3650
- 0x0000000000000000, False, False)
3651
-
3652
- def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
3653
- return crc_generic(msg, 64, 0x000000000000001B,
3654
- initial_value & 0xFFFFFFFFFFFFFFFF,
3655
- 0xFFFFFFFFFFFFFFFF, True, True)
3656
-
3657
- # =========================
3658
- # Incremental CRC context
3659
- # =========================
3660
- CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
3661
-
3662
- _CRC_SPECS = {
3663
- "crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
3664
- "crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
3665
- "crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
3666
- "crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
3667
- "crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
3668
- "crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
3669
- }
3670
-
3671
- class CRCContext(object):
3672
- __slots__ = ("spec", "table", "mask", "shift", "crc")
3673
-
3674
- def __init__(self, spec):
3675
- self.spec = spec
3676
- self.table = _build_table(spec.width, spec.poly, spec.refin)
3677
- self.mask = (1 << spec.width) - 1
3678
- self.shift = spec.width - 8
3679
- self.crc = spec.init & self.mask
3680
-
3681
- def update(self, data):
3682
- if not isinstance(data, (bytes, bytearray, memoryview)):
3683
- data = bytes(bytearray(data))
3684
- buf = _mv_tobytes(memoryview(data))
3685
- if self.spec.refin:
3686
- c = self.crc
3687
- tbl = self.table
3688
- for b in buf:
3689
- if not isinstance(b, int): # Py2
3690
- b = ord(b)
3691
- c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
3692
- self.crc = c & self.mask
3693
- else:
3694
- c = self.crc
3695
- tbl = self.table
3696
- sh = self.shift
3697
- msk = self.mask
3698
- for b in buf:
3699
- if not isinstance(b, int):
3700
- b = ord(b)
3701
- c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
3702
- self.crc = c & msk
3703
- return self
3704
-
3705
- def digest_int(self):
3706
- c = self.crc
3707
- if self.spec.refout ^ self.spec.refin:
3708
- c = _reflect(c, self.spec.width)
3709
- return (c ^ self.spec.xorout) & self.mask
3710
-
3711
- def hexdigest(self):
3712
- width_hex = (self.spec.width + 3) // 4
3713
- return format(self.digest_int(), "0{}x".format(width_hex)).lower()
3714
-
3715
- def crc_context_from_name(name_norm):
3716
- spec = _CRC_SPECS.get(name_norm)
3717
- if spec is None:
3718
- raise KeyError("Unknown CRC spec: {}".format(name_norm))
3719
- return CRCContext(spec)
3720
-
3721
- # =========================
3722
- # Dispatch helpers
3723
- # =========================
3724
- _CRC_ALIASES = {
3725
- # keep your historical behaviors
3726
- "crc16": "crc16_ansi",
3727
- "crc16_ibm": "crc16_ansi",
3728
- "crc16_ansi": "crc16_ansi",
3729
- "crc16_modbus": "crc16_ansi",
3730
- "crc16_ccitt": "crc16_ccitt",
3731
- "crc16_ccitt_false": "crc16_ccitt",
3732
- "crc16_x25": "crc16_x25",
3733
- "crc16_kermit": "crc16_kermit",
3734
- "crc64": "crc64_iso",
3735
- "crc64_iso": "crc64_iso",
3736
- "crc64_ecma": "crc64_ecma",
3737
- "adler32": "adler32",
3738
- "crc32": "crc32",
3739
- }
3740
-
3741
- _CRC_WIDTH = {
3742
- "crc16_ansi": 16,
3743
- "crc16_ccitt": 16,
3744
- "crc16_x25": 16,
3745
- "crc16_kermit": 16,
3746
- "crc64_iso": 64,
3747
- "crc64_ecma": 64,
3748
- "adler32": 32,
3749
- "crc32": 32,
3750
- }
3751
-
3752
- def _crc_compute(algo_key, data_bytes):
3753
- if algo_key == "crc16_ansi":
3754
- return crc16_ansi(data_bytes) & 0xFFFF
3755
- if algo_key == "crc16_ccitt":
3756
- return crc16_ccitt(data_bytes) & 0xFFFF
3757
- if algo_key == "crc16_x25":
3758
- return crc16_x25(data_bytes) & 0xFFFF
3759
- if algo_key == "crc16_kermit":
3760
- return crc16_kermit(data_bytes) & 0xFFFF
3761
- if algo_key == "crc64_iso":
3762
- return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
3763
- if algo_key == "crc64_ecma":
3764
- return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
3765
- if algo_key == "adler32":
3766
- return zlib.adler32(data_bytes) & 0xFFFFFFFF
3767
- if algo_key == "crc32":
3768
- return zlib.crc32(data_bytes) & 0xFFFFFFFF
3769
- raise KeyError(algo_key)
3770
-
3771
- try:
3772
- hashlib_guaranteed
3773
- except NameError:
3774
- hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
3775
-
3776
- def CheckSumSupportAlt(name, guaranteed):
3777
- try:
3778
- return name.lower() in guaranteed
3779
- except Exception:
3780
- return False
3781
-
3782
- # =========================
3783
- # Public checksum API
3784
- # =========================
3785
- def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3786
- """
3787
- Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
3788
- or a single field) and compute the requested checksum. Returns lowercase hex.
3789
- """
3790
- checksumtype_norm = (checksumtype or "crc32").lower()
3791
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3792
-
3793
- delim = formatspecs.get('format_delimiter', u"\0")
3794
- hdr_bytes = _serialize_header_fields(inlist or [], delim)
3795
- if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
3796
- hdr_bytes = _to_bytes(hdr_bytes)
3797
- hdr_bytes = bytes(hdr_bytes)
3798
-
3799
- if algo_key in _CRC_WIDTH:
3800
- n = _crc_compute(algo_key, hdr_bytes)
3801
- return _hex_pad(n, _CRC_WIDTH[algo_key])
3802
-
3803
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3804
- h = hashlib.new(algo_key)
3805
- h.update(hdr_bytes)
3806
- return h.hexdigest().lower()
3807
-
3808
- return "0"
3809
-
3810
- def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3811
- """
3812
- Accepts bytes/str/file-like.
3813
- - Hashlib algos: streamed in 1 MiB chunks.
3814
- - CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
3815
- - Falls back to one-shot for non-file-like inputs.
3816
- """
3817
- checksumtype_norm = (checksumtype or "crc32").lower()
3818
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3819
-
3820
- # file-like streaming
3821
- if hasattr(instr, "read"):
3822
- # hashlib
3823
- if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3824
- h = hashlib.new(algo_key)
3825
- while True:
3826
- chunk = instr.read(1 << 20)
3827
- if not chunk:
3828
- break
3829
- if not isinstance(chunk, (bytes, bytearray, memoryview)):
3830
- chunk = bytes(bytearray(chunk))
3831
- h.update(chunk)
3832
- return h.hexdigest().lower()
3833
-
3834
- # CRC streaming via context
3835
- if algo_key in _CRC_SPECS:
3836
- ctx = crc_context_from_name(algo_key)
3837
- while True:
3838
- chunk = instr.read(1 << 20)
3839
- if not chunk:
3840
- break
3841
- if not isinstance(chunk, (bytes, bytearray, memoryview)):
3842
- chunk = bytes(bytearray(chunk))
3843
- ctx.update(chunk)
3844
- return ctx.hexdigest()
3845
-
3846
- # not known streaming algo: fallback to one-shot bytes
3847
- data = instr.read()
3848
- if not isinstance(data, (bytes, bytearray, memoryview)):
3849
- data = bytes(bytearray(data))
3850
- else:
3851
- data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
3852
- data = bytes(data)
3853
-
3854
- # one-shot
3855
- if algo_key in _CRC_SPECS:
3856
- return crc_context_from_name(algo_key).update(data).hexdigest()
3857
-
3858
- if algo_key in _CRC_WIDTH:
3859
- n = _crc_compute(algo_key, data)
3860
- return _hex_pad(n, _CRC_WIDTH[algo_key])
3861
-
3862
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3863
- h = hashlib.new(algo_key)
3864
- h.update(data)
3865
- return h.hexdigest().lower()
3866
-
3867
- return "0"
3868
-
3869
- def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3870
- calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
3871
- want = (inchecksum or "0").strip().lower()
3872
- if want.startswith("0x"):
3873
- want = want[2:]
3874
- return hmac.compare_digest(want, calc)
3875
-
3876
- def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3877
- calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
3878
- want = (inchecksum or "0").strip().lower()
3879
- if want.startswith("0x"):
3880
- want = want[2:]
3881
- return hmac.compare_digest(want, calc)
3882
-
3883
-
3884
- # =========================
3885
- # Incremental CRC context
3886
- # =========================
3887
- CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
3888
-
3889
- _CRC_SPECS = {
3890
- "crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
3891
- "crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
3892
- "crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
3893
- "crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
3894
- "crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
3895
- "crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
3896
- }
3897
-
3898
3634
  # --- helpers --------------------------------------------------------------
3899
3635
 
3900
3636
  try:
@@ -3935,206 +3671,15 @@ def _bytes_to_int(b):
3935
3671
  value = (value << 8) | ch
3936
3672
  return value
3937
3673
 
3938
-
3939
- # --- your existing CRCContext (unchanged) ---------------------------------
3940
-
3941
- class CRCContext(object):
3942
- __slots__ = ("spec", "table", "mask", "shift", "crc")
3943
-
3944
- def __init__(self, spec):
3945
- self.spec = spec
3946
- self.table = _build_table(spec.width, spec.poly, spec.refin)
3947
- self.mask = (1 << spec.width) - 1
3948
- self.shift = spec.width - 8
3949
- self.crc = spec.init & self.mask
3950
-
3951
- def update(self, data):
3952
- if not isinstance(data, (bytes, bytearray, memoryview)):
3953
- data = bytes(bytearray(data))
3954
- buf = _mv_tobytes(memoryview(data))
3955
- if self.spec.refin:
3956
- c = self.crc
3957
- tbl = self.table
3958
- for b in buf:
3959
- if not isinstance(b, int): # Py2
3960
- b = ord(b)
3961
- c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
3962
- self.crc = c & self.mask
3963
- else:
3964
- c = self.crc
3965
- tbl = self.table
3966
- sh = self.shift
3967
- msk = self.mask
3968
- for b in buf:
3969
- if not isinstance(b, int):
3970
- b = ord(b)
3971
- c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
3972
- self.crc = c & msk
3973
- return self
3974
-
3975
- def digest_int(self):
3976
- c = self.crc
3977
- if self.spec.refout ^ self.spec.refin:
3978
- c = _reflect(c, self.spec.width)
3979
- return (c ^ self.spec.xorout) & self.mask
3980
-
3981
- def hexdigest(self):
3982
- width_hex = (self.spec.width + 3) // 4
3983
- return format(self.digest_int(), "0{}x".format(width_hex)).lower()
3984
-
3985
-
3986
- # --- hashlib-backed implementation ---------------------------------------
3987
-
3988
- class _HashlibCRCWrapper(object):
3989
- """
3990
- Wrap a hashlib object to present the same interface as CRCContext
3991
- (update, digest_int, hexdigest).
3992
-
3993
- Assumes the hashlib algorithm already implements the exact CRC
3994
- specification (refin/refout/xorout/etc.).
3995
- """
3996
- __slots__ = ("_h", "spec", "mask", "width_hex")
3997
-
3998
- def __init__(self, algo_name, spec):
3999
- self._h = hashlib.new(algo_name)
4000
- self.spec = spec
4001
- self.mask = (1 << spec.width) - 1
4002
- self.width_hex = (spec.width + 3) // 4
4003
-
4004
- def update(self, data):
4005
- self._h.update(_coerce_bytes(data))
4006
- return self
4007
-
4008
- def digest_int(self):
4009
- # Convert final digest bytes to an integer and mask to width
4010
- value = _bytes_to_int(self._h.digest())
4011
- return value & self.mask
4012
-
4013
- def hexdigest(self):
4014
- h = self._h.hexdigest().lower()
4015
- # Normalize to the same number of hex digits as CRCContext
4016
- if len(h) < self.width_hex:
4017
- h = ("0" * (self.width_hex - len(h))) + h
4018
- elif len(h) > self.width_hex:
4019
- h = h[-self.width_hex:]
4020
- return h
4021
-
4022
-
4023
- # --- public class: choose hashlib or fallback -----------------------------
4024
-
4025
- class CRC(object):
4026
- """
4027
- CRC wrapper that uses hashlib if available, otherwise falls back to
4028
- the pure-Python CRCContext.
4029
-
4030
- spec.hashlib_name (preferred) or spec.name is used as the hashlib
4031
- algorithm name, e.g. 'crc32', 'crc32c', etc.
4032
- """
4033
-
4034
- __slots__ = ("spec", "_impl")
4035
-
4036
- def __init__(self, spec):
4037
- self.spec = spec
4038
-
4039
- algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
4040
- impl = None
4041
-
4042
- if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
4043
- # Use hashlib-backed implementation
4044
- impl = _HashlibCRCWrapper(algo_name, spec)
4045
- else:
4046
- # Fallback to your pure-Python implementation
4047
- impl = CRCContext(spec)
4048
-
4049
- self._impl = impl
4050
-
4051
- def update(self, data):
4052
- self._impl.update(data)
4053
- return self
4054
-
4055
- def digest_int(self):
4056
- return self._impl.digest_int()
4057
-
4058
- def hexdigest(self):
4059
- return self._impl.hexdigest()
4060
-
4061
- def crc_context_from_name(name_norm):
4062
- spec = _CRC_SPECS.get(name_norm)
4063
- if spec is None:
4064
- raise KeyError("Unknown CRC spec: {}".format(name_norm))
4065
- return CRCContext(spec)
4066
-
4067
- # =========================
4068
- # Dispatch helpers
4069
- # =========================
4070
- _CRC_ALIASES = {
4071
- # keep your historical behaviors
4072
- "crc16": "crc16_ansi",
4073
- "crc16_ibm": "crc16_ansi",
4074
- "crc16_ansi": "crc16_ansi",
4075
- "crc16_modbus": "crc16_ansi",
4076
- "crc16_ccitt": "crc16_ccitt",
4077
- "crc16_ccitt_false": "crc16_ccitt",
4078
- "crc16_x25": "crc16_x25",
4079
- "crc16_kermit": "crc16_kermit",
4080
- "crc64": "crc64_iso",
4081
- "crc64_iso": "crc64_iso",
4082
- "crc64_ecma": "crc64_ecma",
4083
- "adler32": "adler32",
4084
- "crc32": "crc32",
4085
- }
4086
-
4087
- _CRC_WIDTH = {
4088
- "crc16_ansi": 16,
4089
- "crc16_ccitt": 16,
4090
- "crc16_x25": 16,
4091
- "crc16_kermit": 16,
4092
- "crc64_iso": 64,
4093
- "crc64_ecma": 64,
4094
- "adler32": 32,
4095
- "crc32": 32,
4096
- }
4097
-
4098
- def _crc_compute(algo_key, data_bytes):
4099
- if algo_key == "crc16_ansi":
4100
- return crc16_ansi(data_bytes) & 0xFFFF
4101
- if algo_key == "crc16_ccitt":
4102
- return crc16_ccitt(data_bytes) & 0xFFFF
4103
- if algo_key == "crc16_x25":
4104
- return crc16_x25(data_bytes) & 0xFFFF
4105
- if algo_key == "crc16_kermit":
4106
- return crc16_kermit(data_bytes) & 0xFFFF
4107
- if algo_key == "crc64_iso":
4108
- return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
4109
- if algo_key == "crc64_ecma":
4110
- return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
4111
- if algo_key == "adler32":
4112
- return zlib.adler32(data_bytes) & 0xFFFFFFFF
4113
- if algo_key == "crc32":
4114
- return zlib.crc32(data_bytes) & 0xFFFFFFFF
4115
- raise KeyError(algo_key)
4116
-
4117
- try:
4118
- hashlib_guaranteed
4119
- except NameError:
4120
- hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
4121
-
4122
- def CheckSumSupportAlt(name, guaranteed):
4123
- try:
4124
- return name.lower() in guaranteed
4125
- except Exception:
4126
- return False
4127
-
4128
3674
  # =========================
4129
3675
  # Public checksum API
4130
3676
  # =========================
4131
- def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3677
+ def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
4132
3678
  """
4133
3679
  Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
4134
3680
  or a single field) and compute the requested checksum. Returns lowercase hex.
4135
3681
  """
4136
- checksumtype_norm = (checksumtype or "crc32").lower()
4137
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3682
+ algo_key = (checksumtype or "md5").lower()
4138
3683
 
4139
3684
  delim = formatspecs.get('format_delimiter', u"\0")
4140
3685
  hdr_bytes = _serialize_header_fields(inlist or [], delim)
@@ -4142,34 +3687,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, format
4142
3687
  hdr_bytes = _to_bytes(hdr_bytes)
4143
3688
  hdr_bytes = bytes(hdr_bytes)
4144
3689
 
4145
- if algo_key in _CRC_WIDTH:
4146
- n = _crc_compute(algo_key, hdr_bytes)
4147
- return _hex_pad(n, _CRC_WIDTH[algo_key])
4148
-
4149
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
4150
- h = hashlib.new(algo_key)
4151
- h.update(hdr_bytes)
4152
- return h.hexdigest().lower()
3690
+ if CheckSumSupport(algo_key, hashlib_guaranteed):
3691
+ h = hashlib.new(algo_key)
3692
+ h.update(hdr_bytes)
3693
+ return h.hexdigest().lower()
4153
3694
 
4154
3695
  return "0"
4155
3696
 
4156
- def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
3697
+ def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
4157
3698
  """
4158
3699
  Accepts bytes/str/file-like.
4159
3700
  - Hashlib algos: streamed in 1 MiB chunks.
4160
3701
  - CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
4161
3702
  - Falls back to one-shot for non-file-like inputs.
4162
3703
  """
4163
- checksumtype_norm = (checksumtype or "crc32").lower()
4164
- algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
3704
+ algo_key = (checksumtype or "md5").lower()
4165
3705
 
4166
3706
  # file-like streaming
4167
- if hasattr(instr, "read"):
3707
+ if hasattr(inbytes, "read"):
4168
3708
  # hashlib
4169
- if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3709
+
3710
+ if CheckSumSupport(algo_key, hashlib_guaranteed):
4170
3711
  h = hashlib.new(algo_key)
4171
3712
  while True:
4172
- chunk = instr.read(1 << 20)
3713
+ chunk = inbytes.read(__filebuff_size__)
4173
3714
  if not chunk:
4174
3715
  break
4175
3716
  if not isinstance(chunk, (bytes, bytearray, memoryview)):
@@ -4177,49 +3718,31 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
4177
3718
  h.update(chunk)
4178
3719
  return h.hexdigest().lower()
4179
3720
 
4180
- # CRC streaming via context
4181
- if algo_key in _CRC_SPECS:
4182
- ctx = crc_context_from_name(algo_key)
4183
- while True:
4184
- chunk = instr.read(1 << 20)
4185
- if not chunk:
4186
- break
4187
- if not isinstance(chunk, (bytes, bytearray, memoryview)):
4188
- chunk = bytes(bytearray(chunk))
4189
- ctx.update(chunk)
4190
- return ctx.hexdigest()
4191
-
4192
3721
  # not known streaming algo: fallback to one-shot bytes
4193
- data = instr.read()
3722
+ data = inbytes.read()
4194
3723
  if not isinstance(data, (bytes, bytearray, memoryview)):
4195
3724
  data = bytes(bytearray(data))
4196
3725
  else:
4197
- data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
3726
+ data = _to_bytes(inbytes) if (encodedata or not isinstance(inbytes, (bytes, bytearray, memoryview))) else inbytes
4198
3727
  data = bytes(data)
4199
3728
 
4200
3729
  # one-shot
4201
- if algo_key in _CRC_SPECS:
4202
- return crc_context_from_name(algo_key).update(data).hexdigest()
4203
-
4204
- if algo_key in _CRC_WIDTH:
4205
- n = _crc_compute(algo_key, data)
4206
- return _hex_pad(n, _CRC_WIDTH[algo_key])
4207
3730
 
4208
- if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
3731
+ if CheckSumSupport(algo_key, hashlib_guaranteed):
4209
3732
  h = hashlib.new(algo_key)
4210
3733
  h.update(data)
4211
3734
  return h.hexdigest().lower()
4212
3735
 
4213
3736
  return "0"
4214
3737
 
4215
- def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3738
+ def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
4216
3739
  calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
4217
3740
  want = (inchecksum or "0").strip().lower()
4218
3741
  if want.startswith("0x"):
4219
3742
  want = want[2:]
4220
3743
  return hmac.compare_digest(want, calc)
4221
3744
 
4222
- def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
3745
+ def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
4223
3746
  calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
4224
3747
  want = (inchecksum or "0").strip().lower()
4225
3748
  if want.startswith("0x"):
@@ -4266,66 +3789,6 @@ def GetDataFromArrayAlt(structure, path, default=None):
4266
3789
  return element
4267
3790
 
4268
3791
 
4269
- def GetHeaderChecksum(inlist=[], checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
4270
- fileheader = AppendNullBytes(inlist, formatspecs['format_delimiter']) if isinstance(
4271
- inlist, list) else AppendNullByte(inlist, formatspecs['format_delimiter'])
4272
- if encodedata and hasattr(fileheader, "encode"):
4273
- fileheader = fileheader.encode('UTF-8')
4274
- checksum_methods = {
4275
- "crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4276
- "crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4277
- "crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4278
- "crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
4279
- "adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
4280
- "crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
4281
- "crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
4282
- "crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4283
- "crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4284
- }
4285
- if checksumtype in checksum_methods:
4286
- return checksum_methods[checksumtype](fileheader)
4287
- elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
4288
- checksumoutstr = hashlib.new(checksumtype)
4289
- checksumoutstr.update(fileheader)
4290
- return checksumoutstr.hexdigest().lower()
4291
- return format(0, 'x').lower()
4292
-
4293
-
4294
- def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
4295
- if encodedata and hasattr(instr, "encode"):
4296
- instr = instr.encode('UTF-8')
4297
- checksum_methods = {
4298
- "crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4299
- "crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4300
- "crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
4301
- "crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
4302
- "adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
4303
- "crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
4304
- "crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
4305
- "crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4306
- "crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
4307
- }
4308
- if checksumtype in checksum_methods:
4309
- return checksum_methods[checksumtype](instr)
4310
- elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
4311
- checksumoutstr = hashlib.new(checksumtype)
4312
- checksumoutstr.update(instr)
4313
- return checksumoutstr.hexdigest().lower()
4314
- return format(0, 'x').lower()
4315
-
4316
-
4317
- def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
4318
- infileheadercshex = GetHeaderChecksum(
4319
- inlist, checksumtype, True, formatspecs).lower()
4320
- return inchecksum.lower() == infileheadercshex
4321
-
4322
-
4323
- def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
4324
- catinfilecshex = GetFileChecksum(
4325
- infile, checksumtype, True, formatspecs).lower()
4326
- return inchecksum.lower() == catinfilecshex
4327
-
4328
-
4329
3792
  # ========= pushback-aware delimiter reader =========
4330
3793
  class _DelimiterReader(object):
4331
3794
  """
@@ -4658,7 +4121,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4658
4121
  if(not hasattr(fp, "read")):
4659
4122
  return False
4660
4123
  delimiter = formatspecs['format_delimiter']
4661
- fheaderstart = fp.tell()
4662
4124
  if(formatspecs['new_style']):
4663
4125
  HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
4664
4126
  else:
@@ -4681,22 +4143,74 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4681
4143
  fjsonchecksumtype = HeaderOut[30]
4682
4144
  fjsonchecksum = HeaderOut[31]
4683
4145
  fjsoncontent = {}
4684
- fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4685
- if(fjsonsize > 0):
4686
- try:
4687
- fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4688
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4146
+ if(fjsontype=="json"):
4147
+ fjsoncontent = {}
4148
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4149
+ if(fjsonsize > 0):
4689
4150
  try:
4690
- fjsoncontent = json.loads(fprejsoncontent)
4151
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4152
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4691
4153
  except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4692
- fprejsoncontent = ""
4693
- fjsoncontent = {}
4694
- else:
4695
- fprejsoncontent = ""
4154
+ try:
4155
+ fjsonrawcontent = fprejsoncontent
4156
+ fjsoncontent = json.loads(fprejsoncontent)
4157
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4158
+ fprejsoncontent = ""
4159
+ fjsonrawcontent = fprejsoncontent
4160
+ fjsoncontent = {}
4161
+ else:
4162
+ fprejsoncontent = ""
4163
+ fjsonrawcontent = fprejsoncontent
4164
+ fjsoncontent = {}
4165
+ elif(testyaml and fjsontype == "yaml"):
4166
+ fjsoncontent = {}
4167
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4168
+ if (fjsonsize > 0):
4169
+ try:
4170
+ # try base64 → utf-8 → YAML
4171
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4172
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4173
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
4174
+ try:
4175
+ # fall back to treating the bytes as plain text YAML
4176
+ fjsonrawcontent = fprejsoncontent
4177
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4178
+ except (UnicodeDecodeError, yaml.YAMLError):
4179
+ # final fallback: empty
4180
+ fprejsoncontent = ""
4181
+ fjsonrawcontent = fprejsoncontent
4182
+ fjsoncontent = {}
4183
+ else:
4184
+ fprejsoncontent = ""
4185
+ fjsonrawcontent = fprejsoncontent
4186
+ fjsoncontent = {}
4187
+ elif(not testyaml and fjsontype == "yaml"):
4696
4188
  fjsoncontent = {}
4189
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4190
+ fprejsoncontent = ""
4191
+ fjsonrawcontent = fprejsoncontent
4192
+ elif(fjsontype=="list"):
4193
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4194
+ flisttmp = MkTempFile()
4195
+ flisttmp.write(fprejsoncontent.encode())
4196
+ flisttmp.seek(0)
4197
+ fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
4198
+ flisttmp.close()
4199
+ fjsonrawcontent = fjsoncontent
4200
+ if(fjsonlen==1):
4201
+ try:
4202
+ fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
4203
+ fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
4204
+ fjsonlen = len(fjsoncontent)
4205
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4206
+ try:
4207
+ fjsonrawcontent = fjsoncontent[0]
4208
+ fjsoncontent = json.loads(fjsoncontent[0])
4209
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4210
+ pass
4697
4211
  fp.seek(len(delimiter), 1)
4698
4212
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4699
- if(jsonfcs != fjsonchecksum and not skipchecksum):
4213
+ if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4700
4214
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4701
4215
  fname + " at offset " + str(fheaderstart))
4702
4216
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
@@ -4710,8 +4224,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4710
4224
  fname + " at offset " + str(fheaderstart))
4711
4225
  VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
4712
4226
  return False
4713
- fhend = fp.tell() - 1
4714
- fcontentstart = fp.tell()
4715
4227
  fcontents = MkTempFile()
4716
4228
  if(fsize > 0 and not listonly):
4717
4229
  if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
@@ -4725,9 +4237,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4725
4237
  fp.seek(fcsize, 1)
4726
4238
  fcontents.seek(0, 0)
4727
4239
  newfccs = GetFileChecksum(
4728
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4240
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4729
4241
  fcontents.seek(0, 0)
4730
- if(fccs != newfccs and not skipchecksum and not listonly):
4242
+ if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4731
4243
  VerbosePrintOut("File Content Checksum Error with file " +
4732
4244
  fname + " at offset " + str(fcontentstart))
4733
4245
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4740,10 +4252,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
4740
4252
  cfcontents = UncompressFileAlt(fcontents, formatspecs)
4741
4253
  cfcontents.seek(0, 0)
4742
4254
  fcontents = MkTempFile()
4743
- shutil.copyfileobj(cfcontents, fcontents)
4255
+ shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4744
4256
  cfcontents.close()
4745
4257
  fcontents.seek(0, 0)
4746
- fcontentend = fp.tell()
4747
4258
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
4748
4259
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
4749
4260
  if(abs(fseeknextasnum) == 0):
@@ -4835,22 +4346,49 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4835
4346
  if(fjsontype=="json"):
4836
4347
  fjsoncontent = {}
4837
4348
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4838
- if(fjsonsize > 0):
4349
+ if(fjsonsize > 0):
4350
+ try:
4351
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4352
+ fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4353
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4354
+ try:
4355
+ fjsonrawcontent = fprejsoncontent
4356
+ fjsoncontent = json.loads(fprejsoncontent)
4357
+ except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4358
+ fprejsoncontent = ""
4359
+ fjsonrawcontent = fprejsoncontent
4360
+ fjsoncontent = {}
4361
+ else:
4362
+ fprejsoncontent = ""
4363
+ fjsonrawcontent = fprejsoncontent
4364
+ fjsoncontent = {}
4365
+ elif(testyaml and fjsontype == "yaml"):
4366
+ fjsoncontent = {}
4367
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4368
+ if (fjsonsize > 0):
4839
4369
  try:
4370
+ # try base64 → utf-8 → YAML
4840
4371
  fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4841
- fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
4842
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4372
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4373
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
4843
4374
  try:
4375
+ # fall back to treating the bytes as plain text YAML
4844
4376
  fjsonrawcontent = fprejsoncontent
4845
- fjsoncontent = json.loads(fprejsoncontent)
4846
- except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
4377
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4378
+ except (UnicodeDecodeError, yaml.YAMLError):
4379
+ # final fallback: empty
4847
4380
  fprejsoncontent = ""
4848
- fjsonrawcontent = fprejsoncontent
4381
+ fjsonrawcontent = fprejsoncontent
4849
4382
  fjsoncontent = {}
4850
4383
  else:
4851
4384
  fprejsoncontent = ""
4852
- fjsonrawcontent = fprejsoncontent
4385
+ fjsonrawcontent = fprejsoncontent
4853
4386
  fjsoncontent = {}
4387
+ elif(not testyaml and fjsontype == "yaml"):
4388
+ fjsoncontent = {}
4389
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4390
+ fprejsoncontent = ""
4391
+ fjsonrawcontent = fprejsoncontent
4854
4392
  elif(fjsontype=="list"):
4855
4393
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4856
4394
  flisttmp = MkTempFile()
@@ -4873,7 +4411,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4873
4411
  fp.seek(len(delimiter), 1)
4874
4412
  fjend = fp.tell() - 1
4875
4413
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
4876
- if(jsonfcs != fjsonchecksum and not skipchecksum):
4414
+ if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
4877
4415
  VerbosePrintOut("File JSON Data Checksum Error with file " +
4878
4416
  fname + " at offset " + str(fheaderstart))
4879
4417
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
@@ -4905,9 +4443,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4905
4443
  pyhascontents = False
4906
4444
  fcontents.seek(0, 0)
4907
4445
  newfccs = GetFileChecksum(
4908
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4446
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4909
4447
  fcontents.seek(0, 0)
4910
- if(fccs != newfccs and not skipchecksum and not listonly):
4448
+ if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
4911
4449
  VerbosePrintOut("File Content Checksum Error with file " +
4912
4450
  fname + " at offset " + str(fcontentstart))
4913
4451
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -4921,7 +4459,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
4921
4459
  fcontents, formatspecs)
4922
4460
  cfcontents.seek(0, 0)
4923
4461
  fcontents = MkTempFile()
4924
- shutil.copyfileobj(cfcontents, fcontents)
4462
+ shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
4925
4463
  cfcontents.close()
4926
4464
  fcontents.seek(0, 0)
4927
4465
  fccs = GetFileChecksum(
@@ -5037,6 +4575,33 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5037
4575
  fprejsoncontent = ""
5038
4576
  fjsonrawcontent = fprejsoncontent
5039
4577
  fjsoncontent = {}
4578
+ elif(testyaml and fjsontype == "yaml"):
4579
+ fjsoncontent = {}
4580
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4581
+ if (fjsonsize > 0):
4582
+ try:
4583
+ # try base64 → utf-8 → YAML
4584
+ fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
4585
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4586
+ except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
4587
+ try:
4588
+ # fall back to treating the bytes as plain text YAML
4589
+ fjsonrawcontent = fprejsoncontent
4590
+ fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
4591
+ except (UnicodeDecodeError, yaml.YAMLError):
4592
+ # final fallback: empty
4593
+ fprejsoncontent = ""
4594
+ fjsonrawcontent = fprejsoncontent
4595
+ fjsoncontent = {}
4596
+ else:
4597
+ fprejsoncontent = ""
4598
+ fjsonrawcontent = fprejsoncontent
4599
+ fjsoncontent = {}
4600
+ elif(not testyaml and fjsontype == "yaml"):
4601
+ fjsoncontent = {}
4602
+ fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
4603
+ fprejsoncontent = ""
4604
+ fjsonrawcontent = fprejsoncontent
5040
4605
  elif(fjsontype=="list"):
5041
4606
  fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
5042
4607
  flisttmp = MkTempFile()
@@ -5058,7 +4623,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5058
4623
  pass
5059
4624
  fp.seek(len(delimiter), 1)
5060
4625
  jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
5061
- if(jsonfcs != fjsonchecksum and not skipchecksum):
4626
+ if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
5062
4627
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5063
4628
  fname + " at offset " + str(fheaderstart))
5064
4629
  VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
@@ -5090,8 +4655,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5090
4655
  pyhascontents = False
5091
4656
  fcontents.seek(0, 0)
5092
4657
  newfccs = GetFileChecksum(
5093
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
5094
- if(fccs != newfccs and not skipchecksum and not listonly):
4658
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
4659
+ if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
5095
4660
  VerbosePrintOut("File Content Checksum Error with file " +
5096
4661
  fname + " at offset " + str(fcontentstart))
5097
4662
  VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
@@ -5105,11 +4670,11 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
5105
4670
  fcontents, formatspecs)
5106
4671
  cfcontents.seek(0, 0)
5107
4672
  fcontents = MkTempFile()
5108
- shutil.copyfileobj(cfcontents, fcontents)
4673
+ shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
5109
4674
  cfcontents.close()
5110
4675
  fcontents.seek(0, 0)
5111
4676
  fccs = GetFileChecksum(
5112
- fcontents.read(), HeaderOut[-3].lower(), False, formatspecs)
4677
+ fcontents, HeaderOut[-3].lower(), False, formatspecs)
5113
4678
  fcontentend = fp.tell()
5114
4679
  if(re.findall("^\\+([0-9]+)", fseeknextfile)):
5115
4680
  fseeknextasnum = int(fseeknextfile.replace("+", ""))
@@ -5143,9 +4708,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
5143
4708
  curloc = filestart
5144
4709
  try:
5145
4710
  fp.seek(0, 2)
5146
- except OSError:
5147
- SeekToEndOfFile(fp)
5148
- except ValueError:
4711
+ except (OSError, ValueError):
5149
4712
  SeekToEndOfFile(fp)
5150
4713
  CatSize = fp.tell()
5151
4714
  CatSizeEnd = CatSize
@@ -5194,9 +4757,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5194
4757
  curloc = filestart
5195
4758
  try:
5196
4759
  fp.seek(0, 2)
5197
- except OSError:
5198
- SeekToEndOfFile(fp)
5199
- except ValueError:
4760
+ except (OSError, ValueError):
5200
4761
  SeekToEndOfFile(fp)
5201
4762
  CatSize = fp.tell()
5202
4763
  CatSizeEnd = CatSize
@@ -5215,10 +4776,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5215
4776
  else:
5216
4777
  inheader = ReadFileHeaderDataWoSize(
5217
4778
  fp, formatspecs['format_delimiter'])
5218
- fnumextrafieldsize = int(inheader[5], 16)
5219
- fnumextrafields = int(inheader[6], 16)
4779
+ fnumextrafieldsize = int(inheader[6], 16)
4780
+ fnumextrafields = int(inheader[7], 16)
5220
4781
  fextrafieldslist = []
5221
- extrastart = 7
4782
+ extrastart = 8
5222
4783
  extraend = extrastart + fnumextrafields
5223
4784
  while(extrastart < extraend):
5224
4785
  fextrafieldslist.append(inheader[extrastart])
@@ -5237,7 +4798,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5237
4798
  fnumfields = int(inheader[1], 16)
5238
4799
  fhencoding = inheader[2]
5239
4800
  fostype = inheader[3]
5240
- fnumfiles = int(inheader[4], 16)
4801
+ fpythontype = inheader[4]
4802
+ fnumfiles = int(inheader[5], 16)
5241
4803
  fprechecksumtype = inheader[-2]
5242
4804
  fprechecksum = inheader[-1]
5243
4805
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
@@ -5250,7 +4812,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5250
4812
  return False
5251
4813
  formversions = re.search('(.*?)(\\d+)', formstring).groups()
5252
4814
  fcompresstype = ""
5253
- outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
4815
+ outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
5254
4816
  if (seekstart < 0) or (seekstart > fnumfiles):
5255
4817
  seekstart = 0
5256
4818
  if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
@@ -5278,7 +4840,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5278
4840
  prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5279
4841
  fp.seek(len(delimiter), 1)
5280
4842
  prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
5281
- if(prejsonfcs != prefjsonchecksum and not skipchecksum):
4843
+ if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5282
4844
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5283
4845
  prefname + " at offset " + str(prefhstart))
5284
4846
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
@@ -5286,7 +4848,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5286
4848
  prenewfcs = GetHeaderChecksum(
5287
4849
  preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
5288
4850
  prefcs = preheaderdata[-2]
5289
- if(prefcs != prenewfcs and not skipchecksum):
4851
+ if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5290
4852
  VerbosePrintOut("File Header Checksum Error with file " +
5291
4853
  prefname + " at offset " + str(prefhstart))
5292
4854
  VerbosePrintOut("'" + prefcs + "' != " +
@@ -5302,10 +4864,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
5302
4864
  prefcontents.write(fp.read(prefsize))
5303
4865
  prefcontents.seek(0, 0)
5304
4866
  prenewfccs = GetFileChecksum(
5305
- prefcontents.read(), preheaderdata[-3].lower(), False, formatspecs)
4867
+ prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5306
4868
  prefccs = preheaderdata[-1]
5307
4869
  pyhascontents = True
5308
- if(prefccs != prenewfccs and not skipchecksum):
4870
+ if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
5309
4871
  VerbosePrintOut("File Content Checksum Error with file " +
5310
4872
  prefname + " at offset " + str(prefcontentstart))
5311
4873
  VerbosePrintOut("'" + prefccs +
@@ -5351,9 +4913,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5351
4913
  curloc = filestart
5352
4914
  try:
5353
4915
  fp.seek(0, 2)
5354
- except OSError:
5355
- SeekToEndOfFile(fp)
5356
- except ValueError:
4916
+ except (OSError, ValueError):
5357
4917
  SeekToEndOfFile(fp)
5358
4918
  CatSize = fp.tell()
5359
4919
  CatSizeEnd = CatSize
@@ -5372,10 +4932,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5372
4932
  else:
5373
4933
  inheader = ReadFileHeaderDataWoSize(
5374
4934
  fp, formatspecs['format_delimiter'])
5375
- fnumextrafieldsize = int(inheader[5], 16)
5376
- fnumextrafields = int(inheader[6], 16)
4935
+ fnumextrafieldsize = int(inheader[6], 16)
4936
+ fnumextrafields = int(inheader[7], 16)
5377
4937
  fextrafieldslist = []
5378
- extrastart = 7
4938
+ extrastart = 8
5379
4939
  extraend = extrastart + fnumextrafields
5380
4940
  while(extrastart < extraend):
5381
4941
  fextrafieldslist.append(inheader[extrastart])
@@ -5394,7 +4954,8 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5394
4954
  fnumfields = int(inheader[1], 16)
5395
4955
  fhencoding = inheader[2]
5396
4956
  fostype = inheader[3]
5397
- fnumfiles = int(inheader[4], 16)
4957
+ fpythontype = inheader[4]
4958
+ fnumfiles = int(inheader[5], 16)
5398
4959
  fprechecksumtype = inheader[-2]
5399
4960
  fprechecksum = inheader[-1]
5400
4961
  headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
@@ -5440,7 +5001,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5440
5001
  prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
5441
5002
  fp.seek(len(delimiter), 1)
5442
5003
  prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
5443
- if(prejsonfcs != prefjsonchecksum and not skipchecksum):
5004
+ if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
5444
5005
  VerbosePrintOut("File JSON Data Checksum Error with file " +
5445
5006
  prefname + " at offset " + str(prefhstart))
5446
5007
  VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
@@ -5469,7 +5030,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
5469
5030
  prefcontents, preheaderdata[-3].lower(), False, formatspecs)
5470
5031
  prefccs = preheaderdata[-1]
5471
5032
  pyhascontents = True
5472
- if(prefccs != prenewfccs and not skipchecksum):
5033
+ if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
5473
5034
  VerbosePrintOut("File Content Checksum Error with file " +
5474
5035
  prefname + " at offset " + str(prefcontentstart))
5475
5036
  VerbosePrintOut("'" + prefccs +
@@ -5510,24 +5071,17 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5510
5071
  fp = infile
5511
5072
  try:
5512
5073
  fp.seek(0, 2)
5513
- except OSError:
5514
- SeekToEndOfFile(fp)
5515
- except ValueError:
5074
+ except (OSError, ValueError):
5516
5075
  SeekToEndOfFile(fp)
5517
5076
  outfsize = fp.tell()
5518
5077
  fp.seek(filestart, 0)
5519
5078
  currentfilepos = fp.tell()
5520
5079
  elif(infile == "-"):
5521
5080
  fp = MkTempFile()
5522
- if(hasattr(sys.stdin, "buffer")):
5523
- shutil.copyfileobj(sys.stdin.buffer, fp)
5524
- else:
5525
- shutil.copyfileobj(sys.stdin, fp)
5081
+ shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
5526
5082
  try:
5527
5083
  fp.seek(0, 2)
5528
- except OSError:
5529
- SeekToEndOfFile(fp)
5530
- except ValueError:
5084
+ except (OSError, ValueError):
5531
5085
  SeekToEndOfFile(fp)
5532
5086
  outfsize = fp.tell()
5533
5087
  fp.seek(filestart, 0)
@@ -5537,9 +5091,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5537
5091
  fp.write(infile)
5538
5092
  try:
5539
5093
  fp.seek(0, 2)
5540
- except OSError:
5541
- SeekToEndOfFile(fp)
5542
- except ValueError:
5094
+ except (OSError, ValueError):
5543
5095
  SeekToEndOfFile(fp)
5544
5096
  outfsize = fp.tell()
5545
5097
  fp.seek(filestart, 0)
@@ -5548,9 +5100,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5548
5100
  fp = download_file_from_internet_file(infile)
5549
5101
  try:
5550
5102
  fp.seek(0, 2)
5551
- except OSError:
5552
- SeekToEndOfFile(fp)
5553
- except ValueError:
5103
+ except (OSError, ValueError):
5554
5104
  SeekToEndOfFile(fp)
5555
5105
  outfsize = fp.tell()
5556
5106
  fp.seek(filestart, 0)
@@ -5558,9 +5108,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5558
5108
  elif(isinstance(infile, FileLikeAdapter)):
5559
5109
  try:
5560
5110
  fp.seek(0, 2)
5561
- except OSError:
5562
- SeekToEndOfFile(fp)
5563
- except ValueError:
5111
+ except (OSError, ValueError):
5564
5112
  SeekToEndOfFile(fp)
5565
5113
  outfsize = fp.tell()
5566
5114
  fp.seek(filestart, 0)
@@ -5570,9 +5118,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5570
5118
  fp = open(infile, "rb")
5571
5119
  try:
5572
5120
  fp.seek(0, 2)
5573
- except OSError:
5574
- SeekToEndOfFile(fp)
5575
- except ValueError:
5121
+ except (OSError, ValueError):
5576
5122
  SeekToEndOfFile(fp)
5577
5123
  outfsize = fp.tell()
5578
5124
  fp.seek(filestart, 0)
@@ -5623,9 +5169,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
5623
5169
  currentinfilepos = infp.tell()
5624
5170
  try:
5625
5171
  infp.seek(0, 2)
5626
- except OSError:
5627
- SeekToEndOfFile(infp)
5628
- except ValueError:
5172
+ except (OSError, ValueError):
5629
5173
  SeekToEndOfFile(infp)
5630
5174
  outinfsize = infp.tell()
5631
5175
  infp.seek(currentinfilepos, 0)
@@ -5664,24 +5208,17 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5664
5208
  fp = infile
5665
5209
  try:
5666
5210
  fp.seek(0, 2)
5667
- except OSError:
5668
- SeekToEndOfFile(fp)
5669
- except ValueError:
5211
+ except (OSError, ValueError):
5670
5212
  SeekToEndOfFile(fp)
5671
5213
  outfsize = fp.tell()
5672
5214
  fp.seek(filestart, 0)
5673
5215
  currentfilepos = fp.tell()
5674
5216
  elif(infile == "-"):
5675
5217
  fp = MkTempFile()
5676
- if(hasattr(sys.stdin, "buffer")):
5677
- shutil.copyfileobj(sys.stdin.buffer, fp)
5678
- else:
5679
- shutil.copyfileobj(sys.stdin, fp)
5218
+ shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
5680
5219
  try:
5681
5220
  fp.seek(0, 2)
5682
- except OSError:
5683
- SeekToEndOfFile(fp)
5684
- except ValueError:
5221
+ except (OSError, ValueError):
5685
5222
  SeekToEndOfFile(fp)
5686
5223
  outfsize = fp.tell()
5687
5224
  fp.seek(filestart, 0)
@@ -5691,9 +5228,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5691
5228
  fp.write(infile)
5692
5229
  try:
5693
5230
  fp.seek(0, 2)
5694
- except OSError:
5695
- SeekToEndOfFile(fp)
5696
- except ValueError:
5231
+ except (OSError, ValueError):
5697
5232
  SeekToEndOfFile(fp)
5698
5233
  outfsize = fp.tell()
5699
5234
  fp.seek(filestart, 0)
@@ -5702,9 +5237,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5702
5237
  fp = download_file_from_internet_file(infile)
5703
5238
  try:
5704
5239
  fp.seek(0, 2)
5705
- except OSError:
5706
- SeekToEndOfFile(fp)
5707
- except ValueError:
5240
+ except (OSError, ValueError):
5708
5241
  SeekToEndOfFile(fp)
5709
5242
  outfsize = fp.tell()
5710
5243
  fp.seek(filestart, 0)
@@ -5712,9 +5245,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5712
5245
  elif(isinstance(infile, FileLikeAdapter)):
5713
5246
  try:
5714
5247
  fp.seek(0, 2)
5715
- except OSError:
5716
- SeekToEndOfFile(fp)
5717
- except ValueError:
5248
+ except (OSError, ValueError):
5718
5249
  SeekToEndOfFile(fp)
5719
5250
  outfsize = fp.tell()
5720
5251
  fp.seek(filestart, 0)
@@ -5724,9 +5255,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5724
5255
  fp = open(infile, "rb")
5725
5256
  try:
5726
5257
  fp.seek(0, 2)
5727
- except OSError:
5728
- SeekToEndOfFile(fp)
5729
- except ValueError:
5258
+ except (OSError, ValueError):
5730
5259
  SeekToEndOfFile(fp)
5731
5260
  outfsize = fp.tell()
5732
5261
  fp.seek(filestart, 0)
@@ -5777,9 +5306,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
5777
5306
  currentinfilepos = infp.tell()
5778
5307
  try:
5779
5308
  infp.seek(0, 2)
5780
- except OSError:
5781
- SeekToEndOfFile(infp)
5782
- except ValueError:
5309
+ except (OSError, ValueError):
5783
5310
  SeekToEndOfFile(infp)
5784
5311
  outinfsize = infp.tell()
5785
5312
  infp.seek(currentinfilepos, 0)
@@ -5868,7 +5395,7 @@ def AppendFileHeader(fp,
5868
5395
  numfiles,
5869
5396
  fencoding,
5870
5397
  extradata=None,
5871
- checksumtype="crc32",
5398
+ checksumtype="md5",
5872
5399
  formatspecs=__file_format_dict__):
5873
5400
  """
5874
5401
  Build and write the archive file header.
@@ -5920,11 +5447,11 @@ def AppendFileHeader(fp,
5920
5447
 
5921
5448
  # Preserve your original "tmpoutlen" computation exactly
5922
5449
  tmpoutlist = [extrasizelen, extrafields] # you used this as a separate list
5923
- tmpoutlen = 3 + len(tmpoutlist) + len(xlist) + 2
5450
+ tmpoutlen = 4 + len(tmpoutlist) + len(xlist) + 2
5924
5451
  tmpoutlenhex = _hex_lower(tmpoutlen)
5925
5452
 
5926
5453
  # Serialize the first group
5927
- fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), fnumfiles_hex], delimiter)
5454
+ fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
5928
5455
  # Append tmpoutlist
5929
5456
  fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
5930
5457
  # Append extradata items if any
@@ -5979,7 +5506,7 @@ def AppendFileHeader(fp,
5979
5506
  return fp
5980
5507
 
5981
5508
 
5982
- def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc32", formatspecs=__file_format_multi_dict__):
5509
+ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5983
5510
  if(IsNestedDict(formatspecs) and fmttype in formatspecs):
5984
5511
  formatspecs = formatspecs[fmttype]
5985
5512
  elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
@@ -5989,11 +5516,11 @@ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc3
5989
5516
  return fp
5990
5517
 
5991
5518
 
5992
- def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc32", formatspecs=__file_format_multi_dict__):
5519
+ def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
5993
5520
  return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
5994
5521
 
5995
5522
 
5996
- def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="crc32", formatspecs=__file_format_multi_dict__, returnfp=False):
5523
+ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
5997
5524
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
5998
5525
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
5999
5526
  get_in_ext = os.path.splitext(outfile)
@@ -6042,18 +5569,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
6042
5569
  fp.flush()
6043
5570
  if(hasattr(os, "sync")):
6044
5571
  os.fsync(fp.fileno())
6045
- except io.UnsupportedOperation:
6046
- pass
6047
- except AttributeError:
6048
- pass
6049
- except OSError:
5572
+ except (io.UnsupportedOperation, AttributeError, OSError):
6050
5573
  pass
6051
5574
  if(outfile == "-"):
6052
5575
  fp.seek(0, 0)
6053
- if(hasattr(sys.stdout, "buffer")):
6054
- shutil.copyfileobj(fp, sys.stdout.buffer)
6055
- else:
6056
- shutil.copyfileobj(fp, sys.stdout)
5576
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
6057
5577
  elif(outfile is None):
6058
5578
  fp.seek(0, 0)
6059
5579
  outvar = fp.read()
@@ -6072,11 +5592,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
6072
5592
  return True
6073
5593
 
6074
5594
 
6075
- def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="crc32", formatspecs=__file_format_dict__, returnfp=False):
5595
+ def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
6076
5596
  return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
6077
5597
 
6078
5598
 
6079
- def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["crc32", "crc32", "crc32"], formatspecs=__file_format_dict__):
5599
+ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
6080
5600
  if(not hasattr(fp, "write")):
6081
5601
  return False
6082
5602
  if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
@@ -6155,26 +5675,21 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
6155
5675
  fp.flush()
6156
5676
  if(hasattr(os, "sync")):
6157
5677
  os.fsync(fp.fileno())
6158
- except io.UnsupportedOperation:
6159
- pass
6160
- except AttributeError:
6161
- pass
6162
- except OSError:
5678
+ except (io.UnsupportedOperation, AttributeError, OSError):
6163
5679
  pass
6164
5680
  return fp
6165
5681
 
6166
-
6167
- def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
5682
+ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6168
5683
  if(not hasattr(fp, "write")):
6169
5684
  return False
6170
5685
  advancedlist = formatspecs['use_advanced_list']
6171
5686
  altinode = formatspecs['use_alt_inode']
6172
5687
  if(verbose):
6173
5688
  logging.basicConfig(format="%(message)s",
6174
- stream=sys.stdout, level=logging.DEBUG)
5689
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6175
5690
  infilelist = []
6176
5691
  if(infiles == "-"):
6177
- for line in sys.stdin:
5692
+ for line in PY_STDIN_TEXT:
6178
5693
  infilelist.append(line.strip())
6179
5694
  infilelist = list(filter(None, infilelist))
6180
5695
  elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
@@ -6218,11 +5733,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6218
5733
  fp.flush()
6219
5734
  if(hasattr(os, "sync")):
6220
5735
  os.fsync(fp.fileno())
6221
- except io.UnsupportedOperation:
6222
- pass
6223
- except AttributeError:
6224
- pass
6225
- except OSError:
5736
+ except (io.UnsupportedOperation, AttributeError, OSError):
6226
5737
  pass
6227
5738
  FullSizeFilesAlt = 0
6228
5739
  for curfname in GetDirList:
@@ -6372,7 +5883,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6372
5883
  curcompression = "none"
6373
5884
  if not followlink and ftype in data_types:
6374
5885
  with open(fname, "rb") as fpc:
6375
- copy_opaque(fpc, fcontents, bufsize=1 << 20) # 1 MiB chunks, opaque copy
5886
+ shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6376
5887
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6377
5888
  fcontents.seek(0, 0)
6378
5889
  if(typechecktest is not False):
@@ -6390,7 +5901,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6390
5901
  while(ilmin < ilsize):
6391
5902
  cfcontents = MkTempFile()
6392
5903
  fcontents.seek(0, 0)
6393
- shutil.copyfileobj(fcontents, cfcontents)
5904
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6394
5905
  fcontents.seek(0, 0)
6395
5906
  cfcontents.seek(0, 0)
6396
5907
  cfcontents = CompressOpenFileAlt(
@@ -6406,7 +5917,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6406
5917
  curcompression = compressionuselist[ilcmin]
6407
5918
  fcontents.seek(0, 0)
6408
5919
  cfcontents = MkTempFile()
6409
- shutil.copyfileobj(fcontents, cfcontents)
5920
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6410
5921
  cfcontents.seek(0, 0)
6411
5922
  cfcontents = CompressOpenFileAlt(
6412
5923
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6422,7 +5933,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6422
5933
  return False
6423
5934
  flstatinfo = os.stat(flinkname)
6424
5935
  with open(flinkname, "rb") as fpc:
6425
- copy_opaque(fpc, fcontents, bufsize=1 << 20) # 1 MiB chunks, opaque copy
5936
+ shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6426
5937
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6427
5938
  fcontents.seek(0, 0)
6428
5939
  if(typechecktest is not False):
@@ -6440,7 +5951,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6440
5951
  while(ilmin < ilsize):
6441
5952
  cfcontents = MkTempFile()
6442
5953
  fcontents.seek(0, 0)
6443
- shutil.copyfileobj(fcontents, cfcontents)
5954
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6444
5955
  fcontents.seek(0, 0)
6445
5956
  cfcontents.seek(0, 0)
6446
5957
  cfcontents = CompressOpenFileAlt(
@@ -6456,7 +5967,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6456
5967
  curcompression = compressionuselist[ilcmin]
6457
5968
  fcontents.seek(0, 0)
6458
5969
  cfcontents = MkTempFile()
6459
- shutil.copyfileobj(fcontents, cfcontents)
5970
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6460
5971
  cfcontents.seek(0, 0)
6461
5972
  cfcontents = CompressOpenFileAlt(
6462
5973
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6479,20 +5990,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
6479
5990
  fp.flush()
6480
5991
  if(hasattr(os, "sync")):
6481
5992
  os.fsync(fp.fileno())
6482
- except io.UnsupportedOperation:
6483
- pass
6484
- except AttributeError:
6485
- pass
6486
- except OSError:
5993
+ except (io.UnsupportedOperation, AttributeError, OSError):
6487
5994
  pass
6488
5995
  return fp
6489
5996
 
6490
- def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
5997
+ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6491
5998
  if(not hasattr(fp, "write")):
6492
5999
  return False
6493
6000
  if(verbose):
6494
6001
  logging.basicConfig(format="%(message)s",
6495
- stream=sys.stdout, level=logging.DEBUG)
6002
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6496
6003
  curinode = 0
6497
6004
  curfid = 0
6498
6005
  inodelist = []
@@ -6501,10 +6008,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6501
6008
  inodetoforminode = {}
6502
6009
  if(infile == "-"):
6503
6010
  infile = MkTempFile()
6504
- if(hasattr(sys.stdin, "buffer")):
6505
- shutil.copyfileobj(sys.stdin.buffer, infile)
6506
- else:
6507
- shutil.copyfileobj(sys.stdin, infile)
6011
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6508
6012
  infile.seek(0, 0)
6509
6013
  if(not infile):
6510
6014
  return False
@@ -6564,11 +6068,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6564
6068
  fp.flush()
6565
6069
  if(hasattr(os, "sync")):
6566
6070
  os.fsync(fp.fileno())
6567
- except io.UnsupportedOperation:
6568
- pass
6569
- except AttributeError:
6570
- pass
6571
- except OSError:
6071
+ except (io.UnsupportedOperation, AttributeError, OSError):
6572
6072
  pass
6573
6073
  for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
6574
6074
  fencoding = "UTF-8"
@@ -6654,7 +6154,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6654
6154
  curcompression = "none"
6655
6155
  if ftype in data_types:
6656
6156
  fpc = tarfp.extractfile(member)
6657
- copy_opaque(fpc, fcontents, bufsize=1 << 20) # 1 MiB chunks, opaque copy
6157
+ shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
6658
6158
  fpc.close()
6659
6159
  typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
6660
6160
  fcontents.seek(0, 0)
@@ -6673,7 +6173,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6673
6173
  while(ilmin < ilsize):
6674
6174
  cfcontents = MkTempFile()
6675
6175
  fcontents.seek(0, 0)
6676
- shutil.copyfileobj(fcontents, cfcontents)
6176
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6677
6177
  fcontents.seek(0, 0)
6678
6178
  cfcontents.seek(0, 0)
6679
6179
  cfcontents = CompressOpenFileAlt(
@@ -6689,7 +6189,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6689
6189
  curcompression = compressionuselist[ilcmin]
6690
6190
  fcontents.seek(0, 0)
6691
6191
  cfcontents = MkTempFile()
6692
- shutil.copyfileobj(fcontents, cfcontents)
6192
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6693
6193
  cfcontents.seek(0, 0)
6694
6194
  cfcontents = CompressOpenFileAlt(
6695
6195
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6712,21 +6212,17 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
6712
6212
  fp.flush()
6713
6213
  if(hasattr(os, "sync")):
6714
6214
  os.fsync(fp.fileno())
6715
- except io.UnsupportedOperation:
6716
- pass
6717
- except AttributeError:
6718
- pass
6719
- except OSError:
6215
+ except (io.UnsupportedOperation, AttributeError, OSError):
6720
6216
  pass
6721
6217
  fcontents.close()
6722
6218
  return fp
6723
6219
 
6724
- def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6220
+ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6725
6221
  if(not hasattr(fp, "write")):
6726
6222
  return False
6727
6223
  if(verbose):
6728
6224
  logging.basicConfig(format="%(message)s",
6729
- stream=sys.stdout, level=logging.DEBUG)
6225
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6730
6226
  curinode = 0
6731
6227
  curfid = 0
6732
6228
  inodelist = []
@@ -6735,10 +6231,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6735
6231
  inodetoforminode = {}
6736
6232
  if(infile == "-"):
6737
6233
  infile = MkTempFile()
6738
- if(hasattr(sys.stdin, "buffer")):
6739
- shutil.copyfileobj(sys.stdin.buffer, infile)
6740
- else:
6741
- shutil.copyfileobj(sys.stdin, infile)
6234
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
6742
6235
  infile.seek(0, 0)
6743
6236
  if(not infile):
6744
6237
  return False
@@ -6768,11 +6261,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6768
6261
  fp.flush()
6769
6262
  if(hasattr(os, "sync")):
6770
6263
  os.fsync(fp.fileno())
6771
- except io.UnsupportedOperation:
6772
- pass
6773
- except AttributeError:
6774
- pass
6775
- except OSError:
6264
+ except (io.UnsupportedOperation, AttributeError, OSError):
6776
6265
  pass
6777
6266
  for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
6778
6267
  fencoding = "UTF-8"
@@ -6857,24 +6346,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6857
6346
  fcsize = format(int(0), 'x').lower()
6858
6347
  try:
6859
6348
  fuid = format(int(os.getuid()), 'x').lower()
6860
- except AttributeError:
6861
- fuid = format(int(0), 'x').lower()
6862
- except KeyError:
6349
+ except (KeyError, AttributeError):
6863
6350
  fuid = format(int(0), 'x').lower()
6864
6351
  try:
6865
6352
  fgid = format(int(os.getgid()), 'x').lower()
6866
- except AttributeError:
6867
- fgid = format(int(0), 'x').lower()
6868
- except KeyError:
6353
+ except (KeyError, AttributeError):
6869
6354
  fgid = format(int(0), 'x').lower()
6870
6355
  try:
6871
6356
  import pwd
6872
6357
  try:
6873
6358
  userinfo = pwd.getpwuid(os.getuid())
6874
6359
  funame = userinfo.pw_name
6875
- except KeyError:
6876
- funame = ""
6877
- except AttributeError:
6360
+ except (KeyError, AttributeError):
6878
6361
  funame = ""
6879
6362
  except ImportError:
6880
6363
  funame = ""
@@ -6884,9 +6367,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6884
6367
  try:
6885
6368
  groupinfo = grp.getgrgid(os.getgid())
6886
6369
  fgname = groupinfo.gr_name
6887
- except KeyError:
6888
- fgname = ""
6889
- except AttributeError:
6370
+ except (KeyError, AttributeError):
6890
6371
  fgname = ""
6891
6372
  except ImportError:
6892
6373
  fgname = ""
@@ -6909,7 +6390,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6909
6390
  while(ilmin < ilsize):
6910
6391
  cfcontents = MkTempFile()
6911
6392
  fcontents.seek(0, 0)
6912
- shutil.copyfileobj(fcontents, cfcontents)
6393
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6913
6394
  fcontents.seek(0, 0)
6914
6395
  cfcontents.seek(0, 0)
6915
6396
  cfcontents = CompressOpenFileAlt(
@@ -6922,7 +6403,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6922
6403
  curcompression = compressionuselist[ilcmin]
6923
6404
  fcontents.seek(0, 0)
6924
6405
  cfcontents = MkTempFile()
6925
- shutil.copyfileobj(fcontents, cfcontents)
6406
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
6926
6407
  cfcontents.seek(0, 0)
6927
6408
  cfcontents = CompressOpenFileAlt(
6928
6409
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -6945,26 +6426,22 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
6945
6426
  fp.flush()
6946
6427
  if(hasattr(os, "sync")):
6947
6428
  os.fsync(fp.fileno())
6948
- except io.UnsupportedOperation:
6949
- pass
6950
- except AttributeError:
6951
- pass
6952
- except OSError:
6429
+ except (io.UnsupportedOperation, AttributeError, OSError):
6953
6430
  pass
6954
6431
  fcontents.close()
6955
6432
  return fp
6956
6433
 
6957
6434
  if(not rarfile_support):
6958
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6435
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6959
6436
  return False
6960
6437
 
6961
6438
  if(rarfile_support):
6962
- def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6439
+ def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
6963
6440
  if(not hasattr(fp, "write")):
6964
6441
  return False
6965
6442
  if(verbose):
6966
6443
  logging.basicConfig(format="%(message)s",
6967
- stream=sys.stdout, level=logging.DEBUG)
6444
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
6968
6445
  curinode = 0
6969
6446
  curfid = 0
6970
6447
  inodelist = []
@@ -6985,21 +6462,13 @@ if(rarfile_support):
6985
6462
  fp.flush()
6986
6463
  if(hasattr(os, "sync")):
6987
6464
  os.fsync(fp.fileno())
6988
- except io.UnsupportedOperation:
6989
- pass
6990
- except AttributeError:
6991
- pass
6992
- except OSError:
6465
+ except (io.UnsupportedOperation, AttributeError, OSError):
6993
6466
  pass
6994
6467
  try:
6995
6468
  fp.flush()
6996
6469
  if(hasattr(os, "sync")):
6997
6470
  os.fsync(fp.fileno())
6998
- except io.UnsupportedOperation:
6999
- pass
7000
- except AttributeError:
7001
- pass
7002
- except OSError:
6471
+ except (io.UnsupportedOperation, AttributeError, OSError):
7003
6472
  pass
7004
6473
  for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
7005
6474
  is_unix = False
@@ -7108,24 +6577,18 @@ if(rarfile_support):
7108
6577
  int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
7109
6578
  try:
7110
6579
  fuid = format(int(os.getuid()), 'x').lower()
7111
- except AttributeError:
7112
- fuid = format(int(0), 'x').lower()
7113
- except KeyError:
6580
+ except (KeyError, AttributeError):
7114
6581
  fuid = format(int(0), 'x').lower()
7115
6582
  try:
7116
6583
  fgid = format(int(os.getgid()), 'x').lower()
7117
- except AttributeError:
7118
- fgid = format(int(0), 'x').lower()
7119
- except KeyError:
6584
+ except (KeyError, AttributeError):
7120
6585
  fgid = format(int(0), 'x').lower()
7121
6586
  try:
7122
6587
  import pwd
7123
6588
  try:
7124
6589
  userinfo = pwd.getpwuid(os.getuid())
7125
6590
  funame = userinfo.pw_name
7126
- except KeyError:
7127
- funame = ""
7128
- except AttributeError:
6591
+ except (KeyError, AttributeError):
7129
6592
  funame = ""
7130
6593
  except ImportError:
7131
6594
  funame = ""
@@ -7135,9 +6598,7 @@ if(rarfile_support):
7135
6598
  try:
7136
6599
  groupinfo = grp.getgrgid(os.getgid())
7137
6600
  fgname = groupinfo.gr_name
7138
- except KeyError:
7139
- fgname = ""
7140
- except AttributeError:
6601
+ except (KeyError, AttributeError):
7141
6602
  fgname = ""
7142
6603
  except ImportError:
7143
6604
  fgname = ""
@@ -7160,7 +6621,7 @@ if(rarfile_support):
7160
6621
  while(ilmin < ilsize):
7161
6622
  cfcontents = MkTempFile()
7162
6623
  fcontents.seek(0, 0)
7163
- shutil.copyfileobj(fcontents, cfcontents)
6624
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7164
6625
  fcontents.seek(0, 0)
7165
6626
  cfcontents.seek(0, 0)
7166
6627
  cfcontents = CompressOpenFileAlt(
@@ -7176,7 +6637,7 @@ if(rarfile_support):
7176
6637
  curcompression = compressionuselist[ilcmin]
7177
6638
  fcontents.seek(0, 0)
7178
6639
  cfcontents = MkTempFile()
7179
- shutil.copyfileobj(fcontents, cfcontents)
6640
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7180
6641
  cfcontents.seek(0, 0)
7181
6642
  cfcontents = CompressOpenFileAlt(
7182
6643
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -7199,26 +6660,22 @@ if(rarfile_support):
7199
6660
  fp.flush()
7200
6661
  if(hasattr(os, "sync")):
7201
6662
  os.fsync(fp.fileno())
7202
- except io.UnsupportedOperation:
7203
- pass
7204
- except AttributeError:
7205
- pass
7206
- except OSError:
6663
+ except (io.UnsupportedOperation, AttributeError, OSError):
7207
6664
  pass
7208
6665
  fcontents.close()
7209
6666
  return fp
7210
6667
 
7211
6668
  if(not py7zr_support):
7212
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6669
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7213
6670
  return False
7214
6671
 
7215
6672
  if(py7zr_support):
7216
- def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6673
+ def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7217
6674
  if(not hasattr(fp, "write")):
7218
6675
  return False
7219
6676
  if(verbose):
7220
6677
  logging.basicConfig(format="%(message)s",
7221
- stream=sys.stdout, level=logging.DEBUG)
6678
+ stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7222
6679
  formver = formatspecs['format_ver']
7223
6680
  fileheaderver = str(int(formver.replace(".", "")))
7224
6681
  curinode = 0
@@ -7241,11 +6698,7 @@ if(py7zr_support):
7241
6698
  fp.flush()
7242
6699
  if(hasattr(os, "sync")):
7243
6700
  os.fsync(fp.fileno())
7244
- except io.UnsupportedOperation:
7245
- pass
7246
- except AttributeError:
7247
- pass
7248
- except OSError:
6701
+ except (io.UnsupportedOperation, AttributeError, OSError):
7249
6702
  pass
7250
6703
  for member in sorted(szpfp.list(), key=lambda x: x.filename):
7251
6704
  fencoding = "UTF-8"
@@ -7295,24 +6748,18 @@ if(py7zr_support):
7295
6748
  int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
7296
6749
  try:
7297
6750
  fuid = format(int(os.getuid()), 'x').lower()
7298
- except AttributeError:
7299
- fuid = format(int(0), 'x').lower()
7300
- except KeyError:
6751
+ except (KeyError, AttributeError):
7301
6752
  fuid = format(int(0), 'x').lower()
7302
6753
  try:
7303
6754
  fgid = format(int(os.getgid()), 'x').lower()
7304
- except AttributeError:
7305
- fgid = format(int(0), 'x').lower()
7306
- except KeyError:
6755
+ except (KeyError, AttributeError):
7307
6756
  fgid = format(int(0), 'x').lower()
7308
6757
  try:
7309
6758
  import pwd
7310
6759
  try:
7311
6760
  userinfo = pwd.getpwuid(os.getuid())
7312
6761
  funame = userinfo.pw_name
7313
- except KeyError:
7314
- funame = ""
7315
- except AttributeError:
6762
+ except (KeyError, AttributeError):
7316
6763
  funame = ""
7317
6764
  except ImportError:
7318
6765
  funame = ""
@@ -7322,9 +6769,7 @@ if(py7zr_support):
7322
6769
  try:
7323
6770
  groupinfo = grp.getgrgid(os.getgid())
7324
6771
  fgname = groupinfo.gr_name
7325
- except KeyError:
7326
- fgname = ""
7327
- except AttributeError:
6772
+ except (KeyError, AttributeError):
7328
6773
  fgname = ""
7329
6774
  except ImportError:
7330
6775
  fgname = ""
@@ -7350,7 +6795,7 @@ if(py7zr_support):
7350
6795
  while(ilmin < ilsize):
7351
6796
  cfcontents = MkTempFile()
7352
6797
  fcontents.seek(0, 0)
7353
- shutil.copyfileobj(fcontents, cfcontents)
6798
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7354
6799
  fcontents.seek(0, 0)
7355
6800
  cfcontents.seek(0, 0)
7356
6801
  cfcontents = CompressOpenFileAlt(
@@ -7366,7 +6811,7 @@ if(py7zr_support):
7366
6811
  curcompression = compressionuselist[ilcmin]
7367
6812
  fcontents.seek(0, 0)
7368
6813
  cfcontents = MkTempFile()
7369
- shutil.copyfileobj(fcontents, cfcontents)
6814
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
7370
6815
  cfcontents.seek(0, 0)
7371
6816
  cfcontents = CompressOpenFileAlt(
7372
6817
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
@@ -7389,20 +6834,16 @@ if(py7zr_support):
7389
6834
  fp.flush()
7390
6835
  if(hasattr(os, "sync")):
7391
6836
  os.fsync(fp.fileno())
7392
- except io.UnsupportedOperation:
7393
- pass
7394
- except AttributeError:
7395
- pass
7396
- except OSError:
6837
+ except (io.UnsupportedOperation, AttributeError, OSError):
7397
6838
  pass
7398
6839
  fcontents.close()
7399
6840
  return fp
7400
6841
 
7401
- def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6842
+ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7402
6843
  if(not hasattr(fp, "write")):
7403
6844
  return False
7404
6845
  if(verbose):
7405
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
6846
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
7406
6847
  GetDirList = inlist
7407
6848
  if(not GetDirList):
7408
6849
  return False
@@ -7460,12 +6901,12 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
7460
6901
  return fp
7461
6902
 
7462
6903
 
7463
- def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
6904
+ def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
7464
6905
  inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
7465
6906
  return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
7466
6907
 
7467
6908
 
7468
- def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
6909
+ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7469
6910
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7470
6911
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7471
6912
  get_in_ext = os.path.splitext(outfile)
@@ -7518,18 +6959,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7518
6959
  fp.flush()
7519
6960
  if(hasattr(os, "sync")):
7520
6961
  os.fsync(fp.fileno())
7521
- except io.UnsupportedOperation:
7522
- pass
7523
- except AttributeError:
7524
- pass
7525
- except OSError:
6962
+ except (io.UnsupportedOperation, AttributeError, OSError):
7526
6963
  pass
7527
6964
  if(outfile == "-"):
7528
6965
  fp.seek(0, 0)
7529
- if(hasattr(sys.stdout, "buffer")):
7530
- shutil.copyfileobj(fp, sys.stdout.buffer)
7531
- else:
7532
- shutil.copyfileobj(fp, sys.stdout)
6966
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7533
6967
  elif(outfile is None):
7534
6968
  fp.seek(0, 0)
7535
6969
  outvar = fp.read()
@@ -7546,7 +6980,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
7546
6980
  fp.close()
7547
6981
  return True
7548
6982
 
7549
- def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
6983
+ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7550
6984
  if not isinstance(infiles, list):
7551
6985
  infiles = [infiles]
7552
6986
  returnout = False
@@ -7561,7 +6995,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
7561
6995
  return True
7562
6996
  return returnout
7563
6997
 
7564
- def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
6998
+ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7565
6999
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7566
7000
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7567
7001
  get_in_ext = os.path.splitext(outfile)
@@ -7611,18 +7045,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7611
7045
  fp.flush()
7612
7046
  if(hasattr(os, "sync")):
7613
7047
  os.fsync(fp.fileno())
7614
- except io.UnsupportedOperation:
7615
- pass
7616
- except AttributeError:
7617
- pass
7618
- except OSError:
7048
+ except (io.UnsupportedOperation, AttributeError, OSError):
7619
7049
  pass
7620
7050
  if(outfile == "-"):
7621
7051
  fp.seek(0, 0)
7622
- if(hasattr(sys.stdout, "buffer")):
7623
- shutil.copyfileobj(fp, sys.stdout.buffer)
7624
- else:
7625
- shutil.copyfileobj(fp, sys.stdout)
7052
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7626
7053
  elif(outfile is None):
7627
7054
  fp.seek(0, 0)
7628
7055
  outvar = fp.read()
@@ -7640,7 +7067,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
7640
7067
  fp.close()
7641
7068
  return True
7642
7069
 
7643
- def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7070
+ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7644
7071
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7645
7072
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7646
7073
  get_in_ext = os.path.splitext(outfile)
@@ -7691,18 +7118,11 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7691
7118
  fp.flush()
7692
7119
  if(hasattr(os, "sync")):
7693
7120
  os.fsync(fp.fileno())
7694
- except io.UnsupportedOperation:
7695
- pass
7696
- except AttributeError:
7697
- pass
7698
- except OSError:
7121
+ except (io.UnsupportedOperation, AttributeError, OSError):
7699
7122
  pass
7700
7123
  if(outfile == "-"):
7701
7124
  fp.seek(0, 0)
7702
- if(hasattr(sys.stdout, "buffer")):
7703
- shutil.copyfileobj(fp, sys.stdout.buffer)
7704
- else:
7705
- shutil.copyfileobj(fp, sys.stdout)
7125
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7706
7126
  elif(outfile is None):
7707
7127
  fp.seek(0, 0)
7708
7128
  outvar = fp.read()
@@ -7720,7 +7140,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
7720
7140
  fp.close()
7721
7141
  return True
7722
7142
 
7723
- def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7143
+ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7724
7144
  if not isinstance(infiles, list):
7725
7145
  infiles = [infiles]
7726
7146
  returnout = False
@@ -7735,7 +7155,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
7735
7155
  return True
7736
7156
  return returnout
7737
7157
 
7738
- def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7158
+ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7739
7159
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7740
7160
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7741
7161
  get_in_ext = os.path.splitext(outfile)
@@ -7786,18 +7206,11 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7786
7206
  fp.flush()
7787
7207
  if(hasattr(os, "sync")):
7788
7208
  os.fsync(fp.fileno())
7789
- except io.UnsupportedOperation:
7790
- pass
7791
- except AttributeError:
7792
- pass
7793
- except OSError:
7209
+ except (io.UnsupportedOperation, AttributeError, OSError):
7794
7210
  pass
7795
7211
  if(outfile == "-"):
7796
7212
  fp.seek(0, 0)
7797
- if(hasattr(sys.stdout, "buffer")):
7798
- shutil.copyfileobj(fp, sys.stdout.buffer)
7799
- else:
7800
- shutil.copyfileobj(fp, sys.stdout)
7213
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7801
7214
  elif(outfile is None):
7802
7215
  fp.seek(0, 0)
7803
7216
  outvar = fp.read()
@@ -7815,7 +7228,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
7815
7228
  fp.close()
7816
7229
  return True
7817
7230
 
7818
- def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7231
+ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7819
7232
  if not isinstance(infiles, list):
7820
7233
  infiles = [infiles]
7821
7234
  returnout = False
@@ -7831,11 +7244,11 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
7831
7244
  return returnout
7832
7245
 
7833
7246
  if(not rarfile_support):
7834
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7247
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7835
7248
  return False
7836
7249
 
7837
7250
  if(rarfile_support):
7838
- def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7251
+ def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7839
7252
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7840
7253
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7841
7254
  get_in_ext = os.path.splitext(outfile)
@@ -7886,18 +7299,11 @@ if(rarfile_support):
7886
7299
  fp.flush()
7887
7300
  if(hasattr(os, "sync")):
7888
7301
  os.fsync(fp.fileno())
7889
- except io.UnsupportedOperation:
7890
- pass
7891
- except AttributeError:
7892
- pass
7893
- except OSError:
7302
+ except (io.UnsupportedOperation, AttributeError, OSError):
7894
7303
  pass
7895
7304
  if(outfile == "-"):
7896
7305
  fp.seek(0, 0)
7897
- if(hasattr(sys.stdout, "buffer")):
7898
- shutil.copyfileobj(fp, sys.stdout.buffer)
7899
- else:
7900
- shutil.copyfileobj(fp, sys.stdout)
7306
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
7901
7307
  elif(outfile is None):
7902
7308
  fp.seek(0, 0)
7903
7309
  outvar = fp.read()
@@ -7915,7 +7321,7 @@ if(rarfile_support):
7915
7321
  fp.close()
7916
7322
  return True
7917
7323
 
7918
- def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7324
+ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7919
7325
  if not isinstance(infiles, list):
7920
7326
  infiles = [infiles]
7921
7327
  returnout = False
@@ -7931,11 +7337,11 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
7931
7337
  return returnout
7932
7338
 
7933
7339
  if(not py7zr_support):
7934
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7340
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7935
7341
  return False
7936
7342
 
7937
7343
  if(py7zr_support):
7938
- def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7344
+ def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7939
7345
  if(IsNestedDict(formatspecs) and fmttype=="auto" and
7940
7346
  (outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
7941
7347
  get_in_ext = os.path.splitext(outfile)
@@ -7986,18 +7392,11 @@ if(py7zr_support):
7986
7392
  fp.flush()
7987
7393
  if(hasattr(os, "sync")):
7988
7394
  os.fsync(fp.fileno())
7989
- except io.UnsupportedOperation:
7990
- pass
7991
- except AttributeError:
7992
- pass
7993
- except OSError:
7395
+ except (io.UnsupportedOperation, AttributeError, OSError):
7994
7396
  pass
7995
7397
  if(outfile == "-"):
7996
7398
  fp.seek(0, 0)
7997
- if(hasattr(sys.stdout, "buffer")):
7998
- shutil.copyfileobj(fp, sys.stdout.buffer)
7999
- else:
8000
- shutil.copyfileobj(fp, sys.stdout)
7399
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
8001
7400
  elif(outfile is None):
8002
7401
  fp.seek(0, 0)
8003
7402
  outvar = fp.read()
@@ -8015,7 +7414,7 @@ if(py7zr_support):
8015
7414
  fp.close()
8016
7415
  return True
8017
7416
 
8018
- def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
7417
+ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
8019
7418
  if not isinstance(infiles, list):
8020
7419
  infiles = [infiles]
8021
7420
  returnout = False
@@ -8030,7 +7429,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
8030
7429
  return True
8031
7430
  return returnout
8032
7431
 
8033
- def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
7432
+ def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
8034
7433
  inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
8035
7434
  return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
8036
7435
 
@@ -8064,9 +7463,7 @@ def PrintPermissionString(fchmode, ftype):
8064
7463
  permissionstr = "w" + permissionstr
8065
7464
  try:
8066
7465
  permissionoutstr = stat.filemode(fchmode)
8067
- except AttributeError:
8068
- permissionoutstr = permissionstr
8069
- except KeyError:
7466
+ except (KeyError, AttributeError):
8070
7467
  permissionoutstr = permissionstr
8071
7468
  return permissionoutstr
8072
7469
 
@@ -8982,7 +8379,7 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
8982
8379
 
8983
8380
 
8984
8381
  def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
8985
- filefp = StringIO()
8382
+ filefp = MkTempFile("", isbytes=False)
8986
8383
  outstring = UncompressString(instring, formatspecs, filestart)
8987
8384
  filefp.write(outstring)
8988
8385
  filefp.seek(0, 0)
@@ -8997,7 +8394,7 @@ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=
8997
8394
  fp.seek(filestart, 0)
8998
8395
  if(prechck!="zstd"):
8999
8396
  return UncompressFileAlt(fp, formatspecs, filestart)
9000
- filefp = StringIO()
8397
+ filefp = MkTempFile("", isbytes=False)
9001
8398
  fp.seek(filestart, 0)
9002
8399
  outstring = UncompressString(fp.read(), formatspecs, 0)
9003
8400
  filefp.write(outstring)
@@ -9069,9 +8466,7 @@ def _extract_base_fp(obj):
9069
8466
  try:
9070
8467
  f() # probe fileno()
9071
8468
  return cur
9072
- except UnsupportedOperation:
9073
- pass
9074
- except Exception:
8469
+ except (Exception, UnsupportedOperation):
9075
8470
  pass
9076
8471
  for attr in ("fileobj", "fp", "_fp", "buffer", "raw"):
9077
8472
  nxt = getattr(cur, attr, None)
@@ -9463,7 +8858,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw):
9463
8858
 
9464
8859
  # ========= copy helpers =========
9465
8860
 
9466
- def fast_copy(infp, outfp, bufsize=1 << 20):
8861
+ def fast_copy(infp, outfp, bufsize=__filebuff_size__):
9467
8862
  """
9468
8863
  Efficient copy from any readable file-like to any writable file-like.
9469
8864
  Uses readinto() when available to avoid extra allocations.
@@ -9507,7 +8902,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
9507
8902
  shutil.copyfileobj(fp, outfp, length=chunk_size)
9508
8903
 
9509
8904
 
9510
- def copy_opaque(src, dst, bufsize=1 << 20, grow_step=64 << 20):
8905
+ def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20):
9511
8906
  """
9512
8907
  Copy opaque bytes from 'src' (any readable file-like) to 'dst'
9513
8908
  (your mmap-backed FileLikeAdapter or any writable file-like).
@@ -9569,11 +8964,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
9569
8964
 
9570
8965
  try:
9571
8966
  fp.seek(0, 0)
9572
- except io.UnsupportedOperation:
9573
- pass
9574
- except AttributeError:
9575
- pass
9576
- except OSError:
8967
+ except (io.UnsupportedOperation, AttributeError, OSError):
9577
8968
  pass
9578
8969
 
9579
8970
  if (not compression or compression == formatspecs['format_magic']
@@ -9632,11 +9023,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
9632
9023
 
9633
9024
  try:
9634
9025
  bytesfp.seek(0, 0)
9635
- except io.UnsupportedOperation:
9636
- pass
9637
- except AttributeError:
9638
- pass
9639
- except OSError:
9026
+ except (io.UnsupportedOperation, AttributeError, OSError):
9640
9027
  pass
9641
9028
  out = FileLikeAdapter(bytesfp, mode="rb") # read interface for the caller
9642
9029
  try:
@@ -9766,31 +9153,18 @@ def CheckSumSupport(checkfor, guaranteed=True):
9766
9153
  try:
9767
9154
  hash_list = sorted(list(hashlib.algorithms_guaranteed))
9768
9155
  except AttributeError:
9769
- hash_list = sorted(list(hashlib.algorithms))
9770
- else:
9771
- try:
9772
- hash_list = sorted(list(hashlib.algorithms_available))
9773
- except AttributeError:
9774
- hash_list = sorted(list(hashlib.algorithms))
9775
- checklistout = sorted(hash_list + ['adler32', 'crc16', 'crc16_ansi', 'crc16_ibm',
9776
- 'crc16_ccitt', 'crc32', 'crc64', 'crc64_ecma', 'crc64_iso', 'none'])
9777
- if(checkfor in checklistout):
9778
- return True
9779
- else:
9780
- return False
9781
-
9782
-
9783
- def CheckSumSupportAlt(checkfor, guaranteed=True):
9784
- if(guaranteed):
9785
- try:
9786
- hash_list = sorted(list(hashlib.algorithms_guaranteed))
9787
- except AttributeError:
9788
- hash_list = sorted(list(hashlib.algorithms))
9156
+ try:
9157
+ hash_list = sorted(list(hashlib.algorithms))
9158
+ except AttributeError:
9159
+ hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
9789
9160
  else:
9790
9161
  try:
9791
9162
  hash_list = sorted(list(hashlib.algorithms_available))
9792
9163
  except AttributeError:
9793
- hash_list = sorted(list(hashlib.algorithms))
9164
+ try:
9165
+ hash_list = sorted(list(hashlib.algorithms))
9166
+ except AttributeError:
9167
+ hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
9794
9168
  checklistout = hash_list
9795
9169
  if(checkfor in checklistout):
9796
9170
  return True
@@ -9798,48 +9172,48 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
9798
9172
  return False
9799
9173
 
9800
9174
 
9801
- def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9175
+ def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9802
9176
  return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9803
9177
 
9804
- def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9178
+ def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
9805
9179
  return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
9806
9180
 
9807
- def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9181
+ def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9808
9182
  return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
9809
9183
 
9810
9184
 
9811
- def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9185
+ def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9812
9186
  return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9813
9187
 
9814
9188
 
9815
- def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9189
+ def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9816
9190
  return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9817
9191
 
9818
9192
 
9819
9193
  if(not rarfile_support):
9820
- def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9194
+ def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9821
9195
  return False
9822
9196
 
9823
9197
  if(rarfile_support):
9824
- def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9198
+ def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9825
9199
  return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9826
9200
 
9827
9201
 
9828
9202
  if(not py7zr_support):
9829
- def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9203
+ def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9830
9204
  return False
9831
9205
 
9832
9206
  if(py7zr_support):
9833
- def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9207
+ def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9834
9208
  return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
9835
9209
 
9836
9210
 
9837
- def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9211
+ def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
9838
9212
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
9839
9213
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
9840
9214
  formatspecs = formatspecs[checkcompressfile]
9841
9215
  if(verbose):
9842
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
9216
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9843
9217
  if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
9844
9218
  return PackFoxFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
9845
9219
  elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
@@ -9922,7 +9296,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9922
9296
  formatspecs=__file_format_multi_dict__, # keep default like original
9923
9297
  seektoend=False, verbose=False, returnfp=False):
9924
9298
  if(verbose):
9925
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
9299
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
9926
9300
 
9927
9301
  if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
9928
9302
  formatspecs = formatspecs[fmttype]
@@ -9949,10 +9323,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
9949
9323
 
9950
9324
  elif(infile == "-"):
9951
9325
  fp = MkTempFile()
9952
- if(hasattr(sys.stdin, "buffer")):
9953
- shutil.copyfileobj(sys.stdin.buffer, fp)
9954
- else:
9955
- shutil.copyfileobj(sys.stdin, fp)
9326
+ shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
9956
9327
  fp.seek(filestart, 0)
9957
9328
  fp = UncompressFileAlt(fp, formatspecs, filestart)
9958
9329
  checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
@@ -10029,9 +9400,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
10029
9400
 
10030
9401
  try:
10031
9402
  fp.seek(0, 2)
10032
- except OSError:
10033
- SeekToEndOfFile(fp)
10034
- except ValueError:
9403
+ except (OSError, ValueError):
10035
9404
  SeekToEndOfFile(fp)
10036
9405
 
10037
9406
  CatSize = fp.tell()
@@ -10061,16 +9430,17 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
10061
9430
  else:
10062
9431
  inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
10063
9432
 
10064
- fnumextrafieldsize = int(inheader[5], 16)
10065
- fnumextrafields = int(inheader[6], 16)
10066
- extrastart = 7
9433
+ fnumextrafieldsize = int(inheader[6], 16)
9434
+ fnumextrafields = int(inheader[7], 16)
9435
+ extrastart = 8
10067
9436
  extraend = extrastart + fnumextrafields
10068
9437
  formversion = re.findall("([\\d]+)", formstring)
10069
9438
  fheadsize = int(inheader[0], 16)
10070
9439
  fnumfields = int(inheader[1], 16)
10071
9440
  fhencoding = inheader[2]
10072
9441
  fostype = inheader[3]
10073
- fnumfiles = int(inheader[4], 16)
9442
+ fpythontype = inheader[4]
9443
+ fnumfiles = int(inheader[5], 16)
10074
9444
  fprechecksumtype = inheader[-2]
10075
9445
  fprechecksum = inheader[-1]
10076
9446
 
@@ -10191,7 +9561,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
10191
9561
  VerbosePrintOut(outfname)
10192
9562
  VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
10193
9563
 
10194
- if(outfcs == infcs):
9564
+ if(hmac.compare_digest(outfcs, infcs)):
10195
9565
  if(verbose):
10196
9566
  VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
10197
9567
  VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
@@ -10203,7 +9573,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
10203
9573
  VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
10204
9574
 
10205
9575
  if(outfjsonsize > 0):
10206
- if(outfjsonchecksum == injsonfcs):
9576
+ if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
10207
9577
  if(verbose):
10208
9578
  VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
10209
9579
  VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
@@ -10227,7 +9597,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
10227
9597
  infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
10228
9598
  pyhascontents = True
10229
9599
 
10230
- if(outfccs == infccs):
9600
+ if(hmac.compare_digest(outfccs, infccs)):
10231
9601
  if(verbose):
10232
9602
  VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
10233
9603
  VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
@@ -10300,7 +9670,7 @@ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
10300
9670
  while True:
10301
9671
  if outstartfile >= outfsize: # stop when function signals False
10302
9672
  break
10303
- is_valid_file = ArchiveFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
9673
+ is_valid_file = FoxFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
10304
9674
  if is_valid_file is False: # stop when function signals False
10305
9675
  outretval.append(is_valid_file)
10306
9676
  break
@@ -10310,9 +9680,7 @@ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
10310
9680
  outstartfile = infile.tell()
10311
9681
  try:
10312
9682
  infile.seek(0, 2)
10313
- except OSError:
10314
- SeekToEndOfFile(infile)
10315
- except ValueError:
9683
+ except (OSError, ValueError):
10316
9684
  SeekToEndOfFile(infile)
10317
9685
  outfsize = infile.tell()
10318
9686
  infile.seek(outstartfile, 0)
@@ -10388,7 +9756,7 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
10388
9756
  formatspecs = formatspecs[checkcompressfile]
10389
9757
  fp = MkTempFile()
10390
9758
  fp = PackFoxFileFromTarFile(
10391
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9759
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10392
9760
  listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10393
9761
  return listarrayfiles
10394
9762
 
@@ -10399,7 +9767,7 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
10399
9767
  formatspecs = formatspecs[checkcompressfile]
10400
9768
  fp = MkTempFile()
10401
9769
  fp = PackFoxFileFromZipFile(
10402
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9770
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10403
9771
  listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10404
9772
  return listarrayfiles
10405
9773
 
@@ -10415,7 +9783,7 @@ if(rarfile_support):
10415
9783
  formatspecs = formatspecs[checkcompressfile]
10416
9784
  fp = MkTempFile()
10417
9785
  fp = PackFoxFileFromRarFile(
10418
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9786
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10419
9787
  listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10420
9788
  return listarrayfiles
10421
9789
 
@@ -10430,7 +9798,7 @@ if(py7zr_support):
10430
9798
  formatspecs = formatspecs[checkcompressfile]
10431
9799
  fp = MkTempFile()
10432
9800
  fp = PackFoxFileFromSevenZipFile(
10433
- infile, fp, "auto", True, None, compressionlistalt, "crc32", [], formatspecs, False, True)
9801
+ infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
10434
9802
  listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
10435
9803
  return listarrayfiles
10436
9804
 
@@ -10454,7 +9822,7 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
10454
9822
  return False
10455
9823
 
10456
9824
 
10457
- def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
9825
+ def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
10458
9826
  outarray = MkTempFile()
10459
9827
  packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10460
9828
  compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
@@ -10585,7 +9953,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10585
9953
  if compressionuselist is None:
10586
9954
  compressionuselist = compressionlistalt
10587
9955
  if checksumtype is None:
10588
- checksumtype = ["crc32", "crc32", "crc32", "crc32"]
9956
+ checksumtype = ["md5", "md5", "md5", "md5"]
10589
9957
  if extradata is None:
10590
9958
  extradata = []
10591
9959
  if jsondata is None:
@@ -10672,7 +10040,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10672
10040
  compression = "auto"
10673
10041
 
10674
10042
  if verbose:
10675
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10043
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10676
10044
 
10677
10045
  # No files?
10678
10046
  if not listarrayfiles.get('ffilelist'):
@@ -10777,7 +10145,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10777
10145
  while ilmin < ilsize:
10778
10146
  cfcontents = MkTempFile()
10779
10147
  fcontents.seek(0, 0)
10780
- shutil.copyfileobj(fcontents, cfcontents)
10148
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10781
10149
  fcontents.seek(0, 0)
10782
10150
  cfcontents.seek(0, 0)
10783
10151
  cfcontents = CompressOpenFileAlt(
@@ -10795,7 +10163,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10795
10163
 
10796
10164
  fcontents.seek(0, 0)
10797
10165
  cfcontents = MkTempFile()
10798
- shutil.copyfileobj(fcontents, cfcontents)
10166
+ shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
10799
10167
  cfcontents.seek(0, 0)
10800
10168
  cfcontents = CompressOpenFileAlt(
10801
10169
  cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
@@ -10894,22 +10262,12 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
10894
10262
  fp.flush()
10895
10263
  if hasattr(os, "sync"):
10896
10264
  os.fsync(fp.fileno())
10897
- except io.UnsupportedOperation:
10898
- if verbose:
10899
- logging.warning("Flush/sync unsupported on this file object.")
10900
- except AttributeError:
10901
- if verbose:
10902
- logging.warning("Flush/sync attributes missing on this file object.")
10903
- except OSError as e:
10904
- if verbose:
10905
- logging.warning("OS error during flush/sync: %s", e)
10265
+ except (io.UnsupportedOperation, AttributeError, OSError):
10266
+ pass
10906
10267
 
10907
10268
  if outfile == "-":
10908
10269
  fp.seek(0, 0)
10909
- if hasattr(sys.stdout, "buffer"):
10910
- shutil.copyfileobj(fp, sys.stdout.buffer)
10911
- else:
10912
- shutil.copyfileobj(fp, sys.stdout)
10270
+ shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
10913
10271
  elif outfile is None:
10914
10272
  fp.seek(0, 0)
10915
10273
  outvar = fp.read()
@@ -10948,14 +10306,14 @@ def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto",
10948
10306
  return True
10949
10307
  return returnout
10950
10308
 
10951
- def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10309
+ def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10952
10310
  fp = MkTempFile(instr)
10953
10311
  listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
10954
10312
  checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
10955
10313
  return listarrayfiles
10956
10314
 
10957
10315
 
10958
- def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["crc32", "crc32", "crc32"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10316
+ def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
10959
10317
  outarray = MkTempFile()
10960
10318
  packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
10961
10319
  compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
@@ -10968,7 +10326,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
10968
10326
  if(outdir is not None):
10969
10327
  outdir = RemoveWindowsPath(outdir)
10970
10328
  if(verbose):
10971
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10329
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
10972
10330
  if(isinstance(infile, dict)):
10973
10331
  listarrayfiles = infile
10974
10332
  else:
@@ -11018,16 +10376,12 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
11018
10376
  listarrayfiles['ffilelist'][lcfi]['fcontents'])
11019
10377
  listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
11020
10378
  shutil.copyfileobj(
11021
- listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
10379
+ listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc, length=__filebuff_size__)
11022
10380
  try:
11023
10381
  fpc.flush()
11024
10382
  if(hasattr(os, "sync")):
11025
10383
  os.fsync(fpc.fileno())
11026
- except io.UnsupportedOperation:
11027
- pass
11028
- except AttributeError:
11029
- pass
11030
- except OSError:
10384
+ except (io.UnsupportedOperation, AttributeError, OSError):
11031
10385
  pass
11032
10386
  if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
11033
10387
  os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
@@ -11069,16 +10423,12 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
11069
10423
  flinkinfo['fcontents'] = MkTempFile(
11070
10424
  flinkinfo['fcontents'])
11071
10425
  flinkinfo['fcontents'].seek(0, 0)
11072
- shutil.copyfileobj(flinkinfo['fcontents'], fpc)
10426
+ shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
11073
10427
  try:
11074
10428
  fpc.flush()
11075
10429
  if(hasattr(os, "sync")):
11076
10430
  os.fsync(fpc.fileno())
11077
- except io.UnsupportedOperation:
11078
- pass
11079
- except AttributeError:
11080
- pass
11081
- except OSError:
10431
+ except (io.UnsupportedOperation, AttributeError, OSError):
11082
10432
  pass
11083
10433
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
11084
10434
  os.chown(PrependPath(
@@ -11148,16 +10498,12 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
11148
10498
  flinkinfo['fcontents'] = MkTempFile(
11149
10499
  flinkinfo['fcontents'])
11150
10500
  flinkinfo['fcontents'].seek(0, 0)
11151
- shutil.copyfileobj(flinkinfo['fcontents'], fpc)
10501
+ shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
11152
10502
  try:
11153
10503
  fpc.flush()
11154
10504
  if(hasattr(os, "sync")):
11155
10505
  os.fsync(fpc.fileno())
11156
- except io.UnsupportedOperation:
11157
- pass
11158
- except AttributeError:
11159
- pass
11160
- except OSError:
10506
+ except (io.UnsupportedOperation, AttributeError, OSError):
11161
10507
  pass
11162
10508
  if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
11163
10509
  os.chown(PrependPath(
@@ -11244,7 +10590,7 @@ def ftype_to_str(ftype):
11244
10590
 
11245
10591
  def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
11246
10592
  if(verbose):
11247
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10593
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11248
10594
  if(isinstance(infile, dict)):
11249
10595
  listarrayfileslist = [infile]
11250
10596
  if(isinstance(infile, list)):
@@ -11252,7 +10598,7 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
11252
10598
  else:
11253
10599
  if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
11254
10600
  infile = RemoveWindowsPath(infile)
11255
- listarrayfileslist = ArchiveFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
10601
+ listarrayfileslist = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
11256
10602
  if(not listarrayfileslist):
11257
10603
  return False
11258
10604
  for listarrayfiles in listarrayfileslist:
@@ -11325,9 +10671,7 @@ def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
11325
10671
  outstartfile = infile.tell()
11326
10672
  try:
11327
10673
  infile.seek(0, 2)
11328
- except OSError:
11329
- SeekToEndOfFile(infile)
11330
- except ValueError:
10674
+ except (OSError, ValueError):
11331
10675
  SeekToEndOfFile(infile)
11332
10676
  outfsize = infile.tell()
11333
10677
  infile.seek(outstartfile, 0)
@@ -11357,13 +10701,10 @@ def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
11357
10701
 
11358
10702
  def TarFileListFiles(infile, verbose=False, returnfp=False):
11359
10703
  if(verbose):
11360
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10704
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11361
10705
  if(infile == "-"):
11362
10706
  infile = MkTempFile()
11363
- if(hasattr(sys.stdin, "buffer")):
11364
- shutil.copyfileobj(sys.stdin.buffer, infile)
11365
- else:
11366
- shutil.copyfileobj(sys.stdin, infile)
10707
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11367
10708
  infile.seek(0, 0)
11368
10709
  if(not infile):
11369
10710
  return False
@@ -11482,13 +10823,10 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
11482
10823
 
11483
10824
  def ZipFileListFiles(infile, verbose=False, returnfp=False):
11484
10825
  if(verbose):
11485
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10826
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11486
10827
  if(infile == "-"):
11487
10828
  infile = MkTempFile()
11488
- if(hasattr(sys.stdin, "buffer")):
11489
- shutil.copyfileobj(sys.stdin.buffer, infile)
11490
- else:
11491
- shutil.copyfileobj(sys.stdin, infile)
10829
+ shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
11492
10830
  infile.seek(0, 0)
11493
10831
  if(not infile):
11494
10832
  return False
@@ -11565,24 +10903,18 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11565
10903
  printfname = member.filename
11566
10904
  try:
11567
10905
  fuid = int(os.getuid())
11568
- except AttributeError:
11569
- fuid = int(0)
11570
- except KeyError:
10906
+ except (KeyError, AttributeError):
11571
10907
  fuid = int(0)
11572
10908
  try:
11573
10909
  fgid = int(os.getgid())
11574
- except AttributeError:
11575
- fgid = int(0)
11576
- except KeyError:
10910
+ except (KeyError, AttributeError):
11577
10911
  fgid = int(0)
11578
10912
  try:
11579
10913
  import pwd
11580
10914
  try:
11581
10915
  userinfo = pwd.getpwuid(os.getuid())
11582
10916
  funame = userinfo.pw_name
11583
- except KeyError:
11584
- funame = ""
11585
- except AttributeError:
10917
+ except (KeyError, AttributeError):
11586
10918
  funame = ""
11587
10919
  except ImportError:
11588
10920
  funame = ""
@@ -11592,9 +10924,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
11592
10924
  try:
11593
10925
  groupinfo = grp.getgrgid(os.getgid())
11594
10926
  fgname = groupinfo.gr_name
11595
- except KeyError:
11596
- fgname = ""
11597
- except AttributeError:
10927
+ except (KeyError, AttributeError):
11598
10928
  fgname = ""
11599
10929
  except ImportError:
11600
10930
  fgname = ""
@@ -11620,7 +10950,7 @@ if(not rarfile_support):
11620
10950
  if(rarfile_support):
11621
10951
  def RarFileListFiles(infile, verbose=False, returnfp=False):
11622
10952
  if(verbose):
11623
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
10953
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11624
10954
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11625
10955
  return False
11626
10956
  if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
@@ -11703,24 +11033,18 @@ if(rarfile_support):
11703
11033
  printfname = member.filename
11704
11034
  try:
11705
11035
  fuid = int(os.getuid())
11706
- except AttributeError:
11707
- fuid = int(0)
11708
- except KeyError:
11036
+ except (KeyError, AttributeError):
11709
11037
  fuid = int(0)
11710
11038
  try:
11711
11039
  fgid = int(os.getgid())
11712
- except AttributeError:
11713
- fgid = int(0)
11714
- except KeyError:
11040
+ except (KeyError, AttributeError):
11715
11041
  fgid = int(0)
11716
11042
  try:
11717
11043
  import pwd
11718
11044
  try:
11719
11045
  userinfo = pwd.getpwuid(os.getuid())
11720
11046
  funame = userinfo.pw_name
11721
- except KeyError:
11722
- funame = ""
11723
- except AttributeError:
11047
+ except (KeyError, AttributeError):
11724
11048
  funame = ""
11725
11049
  except ImportError:
11726
11050
  funame = ""
@@ -11730,9 +11054,7 @@ if(rarfile_support):
11730
11054
  try:
11731
11055
  groupinfo = grp.getgrgid(os.getgid())
11732
11056
  fgname = groupinfo.gr_name
11733
- except KeyError:
11734
- fgname = ""
11735
- except AttributeError:
11057
+ except (KeyError, AttributeError):
11736
11058
  fgname = ""
11737
11059
  except ImportError:
11738
11060
  fgname = ""
@@ -11757,7 +11079,7 @@ if(not py7zr_support):
11757
11079
  if(py7zr_support):
11758
11080
  def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
11759
11081
  if(verbose):
11760
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
11082
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11761
11083
  if(not os.path.exists(infile) or not os.path.isfile(infile)):
11762
11084
  return False
11763
11085
  lcfi = 0
@@ -11810,24 +11132,18 @@ if(py7zr_support):
11810
11132
  file_content[member.filename].close()
11811
11133
  try:
11812
11134
  fuid = int(os.getuid())
11813
- except AttributeError:
11814
- fuid = int(0)
11815
- except KeyError:
11135
+ except (KeyError, AttributeError):
11816
11136
  fuid = int(0)
11817
11137
  try:
11818
11138
  fgid = int(os.getgid())
11819
- except AttributeError:
11820
- fgid = int(0)
11821
- except KeyError:
11139
+ except (KeyError, AttributeError):
11822
11140
  fgid = int(0)
11823
11141
  try:
11824
11142
  import pwd
11825
11143
  try:
11826
11144
  userinfo = pwd.getpwuid(os.getuid())
11827
11145
  funame = userinfo.pw_name
11828
- except KeyError:
11829
- funame = ""
11830
- except AttributeError:
11146
+ except (KeyError, AttributeError):
11831
11147
  funame = ""
11832
11148
  except ImportError:
11833
11149
  funame = ""
@@ -11837,9 +11153,7 @@ if(py7zr_support):
11837
11153
  try:
11838
11154
  groupinfo = grp.getgrgid(os.getgid())
11839
11155
  fgname = groupinfo.gr_name
11840
- except KeyError:
11841
- fgname = ""
11842
- except AttributeError:
11156
+ except (KeyError, AttributeError):
11843
11157
  fgname = ""
11844
11158
  except ImportError:
11845
11159
  fgname = ""
@@ -11860,7 +11174,7 @@ if(py7zr_support):
11860
11174
 
11861
11175
  def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
11862
11176
  if(verbose):
11863
- logging.basicConfig(format="%(message)s", stream=sys.stdout, level=logging.DEBUG)
11177
+ logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
11864
11178
  checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
11865
11179
  if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
11866
11180
  formatspecs = formatspecs[checkcompressfile]
@@ -11879,7 +11193,7 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
11879
11193
  return False
11880
11194
 
11881
11195
 
11882
- def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11196
+ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
11883
11197
  outarray = MkTempFile()
11884
11198
  packform = PackFoxFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
11885
11199
  compressionlevel, followlink, checksumtype, formatspecs, False, True)
@@ -11891,19 +11205,19 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
11891
11205
  PyNeoFile compatibility layer
11892
11206
  """
11893
11207
 
11894
- def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11208
+ def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11895
11209
  return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
11896
11210
 
11897
- def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11211
+ def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
11898
11212
  return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
11899
11213
 
11900
- def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11214
+ def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11901
11215
  return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
11902
11216
 
11903
- def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='crc32', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11217
+ def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
11904
11218
  return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
11905
11219
 
11906
- def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11220
+ def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
11907
11221
  return PackFoxFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
11908
11222
 
11909
11223
  def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
@@ -11912,7 +11226,7 @@ def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonl
11912
11226
  def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
11913
11227
  return UnPackFoxFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
11914
11228
 
11915
- def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
11229
+ def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11916
11230
  return RePackFoxFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11917
11231
 
11918
11232
  def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
@@ -11921,7 +11235,7 @@ def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False,
11921
11235
  def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
11922
11236
  return FoxFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
11923
11237
 
11924
- def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["crc32", "crc32", "crc32", "crc32"], compression="auto", compression_level=None, returnfp=False):
11238
+ def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
11925
11239
  intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
11926
11240
  return RePackFoxFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
11927
11241
 
@@ -11963,10 +11277,7 @@ def download_file_from_ftp_file(url):
11963
11277
  ftp_port = 21
11964
11278
  try:
11965
11279
  ftp.connect(urlparts.hostname, ftp_port)
11966
- except socket.gaierror:
11967
- log.info("Error With URL "+url)
11968
- return False
11969
- except socket.timeout:
11280
+ except (socket.gaierror, socket.timeout):
11970
11281
  log.info("Error With URL "+url)
11971
11282
  return False
11972
11283
  if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
@@ -12054,10 +11365,7 @@ def upload_file_to_ftp_file(ftpfile, url):
12054
11365
  ftp_port = 21
12055
11366
  try:
12056
11367
  ftp.connect(urlparts.hostname, ftp_port)
12057
- except socket.gaierror:
12058
- log.info("Error With URL "+url)
12059
- return False
12060
- except socket.timeout:
11368
+ except (socket.gaierror, socket.timeout):
12061
11369
  log.info("Error With URL "+url)
12062
11370
  return False
12063
11371
  if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
@@ -12168,7 +11476,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12168
11476
  else:
12169
11477
  response = requests.get(rebuilt_url, headers=headers, timeout=(5, 30), stream=True)
12170
11478
  response.raw.decode_content = True
12171
- shutil.copyfileobj(response.raw, httpfile)
11479
+ shutil.copyfileobj(response.raw, httpfile, length=__filebuff_size__)
12172
11480
 
12173
11481
  # 2) HTTPX branch
12174
11482
  elif usehttp == 'httpx' and havehttpx:
@@ -12180,7 +11488,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12180
11488
  else:
12181
11489
  response = client.get(rebuilt_url, headers=headers)
12182
11490
  raw_wrapper = RawIteratorWrapper(response.iter_bytes())
12183
- shutil.copyfileobj(raw_wrapper, httpfile)
11491
+ shutil.copyfileobj(raw_wrapper, httpfile, length=__filebuff_size__)
12184
11492
 
12185
11493
  # 3) Mechanize branch
12186
11494
  elif usehttp == 'mechanize' and havemechanize:
@@ -12199,7 +11507,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12199
11507
 
12200
11508
  # Open the URL and copy the response to httpfile
12201
11509
  response = br.open(rebuilt_url)
12202
- shutil.copyfileobj(response, httpfile)
11510
+ shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
12203
11511
 
12204
11512
  # 4) Fallback to urllib
12205
11513
  else:
@@ -12212,7 +11520,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
12212
11520
  else:
12213
11521
  opener = build_opener()
12214
11522
  response = opener.open(request)
12215
- shutil.copyfileobj(response, httpfile)
11523
+ shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
12216
11524
 
12217
11525
  # Reset file pointer to the start before returning
12218
11526
  httpfile.seek(0, 0)
@@ -12345,7 +11653,7 @@ def upload_file_to_http_file(
12345
11653
  fileobj.seek(0)
12346
11654
  except Exception:
12347
11655
  pass
12348
- shutil.copyfileobj(fileobj, buf)
11656
+ shutil.copyfileobj(fileobj, buf, length=__filebuff_size__)
12349
11657
 
12350
11658
  _w('\r\n')
12351
11659
  _w('--' + boundary + '--\r\n')
@@ -12434,10 +11742,7 @@ if(haveparamiko):
12434
11742
  username=sftp_username, password=sftp_password)
12435
11743
  except paramiko.ssh_exception.SSHException:
12436
11744
  return False
12437
- except socket.gaierror:
12438
- log.info("Error With URL "+url)
12439
- return False
12440
- except socket.timeout:
11745
+ except (socket.gaierror, socket.timeout):
12441
11746
  log.info("Error With URL "+url)
12442
11747
  return False
12443
11748
  sftp = ssh.open_sftp()
@@ -12491,10 +11796,7 @@ if(haveparamiko):
12491
11796
  username=sftp_username, password=sftp_password)
12492
11797
  except paramiko.ssh_exception.SSHException:
12493
11798
  return False
12494
- except socket.gaierror:
12495
- log.info("Error With URL "+url)
12496
- return False
12497
- except socket.timeout:
11799
+ except (socket.gaierror, socket.timeout):
12498
11800
  log.info("Error With URL "+url)
12499
11801
  return False
12500
11802
  sftp = ssh.open_sftp()
@@ -12545,10 +11847,7 @@ if(havepysftp):
12545
11847
  username=sftp_username, password=sftp_password)
12546
11848
  except paramiko.ssh_exception.SSHException:
12547
11849
  return False
12548
- except socket.gaierror:
12549
- log.info("Error With URL "+url)
12550
- return False
12551
- except socket.timeout:
11850
+ except (socket.gaierror, socket.timeout):
12552
11851
  log.info("Error With URL "+url)
12553
11852
  return False
12554
11853
  sftpfile = MkTempFile()
@@ -12598,10 +11897,7 @@ if(havepysftp):
12598
11897
  username=sftp_username, password=sftp_password)
12599
11898
  except paramiko.ssh_exception.SSHException:
12600
11899
  return False
12601
- except socket.gaierror:
12602
- log.info("Error With URL "+url)
12603
- return False
12604
- except socket.timeout:
11900
+ except (socket.gaierror, socket.timeout):
12605
11901
  log.info("Error With URL "+url)
12606
11902
  return False
12607
11903
  sftpfile.seek(0, 0)