PyArchiveFile 0.24.4__py3-none-any.whl → 0.25.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.24.4.data → pyarchivefile-0.25.0.data}/scripts/archivefile.py +2 -2
- {pyarchivefile-0.24.4.dist-info → pyarchivefile-0.25.0.dist-info}/METADATA +2 -2
- pyarchivefile-0.25.0.dist-info/RECORD +10 -0
- pyarchivefile.py +423 -1127
- pyarchivefile-0.24.4.dist-info/RECORD +0 -10
- {pyarchivefile-0.24.4.data → pyarchivefile-0.25.0.data}/scripts/archiveneofile.py +0 -0
- {pyarchivefile-0.24.4.data → pyarchivefile-0.25.0.data}/scripts/neoarchivefile.py +0 -0
- {pyarchivefile-0.24.4.dist-info → pyarchivefile-0.25.0.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.24.4.dist-info → pyarchivefile-0.25.0.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.24.4.dist-info → pyarchivefile-0.25.0.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.24.4.dist-info → pyarchivefile-0.25.0.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/3/2025 Ver. 0.
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/3/2025 Ver. 0.25.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -73,6 +73,17 @@ except ImportError:
|
|
|
73
73
|
except ImportError:
|
|
74
74
|
import json
|
|
75
75
|
|
|
76
|
+
testyaml = False
|
|
77
|
+
try:
|
|
78
|
+
import oyaml as yaml
|
|
79
|
+
testyaml = True
|
|
80
|
+
except ImportError:
|
|
81
|
+
try:
|
|
82
|
+
import yaml
|
|
83
|
+
testyaml = True
|
|
84
|
+
except ImportError:
|
|
85
|
+
testyaml = False
|
|
86
|
+
|
|
76
87
|
try:
|
|
77
88
|
import configparser
|
|
78
89
|
except ImportError:
|
|
@@ -115,6 +126,16 @@ else:
|
|
|
115
126
|
bytes_type = bytes
|
|
116
127
|
text_type = str
|
|
117
128
|
|
|
129
|
+
# Text streams (as provided by Python)
|
|
130
|
+
PY_STDIN_TEXT = sys.stdin
|
|
131
|
+
PY_STDOUT_TEXT = sys.stdout
|
|
132
|
+
PY_STDERR_TEXT = sys.stderr
|
|
133
|
+
|
|
134
|
+
# Binary-friendly streams (use .buffer on Py3, fall back on Py2)
|
|
135
|
+
PY_STDIN_BUF = getattr(sys.stdin, "buffer", sys.stdin)
|
|
136
|
+
PY_STDOUT_BUF = getattr(sys.stdout, "buffer", sys.stdout)
|
|
137
|
+
PY_STDERR_BUF = getattr(sys.stderr, "buffer", sys.stderr)
|
|
138
|
+
|
|
118
139
|
# Text vs bytes tuples you can use with isinstance()
|
|
119
140
|
TEXT_TYPES = (basestring,) # "str or unicode" on Py2, "str" on Py3
|
|
120
141
|
BINARY_TYPES = (bytes,) if not PY2 else (str,) # bytes on Py3, str on Py2
|
|
@@ -211,12 +232,6 @@ if sys.version_info[0] == 2:
|
|
|
211
232
|
except (NameError, AttributeError):
|
|
212
233
|
pass
|
|
213
234
|
|
|
214
|
-
# CRC32 import
|
|
215
|
-
try:
|
|
216
|
-
from zlib import crc32
|
|
217
|
-
except ImportError:
|
|
218
|
-
from binascii import crc32
|
|
219
|
-
|
|
220
235
|
# Define FileNotFoundError for Python 2
|
|
221
236
|
try:
|
|
222
237
|
FileNotFoundError
|
|
@@ -251,9 +266,7 @@ py7zr_support = False
|
|
|
251
266
|
try:
|
|
252
267
|
import py7zr
|
|
253
268
|
py7zr_support = True
|
|
254
|
-
except ImportError:
|
|
255
|
-
pass
|
|
256
|
-
except OSError:
|
|
269
|
+
except (ImportError, OSError):
|
|
257
270
|
pass
|
|
258
271
|
|
|
259
272
|
# TAR file checking
|
|
@@ -279,9 +292,7 @@ haveparamiko = False
|
|
|
279
292
|
try:
|
|
280
293
|
import paramiko
|
|
281
294
|
haveparamiko = True
|
|
282
|
-
except ImportError:
|
|
283
|
-
pass
|
|
284
|
-
except OSError:
|
|
295
|
+
except (ImportError, OSError):
|
|
285
296
|
pass
|
|
286
297
|
|
|
287
298
|
# PySFTP support
|
|
@@ -289,9 +300,7 @@ havepysftp = False
|
|
|
289
300
|
try:
|
|
290
301
|
import pysftp
|
|
291
302
|
havepysftp = True
|
|
292
|
-
except ImportError:
|
|
293
|
-
pass
|
|
294
|
-
except OSError:
|
|
303
|
+
except (ImportError, OSError):
|
|
295
304
|
pass
|
|
296
305
|
|
|
297
306
|
# Add the mechanize import check
|
|
@@ -299,9 +308,7 @@ havemechanize = False
|
|
|
299
308
|
try:
|
|
300
309
|
import mechanize
|
|
301
310
|
havemechanize = True
|
|
302
|
-
except ImportError:
|
|
303
|
-
pass
|
|
304
|
-
except OSError:
|
|
311
|
+
except (ImportError, OSError):
|
|
305
312
|
pass
|
|
306
313
|
|
|
307
314
|
# Requests support
|
|
@@ -311,9 +318,7 @@ try:
|
|
|
311
318
|
haverequests = True
|
|
312
319
|
import urllib3
|
|
313
320
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
314
|
-
except ImportError:
|
|
315
|
-
pass
|
|
316
|
-
except OSError:
|
|
321
|
+
except (ImportError, OSError):
|
|
317
322
|
pass
|
|
318
323
|
|
|
319
324
|
# HTTPX support
|
|
@@ -323,9 +328,7 @@ try:
|
|
|
323
328
|
havehttpx = True
|
|
324
329
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
325
330
|
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
|
326
|
-
except ImportError:
|
|
327
|
-
pass
|
|
328
|
-
except OSError:
|
|
331
|
+
except (ImportError, OSError):
|
|
329
332
|
pass
|
|
330
333
|
|
|
331
334
|
# HTTP and URL parsing
|
|
@@ -416,9 +419,14 @@ __include_defaults__ = True
|
|
|
416
419
|
__use_inmemfile__ = True
|
|
417
420
|
__use_spoolfile__ = False
|
|
418
421
|
__use_spooldir__ = tempfile.gettempdir()
|
|
419
|
-
|
|
420
|
-
|
|
422
|
+
BYTES_PER_KiB = 1024
|
|
423
|
+
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
|
+
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
425
|
+
DEFAULT_SPOOL_MAX = 4 * BYTES_PER_MiB # 4 MiB per spooled temp file
|
|
421
426
|
__spoolfile_size__ = DEFAULT_SPOOL_MAX
|
|
427
|
+
# Buffer: bigger than stdlib default (16 KiB), but still modest
|
|
428
|
+
DEFAULT_BUFFER_MAX = 256 * BYTES_PER_KiB # 256 KiB copy buffer
|
|
429
|
+
__filebuff_size__ = DEFAULT_BUFFER_MAX
|
|
422
430
|
__program_name__ = "Py"+__file_format_default__
|
|
423
431
|
__use_env_file__ = True
|
|
424
432
|
__use_ini_file__ = True
|
|
@@ -634,12 +642,12 @@ __project__ = __program_name__
|
|
|
634
642
|
__program_alt_name__ = __program_name__
|
|
635
643
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
636
644
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
637
|
-
__version_info__ = (0,
|
|
638
|
-
__version_date_info__ = (2025, 11,
|
|
645
|
+
__version_info__ = (0, 25, 0, "RC 1", 1)
|
|
646
|
+
__version_date_info__ = (2025, 11, 5, "RC 1", 1)
|
|
639
647
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
640
648
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
641
649
|
__revision__ = __version_info__[3]
|
|
642
|
-
__revision_id__ = "$Id:
|
|
650
|
+
__revision_id__ = "$Id: f83f2300169beab750d0c2947fc497e2a73a3e91 $"
|
|
643
651
|
if(__version_info__[4] is not None):
|
|
644
652
|
__version_date_plusrc__ = __version_date__ + \
|
|
645
653
|
"-" + str(__version_date_info__[4])
|
|
@@ -790,7 +798,7 @@ geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prour
|
|
|
790
798
|
if(platform.python_implementation() != ""):
|
|
791
799
|
py_implementation = platform.python_implementation()
|
|
792
800
|
if(platform.python_implementation() == ""):
|
|
793
|
-
py_implementation = "
|
|
801
|
+
py_implementation = "CPython"
|
|
794
802
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
795
803
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
796
804
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -2378,7 +2386,7 @@ def GetTotalSize(file_list):
|
|
|
2378
2386
|
try:
|
|
2379
2387
|
total_size += os.path.getsize(item)
|
|
2380
2388
|
except OSError:
|
|
2381
|
-
|
|
2389
|
+
PY_STDERR_TEXT.write("Error accessing file {}: {}\n".format(item, e))
|
|
2382
2390
|
return total_size
|
|
2383
2391
|
|
|
2384
2392
|
|
|
@@ -2615,7 +2623,7 @@ class ZlibFile(object):
|
|
|
2615
2623
|
scanned_leading = 0 # for tolerant header scan
|
|
2616
2624
|
|
|
2617
2625
|
while True:
|
|
2618
|
-
data = self.file.read(
|
|
2626
|
+
data = self.file.read(__filebuff_size__) # 1 MiB blocks
|
|
2619
2627
|
if not data:
|
|
2620
2628
|
if d is not None:
|
|
2621
2629
|
self._spool.write(d.flush())
|
|
@@ -2773,7 +2781,7 @@ class ZlibFile(object):
|
|
|
2773
2781
|
|
|
2774
2782
|
# Buffer and compress in chunks to limit memory
|
|
2775
2783
|
self._write_buf += data
|
|
2776
|
-
if len(self._write_buf) >= (
|
|
2784
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
2777
2785
|
chunk = self._compressor.compress(bytes(self._write_buf))
|
|
2778
2786
|
if chunk:
|
|
2779
2787
|
self.file.write(chunk)
|
|
@@ -2883,7 +2891,7 @@ class ZlibFile(object):
|
|
|
2883
2891
|
"""
|
|
2884
2892
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
2885
2893
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
2886
|
-
bio =
|
|
2894
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
2887
2895
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
2888
2896
|
|
|
2889
2897
|
# compatibility aliases for unwrapping utilities
|
|
@@ -2919,7 +2927,7 @@ def compress_bytes(payload, level=6, wbits=15, text=False, **kw):
|
|
|
2919
2927
|
out = compress_bytes(b"hello")
|
|
2920
2928
|
out = compress_bytes(u"hello\n", text=True, encoding="utf-8", newline="\n")
|
|
2921
2929
|
"""
|
|
2922
|
-
bio =
|
|
2930
|
+
bio = MkTempFile()
|
|
2923
2931
|
mode = 'wt' if text else 'wb'
|
|
2924
2932
|
f = ZlibFile(fileobj=bio, mode=mode, level=level, wbits=wbits, **kw)
|
|
2925
2933
|
try:
|
|
@@ -3078,7 +3086,7 @@ class GzipFile(object):
|
|
|
3078
3086
|
|
|
3079
3087
|
self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
|
|
3080
3088
|
|
|
3081
|
-
CHUNK =
|
|
3089
|
+
CHUNK = __filebuff_size__
|
|
3082
3090
|
pending = b""
|
|
3083
3091
|
d = None
|
|
3084
3092
|
absolute_offset = 0
|
|
@@ -3241,7 +3249,7 @@ class GzipFile(object):
|
|
|
3241
3249
|
|
|
3242
3250
|
# Stage and compress in chunks
|
|
3243
3251
|
self._write_buf += data
|
|
3244
|
-
if len(self._write_buf) >= (
|
|
3252
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
3245
3253
|
out = self._compressor.compress(bytes(self._write_buf))
|
|
3246
3254
|
if out:
|
|
3247
3255
|
self.file.write(out)
|
|
@@ -3341,7 +3349,7 @@ class GzipFile(object):
|
|
|
3341
3349
|
"""
|
|
3342
3350
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3343
3351
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
3344
|
-
bio =
|
|
3352
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
3345
3353
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
3346
3354
|
|
|
3347
3355
|
# compatibility aliases for unwrapping utilities
|
|
@@ -3383,7 +3391,7 @@ def gzip_compress_bytes(payload, level=6, text=False, **kw):
|
|
|
3383
3391
|
- text=False: 'payload' must be bytes-like; written via GzipFile('wb')
|
|
3384
3392
|
You can pass newline/encoding/errors to control text encoding.
|
|
3385
3393
|
"""
|
|
3386
|
-
bio =
|
|
3394
|
+
bio = MkTempFile()
|
|
3387
3395
|
mode = 'wt' if text else 'wb'
|
|
3388
3396
|
gf = GzipFile(fileobj=bio, mode=mode, level=level, **kw)
|
|
3389
3397
|
try:
|
|
@@ -3615,53 +3623,6 @@ def crc_generic(msg, width, poly, init, xorout, refin, refout):
|
|
|
3615
3623
|
crc = _reflect(crc, width)
|
|
3616
3624
|
return (crc ^ xorout) & mask
|
|
3617
3625
|
|
|
3618
|
-
# =========================
|
|
3619
|
-
# Named CRCs
|
|
3620
|
-
# =========================
|
|
3621
|
-
# CRC-16/ANSI (ARC/MODBUS family with init=0xFFFF by default)
|
|
3622
|
-
def crc16_ansi(msg, initial_value=0xFFFF):
|
|
3623
|
-
return crc_generic(msg, 16, 0x8005, initial_value & 0xFFFF, 0x0000, True, True)
|
|
3624
|
-
|
|
3625
|
-
def crc16_ibm(msg, initial_value=0xFFFF):
|
|
3626
|
-
return crc16_ansi(msg, initial_value)
|
|
3627
|
-
|
|
3628
|
-
def crc16(msg):
|
|
3629
|
-
return crc16_ansi(msg, 0xFFFF)
|
|
3630
|
-
|
|
3631
|
-
def crc16_ccitt(msg, initial_value=0xFFFF):
|
|
3632
|
-
# CCITT-FALSE
|
|
3633
|
-
return crc_generic(msg, 16, 0x1021, initial_value & 0xFFFF, 0x0000, False, False)
|
|
3634
|
-
|
|
3635
|
-
def crc16_x25(msg):
|
|
3636
|
-
return crc_generic(msg, 16, 0x1021, 0xFFFF, 0xFFFF, True, True)
|
|
3637
|
-
|
|
3638
|
-
def crc16_kermit(msg):
|
|
3639
|
-
return crc_generic(msg, 16, 0x1021, 0x0000, 0x0000, True, True)
|
|
3640
|
-
|
|
3641
|
-
def crc64_ecma(msg, initial_value=0x0000000000000000):
|
|
3642
|
-
return crc_generic(msg, 64, 0x42F0E1EBA9EA3693,
|
|
3643
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3644
|
-
0x0000000000000000, False, False)
|
|
3645
|
-
|
|
3646
|
-
def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
|
|
3647
|
-
return crc_generic(msg, 64, 0x000000000000001B,
|
|
3648
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3649
|
-
0xFFFFFFFFFFFFFFFF, True, True)
|
|
3650
|
-
|
|
3651
|
-
# =========================
|
|
3652
|
-
# Incremental CRC context
|
|
3653
|
-
# =========================
|
|
3654
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
3655
|
-
|
|
3656
|
-
_CRC_SPECS = {
|
|
3657
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
3658
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
3659
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
3660
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
3661
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
3662
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
3663
|
-
}
|
|
3664
|
-
|
|
3665
3626
|
# --- helpers --------------------------------------------------------------
|
|
3666
3627
|
|
|
3667
3628
|
try:
|
|
@@ -3702,206 +3663,15 @@ def _bytes_to_int(b):
|
|
|
3702
3663
|
value = (value << 8) | ch
|
|
3703
3664
|
return value
|
|
3704
3665
|
|
|
3705
|
-
|
|
3706
|
-
# --- your existing CRCContext (unchanged) ---------------------------------
|
|
3707
|
-
|
|
3708
|
-
class CRCContext(object):
|
|
3709
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
3710
|
-
|
|
3711
|
-
def __init__(self, spec):
|
|
3712
|
-
self.spec = spec
|
|
3713
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
3714
|
-
self.mask = (1 << spec.width) - 1
|
|
3715
|
-
self.shift = spec.width - 8
|
|
3716
|
-
self.crc = spec.init & self.mask
|
|
3717
|
-
|
|
3718
|
-
def update(self, data):
|
|
3719
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3720
|
-
data = bytes(bytearray(data))
|
|
3721
|
-
buf = _mv_tobytes(memoryview(data))
|
|
3722
|
-
if self.spec.refin:
|
|
3723
|
-
c = self.crc
|
|
3724
|
-
tbl = self.table
|
|
3725
|
-
for b in buf:
|
|
3726
|
-
if not isinstance(b, int): # Py2
|
|
3727
|
-
b = ord(b)
|
|
3728
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
3729
|
-
self.crc = c & self.mask
|
|
3730
|
-
else:
|
|
3731
|
-
c = self.crc
|
|
3732
|
-
tbl = self.table
|
|
3733
|
-
sh = self.shift
|
|
3734
|
-
msk = self.mask
|
|
3735
|
-
for b in buf:
|
|
3736
|
-
if not isinstance(b, int):
|
|
3737
|
-
b = ord(b)
|
|
3738
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
3739
|
-
self.crc = c & msk
|
|
3740
|
-
return self
|
|
3741
|
-
|
|
3742
|
-
def digest_int(self):
|
|
3743
|
-
c = self.crc
|
|
3744
|
-
if self.spec.refout ^ self.spec.refin:
|
|
3745
|
-
c = _reflect(c, self.spec.width)
|
|
3746
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
3747
|
-
|
|
3748
|
-
def hexdigest(self):
|
|
3749
|
-
width_hex = (self.spec.width + 3) // 4
|
|
3750
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
3751
|
-
|
|
3752
|
-
|
|
3753
|
-
# --- hashlib-backed implementation ---------------------------------------
|
|
3754
|
-
|
|
3755
|
-
class _HashlibCRCWrapper(object):
|
|
3756
|
-
"""
|
|
3757
|
-
Wrap a hashlib object to present the same interface as CRCContext
|
|
3758
|
-
(update, digest_int, hexdigest).
|
|
3759
|
-
|
|
3760
|
-
Assumes the hashlib algorithm already implements the exact CRC
|
|
3761
|
-
specification (refin/refout/xorout/etc.).
|
|
3762
|
-
"""
|
|
3763
|
-
__slots__ = ("_h", "spec", "mask", "width_hex")
|
|
3764
|
-
|
|
3765
|
-
def __init__(self, algo_name, spec):
|
|
3766
|
-
self._h = hashlib.new(algo_name)
|
|
3767
|
-
self.spec = spec
|
|
3768
|
-
self.mask = (1 << spec.width) - 1
|
|
3769
|
-
self.width_hex = (spec.width + 3) // 4
|
|
3770
|
-
|
|
3771
|
-
def update(self, data):
|
|
3772
|
-
self._h.update(_coerce_bytes(data))
|
|
3773
|
-
return self
|
|
3774
|
-
|
|
3775
|
-
def digest_int(self):
|
|
3776
|
-
# Convert final digest bytes to an integer and mask to width
|
|
3777
|
-
value = _bytes_to_int(self._h.digest())
|
|
3778
|
-
return value & self.mask
|
|
3779
|
-
|
|
3780
|
-
def hexdigest(self):
|
|
3781
|
-
h = self._h.hexdigest().lower()
|
|
3782
|
-
# Normalize to the same number of hex digits as CRCContext
|
|
3783
|
-
if len(h) < self.width_hex:
|
|
3784
|
-
h = ("0" * (self.width_hex - len(h))) + h
|
|
3785
|
-
elif len(h) > self.width_hex:
|
|
3786
|
-
h = h[-self.width_hex:]
|
|
3787
|
-
return h
|
|
3788
|
-
|
|
3789
|
-
|
|
3790
|
-
# --- public class: choose hashlib or fallback -----------------------------
|
|
3791
|
-
|
|
3792
|
-
class CRC(object):
|
|
3793
|
-
"""
|
|
3794
|
-
CRC wrapper that uses hashlib if available, otherwise falls back to
|
|
3795
|
-
the pure-Python CRCContext.
|
|
3796
|
-
|
|
3797
|
-
spec.hashlib_name (preferred) or spec.name is used as the hashlib
|
|
3798
|
-
algorithm name, e.g. 'crc32', 'crc32c', etc.
|
|
3799
|
-
"""
|
|
3800
|
-
|
|
3801
|
-
__slots__ = ("spec", "_impl")
|
|
3802
|
-
|
|
3803
|
-
def __init__(self, spec):
|
|
3804
|
-
self.spec = spec
|
|
3805
|
-
|
|
3806
|
-
algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
|
|
3807
|
-
impl = None
|
|
3808
|
-
|
|
3809
|
-
if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
|
|
3810
|
-
# Use hashlib-backed implementation
|
|
3811
|
-
impl = _HashlibCRCWrapper(algo_name, spec)
|
|
3812
|
-
else:
|
|
3813
|
-
# Fallback to your pure-Python implementation
|
|
3814
|
-
impl = CRCContext(spec)
|
|
3815
|
-
|
|
3816
|
-
self._impl = impl
|
|
3817
|
-
|
|
3818
|
-
def update(self, data):
|
|
3819
|
-
self._impl.update(data)
|
|
3820
|
-
return self
|
|
3821
|
-
|
|
3822
|
-
def digest_int(self):
|
|
3823
|
-
return self._impl.digest_int()
|
|
3824
|
-
|
|
3825
|
-
def hexdigest(self):
|
|
3826
|
-
return self._impl.hexdigest()
|
|
3827
|
-
|
|
3828
|
-
def crc_context_from_name(name_norm):
|
|
3829
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
3830
|
-
if spec is None:
|
|
3831
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
3832
|
-
return CRCContext(spec)
|
|
3833
|
-
|
|
3834
|
-
# =========================
|
|
3835
|
-
# Dispatch helpers
|
|
3836
|
-
# =========================
|
|
3837
|
-
_CRC_ALIASES = {
|
|
3838
|
-
# keep your historical behaviors
|
|
3839
|
-
"crc16": "crc16_ansi",
|
|
3840
|
-
"crc16_ibm": "crc16_ansi",
|
|
3841
|
-
"crc16_ansi": "crc16_ansi",
|
|
3842
|
-
"crc16_modbus": "crc16_ansi",
|
|
3843
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
3844
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
3845
|
-
"crc16_x25": "crc16_x25",
|
|
3846
|
-
"crc16_kermit": "crc16_kermit",
|
|
3847
|
-
"crc64": "crc64_iso",
|
|
3848
|
-
"crc64_iso": "crc64_iso",
|
|
3849
|
-
"crc64_ecma": "crc64_ecma",
|
|
3850
|
-
"adler32": "adler32",
|
|
3851
|
-
"crc32": "crc32",
|
|
3852
|
-
}
|
|
3853
|
-
|
|
3854
|
-
_CRC_WIDTH = {
|
|
3855
|
-
"crc16_ansi": 16,
|
|
3856
|
-
"crc16_ccitt": 16,
|
|
3857
|
-
"crc16_x25": 16,
|
|
3858
|
-
"crc16_kermit": 16,
|
|
3859
|
-
"crc64_iso": 64,
|
|
3860
|
-
"crc64_ecma": 64,
|
|
3861
|
-
"adler32": 32,
|
|
3862
|
-
"crc32": 32,
|
|
3863
|
-
}
|
|
3864
|
-
|
|
3865
|
-
def _crc_compute(algo_key, data_bytes):
|
|
3866
|
-
if algo_key == "crc16_ansi":
|
|
3867
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
3868
|
-
if algo_key == "crc16_ccitt":
|
|
3869
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
3870
|
-
if algo_key == "crc16_x25":
|
|
3871
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
3872
|
-
if algo_key == "crc16_kermit":
|
|
3873
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
3874
|
-
if algo_key == "crc64_iso":
|
|
3875
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3876
|
-
if algo_key == "crc64_ecma":
|
|
3877
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3878
|
-
if algo_key == "adler32":
|
|
3879
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
3880
|
-
if algo_key == "crc32":
|
|
3881
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
3882
|
-
raise KeyError(algo_key)
|
|
3883
|
-
|
|
3884
|
-
try:
|
|
3885
|
-
hashlib_guaranteed
|
|
3886
|
-
except NameError:
|
|
3887
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
3888
|
-
|
|
3889
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
3890
|
-
try:
|
|
3891
|
-
return name.lower() in guaranteed
|
|
3892
|
-
except Exception:
|
|
3893
|
-
return False
|
|
3894
|
-
|
|
3895
3666
|
# =========================
|
|
3896
3667
|
# Public checksum API
|
|
3897
3668
|
# =========================
|
|
3898
|
-
def GetHeaderChecksum(inlist=None, checksumtype="
|
|
3669
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
3899
3670
|
"""
|
|
3900
3671
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3901
3672
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
3902
3673
|
"""
|
|
3903
|
-
|
|
3904
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3674
|
+
algo_key = (checksumtype or "md5").lower()
|
|
3905
3675
|
|
|
3906
3676
|
delim = formatspecs.get('format_delimiter', u"\0")
|
|
3907
3677
|
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
@@ -3909,260 +3679,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, format
|
|
|
3909
3679
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3910
3680
|
hdr_bytes = bytes(hdr_bytes)
|
|
3911
3681
|
|
|
3912
|
-
if algo_key
|
|
3913
|
-
|
|
3914
|
-
|
|
3915
|
-
|
|
3916
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3917
|
-
h = hashlib.new(algo_key)
|
|
3918
|
-
h.update(hdr_bytes)
|
|
3919
|
-
return h.hexdigest().lower()
|
|
3682
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3683
|
+
h = hashlib.new(algo_key)
|
|
3684
|
+
h.update(hdr_bytes)
|
|
3685
|
+
return h.hexdigest().lower()
|
|
3920
3686
|
|
|
3921
3687
|
return "0"
|
|
3922
3688
|
|
|
3923
|
-
def GetFileChecksum(
|
|
3689
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
3924
3690
|
"""
|
|
3925
3691
|
Accepts bytes/str/file-like.
|
|
3926
3692
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
3927
3693
|
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
3928
3694
|
- Falls back to one-shot for non-file-like inputs.
|
|
3929
3695
|
"""
|
|
3930
|
-
|
|
3931
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3696
|
+
algo_key = (checksumtype or "md5").lower()
|
|
3932
3697
|
|
|
3933
3698
|
# file-like streaming
|
|
3934
|
-
if hasattr(
|
|
3699
|
+
if hasattr(inbytes, "read"):
|
|
3935
3700
|
# hashlib
|
|
3936
|
-
if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3937
|
-
h = hashlib.new(algo_key)
|
|
3938
|
-
while True:
|
|
3939
|
-
chunk = instr.read(1 << 20)
|
|
3940
|
-
if not chunk:
|
|
3941
|
-
break
|
|
3942
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3943
|
-
chunk = bytes(bytearray(chunk))
|
|
3944
|
-
h.update(chunk)
|
|
3945
|
-
return h.hexdigest().lower()
|
|
3946
|
-
|
|
3947
|
-
# CRC streaming via context
|
|
3948
|
-
if algo_key in _CRC_SPECS:
|
|
3949
|
-
ctx = crc_context_from_name(algo_key)
|
|
3950
|
-
while True:
|
|
3951
|
-
chunk = instr.read(1 << 20)
|
|
3952
|
-
if not chunk:
|
|
3953
|
-
break
|
|
3954
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3955
|
-
chunk = bytes(bytearray(chunk))
|
|
3956
|
-
ctx.update(chunk)
|
|
3957
|
-
return ctx.hexdigest()
|
|
3958
|
-
|
|
3959
|
-
# not known streaming algo: fallback to one-shot bytes
|
|
3960
|
-
data = instr.read()
|
|
3961
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3962
|
-
data = bytes(bytearray(data))
|
|
3963
|
-
else:
|
|
3964
|
-
data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
|
|
3965
|
-
data = bytes(data)
|
|
3966
|
-
|
|
3967
|
-
# one-shot
|
|
3968
|
-
if algo_key in _CRC_SPECS:
|
|
3969
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
3970
|
-
|
|
3971
|
-
if algo_key in _CRC_WIDTH:
|
|
3972
|
-
n = _crc_compute(algo_key, data)
|
|
3973
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
3974
|
-
|
|
3975
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3976
|
-
h = hashlib.new(algo_key)
|
|
3977
|
-
h.update(data)
|
|
3978
|
-
return h.hexdigest().lower()
|
|
3979
|
-
|
|
3980
|
-
return "0"
|
|
3981
|
-
|
|
3982
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3983
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
3984
|
-
want = (inchecksum or "0").strip().lower()
|
|
3985
|
-
if want.startswith("0x"):
|
|
3986
|
-
want = want[2:]
|
|
3987
|
-
return hmac.compare_digest(want, calc)
|
|
3988
|
-
|
|
3989
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3990
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
3991
|
-
want = (inchecksum or "0").strip().lower()
|
|
3992
|
-
if want.startswith("0x"):
|
|
3993
|
-
want = want[2:]
|
|
3994
|
-
return hmac.compare_digest(want, calc)
|
|
3995
|
-
|
|
3996
|
-
|
|
3997
|
-
# =========================
|
|
3998
|
-
# Incremental CRC context
|
|
3999
|
-
# =========================
|
|
4000
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
4001
|
-
|
|
4002
|
-
_CRC_SPECS = {
|
|
4003
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
4004
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
4005
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
4006
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
4007
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
4008
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
4009
|
-
}
|
|
4010
|
-
|
|
4011
|
-
class CRCContext(object):
|
|
4012
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
4013
|
-
|
|
4014
|
-
def __init__(self, spec):
|
|
4015
|
-
self.spec = spec
|
|
4016
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
4017
|
-
self.mask = (1 << spec.width) - 1
|
|
4018
|
-
self.shift = spec.width - 8
|
|
4019
|
-
self.crc = spec.init & self.mask
|
|
4020
|
-
|
|
4021
|
-
def update(self, data):
|
|
4022
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
4023
|
-
data = bytes(bytearray(data))
|
|
4024
|
-
if self.spec.refin:
|
|
4025
|
-
c = self.crc
|
|
4026
|
-
tbl = self.table
|
|
4027
|
-
for b in memoryview(data).tobytes():
|
|
4028
|
-
if not isinstance(b, int): # Py2
|
|
4029
|
-
b = ord(b)
|
|
4030
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
4031
|
-
self.crc = c & self.mask
|
|
4032
|
-
else:
|
|
4033
|
-
c = self.crc
|
|
4034
|
-
tbl = self.table
|
|
4035
|
-
sh = self.shift
|
|
4036
|
-
msk = self.mask
|
|
4037
|
-
for b in memoryview(data).tobytes():
|
|
4038
|
-
if not isinstance(b, int):
|
|
4039
|
-
b = ord(b)
|
|
4040
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
4041
|
-
self.crc = c & msk
|
|
4042
|
-
return self
|
|
4043
|
-
|
|
4044
|
-
def digest_int(self):
|
|
4045
|
-
c = self.crc
|
|
4046
|
-
if self.spec.refout ^ self.spec.refin:
|
|
4047
|
-
c = _reflect(c, self.spec.width)
|
|
4048
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
4049
|
-
|
|
4050
|
-
def hexdigest(self):
|
|
4051
|
-
width_hex = (self.spec.width + 3) // 4
|
|
4052
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
4053
3701
|
|
|
4054
|
-
|
|
4055
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
4056
|
-
if spec is None:
|
|
4057
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
4058
|
-
return CRCContext(spec)
|
|
4059
|
-
|
|
4060
|
-
# =========================
|
|
4061
|
-
# Dispatch helpers
|
|
4062
|
-
# =========================
|
|
4063
|
-
_CRC_ALIASES = {
|
|
4064
|
-
# keep your historical behaviors
|
|
4065
|
-
"crc16": "crc16_ansi",
|
|
4066
|
-
"crc16_ibm": "crc16_ansi",
|
|
4067
|
-
"crc16_ansi": "crc16_ansi",
|
|
4068
|
-
"crc16_modbus": "crc16_ansi",
|
|
4069
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
4070
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
4071
|
-
"crc16_x25": "crc16_x25",
|
|
4072
|
-
"crc16_kermit": "crc16_kermit",
|
|
4073
|
-
"crc64": "crc64_iso",
|
|
4074
|
-
"crc64_iso": "crc64_iso",
|
|
4075
|
-
"crc64_ecma": "crc64_ecma",
|
|
4076
|
-
"adler32": "adler32",
|
|
4077
|
-
"crc32": "crc32",
|
|
4078
|
-
}
|
|
4079
|
-
|
|
4080
|
-
_CRC_WIDTH = {
|
|
4081
|
-
"crc16_ansi": 16,
|
|
4082
|
-
"crc16_ccitt": 16,
|
|
4083
|
-
"crc16_x25": 16,
|
|
4084
|
-
"crc16_kermit": 16,
|
|
4085
|
-
"crc64_iso": 64,
|
|
4086
|
-
"crc64_ecma": 64,
|
|
4087
|
-
"adler32": 32,
|
|
4088
|
-
"crc32": 32,
|
|
4089
|
-
}
|
|
4090
|
-
|
|
4091
|
-
def _crc_compute(algo_key, data_bytes):
|
|
4092
|
-
if algo_key == "crc16_ansi":
|
|
4093
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
4094
|
-
if algo_key == "crc16_ccitt":
|
|
4095
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
4096
|
-
if algo_key == "crc16_x25":
|
|
4097
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
4098
|
-
if algo_key == "crc16_kermit":
|
|
4099
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
4100
|
-
if algo_key == "crc64_iso":
|
|
4101
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4102
|
-
if algo_key == "crc64_ecma":
|
|
4103
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4104
|
-
if algo_key == "adler32":
|
|
4105
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
4106
|
-
if algo_key == "crc32":
|
|
4107
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
4108
|
-
raise KeyError(algo_key)
|
|
4109
|
-
|
|
4110
|
-
try:
|
|
4111
|
-
hashlib_guaranteed
|
|
4112
|
-
except NameError:
|
|
4113
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
4114
|
-
|
|
4115
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
4116
|
-
try:
|
|
4117
|
-
return name.lower() in guaranteed
|
|
4118
|
-
except Exception:
|
|
4119
|
-
return False
|
|
4120
|
-
|
|
4121
|
-
# =========================
|
|
4122
|
-
# Public checksum API
|
|
4123
|
-
# =========================
|
|
4124
|
-
def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4125
|
-
"""
|
|
4126
|
-
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
4127
|
-
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
4128
|
-
"""
|
|
4129
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
4130
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
4131
|
-
|
|
4132
|
-
delim = formatspecs.get('format_delimiter', u"\0")
|
|
4133
|
-
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
4134
|
-
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
4135
|
-
hdr_bytes = _to_bytes(hdr_bytes)
|
|
4136
|
-
hdr_bytes = bytes(hdr_bytes)
|
|
4137
|
-
|
|
4138
|
-
if algo_key in _CRC_WIDTH:
|
|
4139
|
-
n = _crc_compute(algo_key, hdr_bytes)
|
|
4140
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
4141
|
-
|
|
4142
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
4143
|
-
h = hashlib.new(algo_key)
|
|
4144
|
-
h.update(hdr_bytes)
|
|
4145
|
-
return h.hexdigest().lower()
|
|
4146
|
-
|
|
4147
|
-
return "0"
|
|
4148
|
-
|
|
4149
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4150
|
-
"""
|
|
4151
|
-
Accepts bytes/str/file-like.
|
|
4152
|
-
- Hashlib algos: streamed in 1 MiB chunks.
|
|
4153
|
-
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
4154
|
-
- Falls back to one-shot for non-file-like inputs.
|
|
4155
|
-
"""
|
|
4156
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
4157
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
4158
|
-
|
|
4159
|
-
# file-like streaming
|
|
4160
|
-
if hasattr(instr, "read"):
|
|
4161
|
-
# hashlib
|
|
4162
|
-
if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3702
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4163
3703
|
h = hashlib.new(algo_key)
|
|
4164
3704
|
while True:
|
|
4165
|
-
chunk =
|
|
3705
|
+
chunk = inbytes.read(__filebuff_size__)
|
|
4166
3706
|
if not chunk:
|
|
4167
3707
|
break
|
|
4168
3708
|
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
@@ -4170,49 +3710,31 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
|
|
|
4170
3710
|
h.update(chunk)
|
|
4171
3711
|
return h.hexdigest().lower()
|
|
4172
3712
|
|
|
4173
|
-
# CRC streaming via context
|
|
4174
|
-
if algo_key in _CRC_SPECS:
|
|
4175
|
-
ctx = crc_context_from_name(algo_key)
|
|
4176
|
-
while True:
|
|
4177
|
-
chunk = instr.read(1 << 20)
|
|
4178
|
-
if not chunk:
|
|
4179
|
-
break
|
|
4180
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
4181
|
-
chunk = bytes(bytearray(chunk))
|
|
4182
|
-
ctx.update(chunk)
|
|
4183
|
-
return ctx.hexdigest()
|
|
4184
|
-
|
|
4185
3713
|
# not known streaming algo: fallback to one-shot bytes
|
|
4186
|
-
data =
|
|
3714
|
+
data = inbytes.read()
|
|
4187
3715
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
4188
3716
|
data = bytes(bytearray(data))
|
|
4189
3717
|
else:
|
|
4190
|
-
data = _to_bytes(
|
|
3718
|
+
data = _to_bytes(inbytes) if (encodedata or not isinstance(inbytes, (bytes, bytearray, memoryview))) else inbytes
|
|
4191
3719
|
data = bytes(data)
|
|
4192
3720
|
|
|
4193
3721
|
# one-shot
|
|
4194
|
-
if algo_key in _CRC_SPECS:
|
|
4195
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
4196
3722
|
|
|
4197
|
-
if algo_key
|
|
4198
|
-
n = _crc_compute(algo_key, data)
|
|
4199
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
4200
|
-
|
|
4201
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3723
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4202
3724
|
h = hashlib.new(algo_key)
|
|
4203
3725
|
h.update(data)
|
|
4204
3726
|
return h.hexdigest().lower()
|
|
4205
3727
|
|
|
4206
3728
|
return "0"
|
|
4207
3729
|
|
|
4208
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="
|
|
3730
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4209
3731
|
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
4210
3732
|
want = (inchecksum or "0").strip().lower()
|
|
4211
3733
|
if want.startswith("0x"):
|
|
4212
3734
|
want = want[2:]
|
|
4213
3735
|
return hmac.compare_digest(want, calc)
|
|
4214
3736
|
|
|
4215
|
-
def ValidateFileChecksum(infile, checksumtype="
|
|
3737
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4216
3738
|
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
4217
3739
|
want = (inchecksum or "0").strip().lower()
|
|
4218
3740
|
if want.startswith("0x"):
|
|
@@ -4259,66 +3781,6 @@ def GetDataFromArrayAlt(structure, path, default=None):
|
|
|
4259
3781
|
return element
|
|
4260
3782
|
|
|
4261
3783
|
|
|
4262
|
-
def GetHeaderChecksum(inlist=[], checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4263
|
-
fileheader = AppendNullBytes(inlist, formatspecs['format_delimiter']) if isinstance(
|
|
4264
|
-
inlist, list) else AppendNullByte(inlist, formatspecs['format_delimiter'])
|
|
4265
|
-
if encodedata and hasattr(fileheader, "encode"):
|
|
4266
|
-
fileheader = fileheader.encode('UTF-8')
|
|
4267
|
-
checksum_methods = {
|
|
4268
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4269
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4270
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4271
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4272
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4273
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4274
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4275
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4276
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4277
|
-
}
|
|
4278
|
-
if checksumtype in checksum_methods:
|
|
4279
|
-
return checksum_methods[checksumtype](fileheader)
|
|
4280
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4281
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4282
|
-
checksumoutstr.update(fileheader)
|
|
4283
|
-
return checksumoutstr.hexdigest().lower()
|
|
4284
|
-
return format(0, 'x').lower()
|
|
4285
|
-
|
|
4286
|
-
|
|
4287
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4288
|
-
if encodedata and hasattr(instr, "encode"):
|
|
4289
|
-
instr = instr.encode('UTF-8')
|
|
4290
|
-
checksum_methods = {
|
|
4291
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4292
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4293
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4294
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4295
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4296
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4297
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4298
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4299
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4300
|
-
}
|
|
4301
|
-
if checksumtype in checksum_methods:
|
|
4302
|
-
return checksum_methods[checksumtype](instr)
|
|
4303
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4304
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4305
|
-
checksumoutstr.update(instr)
|
|
4306
|
-
return checksumoutstr.hexdigest().lower()
|
|
4307
|
-
return format(0, 'x').lower()
|
|
4308
|
-
|
|
4309
|
-
|
|
4310
|
-
def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4311
|
-
infileheadercshex = GetHeaderChecksum(
|
|
4312
|
-
inlist, checksumtype, True, formatspecs).lower()
|
|
4313
|
-
return inchecksum.lower() == infileheadercshex
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4317
|
-
catinfilecshex = GetFileChecksum(
|
|
4318
|
-
infile, checksumtype, True, formatspecs).lower()
|
|
4319
|
-
return inchecksum.lower() == catinfilecshex
|
|
4320
|
-
|
|
4321
|
-
|
|
4322
3784
|
# ========= pushback-aware delimiter reader =========
|
|
4323
3785
|
class _DelimiterReader(object):
|
|
4324
3786
|
"""
|
|
@@ -4651,7 +4113,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4651
4113
|
if(not hasattr(fp, "read")):
|
|
4652
4114
|
return False
|
|
4653
4115
|
delimiter = formatspecs['format_delimiter']
|
|
4654
|
-
fheaderstart = fp.tell()
|
|
4655
4116
|
if(formatspecs['new_style']):
|
|
4656
4117
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4657
4118
|
else:
|
|
@@ -4674,22 +4135,74 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4674
4135
|
fjsonchecksumtype = HeaderOut[30]
|
|
4675
4136
|
fjsonchecksum = HeaderOut[31]
|
|
4676
4137
|
fjsoncontent = {}
|
|
4677
|
-
|
|
4678
|
-
|
|
4679
|
-
|
|
4680
|
-
|
|
4681
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4138
|
+
if(fjsontype=="json"):
|
|
4139
|
+
fjsoncontent = {}
|
|
4140
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4141
|
+
if(fjsonsize > 0):
|
|
4682
4142
|
try:
|
|
4683
|
-
|
|
4143
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4144
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4684
4145
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4685
|
-
|
|
4686
|
-
|
|
4687
|
-
|
|
4688
|
-
|
|
4146
|
+
try:
|
|
4147
|
+
fjsonrawcontent = fprejsoncontent
|
|
4148
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4149
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4150
|
+
fprejsoncontent = ""
|
|
4151
|
+
fjsonrawcontent = fprejsoncontent
|
|
4152
|
+
fjsoncontent = {}
|
|
4153
|
+
else:
|
|
4154
|
+
fprejsoncontent = ""
|
|
4155
|
+
fjsonrawcontent = fprejsoncontent
|
|
4156
|
+
fjsoncontent = {}
|
|
4157
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4689
4158
|
fjsoncontent = {}
|
|
4159
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4160
|
+
if (fjsonsize > 0):
|
|
4161
|
+
try:
|
|
4162
|
+
# try base64 → utf-8 → YAML
|
|
4163
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4164
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4165
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4166
|
+
try:
|
|
4167
|
+
# fall back to treating the bytes as plain text YAML
|
|
4168
|
+
fjsonrawcontent = fprejsoncontent
|
|
4169
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4170
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4171
|
+
# final fallback: empty
|
|
4172
|
+
fprejsoncontent = ""
|
|
4173
|
+
fjsonrawcontent = fprejsoncontent
|
|
4174
|
+
fjsoncontent = {}
|
|
4175
|
+
else:
|
|
4176
|
+
fprejsoncontent = ""
|
|
4177
|
+
fjsonrawcontent = fprejsoncontent
|
|
4178
|
+
fjsoncontent = {}
|
|
4179
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4180
|
+
fjsoncontent = {}
|
|
4181
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4182
|
+
fprejsoncontent = ""
|
|
4183
|
+
fjsonrawcontent = fprejsoncontent
|
|
4184
|
+
elif(fjsontype=="list"):
|
|
4185
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4186
|
+
flisttmp = MkTempFile()
|
|
4187
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4188
|
+
flisttmp.seek(0)
|
|
4189
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4190
|
+
flisttmp.close()
|
|
4191
|
+
fjsonrawcontent = fjsoncontent
|
|
4192
|
+
if(fjsonlen==1):
|
|
4193
|
+
try:
|
|
4194
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4195
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4196
|
+
fjsonlen = len(fjsoncontent)
|
|
4197
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4198
|
+
try:
|
|
4199
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4200
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4201
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4202
|
+
pass
|
|
4690
4203
|
fp.seek(len(delimiter), 1)
|
|
4691
4204
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4692
|
-
if(
|
|
4205
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4693
4206
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4694
4207
|
fname + " at offset " + str(fheaderstart))
|
|
4695
4208
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4703,8 +4216,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4703
4216
|
fname + " at offset " + str(fheaderstart))
|
|
4704
4217
|
VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
|
|
4705
4218
|
return False
|
|
4706
|
-
fhend = fp.tell() - 1
|
|
4707
|
-
fcontentstart = fp.tell()
|
|
4708
4219
|
fcontents = MkTempFile()
|
|
4709
4220
|
if(fsize > 0 and not listonly):
|
|
4710
4221
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -4718,9 +4229,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4718
4229
|
fp.seek(fcsize, 1)
|
|
4719
4230
|
fcontents.seek(0, 0)
|
|
4720
4231
|
newfccs = GetFileChecksum(
|
|
4721
|
-
fcontents
|
|
4232
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4722
4233
|
fcontents.seek(0, 0)
|
|
4723
|
-
if(fccs
|
|
4234
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4724
4235
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4725
4236
|
fname + " at offset " + str(fcontentstart))
|
|
4726
4237
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4733,10 +4244,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4733
4244
|
cfcontents = UncompressFileAlt(fcontents, formatspecs)
|
|
4734
4245
|
cfcontents.seek(0, 0)
|
|
4735
4246
|
fcontents = MkTempFile()
|
|
4736
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4247
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4737
4248
|
cfcontents.close()
|
|
4738
4249
|
fcontents.seek(0, 0)
|
|
4739
|
-
fcontentend = fp.tell()
|
|
4740
4250
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4741
4251
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4742
4252
|
if(abs(fseeknextasnum) == 0):
|
|
@@ -4844,6 +4354,33 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4844
4354
|
fprejsoncontent = ""
|
|
4845
4355
|
fjsonrawcontent = fprejsoncontent
|
|
4846
4356
|
fjsoncontent = {}
|
|
4357
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4358
|
+
fjsoncontent = {}
|
|
4359
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4360
|
+
if (fjsonsize > 0):
|
|
4361
|
+
try:
|
|
4362
|
+
# try base64 → utf-8 → YAML
|
|
4363
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4364
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4365
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4366
|
+
try:
|
|
4367
|
+
# fall back to treating the bytes as plain text YAML
|
|
4368
|
+
fjsonrawcontent = fprejsoncontent
|
|
4369
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4370
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4371
|
+
# final fallback: empty
|
|
4372
|
+
fprejsoncontent = ""
|
|
4373
|
+
fjsonrawcontent = fprejsoncontent
|
|
4374
|
+
fjsoncontent = {}
|
|
4375
|
+
else:
|
|
4376
|
+
fprejsoncontent = ""
|
|
4377
|
+
fjsonrawcontent = fprejsoncontent
|
|
4378
|
+
fjsoncontent = {}
|
|
4379
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4380
|
+
fjsoncontent = {}
|
|
4381
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4382
|
+
fprejsoncontent = ""
|
|
4383
|
+
fjsonrawcontent = fprejsoncontent
|
|
4847
4384
|
elif(fjsontype=="list"):
|
|
4848
4385
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4849
4386
|
flisttmp = MkTempFile()
|
|
@@ -4866,7 +4403,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4866
4403
|
fp.seek(len(delimiter), 1)
|
|
4867
4404
|
fjend = fp.tell() - 1
|
|
4868
4405
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4869
|
-
if(
|
|
4406
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4870
4407
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4871
4408
|
fname + " at offset " + str(fheaderstart))
|
|
4872
4409
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4898,9 +4435,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4898
4435
|
pyhascontents = False
|
|
4899
4436
|
fcontents.seek(0, 0)
|
|
4900
4437
|
newfccs = GetFileChecksum(
|
|
4901
|
-
fcontents
|
|
4438
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4902
4439
|
fcontents.seek(0, 0)
|
|
4903
|
-
if(fccs
|
|
4440
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4904
4441
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4905
4442
|
fname + " at offset " + str(fcontentstart))
|
|
4906
4443
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4914,7 +4451,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4914
4451
|
fcontents, formatspecs)
|
|
4915
4452
|
cfcontents.seek(0, 0)
|
|
4916
4453
|
fcontents = MkTempFile()
|
|
4917
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4454
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4918
4455
|
cfcontents.close()
|
|
4919
4456
|
fcontents.seek(0, 0)
|
|
4920
4457
|
fccs = GetFileChecksum(
|
|
@@ -5030,6 +4567,33 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5030
4567
|
fprejsoncontent = ""
|
|
5031
4568
|
fjsonrawcontent = fprejsoncontent
|
|
5032
4569
|
fjsoncontent = {}
|
|
4570
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4571
|
+
fjsoncontent = {}
|
|
4572
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4573
|
+
if (fjsonsize > 0):
|
|
4574
|
+
try:
|
|
4575
|
+
# try base64 → utf-8 → YAML
|
|
4576
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4577
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4578
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4579
|
+
try:
|
|
4580
|
+
# fall back to treating the bytes as plain text YAML
|
|
4581
|
+
fjsonrawcontent = fprejsoncontent
|
|
4582
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4583
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4584
|
+
# final fallback: empty
|
|
4585
|
+
fprejsoncontent = ""
|
|
4586
|
+
fjsonrawcontent = fprejsoncontent
|
|
4587
|
+
fjsoncontent = {}
|
|
4588
|
+
else:
|
|
4589
|
+
fprejsoncontent = ""
|
|
4590
|
+
fjsonrawcontent = fprejsoncontent
|
|
4591
|
+
fjsoncontent = {}
|
|
4592
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4593
|
+
fjsoncontent = {}
|
|
4594
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4595
|
+
fprejsoncontent = ""
|
|
4596
|
+
fjsonrawcontent = fprejsoncontent
|
|
5033
4597
|
elif(fjsontype=="list"):
|
|
5034
4598
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5035
4599
|
flisttmp = MkTempFile()
|
|
@@ -5051,7 +4615,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5051
4615
|
pass
|
|
5052
4616
|
fp.seek(len(delimiter), 1)
|
|
5053
4617
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5054
|
-
if(
|
|
4618
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5055
4619
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5056
4620
|
fname + " at offset " + str(fheaderstart))
|
|
5057
4621
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -5083,8 +4647,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5083
4647
|
pyhascontents = False
|
|
5084
4648
|
fcontents.seek(0, 0)
|
|
5085
4649
|
newfccs = GetFileChecksum(
|
|
5086
|
-
fcontents
|
|
5087
|
-
if(fccs
|
|
4650
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4651
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
5088
4652
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5089
4653
|
fname + " at offset " + str(fcontentstart))
|
|
5090
4654
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -5098,11 +4662,11 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5098
4662
|
fcontents, formatspecs)
|
|
5099
4663
|
cfcontents.seek(0, 0)
|
|
5100
4664
|
fcontents = MkTempFile()
|
|
5101
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4665
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
5102
4666
|
cfcontents.close()
|
|
5103
4667
|
fcontents.seek(0, 0)
|
|
5104
4668
|
fccs = GetFileChecksum(
|
|
5105
|
-
fcontents
|
|
4669
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5106
4670
|
fcontentend = fp.tell()
|
|
5107
4671
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5108
4672
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5136,9 +4700,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5136
4700
|
curloc = filestart
|
|
5137
4701
|
try:
|
|
5138
4702
|
fp.seek(0, 2)
|
|
5139
|
-
except OSError:
|
|
5140
|
-
SeekToEndOfFile(fp)
|
|
5141
|
-
except ValueError:
|
|
4703
|
+
except (OSError, ValueError):
|
|
5142
4704
|
SeekToEndOfFile(fp)
|
|
5143
4705
|
CatSize = fp.tell()
|
|
5144
4706
|
CatSizeEnd = CatSize
|
|
@@ -5187,9 +4749,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5187
4749
|
curloc = filestart
|
|
5188
4750
|
try:
|
|
5189
4751
|
fp.seek(0, 2)
|
|
5190
|
-
except OSError:
|
|
5191
|
-
SeekToEndOfFile(fp)
|
|
5192
|
-
except ValueError:
|
|
4752
|
+
except (OSError, ValueError):
|
|
5193
4753
|
SeekToEndOfFile(fp)
|
|
5194
4754
|
CatSize = fp.tell()
|
|
5195
4755
|
CatSizeEnd = CatSize
|
|
@@ -5208,10 +4768,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5208
4768
|
else:
|
|
5209
4769
|
inheader = ReadFileHeaderDataWoSize(
|
|
5210
4770
|
fp, formatspecs['format_delimiter'])
|
|
5211
|
-
fnumextrafieldsize = int(inheader[
|
|
5212
|
-
fnumextrafields = int(inheader[
|
|
4771
|
+
fnumextrafieldsize = int(inheader[6], 16)
|
|
4772
|
+
fnumextrafields = int(inheader[7], 16)
|
|
5213
4773
|
fextrafieldslist = []
|
|
5214
|
-
extrastart =
|
|
4774
|
+
extrastart = 8
|
|
5215
4775
|
extraend = extrastart + fnumextrafields
|
|
5216
4776
|
while(extrastart < extraend):
|
|
5217
4777
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5230,7 +4790,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5230
4790
|
fnumfields = int(inheader[1], 16)
|
|
5231
4791
|
fhencoding = inheader[2]
|
|
5232
4792
|
fostype = inheader[3]
|
|
5233
|
-
|
|
4793
|
+
fpythontype = inheader[4]
|
|
4794
|
+
fnumfiles = int(inheader[5], 16)
|
|
5234
4795
|
fprechecksumtype = inheader[-2]
|
|
5235
4796
|
fprechecksum = inheader[-1]
|
|
5236
4797
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5243,7 +4804,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5243
4804
|
return False
|
|
5244
4805
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
5245
4806
|
fcompresstype = ""
|
|
5246
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4807
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5247
4808
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
5248
4809
|
seekstart = 0
|
|
5249
4810
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -5271,7 +4832,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5271
4832
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5272
4833
|
fp.seek(len(delimiter), 1)
|
|
5273
4834
|
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5274
|
-
if(
|
|
4835
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5275
4836
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5276
4837
|
prefname + " at offset " + str(prefhstart))
|
|
5277
4838
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5279,7 +4840,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5279
4840
|
prenewfcs = GetHeaderChecksum(
|
|
5280
4841
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5281
4842
|
prefcs = preheaderdata[-2]
|
|
5282
|
-
if(prefcs
|
|
4843
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5283
4844
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5284
4845
|
prefname + " at offset " + str(prefhstart))
|
|
5285
4846
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5295,10 +4856,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5295
4856
|
prefcontents.write(fp.read(prefsize))
|
|
5296
4857
|
prefcontents.seek(0, 0)
|
|
5297
4858
|
prenewfccs = GetFileChecksum(
|
|
5298
|
-
prefcontents
|
|
4859
|
+
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5299
4860
|
prefccs = preheaderdata[-1]
|
|
5300
4861
|
pyhascontents = True
|
|
5301
|
-
if(prefccs
|
|
4862
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5302
4863
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5303
4864
|
prefname + " at offset " + str(prefcontentstart))
|
|
5304
4865
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5344,9 +4905,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5344
4905
|
curloc = filestart
|
|
5345
4906
|
try:
|
|
5346
4907
|
fp.seek(0, 2)
|
|
5347
|
-
except OSError:
|
|
5348
|
-
SeekToEndOfFile(fp)
|
|
5349
|
-
except ValueError:
|
|
4908
|
+
except (OSError, ValueError):
|
|
5350
4909
|
SeekToEndOfFile(fp)
|
|
5351
4910
|
CatSize = fp.tell()
|
|
5352
4911
|
CatSizeEnd = CatSize
|
|
@@ -5365,10 +4924,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5365
4924
|
else:
|
|
5366
4925
|
inheader = ReadFileHeaderDataWoSize(
|
|
5367
4926
|
fp, formatspecs['format_delimiter'])
|
|
5368
|
-
fnumextrafieldsize = int(inheader[
|
|
5369
|
-
fnumextrafields = int(inheader[
|
|
4927
|
+
fnumextrafieldsize = int(inheader[6], 16)
|
|
4928
|
+
fnumextrafields = int(inheader[7], 16)
|
|
5370
4929
|
fextrafieldslist = []
|
|
5371
|
-
extrastart =
|
|
4930
|
+
extrastart = 8
|
|
5372
4931
|
extraend = extrastart + fnumextrafields
|
|
5373
4932
|
while(extrastart < extraend):
|
|
5374
4933
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5387,7 +4946,8 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5387
4946
|
fnumfields = int(inheader[1], 16)
|
|
5388
4947
|
fhencoding = inheader[2]
|
|
5389
4948
|
fostype = inheader[3]
|
|
5390
|
-
|
|
4949
|
+
fpythontype = inheader[4]
|
|
4950
|
+
fnumfiles = int(inheader[5], 16)
|
|
5391
4951
|
fprechecksumtype = inheader[-2]
|
|
5392
4952
|
fprechecksum = inheader[-1]
|
|
5393
4953
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5433,7 +4993,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5433
4993
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5434
4994
|
fp.seek(len(delimiter), 1)
|
|
5435
4995
|
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5436
|
-
if(
|
|
4996
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5437
4997
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5438
4998
|
prefname + " at offset " + str(prefhstart))
|
|
5439
4999
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5441,7 +5001,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5441
5001
|
prenewfcs = GetHeaderChecksum(
|
|
5442
5002
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5443
5003
|
prefcs = preheaderdata[-2]
|
|
5444
|
-
if(prefcs
|
|
5004
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5445
5005
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5446
5006
|
prefname + " at offset " + str(prefhstart))
|
|
5447
5007
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5462,7 +5022,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5462
5022
|
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5463
5023
|
prefccs = preheaderdata[-1]
|
|
5464
5024
|
pyhascontents = True
|
|
5465
|
-
if(prefccs
|
|
5025
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5466
5026
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5467
5027
|
prefname + " at offset " + str(prefcontentstart))
|
|
5468
5028
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5503,24 +5063,17 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5503
5063
|
fp = infile
|
|
5504
5064
|
try:
|
|
5505
5065
|
fp.seek(0, 2)
|
|
5506
|
-
except OSError:
|
|
5507
|
-
SeekToEndOfFile(fp)
|
|
5508
|
-
except ValueError:
|
|
5066
|
+
except (OSError, ValueError):
|
|
5509
5067
|
SeekToEndOfFile(fp)
|
|
5510
5068
|
outfsize = fp.tell()
|
|
5511
5069
|
fp.seek(filestart, 0)
|
|
5512
5070
|
currentfilepos = fp.tell()
|
|
5513
5071
|
elif(infile == "-"):
|
|
5514
5072
|
fp = MkTempFile()
|
|
5515
|
-
|
|
5516
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5517
|
-
else:
|
|
5518
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5073
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5519
5074
|
try:
|
|
5520
5075
|
fp.seek(0, 2)
|
|
5521
|
-
except OSError:
|
|
5522
|
-
SeekToEndOfFile(fp)
|
|
5523
|
-
except ValueError:
|
|
5076
|
+
except (OSError, ValueError):
|
|
5524
5077
|
SeekToEndOfFile(fp)
|
|
5525
5078
|
outfsize = fp.tell()
|
|
5526
5079
|
fp.seek(filestart, 0)
|
|
@@ -5530,9 +5083,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5530
5083
|
fp.write(infile)
|
|
5531
5084
|
try:
|
|
5532
5085
|
fp.seek(0, 2)
|
|
5533
|
-
except OSError:
|
|
5534
|
-
SeekToEndOfFile(fp)
|
|
5535
|
-
except ValueError:
|
|
5086
|
+
except (OSError, ValueError):
|
|
5536
5087
|
SeekToEndOfFile(fp)
|
|
5537
5088
|
outfsize = fp.tell()
|
|
5538
5089
|
fp.seek(filestart, 0)
|
|
@@ -5541,9 +5092,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5541
5092
|
fp = download_file_from_internet_file(infile)
|
|
5542
5093
|
try:
|
|
5543
5094
|
fp.seek(0, 2)
|
|
5544
|
-
except OSError:
|
|
5545
|
-
SeekToEndOfFile(fp)
|
|
5546
|
-
except ValueError:
|
|
5095
|
+
except (OSError, ValueError):
|
|
5547
5096
|
SeekToEndOfFile(fp)
|
|
5548
5097
|
outfsize = fp.tell()
|
|
5549
5098
|
fp.seek(filestart, 0)
|
|
@@ -5551,9 +5100,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5551
5100
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5552
5101
|
try:
|
|
5553
5102
|
fp.seek(0, 2)
|
|
5554
|
-
except OSError:
|
|
5555
|
-
SeekToEndOfFile(fp)
|
|
5556
|
-
except ValueError:
|
|
5103
|
+
except (OSError, ValueError):
|
|
5557
5104
|
SeekToEndOfFile(fp)
|
|
5558
5105
|
outfsize = fp.tell()
|
|
5559
5106
|
fp.seek(filestart, 0)
|
|
@@ -5563,9 +5110,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5563
5110
|
fp = open(infile, "rb")
|
|
5564
5111
|
try:
|
|
5565
5112
|
fp.seek(0, 2)
|
|
5566
|
-
except OSError:
|
|
5567
|
-
SeekToEndOfFile(fp)
|
|
5568
|
-
except ValueError:
|
|
5113
|
+
except (OSError, ValueError):
|
|
5569
5114
|
SeekToEndOfFile(fp)
|
|
5570
5115
|
outfsize = fp.tell()
|
|
5571
5116
|
fp.seek(filestart, 0)
|
|
@@ -5616,9 +5161,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5616
5161
|
currentinfilepos = infp.tell()
|
|
5617
5162
|
try:
|
|
5618
5163
|
infp.seek(0, 2)
|
|
5619
|
-
except OSError:
|
|
5620
|
-
SeekToEndOfFile(infp)
|
|
5621
|
-
except ValueError:
|
|
5164
|
+
except (OSError, ValueError):
|
|
5622
5165
|
SeekToEndOfFile(infp)
|
|
5623
5166
|
outinfsize = infp.tell()
|
|
5624
5167
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5657,24 +5200,17 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5657
5200
|
fp = infile
|
|
5658
5201
|
try:
|
|
5659
5202
|
fp.seek(0, 2)
|
|
5660
|
-
except OSError:
|
|
5661
|
-
SeekToEndOfFile(fp)
|
|
5662
|
-
except ValueError:
|
|
5203
|
+
except (OSError, ValueError):
|
|
5663
5204
|
SeekToEndOfFile(fp)
|
|
5664
5205
|
outfsize = fp.tell()
|
|
5665
5206
|
fp.seek(filestart, 0)
|
|
5666
5207
|
currentfilepos = fp.tell()
|
|
5667
5208
|
elif(infile == "-"):
|
|
5668
5209
|
fp = MkTempFile()
|
|
5669
|
-
|
|
5670
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5671
|
-
else:
|
|
5672
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5210
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5673
5211
|
try:
|
|
5674
5212
|
fp.seek(0, 2)
|
|
5675
|
-
except OSError:
|
|
5676
|
-
SeekToEndOfFile(fp)
|
|
5677
|
-
except ValueError:
|
|
5213
|
+
except (OSError, ValueError):
|
|
5678
5214
|
SeekToEndOfFile(fp)
|
|
5679
5215
|
outfsize = fp.tell()
|
|
5680
5216
|
fp.seek(filestart, 0)
|
|
@@ -5684,9 +5220,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5684
5220
|
fp.write(infile)
|
|
5685
5221
|
try:
|
|
5686
5222
|
fp.seek(0, 2)
|
|
5687
|
-
except OSError:
|
|
5688
|
-
SeekToEndOfFile(fp)
|
|
5689
|
-
except ValueError:
|
|
5223
|
+
except (OSError, ValueError):
|
|
5690
5224
|
SeekToEndOfFile(fp)
|
|
5691
5225
|
outfsize = fp.tell()
|
|
5692
5226
|
fp.seek(filestart, 0)
|
|
@@ -5695,9 +5229,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5695
5229
|
fp = download_file_from_internet_file(infile)
|
|
5696
5230
|
try:
|
|
5697
5231
|
fp.seek(0, 2)
|
|
5698
|
-
except OSError:
|
|
5699
|
-
SeekToEndOfFile(fp)
|
|
5700
|
-
except ValueError:
|
|
5232
|
+
except (OSError, ValueError):
|
|
5701
5233
|
SeekToEndOfFile(fp)
|
|
5702
5234
|
outfsize = fp.tell()
|
|
5703
5235
|
fp.seek(filestart, 0)
|
|
@@ -5705,9 +5237,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5705
5237
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5706
5238
|
try:
|
|
5707
5239
|
fp.seek(0, 2)
|
|
5708
|
-
except OSError:
|
|
5709
|
-
SeekToEndOfFile(fp)
|
|
5710
|
-
except ValueError:
|
|
5240
|
+
except (OSError, ValueError):
|
|
5711
5241
|
SeekToEndOfFile(fp)
|
|
5712
5242
|
outfsize = fp.tell()
|
|
5713
5243
|
fp.seek(filestart, 0)
|
|
@@ -5717,9 +5247,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5717
5247
|
fp = open(infile, "rb")
|
|
5718
5248
|
try:
|
|
5719
5249
|
fp.seek(0, 2)
|
|
5720
|
-
except OSError:
|
|
5721
|
-
SeekToEndOfFile(fp)
|
|
5722
|
-
except ValueError:
|
|
5250
|
+
except (OSError, ValueError):
|
|
5723
5251
|
SeekToEndOfFile(fp)
|
|
5724
5252
|
outfsize = fp.tell()
|
|
5725
5253
|
fp.seek(filestart, 0)
|
|
@@ -5770,9 +5298,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5770
5298
|
currentinfilepos = infp.tell()
|
|
5771
5299
|
try:
|
|
5772
5300
|
infp.seek(0, 2)
|
|
5773
|
-
except OSError:
|
|
5774
|
-
SeekToEndOfFile(infp)
|
|
5775
|
-
except ValueError:
|
|
5301
|
+
except (OSError, ValueError):
|
|
5776
5302
|
SeekToEndOfFile(infp)
|
|
5777
5303
|
outinfsize = infp.tell()
|
|
5778
5304
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5861,7 +5387,7 @@ def AppendFileHeader(fp,
|
|
|
5861
5387
|
numfiles,
|
|
5862
5388
|
fencoding,
|
|
5863
5389
|
extradata=None,
|
|
5864
|
-
checksumtype="
|
|
5390
|
+
checksumtype="md5",
|
|
5865
5391
|
formatspecs=__file_format_dict__):
|
|
5866
5392
|
"""
|
|
5867
5393
|
Build and write the archive file header.
|
|
@@ -5913,11 +5439,11 @@ def AppendFileHeader(fp,
|
|
|
5913
5439
|
|
|
5914
5440
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5915
5441
|
tmpoutlist = [extrasizelen, extrafields] # you used this as a separate list
|
|
5916
|
-
tmpoutlen =
|
|
5442
|
+
tmpoutlen = 4 + len(tmpoutlist) + len(xlist) + 2
|
|
5917
5443
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5918
5444
|
|
|
5919
5445
|
# Serialize the first group
|
|
5920
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), fnumfiles_hex], delimiter)
|
|
5446
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, fnumfiles_hex], delimiter)
|
|
5921
5447
|
# Append tmpoutlist
|
|
5922
5448
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5923
5449
|
# Append extradata items if any
|
|
@@ -5972,7 +5498,7 @@ def AppendFileHeader(fp,
|
|
|
5972
5498
|
return fp
|
|
5973
5499
|
|
|
5974
5500
|
|
|
5975
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5501
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
|
|
5976
5502
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5977
5503
|
formatspecs = formatspecs[fmttype]
|
|
5978
5504
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
@@ -5982,11 +5508,11 @@ def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="crc3
|
|
|
5982
5508
|
return fp
|
|
5983
5509
|
|
|
5984
5510
|
|
|
5985
|
-
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5511
|
+
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype="md5", formatspecs=__file_format_multi_dict__):
|
|
5986
5512
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5987
5513
|
|
|
5988
5514
|
|
|
5989
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5515
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5990
5516
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5991
5517
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5992
5518
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -6035,18 +5561,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6035
5561
|
fp.flush()
|
|
6036
5562
|
if(hasattr(os, "sync")):
|
|
6037
5563
|
os.fsync(fp.fileno())
|
|
6038
|
-
except io.UnsupportedOperation:
|
|
6039
|
-
pass
|
|
6040
|
-
except AttributeError:
|
|
6041
|
-
pass
|
|
6042
|
-
except OSError:
|
|
5564
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6043
5565
|
pass
|
|
6044
5566
|
if(outfile == "-"):
|
|
6045
5567
|
fp.seek(0, 0)
|
|
6046
|
-
|
|
6047
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
6048
|
-
else:
|
|
6049
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
5568
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
6050
5569
|
elif(outfile is None):
|
|
6051
5570
|
fp.seek(0, 0)
|
|
6052
5571
|
outvar = fp.read()
|
|
@@ -6065,11 +5584,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6065
5584
|
return True
|
|
6066
5585
|
|
|
6067
5586
|
|
|
6068
|
-
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5587
|
+
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="md5", formatspecs=__file_format_dict__, returnfp=False):
|
|
6069
5588
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
6070
5589
|
|
|
6071
5590
|
|
|
6072
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["
|
|
5591
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
6073
5592
|
if(not hasattr(fp, "write")):
|
|
6074
5593
|
return False
|
|
6075
5594
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -6148,25 +5667,21 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
6148
5667
|
fp.flush()
|
|
6149
5668
|
if(hasattr(os, "sync")):
|
|
6150
5669
|
os.fsync(fp.fileno())
|
|
6151
|
-
except io.UnsupportedOperation:
|
|
6152
|
-
pass
|
|
6153
|
-
except AttributeError:
|
|
6154
|
-
pass
|
|
6155
|
-
except OSError:
|
|
5670
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6156
5671
|
pass
|
|
6157
5672
|
return fp
|
|
6158
5673
|
|
|
6159
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
5674
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6160
5675
|
if(not hasattr(fp, "write")):
|
|
6161
5676
|
return False
|
|
6162
5677
|
advancedlist = formatspecs['use_advanced_list']
|
|
6163
5678
|
altinode = formatspecs['use_alt_inode']
|
|
6164
5679
|
if(verbose):
|
|
6165
5680
|
logging.basicConfig(format="%(message)s",
|
|
6166
|
-
stream=
|
|
5681
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6167
5682
|
infilelist = []
|
|
6168
5683
|
if(infiles == "-"):
|
|
6169
|
-
for line in
|
|
5684
|
+
for line in PY_STDIN_TEXT:
|
|
6170
5685
|
infilelist.append(line.strip())
|
|
6171
5686
|
infilelist = list(filter(None, infilelist))
|
|
6172
5687
|
elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
|
|
@@ -6210,11 +5725,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6210
5725
|
fp.flush()
|
|
6211
5726
|
if(hasattr(os, "sync")):
|
|
6212
5727
|
os.fsync(fp.fileno())
|
|
6213
|
-
except io.UnsupportedOperation:
|
|
6214
|
-
pass
|
|
6215
|
-
except AttributeError:
|
|
6216
|
-
pass
|
|
6217
|
-
except OSError:
|
|
5728
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6218
5729
|
pass
|
|
6219
5730
|
FullSizeFilesAlt = 0
|
|
6220
5731
|
for curfname in GetDirList:
|
|
@@ -6364,7 +5875,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6364
5875
|
curcompression = "none"
|
|
6365
5876
|
if not followlink and ftype in data_types:
|
|
6366
5877
|
with open(fname, "rb") as fpc:
|
|
6367
|
-
|
|
5878
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6368
5879
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6369
5880
|
fcontents.seek(0, 0)
|
|
6370
5881
|
if(typechecktest is not False):
|
|
@@ -6382,7 +5893,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6382
5893
|
while(ilmin < ilsize):
|
|
6383
5894
|
cfcontents = MkTempFile()
|
|
6384
5895
|
fcontents.seek(0, 0)
|
|
6385
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
5896
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6386
5897
|
fcontents.seek(0, 0)
|
|
6387
5898
|
cfcontents.seek(0, 0)
|
|
6388
5899
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6398,7 +5909,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6398
5909
|
curcompression = compressionuselist[ilcmin]
|
|
6399
5910
|
fcontents.seek(0, 0)
|
|
6400
5911
|
cfcontents = MkTempFile()
|
|
6401
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
5912
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6402
5913
|
cfcontents.seek(0, 0)
|
|
6403
5914
|
cfcontents = CompressOpenFileAlt(
|
|
6404
5915
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6414,7 +5925,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6414
5925
|
return False
|
|
6415
5926
|
flstatinfo = os.stat(flinkname)
|
|
6416
5927
|
with open(flinkname, "rb") as fpc:
|
|
6417
|
-
|
|
5928
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6418
5929
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6419
5930
|
fcontents.seek(0, 0)
|
|
6420
5931
|
if(typechecktest is not False):
|
|
@@ -6432,7 +5943,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6432
5943
|
while(ilmin < ilsize):
|
|
6433
5944
|
cfcontents = MkTempFile()
|
|
6434
5945
|
fcontents.seek(0, 0)
|
|
6435
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
5946
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6436
5947
|
fcontents.seek(0, 0)
|
|
6437
5948
|
cfcontents.seek(0, 0)
|
|
6438
5949
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6448,7 +5959,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6448
5959
|
curcompression = compressionuselist[ilcmin]
|
|
6449
5960
|
fcontents.seek(0, 0)
|
|
6450
5961
|
cfcontents = MkTempFile()
|
|
6451
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
5962
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6452
5963
|
cfcontents.seek(0, 0)
|
|
6453
5964
|
cfcontents = CompressOpenFileAlt(
|
|
6454
5965
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6471,20 +5982,16 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6471
5982
|
fp.flush()
|
|
6472
5983
|
if(hasattr(os, "sync")):
|
|
6473
5984
|
os.fsync(fp.fileno())
|
|
6474
|
-
except io.UnsupportedOperation:
|
|
6475
|
-
pass
|
|
6476
|
-
except AttributeError:
|
|
6477
|
-
pass
|
|
6478
|
-
except OSError:
|
|
5985
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6479
5986
|
pass
|
|
6480
5987
|
return fp
|
|
6481
5988
|
|
|
6482
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
5989
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6483
5990
|
if(not hasattr(fp, "write")):
|
|
6484
5991
|
return False
|
|
6485
5992
|
if(verbose):
|
|
6486
5993
|
logging.basicConfig(format="%(message)s",
|
|
6487
|
-
stream=
|
|
5994
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6488
5995
|
curinode = 0
|
|
6489
5996
|
curfid = 0
|
|
6490
5997
|
inodelist = []
|
|
@@ -6493,10 +6000,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6493
6000
|
inodetoforminode = {}
|
|
6494
6001
|
if(infile == "-"):
|
|
6495
6002
|
infile = MkTempFile()
|
|
6496
|
-
|
|
6497
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6498
|
-
else:
|
|
6499
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6003
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6500
6004
|
infile.seek(0, 0)
|
|
6501
6005
|
if(not infile):
|
|
6502
6006
|
return False
|
|
@@ -6556,11 +6060,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6556
6060
|
fp.flush()
|
|
6557
6061
|
if(hasattr(os, "sync")):
|
|
6558
6062
|
os.fsync(fp.fileno())
|
|
6559
|
-
except io.UnsupportedOperation:
|
|
6560
|
-
pass
|
|
6561
|
-
except AttributeError:
|
|
6562
|
-
pass
|
|
6563
|
-
except OSError:
|
|
6063
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6564
6064
|
pass
|
|
6565
6065
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6566
6066
|
fencoding = "UTF-8"
|
|
@@ -6646,7 +6146,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6646
6146
|
curcompression = "none"
|
|
6647
6147
|
if ftype in data_types:
|
|
6648
6148
|
fpc = tarfp.extractfile(member)
|
|
6649
|
-
|
|
6149
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6650
6150
|
fpc.close()
|
|
6651
6151
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6652
6152
|
fcontents.seek(0, 0)
|
|
@@ -6665,7 +6165,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6665
6165
|
while(ilmin < ilsize):
|
|
6666
6166
|
cfcontents = MkTempFile()
|
|
6667
6167
|
fcontents.seek(0, 0)
|
|
6668
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6168
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6669
6169
|
fcontents.seek(0, 0)
|
|
6670
6170
|
cfcontents.seek(0, 0)
|
|
6671
6171
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6681,7 +6181,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6681
6181
|
curcompression = compressionuselist[ilcmin]
|
|
6682
6182
|
fcontents.seek(0, 0)
|
|
6683
6183
|
cfcontents = MkTempFile()
|
|
6684
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6184
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6685
6185
|
cfcontents.seek(0, 0)
|
|
6686
6186
|
cfcontents = CompressOpenFileAlt(
|
|
6687
6187
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6704,21 +6204,17 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6704
6204
|
fp.flush()
|
|
6705
6205
|
if(hasattr(os, "sync")):
|
|
6706
6206
|
os.fsync(fp.fileno())
|
|
6707
|
-
except io.UnsupportedOperation:
|
|
6708
|
-
pass
|
|
6709
|
-
except AttributeError:
|
|
6710
|
-
pass
|
|
6711
|
-
except OSError:
|
|
6207
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6712
6208
|
pass
|
|
6713
6209
|
fcontents.close()
|
|
6714
6210
|
return fp
|
|
6715
6211
|
|
|
6716
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6212
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6717
6213
|
if(not hasattr(fp, "write")):
|
|
6718
6214
|
return False
|
|
6719
6215
|
if(verbose):
|
|
6720
6216
|
logging.basicConfig(format="%(message)s",
|
|
6721
|
-
stream=
|
|
6217
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6722
6218
|
curinode = 0
|
|
6723
6219
|
curfid = 0
|
|
6724
6220
|
inodelist = []
|
|
@@ -6727,10 +6223,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6727
6223
|
inodetoforminode = {}
|
|
6728
6224
|
if(infile == "-"):
|
|
6729
6225
|
infile = MkTempFile()
|
|
6730
|
-
|
|
6731
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6732
|
-
else:
|
|
6733
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6226
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6734
6227
|
infile.seek(0, 0)
|
|
6735
6228
|
if(not infile):
|
|
6736
6229
|
return False
|
|
@@ -6760,11 +6253,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6760
6253
|
fp.flush()
|
|
6761
6254
|
if(hasattr(os, "sync")):
|
|
6762
6255
|
os.fsync(fp.fileno())
|
|
6763
|
-
except io.UnsupportedOperation:
|
|
6764
|
-
pass
|
|
6765
|
-
except AttributeError:
|
|
6766
|
-
pass
|
|
6767
|
-
except OSError:
|
|
6256
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6768
6257
|
pass
|
|
6769
6258
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6770
6259
|
fencoding = "UTF-8"
|
|
@@ -6849,24 +6338,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6849
6338
|
fcsize = format(int(0), 'x').lower()
|
|
6850
6339
|
try:
|
|
6851
6340
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
6852
|
-
except AttributeError:
|
|
6853
|
-
fuid = format(int(0), 'x').lower()
|
|
6854
|
-
except KeyError:
|
|
6341
|
+
except (KeyError, AttributeError):
|
|
6855
6342
|
fuid = format(int(0), 'x').lower()
|
|
6856
6343
|
try:
|
|
6857
6344
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
6858
|
-
except AttributeError:
|
|
6859
|
-
fgid = format(int(0), 'x').lower()
|
|
6860
|
-
except KeyError:
|
|
6345
|
+
except (KeyError, AttributeError):
|
|
6861
6346
|
fgid = format(int(0), 'x').lower()
|
|
6862
6347
|
try:
|
|
6863
6348
|
import pwd
|
|
6864
6349
|
try:
|
|
6865
6350
|
userinfo = pwd.getpwuid(os.getuid())
|
|
6866
6351
|
funame = userinfo.pw_name
|
|
6867
|
-
except KeyError:
|
|
6868
|
-
funame = ""
|
|
6869
|
-
except AttributeError:
|
|
6352
|
+
except (KeyError, AttributeError):
|
|
6870
6353
|
funame = ""
|
|
6871
6354
|
except ImportError:
|
|
6872
6355
|
funame = ""
|
|
@@ -6876,9 +6359,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6876
6359
|
try:
|
|
6877
6360
|
groupinfo = grp.getgrgid(os.getgid())
|
|
6878
6361
|
fgname = groupinfo.gr_name
|
|
6879
|
-
except KeyError:
|
|
6880
|
-
fgname = ""
|
|
6881
|
-
except AttributeError:
|
|
6362
|
+
except (KeyError, AttributeError):
|
|
6882
6363
|
fgname = ""
|
|
6883
6364
|
except ImportError:
|
|
6884
6365
|
fgname = ""
|
|
@@ -6901,7 +6382,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6901
6382
|
while(ilmin < ilsize):
|
|
6902
6383
|
cfcontents = MkTempFile()
|
|
6903
6384
|
fcontents.seek(0, 0)
|
|
6904
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6385
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6905
6386
|
fcontents.seek(0, 0)
|
|
6906
6387
|
cfcontents.seek(0, 0)
|
|
6907
6388
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6914,7 +6395,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6914
6395
|
curcompression = compressionuselist[ilcmin]
|
|
6915
6396
|
fcontents.seek(0, 0)
|
|
6916
6397
|
cfcontents = MkTempFile()
|
|
6917
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6398
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6918
6399
|
cfcontents.seek(0, 0)
|
|
6919
6400
|
cfcontents = CompressOpenFileAlt(
|
|
6920
6401
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6937,26 +6418,22 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6937
6418
|
fp.flush()
|
|
6938
6419
|
if(hasattr(os, "sync")):
|
|
6939
6420
|
os.fsync(fp.fileno())
|
|
6940
|
-
except io.UnsupportedOperation:
|
|
6941
|
-
pass
|
|
6942
|
-
except AttributeError:
|
|
6943
|
-
pass
|
|
6944
|
-
except OSError:
|
|
6421
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6945
6422
|
pass
|
|
6946
6423
|
fcontents.close()
|
|
6947
6424
|
return fp
|
|
6948
6425
|
|
|
6949
6426
|
if(not rarfile_support):
|
|
6950
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6427
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6951
6428
|
return False
|
|
6952
6429
|
|
|
6953
6430
|
if(rarfile_support):
|
|
6954
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6431
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6955
6432
|
if(not hasattr(fp, "write")):
|
|
6956
6433
|
return False
|
|
6957
6434
|
if(verbose):
|
|
6958
6435
|
logging.basicConfig(format="%(message)s",
|
|
6959
|
-
stream=
|
|
6436
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6960
6437
|
curinode = 0
|
|
6961
6438
|
curfid = 0
|
|
6962
6439
|
inodelist = []
|
|
@@ -6977,21 +6454,13 @@ if(rarfile_support):
|
|
|
6977
6454
|
fp.flush()
|
|
6978
6455
|
if(hasattr(os, "sync")):
|
|
6979
6456
|
os.fsync(fp.fileno())
|
|
6980
|
-
except io.UnsupportedOperation:
|
|
6981
|
-
pass
|
|
6982
|
-
except AttributeError:
|
|
6983
|
-
pass
|
|
6984
|
-
except OSError:
|
|
6457
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6985
6458
|
pass
|
|
6986
6459
|
try:
|
|
6987
6460
|
fp.flush()
|
|
6988
6461
|
if(hasattr(os, "sync")):
|
|
6989
6462
|
os.fsync(fp.fileno())
|
|
6990
|
-
except io.UnsupportedOperation:
|
|
6991
|
-
pass
|
|
6992
|
-
except AttributeError:
|
|
6993
|
-
pass
|
|
6994
|
-
except OSError:
|
|
6463
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6995
6464
|
pass
|
|
6996
6465
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6997
6466
|
is_unix = False
|
|
@@ -7100,24 +6569,18 @@ if(rarfile_support):
|
|
|
7100
6569
|
int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
|
|
7101
6570
|
try:
|
|
7102
6571
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7103
|
-
except AttributeError:
|
|
7104
|
-
fuid = format(int(0), 'x').lower()
|
|
7105
|
-
except KeyError:
|
|
6572
|
+
except (KeyError, AttributeError):
|
|
7106
6573
|
fuid = format(int(0), 'x').lower()
|
|
7107
6574
|
try:
|
|
7108
6575
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7109
|
-
except AttributeError:
|
|
7110
|
-
fgid = format(int(0), 'x').lower()
|
|
7111
|
-
except KeyError:
|
|
6576
|
+
except (KeyError, AttributeError):
|
|
7112
6577
|
fgid = format(int(0), 'x').lower()
|
|
7113
6578
|
try:
|
|
7114
6579
|
import pwd
|
|
7115
6580
|
try:
|
|
7116
6581
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7117
6582
|
funame = userinfo.pw_name
|
|
7118
|
-
except KeyError:
|
|
7119
|
-
funame = ""
|
|
7120
|
-
except AttributeError:
|
|
6583
|
+
except (KeyError, AttributeError):
|
|
7121
6584
|
funame = ""
|
|
7122
6585
|
except ImportError:
|
|
7123
6586
|
funame = ""
|
|
@@ -7127,9 +6590,7 @@ if(rarfile_support):
|
|
|
7127
6590
|
try:
|
|
7128
6591
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7129
6592
|
fgname = groupinfo.gr_name
|
|
7130
|
-
except KeyError:
|
|
7131
|
-
fgname = ""
|
|
7132
|
-
except AttributeError:
|
|
6593
|
+
except (KeyError, AttributeError):
|
|
7133
6594
|
fgname = ""
|
|
7134
6595
|
except ImportError:
|
|
7135
6596
|
fgname = ""
|
|
@@ -7152,7 +6613,7 @@ if(rarfile_support):
|
|
|
7152
6613
|
while(ilmin < ilsize):
|
|
7153
6614
|
cfcontents = MkTempFile()
|
|
7154
6615
|
fcontents.seek(0, 0)
|
|
7155
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6616
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7156
6617
|
fcontents.seek(0, 0)
|
|
7157
6618
|
cfcontents.seek(0, 0)
|
|
7158
6619
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7168,7 +6629,7 @@ if(rarfile_support):
|
|
|
7168
6629
|
curcompression = compressionuselist[ilcmin]
|
|
7169
6630
|
fcontents.seek(0, 0)
|
|
7170
6631
|
cfcontents = MkTempFile()
|
|
7171
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6632
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7172
6633
|
cfcontents.seek(0, 0)
|
|
7173
6634
|
cfcontents = CompressOpenFileAlt(
|
|
7174
6635
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7191,26 +6652,22 @@ if(rarfile_support):
|
|
|
7191
6652
|
fp.flush()
|
|
7192
6653
|
if(hasattr(os, "sync")):
|
|
7193
6654
|
os.fsync(fp.fileno())
|
|
7194
|
-
except io.UnsupportedOperation:
|
|
7195
|
-
pass
|
|
7196
|
-
except AttributeError:
|
|
7197
|
-
pass
|
|
7198
|
-
except OSError:
|
|
6655
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7199
6656
|
pass
|
|
7200
6657
|
fcontents.close()
|
|
7201
6658
|
return fp
|
|
7202
6659
|
|
|
7203
6660
|
if(not py7zr_support):
|
|
7204
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6661
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7205
6662
|
return False
|
|
7206
6663
|
|
|
7207
6664
|
if(py7zr_support):
|
|
7208
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6665
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7209
6666
|
if(not hasattr(fp, "write")):
|
|
7210
6667
|
return False
|
|
7211
6668
|
if(verbose):
|
|
7212
6669
|
logging.basicConfig(format="%(message)s",
|
|
7213
|
-
stream=
|
|
6670
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7214
6671
|
formver = formatspecs['format_ver']
|
|
7215
6672
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7216
6673
|
curinode = 0
|
|
@@ -7233,11 +6690,7 @@ if(py7zr_support):
|
|
|
7233
6690
|
fp.flush()
|
|
7234
6691
|
if(hasattr(os, "sync")):
|
|
7235
6692
|
os.fsync(fp.fileno())
|
|
7236
|
-
except io.UnsupportedOperation:
|
|
7237
|
-
pass
|
|
7238
|
-
except AttributeError:
|
|
7239
|
-
pass
|
|
7240
|
-
except OSError:
|
|
6693
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7241
6694
|
pass
|
|
7242
6695
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7243
6696
|
fencoding = "UTF-8"
|
|
@@ -7287,24 +6740,18 @@ if(py7zr_support):
|
|
|
7287
6740
|
int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
|
|
7288
6741
|
try:
|
|
7289
6742
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7290
|
-
except AttributeError:
|
|
7291
|
-
fuid = format(int(0), 'x').lower()
|
|
7292
|
-
except KeyError:
|
|
6743
|
+
except (KeyError, AttributeError):
|
|
7293
6744
|
fuid = format(int(0), 'x').lower()
|
|
7294
6745
|
try:
|
|
7295
6746
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7296
|
-
except AttributeError:
|
|
7297
|
-
fgid = format(int(0), 'x').lower()
|
|
7298
|
-
except KeyError:
|
|
6747
|
+
except (KeyError, AttributeError):
|
|
7299
6748
|
fgid = format(int(0), 'x').lower()
|
|
7300
6749
|
try:
|
|
7301
6750
|
import pwd
|
|
7302
6751
|
try:
|
|
7303
6752
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7304
6753
|
funame = userinfo.pw_name
|
|
7305
|
-
except KeyError:
|
|
7306
|
-
funame = ""
|
|
7307
|
-
except AttributeError:
|
|
6754
|
+
except (KeyError, AttributeError):
|
|
7308
6755
|
funame = ""
|
|
7309
6756
|
except ImportError:
|
|
7310
6757
|
funame = ""
|
|
@@ -7314,9 +6761,7 @@ if(py7zr_support):
|
|
|
7314
6761
|
try:
|
|
7315
6762
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7316
6763
|
fgname = groupinfo.gr_name
|
|
7317
|
-
except KeyError:
|
|
7318
|
-
fgname = ""
|
|
7319
|
-
except AttributeError:
|
|
6764
|
+
except (KeyError, AttributeError):
|
|
7320
6765
|
fgname = ""
|
|
7321
6766
|
except ImportError:
|
|
7322
6767
|
fgname = ""
|
|
@@ -7342,7 +6787,7 @@ if(py7zr_support):
|
|
|
7342
6787
|
while(ilmin < ilsize):
|
|
7343
6788
|
cfcontents = MkTempFile()
|
|
7344
6789
|
fcontents.seek(0, 0)
|
|
7345
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6790
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7346
6791
|
fcontents.seek(0, 0)
|
|
7347
6792
|
cfcontents.seek(0, 0)
|
|
7348
6793
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7358,7 +6803,7 @@ if(py7zr_support):
|
|
|
7358
6803
|
curcompression = compressionuselist[ilcmin]
|
|
7359
6804
|
fcontents.seek(0, 0)
|
|
7360
6805
|
cfcontents = MkTempFile()
|
|
7361
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6806
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7362
6807
|
cfcontents.seek(0, 0)
|
|
7363
6808
|
cfcontents = CompressOpenFileAlt(
|
|
7364
6809
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7381,20 +6826,16 @@ if(py7zr_support):
|
|
|
7381
6826
|
fp.flush()
|
|
7382
6827
|
if(hasattr(os, "sync")):
|
|
7383
6828
|
os.fsync(fp.fileno())
|
|
7384
|
-
except io.UnsupportedOperation:
|
|
7385
|
-
pass
|
|
7386
|
-
except AttributeError:
|
|
7387
|
-
pass
|
|
7388
|
-
except OSError:
|
|
6829
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7389
6830
|
pass
|
|
7390
6831
|
fcontents.close()
|
|
7391
6832
|
return fp
|
|
7392
6833
|
|
|
7393
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["
|
|
6834
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7394
6835
|
if(not hasattr(fp, "write")):
|
|
7395
6836
|
return False
|
|
7396
6837
|
if(verbose):
|
|
7397
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
6838
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7398
6839
|
GetDirList = inlist
|
|
7399
6840
|
if(not GetDirList):
|
|
7400
6841
|
return False
|
|
@@ -7452,12 +6893,12 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7452
6893
|
return fp
|
|
7453
6894
|
|
|
7454
6895
|
|
|
7455
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
6896
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7456
6897
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7457
6898
|
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7458
6899
|
|
|
7459
6900
|
|
|
7460
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
6901
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7461
6902
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7462
6903
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7463
6904
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7510,18 +6951,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7510
6951
|
fp.flush()
|
|
7511
6952
|
if(hasattr(os, "sync")):
|
|
7512
6953
|
os.fsync(fp.fileno())
|
|
7513
|
-
except io.UnsupportedOperation:
|
|
7514
|
-
pass
|
|
7515
|
-
except AttributeError:
|
|
7516
|
-
pass
|
|
7517
|
-
except OSError:
|
|
6954
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7518
6955
|
pass
|
|
7519
6956
|
if(outfile == "-"):
|
|
7520
6957
|
fp.seek(0, 0)
|
|
7521
|
-
|
|
7522
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7523
|
-
else:
|
|
7524
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
6958
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7525
6959
|
elif(outfile is None):
|
|
7526
6960
|
fp.seek(0, 0)
|
|
7527
6961
|
outvar = fp.read()
|
|
@@ -7538,7 +6972,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7538
6972
|
fp.close()
|
|
7539
6973
|
return True
|
|
7540
6974
|
|
|
7541
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
6975
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7542
6976
|
if not isinstance(infiles, list):
|
|
7543
6977
|
infiles = [infiles]
|
|
7544
6978
|
returnout = False
|
|
@@ -7553,7 +6987,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7553
6987
|
return True
|
|
7554
6988
|
return returnout
|
|
7555
6989
|
|
|
7556
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
6990
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7557
6991
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7558
6992
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7559
6993
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7603,18 +7037,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7603
7037
|
fp.flush()
|
|
7604
7038
|
if(hasattr(os, "sync")):
|
|
7605
7039
|
os.fsync(fp.fileno())
|
|
7606
|
-
except io.UnsupportedOperation:
|
|
7607
|
-
pass
|
|
7608
|
-
except AttributeError:
|
|
7609
|
-
pass
|
|
7610
|
-
except OSError:
|
|
7040
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7611
7041
|
pass
|
|
7612
7042
|
if(outfile == "-"):
|
|
7613
7043
|
fp.seek(0, 0)
|
|
7614
|
-
|
|
7615
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7616
|
-
else:
|
|
7617
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7044
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7618
7045
|
elif(outfile is None):
|
|
7619
7046
|
fp.seek(0, 0)
|
|
7620
7047
|
outvar = fp.read()
|
|
@@ -7632,7 +7059,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7632
7059
|
fp.close()
|
|
7633
7060
|
return True
|
|
7634
7061
|
|
|
7635
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7062
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7636
7063
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7637
7064
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7638
7065
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7683,18 +7110,11 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7683
7110
|
fp.flush()
|
|
7684
7111
|
if(hasattr(os, "sync")):
|
|
7685
7112
|
os.fsync(fp.fileno())
|
|
7686
|
-
except io.UnsupportedOperation:
|
|
7687
|
-
pass
|
|
7688
|
-
except AttributeError:
|
|
7689
|
-
pass
|
|
7690
|
-
except OSError:
|
|
7113
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7691
7114
|
pass
|
|
7692
7115
|
if(outfile == "-"):
|
|
7693
7116
|
fp.seek(0, 0)
|
|
7694
|
-
|
|
7695
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7696
|
-
else:
|
|
7697
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7117
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7698
7118
|
elif(outfile is None):
|
|
7699
7119
|
fp.seek(0, 0)
|
|
7700
7120
|
outvar = fp.read()
|
|
@@ -7712,7 +7132,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7712
7132
|
fp.close()
|
|
7713
7133
|
return True
|
|
7714
7134
|
|
|
7715
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7135
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7716
7136
|
if not isinstance(infiles, list):
|
|
7717
7137
|
infiles = [infiles]
|
|
7718
7138
|
returnout = False
|
|
@@ -7727,7 +7147,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7727
7147
|
return True
|
|
7728
7148
|
return returnout
|
|
7729
7149
|
|
|
7730
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7150
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7731
7151
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7732
7152
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7733
7153
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7778,18 +7198,11 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7778
7198
|
fp.flush()
|
|
7779
7199
|
if(hasattr(os, "sync")):
|
|
7780
7200
|
os.fsync(fp.fileno())
|
|
7781
|
-
except io.UnsupportedOperation:
|
|
7782
|
-
pass
|
|
7783
|
-
except AttributeError:
|
|
7784
|
-
pass
|
|
7785
|
-
except OSError:
|
|
7201
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7786
7202
|
pass
|
|
7787
7203
|
if(outfile == "-"):
|
|
7788
7204
|
fp.seek(0, 0)
|
|
7789
|
-
|
|
7790
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7791
|
-
else:
|
|
7792
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7205
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7793
7206
|
elif(outfile is None):
|
|
7794
7207
|
fp.seek(0, 0)
|
|
7795
7208
|
outvar = fp.read()
|
|
@@ -7807,7 +7220,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7807
7220
|
fp.close()
|
|
7808
7221
|
return True
|
|
7809
7222
|
|
|
7810
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7223
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7811
7224
|
if not isinstance(infiles, list):
|
|
7812
7225
|
infiles = [infiles]
|
|
7813
7226
|
returnout = False
|
|
@@ -7823,11 +7236,11 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7823
7236
|
return returnout
|
|
7824
7237
|
|
|
7825
7238
|
if(not rarfile_support):
|
|
7826
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7239
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7827
7240
|
return False
|
|
7828
7241
|
|
|
7829
7242
|
if(rarfile_support):
|
|
7830
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7243
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7831
7244
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7832
7245
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7833
7246
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7878,18 +7291,11 @@ if(rarfile_support):
|
|
|
7878
7291
|
fp.flush()
|
|
7879
7292
|
if(hasattr(os, "sync")):
|
|
7880
7293
|
os.fsync(fp.fileno())
|
|
7881
|
-
except io.UnsupportedOperation:
|
|
7882
|
-
pass
|
|
7883
|
-
except AttributeError:
|
|
7884
|
-
pass
|
|
7885
|
-
except OSError:
|
|
7294
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7886
7295
|
pass
|
|
7887
7296
|
if(outfile == "-"):
|
|
7888
7297
|
fp.seek(0, 0)
|
|
7889
|
-
|
|
7890
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7891
|
-
else:
|
|
7892
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7298
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7893
7299
|
elif(outfile is None):
|
|
7894
7300
|
fp.seek(0, 0)
|
|
7895
7301
|
outvar = fp.read()
|
|
@@ -7907,7 +7313,7 @@ if(rarfile_support):
|
|
|
7907
7313
|
fp.close()
|
|
7908
7314
|
return True
|
|
7909
7315
|
|
|
7910
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7316
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7911
7317
|
if not isinstance(infiles, list):
|
|
7912
7318
|
infiles = [infiles]
|
|
7913
7319
|
returnout = False
|
|
@@ -7923,11 +7329,11 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7923
7329
|
return returnout
|
|
7924
7330
|
|
|
7925
7331
|
if(not py7zr_support):
|
|
7926
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7332
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7927
7333
|
return False
|
|
7928
7334
|
|
|
7929
7335
|
if(py7zr_support):
|
|
7930
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7336
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7931
7337
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7932
7338
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7933
7339
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7978,18 +7384,11 @@ if(py7zr_support):
|
|
|
7978
7384
|
fp.flush()
|
|
7979
7385
|
if(hasattr(os, "sync")):
|
|
7980
7386
|
os.fsync(fp.fileno())
|
|
7981
|
-
except io.UnsupportedOperation:
|
|
7982
|
-
pass
|
|
7983
|
-
except AttributeError:
|
|
7984
|
-
pass
|
|
7985
|
-
except OSError:
|
|
7387
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7986
7388
|
pass
|
|
7987
7389
|
if(outfile == "-"):
|
|
7988
7390
|
fp.seek(0, 0)
|
|
7989
|
-
|
|
7990
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7991
|
-
else:
|
|
7992
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7391
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7993
7392
|
elif(outfile is None):
|
|
7994
7393
|
fp.seek(0, 0)
|
|
7995
7394
|
outvar = fp.read()
|
|
@@ -8007,7 +7406,7 @@ if(py7zr_support):
|
|
|
8007
7406
|
fp.close()
|
|
8008
7407
|
return True
|
|
8009
7408
|
|
|
8010
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7409
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8011
7410
|
if not isinstance(infiles, list):
|
|
8012
7411
|
infiles = [infiles]
|
|
8013
7412
|
returnout = False
|
|
@@ -8022,7 +7421,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
8022
7421
|
return True
|
|
8023
7422
|
return returnout
|
|
8024
7423
|
|
|
8025
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7424
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8026
7425
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
8027
7426
|
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
8028
7427
|
|
|
@@ -8056,9 +7455,7 @@ def PrintPermissionString(fchmode, ftype):
|
|
|
8056
7455
|
permissionstr = "w" + permissionstr
|
|
8057
7456
|
try:
|
|
8058
7457
|
permissionoutstr = stat.filemode(fchmode)
|
|
8059
|
-
except AttributeError:
|
|
8060
|
-
permissionoutstr = permissionstr
|
|
8061
|
-
except KeyError:
|
|
7458
|
+
except (KeyError, AttributeError):
|
|
8062
7459
|
permissionoutstr = permissionstr
|
|
8063
7460
|
return permissionoutstr
|
|
8064
7461
|
|
|
@@ -8974,7 +8371,7 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
|
|
|
8974
8371
|
|
|
8975
8372
|
|
|
8976
8373
|
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
8977
|
-
filefp =
|
|
8374
|
+
filefp = MkTempFile("", isbytes=False)
|
|
8978
8375
|
outstring = UncompressString(instring, formatspecs, filestart)
|
|
8979
8376
|
filefp.write(outstring)
|
|
8980
8377
|
filefp.seek(0, 0)
|
|
@@ -8989,7 +8386,7 @@ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=
|
|
|
8989
8386
|
fp.seek(filestart, 0)
|
|
8990
8387
|
if(prechck!="zstd"):
|
|
8991
8388
|
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
8992
|
-
filefp =
|
|
8389
|
+
filefp = MkTempFile("", isbytes=False)
|
|
8993
8390
|
fp.seek(filestart, 0)
|
|
8994
8391
|
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
8995
8392
|
filefp.write(outstring)
|
|
@@ -9061,9 +8458,7 @@ def _extract_base_fp(obj):
|
|
|
9061
8458
|
try:
|
|
9062
8459
|
f() # probe fileno()
|
|
9063
8460
|
return cur
|
|
9064
|
-
except UnsupportedOperation:
|
|
9065
|
-
pass
|
|
9066
|
-
except Exception:
|
|
8461
|
+
except (Exception, UnsupportedOperation):
|
|
9067
8462
|
pass
|
|
9068
8463
|
for attr in ("fileobj", "fp", "_fp", "buffer", "raw"):
|
|
9069
8464
|
nxt = getattr(cur, attr, None)
|
|
@@ -9455,7 +8850,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw):
|
|
|
9455
8850
|
|
|
9456
8851
|
# ========= copy helpers =========
|
|
9457
8852
|
|
|
9458
|
-
def fast_copy(infp, outfp, bufsize=
|
|
8853
|
+
def fast_copy(infp, outfp, bufsize=__filebuff_size__):
|
|
9459
8854
|
"""
|
|
9460
8855
|
Efficient copy from any readable file-like to any writable file-like.
|
|
9461
8856
|
Uses readinto() when available to avoid extra allocations.
|
|
@@ -9499,7 +8894,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
|
|
|
9499
8894
|
shutil.copyfileobj(fp, outfp, length=chunk_size)
|
|
9500
8895
|
|
|
9501
8896
|
|
|
9502
|
-
def copy_opaque(src, dst, bufsize=
|
|
8897
|
+
def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20):
|
|
9503
8898
|
"""
|
|
9504
8899
|
Copy opaque bytes from 'src' (any readable file-like) to 'dst'
|
|
9505
8900
|
(your mmap-backed FileLikeAdapter or any writable file-like).
|
|
@@ -9561,11 +8956,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9561
8956
|
|
|
9562
8957
|
try:
|
|
9563
8958
|
fp.seek(0, 0)
|
|
9564
|
-
except io.UnsupportedOperation:
|
|
9565
|
-
pass
|
|
9566
|
-
except AttributeError:
|
|
9567
|
-
pass
|
|
9568
|
-
except OSError:
|
|
8959
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9569
8960
|
pass
|
|
9570
8961
|
|
|
9571
8962
|
if (not compression or compression == formatspecs['format_magic']
|
|
@@ -9624,11 +9015,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9624
9015
|
|
|
9625
9016
|
try:
|
|
9626
9017
|
bytesfp.seek(0, 0)
|
|
9627
|
-
except io.UnsupportedOperation:
|
|
9628
|
-
pass
|
|
9629
|
-
except AttributeError:
|
|
9630
|
-
pass
|
|
9631
|
-
except OSError:
|
|
9018
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9632
9019
|
pass
|
|
9633
9020
|
out = FileLikeAdapter(bytesfp, mode="rb") # read interface for the caller
|
|
9634
9021
|
try:
|
|
@@ -9758,31 +9145,18 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9758
9145
|
try:
|
|
9759
9146
|
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9760
9147
|
except AttributeError:
|
|
9761
|
-
|
|
9762
|
-
|
|
9763
|
-
|
|
9764
|
-
|
|
9765
|
-
except AttributeError:
|
|
9766
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9767
|
-
checklistout = sorted(hash_list + ['adler32', 'crc16', 'crc16_ansi', 'crc16_ibm',
|
|
9768
|
-
'crc16_ccitt', 'crc32', 'crc64', 'crc64_ecma', 'crc64_iso', 'none'])
|
|
9769
|
-
if(checkfor in checklistout):
|
|
9770
|
-
return True
|
|
9771
|
-
else:
|
|
9772
|
-
return False
|
|
9773
|
-
|
|
9774
|
-
|
|
9775
|
-
def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
9776
|
-
if(guaranteed):
|
|
9777
|
-
try:
|
|
9778
|
-
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9779
|
-
except AttributeError:
|
|
9780
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9148
|
+
try:
|
|
9149
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9150
|
+
except AttributeError:
|
|
9151
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9781
9152
|
else:
|
|
9782
9153
|
try:
|
|
9783
9154
|
hash_list = sorted(list(hashlib.algorithms_available))
|
|
9784
9155
|
except AttributeError:
|
|
9785
|
-
|
|
9156
|
+
try:
|
|
9157
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9158
|
+
except AttributeError:
|
|
9159
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9786
9160
|
checklistout = hash_list
|
|
9787
9161
|
if(checkfor in checklistout):
|
|
9788
9162
|
return True
|
|
@@ -9790,48 +9164,48 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
|
9790
9164
|
return False
|
|
9791
9165
|
|
|
9792
9166
|
|
|
9793
|
-
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9167
|
+
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9794
9168
|
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9795
9169
|
|
|
9796
|
-
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9170
|
+
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9797
9171
|
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9798
9172
|
|
|
9799
|
-
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9173
|
+
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9800
9174
|
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9801
9175
|
|
|
9802
9176
|
|
|
9803
|
-
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9177
|
+
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9804
9178
|
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9805
9179
|
|
|
9806
9180
|
|
|
9807
|
-
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9181
|
+
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9808
9182
|
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9809
9183
|
|
|
9810
9184
|
|
|
9811
9185
|
if(not rarfile_support):
|
|
9812
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9186
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9813
9187
|
return False
|
|
9814
9188
|
|
|
9815
9189
|
if(rarfile_support):
|
|
9816
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9190
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9817
9191
|
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9818
9192
|
|
|
9819
9193
|
|
|
9820
9194
|
if(not py7zr_support):
|
|
9821
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9195
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9822
9196
|
return False
|
|
9823
9197
|
|
|
9824
9198
|
if(py7zr_support):
|
|
9825
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9199
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9826
9200
|
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9827
9201
|
|
|
9828
9202
|
|
|
9829
|
-
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9203
|
+
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9830
9204
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9831
9205
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9832
9206
|
formatspecs = formatspecs[checkcompressfile]
|
|
9833
9207
|
if(verbose):
|
|
9834
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9208
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9835
9209
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9836
9210
|
return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9837
9211
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -9914,7 +9288,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9914
9288
|
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9915
9289
|
seektoend=False, verbose=False, returnfp=False):
|
|
9916
9290
|
if(verbose):
|
|
9917
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9291
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9918
9292
|
|
|
9919
9293
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9920
9294
|
formatspecs = formatspecs[fmttype]
|
|
@@ -9941,10 +9315,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9941
9315
|
|
|
9942
9316
|
elif(infile == "-"):
|
|
9943
9317
|
fp = MkTempFile()
|
|
9944
|
-
|
|
9945
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
9946
|
-
else:
|
|
9947
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
9318
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
9948
9319
|
fp.seek(filestart, 0)
|
|
9949
9320
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9950
9321
|
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
@@ -10021,9 +9392,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10021
9392
|
|
|
10022
9393
|
try:
|
|
10023
9394
|
fp.seek(0, 2)
|
|
10024
|
-
except OSError:
|
|
10025
|
-
SeekToEndOfFile(fp)
|
|
10026
|
-
except ValueError:
|
|
9395
|
+
except (OSError, ValueError):
|
|
10027
9396
|
SeekToEndOfFile(fp)
|
|
10028
9397
|
|
|
10029
9398
|
CatSize = fp.tell()
|
|
@@ -10053,16 +9422,17 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10053
9422
|
else:
|
|
10054
9423
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
10055
9424
|
|
|
10056
|
-
fnumextrafieldsize = int(inheader[
|
|
10057
|
-
fnumextrafields = int(inheader[
|
|
10058
|
-
extrastart =
|
|
9425
|
+
fnumextrafieldsize = int(inheader[6], 16)
|
|
9426
|
+
fnumextrafields = int(inheader[7], 16)
|
|
9427
|
+
extrastart = 8
|
|
10059
9428
|
extraend = extrastart + fnumextrafields
|
|
10060
9429
|
formversion = re.findall("([\\d]+)", formstring)
|
|
10061
9430
|
fheadsize = int(inheader[0], 16)
|
|
10062
9431
|
fnumfields = int(inheader[1], 16)
|
|
10063
9432
|
fhencoding = inheader[2]
|
|
10064
9433
|
fostype = inheader[3]
|
|
10065
|
-
|
|
9434
|
+
fpythontype = inheader[4]
|
|
9435
|
+
fnumfiles = int(inheader[5], 16)
|
|
10066
9436
|
fprechecksumtype = inheader[-2]
|
|
10067
9437
|
fprechecksum = inheader[-1]
|
|
10068
9438
|
|
|
@@ -10183,7 +9553,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10183
9553
|
VerbosePrintOut(outfname)
|
|
10184
9554
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
10185
9555
|
|
|
10186
|
-
if(outfcs
|
|
9556
|
+
if(hmac.compare_digest(outfcs, infcs)):
|
|
10187
9557
|
if(verbose):
|
|
10188
9558
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
10189
9559
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -10195,7 +9565,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10195
9565
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
10196
9566
|
|
|
10197
9567
|
if(outfjsonsize > 0):
|
|
10198
|
-
if(outfjsonchecksum
|
|
9568
|
+
if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
|
|
10199
9569
|
if(verbose):
|
|
10200
9570
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
10201
9571
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -10219,7 +9589,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10219
9589
|
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
10220
9590
|
pyhascontents = True
|
|
10221
9591
|
|
|
10222
|
-
if(outfccs
|
|
9592
|
+
if(hmac.compare_digest(outfccs, infccs)):
|
|
10223
9593
|
if(verbose):
|
|
10224
9594
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
10225
9595
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -10302,9 +9672,7 @@ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=
|
|
|
10302
9672
|
outstartfile = infile.tell()
|
|
10303
9673
|
try:
|
|
10304
9674
|
infile.seek(0, 2)
|
|
10305
|
-
except OSError:
|
|
10306
|
-
SeekToEndOfFile(infile)
|
|
10307
|
-
except ValueError:
|
|
9675
|
+
except (OSError, ValueError):
|
|
10308
9676
|
SeekToEndOfFile(infile)
|
|
10309
9677
|
outfsize = infile.tell()
|
|
10310
9678
|
infile.seek(outstartfile, 0)
|
|
@@ -10380,7 +9748,7 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10380
9748
|
formatspecs = formatspecs[checkcompressfile]
|
|
10381
9749
|
fp = MkTempFile()
|
|
10382
9750
|
fp = PackArchiveFileFromTarFile(
|
|
10383
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9751
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10384
9752
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10385
9753
|
return listarrayfiles
|
|
10386
9754
|
|
|
@@ -10391,7 +9759,7 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10391
9759
|
formatspecs = formatspecs[checkcompressfile]
|
|
10392
9760
|
fp = MkTempFile()
|
|
10393
9761
|
fp = PackArchiveFileFromZipFile(
|
|
10394
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9762
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10395
9763
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10396
9764
|
return listarrayfiles
|
|
10397
9765
|
|
|
@@ -10407,7 +9775,7 @@ if(rarfile_support):
|
|
|
10407
9775
|
formatspecs = formatspecs[checkcompressfile]
|
|
10408
9776
|
fp = MkTempFile()
|
|
10409
9777
|
fp = PackArchiveFileFromRarFile(
|
|
10410
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9778
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10411
9779
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10412
9780
|
return listarrayfiles
|
|
10413
9781
|
|
|
@@ -10422,7 +9790,7 @@ if(py7zr_support):
|
|
|
10422
9790
|
formatspecs = formatspecs[checkcompressfile]
|
|
10423
9791
|
fp = MkTempFile()
|
|
10424
9792
|
fp = PackArchiveFileFromSevenZipFile(
|
|
10425
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9793
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10426
9794
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10427
9795
|
return listarrayfiles
|
|
10428
9796
|
|
|
@@ -10446,7 +9814,7 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10446
9814
|
return False
|
|
10447
9815
|
|
|
10448
9816
|
|
|
10449
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["
|
|
9817
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10450
9818
|
outarray = MkTempFile()
|
|
10451
9819
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10452
9820
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10577,7 +9945,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10577
9945
|
if compressionuselist is None:
|
|
10578
9946
|
compressionuselist = compressionlistalt
|
|
10579
9947
|
if checksumtype is None:
|
|
10580
|
-
checksumtype = ["
|
|
9948
|
+
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10581
9949
|
if extradata is None:
|
|
10582
9950
|
extradata = []
|
|
10583
9951
|
if jsondata is None:
|
|
@@ -10664,7 +10032,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10664
10032
|
compression = "auto"
|
|
10665
10033
|
|
|
10666
10034
|
if verbose:
|
|
10667
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10035
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10668
10036
|
|
|
10669
10037
|
# No files?
|
|
10670
10038
|
if not listarrayfiles.get('ffilelist'):
|
|
@@ -10769,7 +10137,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10769
10137
|
while ilmin < ilsize:
|
|
10770
10138
|
cfcontents = MkTempFile()
|
|
10771
10139
|
fcontents.seek(0, 0)
|
|
10772
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10140
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10773
10141
|
fcontents.seek(0, 0)
|
|
10774
10142
|
cfcontents.seek(0, 0)
|
|
10775
10143
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -10787,7 +10155,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10787
10155
|
|
|
10788
10156
|
fcontents.seek(0, 0)
|
|
10789
10157
|
cfcontents = MkTempFile()
|
|
10790
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10158
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10791
10159
|
cfcontents.seek(0, 0)
|
|
10792
10160
|
cfcontents = CompressOpenFileAlt(
|
|
10793
10161
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
|
|
@@ -10886,22 +10254,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10886
10254
|
fp.flush()
|
|
10887
10255
|
if hasattr(os, "sync"):
|
|
10888
10256
|
os.fsync(fp.fileno())
|
|
10889
|
-
except io.UnsupportedOperation:
|
|
10890
|
-
|
|
10891
|
-
logging.warning("Flush/sync unsupported on this file object.")
|
|
10892
|
-
except AttributeError:
|
|
10893
|
-
if verbose:
|
|
10894
|
-
logging.warning("Flush/sync attributes missing on this file object.")
|
|
10895
|
-
except OSError as e:
|
|
10896
|
-
if verbose:
|
|
10897
|
-
logging.warning("OS error during flush/sync: %s", e)
|
|
10257
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
10258
|
+
pass
|
|
10898
10259
|
|
|
10899
10260
|
if outfile == "-":
|
|
10900
10261
|
fp.seek(0, 0)
|
|
10901
|
-
|
|
10902
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
10903
|
-
else:
|
|
10904
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
10262
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
10905
10263
|
elif outfile is None:
|
|
10906
10264
|
fp.seek(0, 0)
|
|
10907
10265
|
outvar = fp.read()
|
|
@@ -10940,14 +10298,14 @@ def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="aut
|
|
|
10940
10298
|
return True
|
|
10941
10299
|
return returnout
|
|
10942
10300
|
|
|
10943
|
-
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10301
|
+
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10944
10302
|
fp = MkTempFile(instr)
|
|
10945
10303
|
listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10946
10304
|
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
10947
10305
|
return listarrayfiles
|
|
10948
10306
|
|
|
10949
10307
|
|
|
10950
|
-
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10308
|
+
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10951
10309
|
outarray = MkTempFile()
|
|
10952
10310
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10953
10311
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10960,7 +10318,7 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
10960
10318
|
if(outdir is not None):
|
|
10961
10319
|
outdir = RemoveWindowsPath(outdir)
|
|
10962
10320
|
if(verbose):
|
|
10963
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10321
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10964
10322
|
if(isinstance(infile, dict)):
|
|
10965
10323
|
listarrayfiles = infile
|
|
10966
10324
|
else:
|
|
@@ -11010,16 +10368,12 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
11010
10368
|
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
11011
10369
|
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
11012
10370
|
shutil.copyfileobj(
|
|
11013
|
-
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
10371
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc, length=__filebuff_size__)
|
|
11014
10372
|
try:
|
|
11015
10373
|
fpc.flush()
|
|
11016
10374
|
if(hasattr(os, "sync")):
|
|
11017
10375
|
os.fsync(fpc.fileno())
|
|
11018
|
-
except io.UnsupportedOperation:
|
|
11019
|
-
pass
|
|
11020
|
-
except AttributeError:
|
|
11021
|
-
pass
|
|
11022
|
-
except OSError:
|
|
10376
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11023
10377
|
pass
|
|
11024
10378
|
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
11025
10379
|
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
@@ -11061,16 +10415,12 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
11061
10415
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11062
10416
|
flinkinfo['fcontents'])
|
|
11063
10417
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11064
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10418
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11065
10419
|
try:
|
|
11066
10420
|
fpc.flush()
|
|
11067
10421
|
if(hasattr(os, "sync")):
|
|
11068
10422
|
os.fsync(fpc.fileno())
|
|
11069
|
-
except io.UnsupportedOperation:
|
|
11070
|
-
pass
|
|
11071
|
-
except AttributeError:
|
|
11072
|
-
pass
|
|
11073
|
-
except OSError:
|
|
10423
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11074
10424
|
pass
|
|
11075
10425
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11076
10426
|
os.chown(PrependPath(
|
|
@@ -11140,16 +10490,12 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
11140
10490
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11141
10491
|
flinkinfo['fcontents'])
|
|
11142
10492
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11143
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10493
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11144
10494
|
try:
|
|
11145
10495
|
fpc.flush()
|
|
11146
10496
|
if(hasattr(os, "sync")):
|
|
11147
10497
|
os.fsync(fpc.fileno())
|
|
11148
|
-
except io.UnsupportedOperation:
|
|
11149
|
-
pass
|
|
11150
|
-
except AttributeError:
|
|
11151
|
-
pass
|
|
11152
|
-
except OSError:
|
|
10498
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11153
10499
|
pass
|
|
11154
10500
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11155
10501
|
os.chown(PrependPath(
|
|
@@ -11236,7 +10582,7 @@ def ftype_to_str(ftype):
|
|
|
11236
10582
|
|
|
11237
10583
|
def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11238
10584
|
if(verbose):
|
|
11239
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10585
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11240
10586
|
if(isinstance(infile, dict)):
|
|
11241
10587
|
listarrayfileslist = [infile]
|
|
11242
10588
|
if(isinstance(infile, list)):
|
|
@@ -11317,9 +10663,7 @@ def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
11317
10663
|
outstartfile = infile.tell()
|
|
11318
10664
|
try:
|
|
11319
10665
|
infile.seek(0, 2)
|
|
11320
|
-
except OSError:
|
|
11321
|
-
SeekToEndOfFile(infile)
|
|
11322
|
-
except ValueError:
|
|
10666
|
+
except (OSError, ValueError):
|
|
11323
10667
|
SeekToEndOfFile(infile)
|
|
11324
10668
|
outfsize = infile.tell()
|
|
11325
10669
|
infile.seek(outstartfile, 0)
|
|
@@ -11349,13 +10693,10 @@ def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipc
|
|
|
11349
10693
|
|
|
11350
10694
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11351
10695
|
if(verbose):
|
|
11352
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10696
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11353
10697
|
if(infile == "-"):
|
|
11354
10698
|
infile = MkTempFile()
|
|
11355
|
-
|
|
11356
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11357
|
-
else:
|
|
11358
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
10699
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11359
10700
|
infile.seek(0, 0)
|
|
11360
10701
|
if(not infile):
|
|
11361
10702
|
return False
|
|
@@ -11474,13 +10815,10 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11474
10815
|
|
|
11475
10816
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11476
10817
|
if(verbose):
|
|
11477
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10818
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11478
10819
|
if(infile == "-"):
|
|
11479
10820
|
infile = MkTempFile()
|
|
11480
|
-
|
|
11481
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11482
|
-
else:
|
|
11483
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
10821
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11484
10822
|
infile.seek(0, 0)
|
|
11485
10823
|
if(not infile):
|
|
11486
10824
|
return False
|
|
@@ -11557,24 +10895,18 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11557
10895
|
printfname = member.filename
|
|
11558
10896
|
try:
|
|
11559
10897
|
fuid = int(os.getuid())
|
|
11560
|
-
except AttributeError:
|
|
11561
|
-
fuid = int(0)
|
|
11562
|
-
except KeyError:
|
|
10898
|
+
except (KeyError, AttributeError):
|
|
11563
10899
|
fuid = int(0)
|
|
11564
10900
|
try:
|
|
11565
10901
|
fgid = int(os.getgid())
|
|
11566
|
-
except AttributeError:
|
|
11567
|
-
fgid = int(0)
|
|
11568
|
-
except KeyError:
|
|
10902
|
+
except (KeyError, AttributeError):
|
|
11569
10903
|
fgid = int(0)
|
|
11570
10904
|
try:
|
|
11571
10905
|
import pwd
|
|
11572
10906
|
try:
|
|
11573
10907
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11574
10908
|
funame = userinfo.pw_name
|
|
11575
|
-
except KeyError:
|
|
11576
|
-
funame = ""
|
|
11577
|
-
except AttributeError:
|
|
10909
|
+
except (KeyError, AttributeError):
|
|
11578
10910
|
funame = ""
|
|
11579
10911
|
except ImportError:
|
|
11580
10912
|
funame = ""
|
|
@@ -11584,9 +10916,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11584
10916
|
try:
|
|
11585
10917
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11586
10918
|
fgname = groupinfo.gr_name
|
|
11587
|
-
except KeyError:
|
|
11588
|
-
fgname = ""
|
|
11589
|
-
except AttributeError:
|
|
10919
|
+
except (KeyError, AttributeError):
|
|
11590
10920
|
fgname = ""
|
|
11591
10921
|
except ImportError:
|
|
11592
10922
|
fgname = ""
|
|
@@ -11612,7 +10942,7 @@ if(not rarfile_support):
|
|
|
11612
10942
|
if(rarfile_support):
|
|
11613
10943
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11614
10944
|
if(verbose):
|
|
11615
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10945
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11616
10946
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11617
10947
|
return False
|
|
11618
10948
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11695,24 +11025,18 @@ if(rarfile_support):
|
|
|
11695
11025
|
printfname = member.filename
|
|
11696
11026
|
try:
|
|
11697
11027
|
fuid = int(os.getuid())
|
|
11698
|
-
except AttributeError:
|
|
11699
|
-
fuid = int(0)
|
|
11700
|
-
except KeyError:
|
|
11028
|
+
except (KeyError, AttributeError):
|
|
11701
11029
|
fuid = int(0)
|
|
11702
11030
|
try:
|
|
11703
11031
|
fgid = int(os.getgid())
|
|
11704
|
-
except AttributeError:
|
|
11705
|
-
fgid = int(0)
|
|
11706
|
-
except KeyError:
|
|
11032
|
+
except (KeyError, AttributeError):
|
|
11707
11033
|
fgid = int(0)
|
|
11708
11034
|
try:
|
|
11709
11035
|
import pwd
|
|
11710
11036
|
try:
|
|
11711
11037
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11712
11038
|
funame = userinfo.pw_name
|
|
11713
|
-
except KeyError:
|
|
11714
|
-
funame = ""
|
|
11715
|
-
except AttributeError:
|
|
11039
|
+
except (KeyError, AttributeError):
|
|
11716
11040
|
funame = ""
|
|
11717
11041
|
except ImportError:
|
|
11718
11042
|
funame = ""
|
|
@@ -11722,9 +11046,7 @@ if(rarfile_support):
|
|
|
11722
11046
|
try:
|
|
11723
11047
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11724
11048
|
fgname = groupinfo.gr_name
|
|
11725
|
-
except KeyError:
|
|
11726
|
-
fgname = ""
|
|
11727
|
-
except AttributeError:
|
|
11049
|
+
except (KeyError, AttributeError):
|
|
11728
11050
|
fgname = ""
|
|
11729
11051
|
except ImportError:
|
|
11730
11052
|
fgname = ""
|
|
@@ -11749,7 +11071,7 @@ if(not py7zr_support):
|
|
|
11749
11071
|
if(py7zr_support):
|
|
11750
11072
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11751
11073
|
if(verbose):
|
|
11752
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11074
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11753
11075
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11754
11076
|
return False
|
|
11755
11077
|
lcfi = 0
|
|
@@ -11802,24 +11124,18 @@ if(py7zr_support):
|
|
|
11802
11124
|
file_content[member.filename].close()
|
|
11803
11125
|
try:
|
|
11804
11126
|
fuid = int(os.getuid())
|
|
11805
|
-
except AttributeError:
|
|
11806
|
-
fuid = int(0)
|
|
11807
|
-
except KeyError:
|
|
11127
|
+
except (KeyError, AttributeError):
|
|
11808
11128
|
fuid = int(0)
|
|
11809
11129
|
try:
|
|
11810
11130
|
fgid = int(os.getgid())
|
|
11811
|
-
except AttributeError:
|
|
11812
|
-
fgid = int(0)
|
|
11813
|
-
except KeyError:
|
|
11131
|
+
except (KeyError, AttributeError):
|
|
11814
11132
|
fgid = int(0)
|
|
11815
11133
|
try:
|
|
11816
11134
|
import pwd
|
|
11817
11135
|
try:
|
|
11818
11136
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11819
11137
|
funame = userinfo.pw_name
|
|
11820
|
-
except KeyError:
|
|
11821
|
-
funame = ""
|
|
11822
|
-
except AttributeError:
|
|
11138
|
+
except (KeyError, AttributeError):
|
|
11823
11139
|
funame = ""
|
|
11824
11140
|
except ImportError:
|
|
11825
11141
|
funame = ""
|
|
@@ -11829,9 +11145,7 @@ if(py7zr_support):
|
|
|
11829
11145
|
try:
|
|
11830
11146
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11831
11147
|
fgname = groupinfo.gr_name
|
|
11832
|
-
except KeyError:
|
|
11833
|
-
fgname = ""
|
|
11834
|
-
except AttributeError:
|
|
11148
|
+
except (KeyError, AttributeError):
|
|
11835
11149
|
fgname = ""
|
|
11836
11150
|
except ImportError:
|
|
11837
11151
|
fgname = ""
|
|
@@ -11852,7 +11166,7 @@ if(py7zr_support):
|
|
|
11852
11166
|
|
|
11853
11167
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11854
11168
|
if(verbose):
|
|
11855
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11169
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11856
11170
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11857
11171
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11858
11172
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11871,7 +11185,7 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
11871
11185
|
return False
|
|
11872
11186
|
|
|
11873
11187
|
|
|
11874
|
-
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["
|
|
11188
|
+
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11875
11189
|
outarray = MkTempFile()
|
|
11876
11190
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
11877
11191
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
@@ -11883,19 +11197,19 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11883
11197
|
PyNeoFile compatibility layer
|
|
11884
11198
|
"""
|
|
11885
11199
|
|
|
11886
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11200
|
+
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11887
11201
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11888
11202
|
|
|
11889
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11203
|
+
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11890
11204
|
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11891
11205
|
|
|
11892
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11206
|
+
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11893
11207
|
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11894
11208
|
|
|
11895
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11209
|
+
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11896
11210
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11897
11211
|
|
|
11898
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11212
|
+
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11899
11213
|
return PackArchiveFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11900
11214
|
|
|
11901
11215
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
@@ -11904,7 +11218,7 @@ def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonl
|
|
|
11904
11218
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11905
11219
|
return UnPackArchiveFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11906
11220
|
|
|
11907
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["
|
|
11221
|
+
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11908
11222
|
return RePackArchiveFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11909
11223
|
|
|
11910
11224
|
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
@@ -11913,7 +11227,7 @@ def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False,
|
|
|
11913
11227
|
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11914
11228
|
return ArchiveFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11915
11229
|
|
|
11916
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11230
|
+
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11917
11231
|
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11918
11232
|
return RePackArchiveFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11919
11233
|
|
|
@@ -11955,10 +11269,7 @@ def download_file_from_ftp_file(url):
|
|
|
11955
11269
|
ftp_port = 21
|
|
11956
11270
|
try:
|
|
11957
11271
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
11958
|
-
except socket.gaierror:
|
|
11959
|
-
log.info("Error With URL "+url)
|
|
11960
|
-
return False
|
|
11961
|
-
except socket.timeout:
|
|
11272
|
+
except (socket.gaierror, socket.timeout):
|
|
11962
11273
|
log.info("Error With URL "+url)
|
|
11963
11274
|
return False
|
|
11964
11275
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12046,10 +11357,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
12046
11357
|
ftp_port = 21
|
|
12047
11358
|
try:
|
|
12048
11359
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
12049
|
-
except socket.gaierror:
|
|
12050
|
-
log.info("Error With URL "+url)
|
|
12051
|
-
return False
|
|
12052
|
-
except socket.timeout:
|
|
11360
|
+
except (socket.gaierror, socket.timeout):
|
|
12053
11361
|
log.info("Error With URL "+url)
|
|
12054
11362
|
return False
|
|
12055
11363
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12160,7 +11468,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12160
11468
|
else:
|
|
12161
11469
|
response = requests.get(rebuilt_url, headers=headers, timeout=(5, 30), stream=True)
|
|
12162
11470
|
response.raw.decode_content = True
|
|
12163
|
-
shutil.copyfileobj(response.raw, httpfile)
|
|
11471
|
+
shutil.copyfileobj(response.raw, httpfile, length=__filebuff_size__)
|
|
12164
11472
|
|
|
12165
11473
|
# 2) HTTPX branch
|
|
12166
11474
|
elif usehttp == 'httpx' and havehttpx:
|
|
@@ -12172,7 +11480,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12172
11480
|
else:
|
|
12173
11481
|
response = client.get(rebuilt_url, headers=headers)
|
|
12174
11482
|
raw_wrapper = RawIteratorWrapper(response.iter_bytes())
|
|
12175
|
-
shutil.copyfileobj(raw_wrapper, httpfile)
|
|
11483
|
+
shutil.copyfileobj(raw_wrapper, httpfile, length=__filebuff_size__)
|
|
12176
11484
|
|
|
12177
11485
|
# 3) Mechanize branch
|
|
12178
11486
|
elif usehttp == 'mechanize' and havemechanize:
|
|
@@ -12191,7 +11499,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12191
11499
|
|
|
12192
11500
|
# Open the URL and copy the response to httpfile
|
|
12193
11501
|
response = br.open(rebuilt_url)
|
|
12194
|
-
shutil.copyfileobj(response, httpfile)
|
|
11502
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12195
11503
|
|
|
12196
11504
|
# 4) Fallback to urllib
|
|
12197
11505
|
else:
|
|
@@ -12204,7 +11512,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12204
11512
|
else:
|
|
12205
11513
|
opener = build_opener()
|
|
12206
11514
|
response = opener.open(request)
|
|
12207
|
-
shutil.copyfileobj(response, httpfile)
|
|
11515
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12208
11516
|
|
|
12209
11517
|
# Reset file pointer to the start before returning
|
|
12210
11518
|
httpfile.seek(0, 0)
|
|
@@ -12337,7 +11645,7 @@ def upload_file_to_http_file(
|
|
|
12337
11645
|
fileobj.seek(0)
|
|
12338
11646
|
except Exception:
|
|
12339
11647
|
pass
|
|
12340
|
-
shutil.copyfileobj(fileobj, buf)
|
|
11648
|
+
shutil.copyfileobj(fileobj, buf, length=__filebuff_size__)
|
|
12341
11649
|
|
|
12342
11650
|
_w('\r\n')
|
|
12343
11651
|
_w('--' + boundary + '--\r\n')
|
|
@@ -12426,10 +11734,7 @@ if(haveparamiko):
|
|
|
12426
11734
|
username=sftp_username, password=sftp_password)
|
|
12427
11735
|
except paramiko.ssh_exception.SSHException:
|
|
12428
11736
|
return False
|
|
12429
|
-
except socket.gaierror:
|
|
12430
|
-
log.info("Error With URL "+url)
|
|
12431
|
-
return False
|
|
12432
|
-
except socket.timeout:
|
|
11737
|
+
except (socket.gaierror, socket.timeout):
|
|
12433
11738
|
log.info("Error With URL "+url)
|
|
12434
11739
|
return False
|
|
12435
11740
|
sftp = ssh.open_sftp()
|
|
@@ -12483,10 +11788,7 @@ if(haveparamiko):
|
|
|
12483
11788
|
username=sftp_username, password=sftp_password)
|
|
12484
11789
|
except paramiko.ssh_exception.SSHException:
|
|
12485
11790
|
return False
|
|
12486
|
-
except socket.gaierror:
|
|
12487
|
-
log.info("Error With URL "+url)
|
|
12488
|
-
return False
|
|
12489
|
-
except socket.timeout:
|
|
11791
|
+
except (socket.gaierror, socket.timeout):
|
|
12490
11792
|
log.info("Error With URL "+url)
|
|
12491
11793
|
return False
|
|
12492
11794
|
sftp = ssh.open_sftp()
|
|
@@ -12537,10 +11839,7 @@ if(havepysftp):
|
|
|
12537
11839
|
username=sftp_username, password=sftp_password)
|
|
12538
11840
|
except paramiko.ssh_exception.SSHException:
|
|
12539
11841
|
return False
|
|
12540
|
-
except socket.gaierror:
|
|
12541
|
-
log.info("Error With URL "+url)
|
|
12542
|
-
return False
|
|
12543
|
-
except socket.timeout:
|
|
11842
|
+
except (socket.gaierror, socket.timeout):
|
|
12544
11843
|
log.info("Error With URL "+url)
|
|
12545
11844
|
return False
|
|
12546
11845
|
sftpfile = MkTempFile()
|
|
@@ -12590,10 +11889,7 @@ if(havepysftp):
|
|
|
12590
11889
|
username=sftp_username, password=sftp_password)
|
|
12591
11890
|
except paramiko.ssh_exception.SSHException:
|
|
12592
11891
|
return False
|
|
12593
|
-
except socket.gaierror:
|
|
12594
|
-
log.info("Error With URL "+url)
|
|
12595
|
-
return False
|
|
12596
|
-
except socket.timeout:
|
|
11892
|
+
except (socket.gaierror, socket.timeout):
|
|
12597
11893
|
log.info("Error With URL "+url)
|
|
12598
11894
|
return False
|
|
12599
11895
|
sftpfile.seek(0, 0)
|