PyCatFile 0.24.6__py3-none-any.whl → 0.25.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pycatfile-0.24.6.data → pycatfile-0.25.2.data}/scripts/catfile.py +12 -12
- {pycatfile-0.24.6.dist-info → pycatfile-0.25.2.dist-info}/METADATA +2 -2
- pycatfile-0.25.2.dist-info/RECORD +10 -0
- pycatfile.py +668 -1176
- pycatfile-0.24.6.dist-info/RECORD +0 -10
- {pycatfile-0.24.6.data → pycatfile-0.25.2.data}/scripts/catneofile.py +0 -0
- {pycatfile-0.24.6.data → pycatfile-0.25.2.data}/scripts/neocatfile.py +0 -0
- {pycatfile-0.24.6.dist-info → pycatfile-0.25.2.dist-info}/WHEEL +0 -0
- {pycatfile-0.24.6.dist-info → pycatfile-0.25.2.dist-info}/licenses/LICENSE +0 -0
- {pycatfile-0.24.6.dist-info → pycatfile-0.25.2.dist-info}/top_level.txt +0 -0
- {pycatfile-0.24.6.dist-info → pycatfile-0.25.2.dist-info}/zip-safe +0 -0
pycatfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pycatfile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pycatfile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -73,6 +73,17 @@ except ImportError:
|
|
|
73
73
|
except ImportError:
|
|
74
74
|
import json
|
|
75
75
|
|
|
76
|
+
testyaml = False
|
|
77
|
+
try:
|
|
78
|
+
import oyaml as yaml
|
|
79
|
+
testyaml = True
|
|
80
|
+
except ImportError:
|
|
81
|
+
try:
|
|
82
|
+
import yaml
|
|
83
|
+
testyaml = True
|
|
84
|
+
except ImportError:
|
|
85
|
+
testyaml = False
|
|
86
|
+
|
|
76
87
|
try:
|
|
77
88
|
import configparser
|
|
78
89
|
except ImportError:
|
|
@@ -115,6 +126,16 @@ else:
|
|
|
115
126
|
bytes_type = bytes
|
|
116
127
|
text_type = str
|
|
117
128
|
|
|
129
|
+
# Text streams (as provided by Python)
|
|
130
|
+
PY_STDIN_TEXT = sys.stdin
|
|
131
|
+
PY_STDOUT_TEXT = sys.stdout
|
|
132
|
+
PY_STDERR_TEXT = sys.stderr
|
|
133
|
+
|
|
134
|
+
# Binary-friendly streams (use .buffer on Py3, fall back on Py2)
|
|
135
|
+
PY_STDIN_BUF = getattr(sys.stdin, "buffer", sys.stdin)
|
|
136
|
+
PY_STDOUT_BUF = getattr(sys.stdout, "buffer", sys.stdout)
|
|
137
|
+
PY_STDERR_BUF = getattr(sys.stderr, "buffer", sys.stderr)
|
|
138
|
+
|
|
118
139
|
# Text vs bytes tuples you can use with isinstance()
|
|
119
140
|
TEXT_TYPES = (basestring,) # "str or unicode" on Py2, "str" on Py3
|
|
120
141
|
BINARY_TYPES = (bytes,) if not PY2 else (str,) # bytes on Py3, str on Py2
|
|
@@ -211,12 +232,6 @@ if sys.version_info[0] == 2:
|
|
|
211
232
|
except (NameError, AttributeError):
|
|
212
233
|
pass
|
|
213
234
|
|
|
214
|
-
# CRC32 import
|
|
215
|
-
try:
|
|
216
|
-
from zlib import crc32
|
|
217
|
-
except ImportError:
|
|
218
|
-
from binascii import crc32
|
|
219
|
-
|
|
220
235
|
# Define FileNotFoundError for Python 2
|
|
221
236
|
try:
|
|
222
237
|
FileNotFoundError
|
|
@@ -251,9 +266,7 @@ py7zr_support = False
|
|
|
251
266
|
try:
|
|
252
267
|
import py7zr
|
|
253
268
|
py7zr_support = True
|
|
254
|
-
except ImportError:
|
|
255
|
-
pass
|
|
256
|
-
except OSError:
|
|
269
|
+
except (ImportError, OSError):
|
|
257
270
|
pass
|
|
258
271
|
|
|
259
272
|
# TAR file checking
|
|
@@ -279,9 +292,7 @@ haveparamiko = False
|
|
|
279
292
|
try:
|
|
280
293
|
import paramiko
|
|
281
294
|
haveparamiko = True
|
|
282
|
-
except ImportError:
|
|
283
|
-
pass
|
|
284
|
-
except OSError:
|
|
295
|
+
except (ImportError, OSError):
|
|
285
296
|
pass
|
|
286
297
|
|
|
287
298
|
# PySFTP support
|
|
@@ -289,9 +300,7 @@ havepysftp = False
|
|
|
289
300
|
try:
|
|
290
301
|
import pysftp
|
|
291
302
|
havepysftp = True
|
|
292
|
-
except ImportError:
|
|
293
|
-
pass
|
|
294
|
-
except OSError:
|
|
303
|
+
except (ImportError, OSError):
|
|
295
304
|
pass
|
|
296
305
|
|
|
297
306
|
# Add the mechanize import check
|
|
@@ -299,9 +308,7 @@ havemechanize = False
|
|
|
299
308
|
try:
|
|
300
309
|
import mechanize
|
|
301
310
|
havemechanize = True
|
|
302
|
-
except ImportError:
|
|
303
|
-
pass
|
|
304
|
-
except OSError:
|
|
311
|
+
except (ImportError, OSError):
|
|
305
312
|
pass
|
|
306
313
|
|
|
307
314
|
# Requests support
|
|
@@ -311,9 +318,7 @@ try:
|
|
|
311
318
|
haverequests = True
|
|
312
319
|
import urllib3
|
|
313
320
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
314
|
-
except ImportError:
|
|
315
|
-
pass
|
|
316
|
-
except OSError:
|
|
321
|
+
except (ImportError, OSError):
|
|
317
322
|
pass
|
|
318
323
|
|
|
319
324
|
# HTTPX support
|
|
@@ -323,9 +328,7 @@ try:
|
|
|
323
328
|
havehttpx = True
|
|
324
329
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
325
330
|
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
|
326
|
-
except ImportError:
|
|
327
|
-
pass
|
|
328
|
-
except OSError:
|
|
331
|
+
except (ImportError, OSError):
|
|
329
332
|
pass
|
|
330
333
|
|
|
331
334
|
# HTTP and URL parsing
|
|
@@ -416,9 +419,14 @@ __include_defaults__ = True
|
|
|
416
419
|
__use_inmemfile__ = True
|
|
417
420
|
__use_spoolfile__ = False
|
|
418
421
|
__use_spooldir__ = tempfile.gettempdir()
|
|
419
|
-
|
|
420
|
-
|
|
422
|
+
BYTES_PER_KiB = 1024
|
|
423
|
+
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
|
+
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
425
|
+
DEFAULT_SPOOL_MAX = 4 * BYTES_PER_MiB # 4 MiB per spooled temp file
|
|
421
426
|
__spoolfile_size__ = DEFAULT_SPOOL_MAX
|
|
427
|
+
# Buffer: bigger than stdlib default (16 KiB), but still modest
|
|
428
|
+
DEFAULT_BUFFER_MAX = 256 * BYTES_PER_KiB # 256 KiB copy buffer
|
|
429
|
+
__filebuff_size__ = DEFAULT_BUFFER_MAX
|
|
422
430
|
__program_name__ = "Py"+__file_format_default__
|
|
423
431
|
__use_env_file__ = True
|
|
424
432
|
__use_ini_file__ = True
|
|
@@ -641,12 +649,12 @@ __project__ = __program_name__
|
|
|
641
649
|
__program_alt_name__ = __program_name__
|
|
642
650
|
__project_url__ = "https://github.com/GameMaker2k/PyCatFile"
|
|
643
651
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
644
|
-
__version_info__ = (0,
|
|
645
|
-
__version_date_info__ = (2025, 11,
|
|
652
|
+
__version_info__ = (0, 25, 2, "RC 1", 1)
|
|
653
|
+
__version_date_info__ = (2025, 11, 6, "RC 1", 1)
|
|
646
654
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
647
655
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
648
656
|
__revision__ = __version_info__[3]
|
|
649
|
-
__revision_id__ = "$Id:
|
|
657
|
+
__revision_id__ = "$Id: d2f5e2d130062be70121a9e3633ff5962335321c $"
|
|
650
658
|
if(__version_info__[4] is not None):
|
|
651
659
|
__version_date_plusrc__ = __version_date__ + \
|
|
652
660
|
"-" + str(__version_date_info__[4])
|
|
@@ -797,7 +805,7 @@ geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prour
|
|
|
797
805
|
if(platform.python_implementation() != ""):
|
|
798
806
|
py_implementation = platform.python_implementation()
|
|
799
807
|
if(platform.python_implementation() == ""):
|
|
800
|
-
py_implementation = "
|
|
808
|
+
py_implementation = "CPython"
|
|
801
809
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
802
810
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
803
811
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -2385,7 +2393,7 @@ def GetTotalSize(file_list):
|
|
|
2385
2393
|
try:
|
|
2386
2394
|
total_size += os.path.getsize(item)
|
|
2387
2395
|
except OSError:
|
|
2388
|
-
|
|
2396
|
+
PY_STDERR_TEXT.write("Error accessing file {}: {}\n".format(item, e))
|
|
2389
2397
|
return total_size
|
|
2390
2398
|
|
|
2391
2399
|
|
|
@@ -2622,7 +2630,7 @@ class ZlibFile(object):
|
|
|
2622
2630
|
scanned_leading = 0 # for tolerant header scan
|
|
2623
2631
|
|
|
2624
2632
|
while True:
|
|
2625
|
-
data = self.file.read(
|
|
2633
|
+
data = self.file.read(__filebuff_size__) # 1 MiB blocks
|
|
2626
2634
|
if not data:
|
|
2627
2635
|
if d is not None:
|
|
2628
2636
|
self._spool.write(d.flush())
|
|
@@ -2780,7 +2788,7 @@ class ZlibFile(object):
|
|
|
2780
2788
|
|
|
2781
2789
|
# Buffer and compress in chunks to limit memory
|
|
2782
2790
|
self._write_buf += data
|
|
2783
|
-
if len(self._write_buf) >= (
|
|
2791
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
2784
2792
|
chunk = self._compressor.compress(bytes(self._write_buf))
|
|
2785
2793
|
if chunk:
|
|
2786
2794
|
self.file.write(chunk)
|
|
@@ -2890,7 +2898,7 @@ class ZlibFile(object):
|
|
|
2890
2898
|
"""
|
|
2891
2899
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
2892
2900
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
2893
|
-
bio =
|
|
2901
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
2894
2902
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
2895
2903
|
|
|
2896
2904
|
# compatibility aliases for unwrapping utilities
|
|
@@ -2926,7 +2934,7 @@ def compress_bytes(payload, level=6, wbits=15, text=False, **kw):
|
|
|
2926
2934
|
out = compress_bytes(b"hello")
|
|
2927
2935
|
out = compress_bytes(u"hello\n", text=True, encoding="utf-8", newline="\n")
|
|
2928
2936
|
"""
|
|
2929
|
-
bio =
|
|
2937
|
+
bio = MkTempFile()
|
|
2930
2938
|
mode = 'wt' if text else 'wb'
|
|
2931
2939
|
f = ZlibFile(fileobj=bio, mode=mode, level=level, wbits=wbits, **kw)
|
|
2932
2940
|
try:
|
|
@@ -3085,7 +3093,7 @@ class GzipFile(object):
|
|
|
3085
3093
|
|
|
3086
3094
|
self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
|
|
3087
3095
|
|
|
3088
|
-
CHUNK =
|
|
3096
|
+
CHUNK = __filebuff_size__
|
|
3089
3097
|
pending = b""
|
|
3090
3098
|
d = None
|
|
3091
3099
|
absolute_offset = 0
|
|
@@ -3248,7 +3256,7 @@ class GzipFile(object):
|
|
|
3248
3256
|
|
|
3249
3257
|
# Stage and compress in chunks
|
|
3250
3258
|
self._write_buf += data
|
|
3251
|
-
if len(self._write_buf) >= (
|
|
3259
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
3252
3260
|
out = self._compressor.compress(bytes(self._write_buf))
|
|
3253
3261
|
if out:
|
|
3254
3262
|
self.file.write(out)
|
|
@@ -3348,7 +3356,7 @@ class GzipFile(object):
|
|
|
3348
3356
|
"""
|
|
3349
3357
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3350
3358
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
3351
|
-
bio =
|
|
3359
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
3352
3360
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
3353
3361
|
|
|
3354
3362
|
# compatibility aliases for unwrapping utilities
|
|
@@ -3390,7 +3398,7 @@ def gzip_compress_bytes(payload, level=6, text=False, **kw):
|
|
|
3390
3398
|
- text=False: 'payload' must be bytes-like; written via GzipFile('wb')
|
|
3391
3399
|
You can pass newline/encoding/errors to control text encoding.
|
|
3392
3400
|
"""
|
|
3393
|
-
bio =
|
|
3401
|
+
bio = MkTempFile()
|
|
3394
3402
|
mode = 'wt' if text else 'wb'
|
|
3395
3403
|
gf = GzipFile(fileobj=bio, mode=mode, level=level, **kw)
|
|
3396
3404
|
try:
|
|
@@ -3622,280 +3630,6 @@ def crc_generic(msg, width, poly, init, xorout, refin, refout):
|
|
|
3622
3630
|
crc = _reflect(crc, width)
|
|
3623
3631
|
return (crc ^ xorout) & mask
|
|
3624
3632
|
|
|
3625
|
-
# =========================
|
|
3626
|
-
# Named CRCs
|
|
3627
|
-
# =========================
|
|
3628
|
-
# CRC-16/ANSI (ARC/MODBUS family with init=0xFFFF by default)
|
|
3629
|
-
def crc16_ansi(msg, initial_value=0xFFFF):
|
|
3630
|
-
return crc_generic(msg, 16, 0x8005, initial_value & 0xFFFF, 0x0000, True, True)
|
|
3631
|
-
|
|
3632
|
-
def crc16_ibm(msg, initial_value=0xFFFF):
|
|
3633
|
-
return crc16_ansi(msg, initial_value)
|
|
3634
|
-
|
|
3635
|
-
def crc16(msg):
|
|
3636
|
-
return crc16_ansi(msg, 0xFFFF)
|
|
3637
|
-
|
|
3638
|
-
def crc16_ccitt(msg, initial_value=0xFFFF):
|
|
3639
|
-
# CCITT-FALSE
|
|
3640
|
-
return crc_generic(msg, 16, 0x1021, initial_value & 0xFFFF, 0x0000, False, False)
|
|
3641
|
-
|
|
3642
|
-
def crc16_x25(msg):
|
|
3643
|
-
return crc_generic(msg, 16, 0x1021, 0xFFFF, 0xFFFF, True, True)
|
|
3644
|
-
|
|
3645
|
-
def crc16_kermit(msg):
|
|
3646
|
-
return crc_generic(msg, 16, 0x1021, 0x0000, 0x0000, True, True)
|
|
3647
|
-
|
|
3648
|
-
def crc64_ecma(msg, initial_value=0x0000000000000000):
|
|
3649
|
-
return crc_generic(msg, 64, 0x42F0E1EBA9EA3693,
|
|
3650
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3651
|
-
0x0000000000000000, False, False)
|
|
3652
|
-
|
|
3653
|
-
def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
|
|
3654
|
-
return crc_generic(msg, 64, 0x000000000000001B,
|
|
3655
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3656
|
-
0xFFFFFFFFFFFFFFFF, True, True)
|
|
3657
|
-
|
|
3658
|
-
# =========================
|
|
3659
|
-
# Incremental CRC context
|
|
3660
|
-
# =========================
|
|
3661
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
3662
|
-
|
|
3663
|
-
_CRC_SPECS = {
|
|
3664
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
3665
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
3666
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
3667
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
3668
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
3669
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
3670
|
-
}
|
|
3671
|
-
|
|
3672
|
-
class CRCContext(object):
|
|
3673
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
3674
|
-
|
|
3675
|
-
def __init__(self, spec):
|
|
3676
|
-
self.spec = spec
|
|
3677
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
3678
|
-
self.mask = (1 << spec.width) - 1
|
|
3679
|
-
self.shift = spec.width - 8
|
|
3680
|
-
self.crc = spec.init & self.mask
|
|
3681
|
-
|
|
3682
|
-
def update(self, data):
|
|
3683
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3684
|
-
data = bytes(bytearray(data))
|
|
3685
|
-
buf = _mv_tobytes(memoryview(data))
|
|
3686
|
-
if self.spec.refin:
|
|
3687
|
-
c = self.crc
|
|
3688
|
-
tbl = self.table
|
|
3689
|
-
for b in buf:
|
|
3690
|
-
if not isinstance(b, int): # Py2
|
|
3691
|
-
b = ord(b)
|
|
3692
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
3693
|
-
self.crc = c & self.mask
|
|
3694
|
-
else:
|
|
3695
|
-
c = self.crc
|
|
3696
|
-
tbl = self.table
|
|
3697
|
-
sh = self.shift
|
|
3698
|
-
msk = self.mask
|
|
3699
|
-
for b in buf:
|
|
3700
|
-
if not isinstance(b, int):
|
|
3701
|
-
b = ord(b)
|
|
3702
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
3703
|
-
self.crc = c & msk
|
|
3704
|
-
return self
|
|
3705
|
-
|
|
3706
|
-
def digest_int(self):
|
|
3707
|
-
c = self.crc
|
|
3708
|
-
if self.spec.refout ^ self.spec.refin:
|
|
3709
|
-
c = _reflect(c, self.spec.width)
|
|
3710
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
3711
|
-
|
|
3712
|
-
def hexdigest(self):
|
|
3713
|
-
width_hex = (self.spec.width + 3) // 4
|
|
3714
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
3715
|
-
|
|
3716
|
-
def crc_context_from_name(name_norm):
|
|
3717
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
3718
|
-
if spec is None:
|
|
3719
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
3720
|
-
return CRCContext(spec)
|
|
3721
|
-
|
|
3722
|
-
# =========================
|
|
3723
|
-
# Dispatch helpers
|
|
3724
|
-
# =========================
|
|
3725
|
-
_CRC_ALIASES = {
|
|
3726
|
-
# keep your historical behaviors
|
|
3727
|
-
"crc16": "crc16_ansi",
|
|
3728
|
-
"crc16_ibm": "crc16_ansi",
|
|
3729
|
-
"crc16_ansi": "crc16_ansi",
|
|
3730
|
-
"crc16_modbus": "crc16_ansi",
|
|
3731
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
3732
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
3733
|
-
"crc16_x25": "crc16_x25",
|
|
3734
|
-
"crc16_kermit": "crc16_kermit",
|
|
3735
|
-
"crc64": "crc64_iso",
|
|
3736
|
-
"crc64_iso": "crc64_iso",
|
|
3737
|
-
"crc64_ecma": "crc64_ecma",
|
|
3738
|
-
"adler32": "adler32",
|
|
3739
|
-
"crc32": "crc32",
|
|
3740
|
-
}
|
|
3741
|
-
|
|
3742
|
-
_CRC_WIDTH = {
|
|
3743
|
-
"crc16_ansi": 16,
|
|
3744
|
-
"crc16_ccitt": 16,
|
|
3745
|
-
"crc16_x25": 16,
|
|
3746
|
-
"crc16_kermit": 16,
|
|
3747
|
-
"crc64_iso": 64,
|
|
3748
|
-
"crc64_ecma": 64,
|
|
3749
|
-
"adler32": 32,
|
|
3750
|
-
"crc32": 32,
|
|
3751
|
-
}
|
|
3752
|
-
|
|
3753
|
-
def _crc_compute(algo_key, data_bytes):
|
|
3754
|
-
if algo_key == "crc16_ansi":
|
|
3755
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
3756
|
-
if algo_key == "crc16_ccitt":
|
|
3757
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
3758
|
-
if algo_key == "crc16_x25":
|
|
3759
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
3760
|
-
if algo_key == "crc16_kermit":
|
|
3761
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
3762
|
-
if algo_key == "crc64_iso":
|
|
3763
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3764
|
-
if algo_key == "crc64_ecma":
|
|
3765
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3766
|
-
if algo_key == "adler32":
|
|
3767
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
3768
|
-
if algo_key == "crc32":
|
|
3769
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
3770
|
-
raise KeyError(algo_key)
|
|
3771
|
-
|
|
3772
|
-
try:
|
|
3773
|
-
hashlib_guaranteed
|
|
3774
|
-
except NameError:
|
|
3775
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
3776
|
-
|
|
3777
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
3778
|
-
try:
|
|
3779
|
-
return name.lower() in guaranteed
|
|
3780
|
-
except Exception:
|
|
3781
|
-
return False
|
|
3782
|
-
|
|
3783
|
-
# =========================
|
|
3784
|
-
# Public checksum API
|
|
3785
|
-
# =========================
|
|
3786
|
-
def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
3787
|
-
"""
|
|
3788
|
-
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3789
|
-
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
3790
|
-
"""
|
|
3791
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
3792
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3793
|
-
|
|
3794
|
-
delim = formatspecs.get('format_delimiter', u"\0")
|
|
3795
|
-
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
3796
|
-
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
3797
|
-
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3798
|
-
hdr_bytes = bytes(hdr_bytes)
|
|
3799
|
-
|
|
3800
|
-
if algo_key in _CRC_WIDTH:
|
|
3801
|
-
n = _crc_compute(algo_key, hdr_bytes)
|
|
3802
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
3803
|
-
|
|
3804
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3805
|
-
h = hashlib.new(algo_key)
|
|
3806
|
-
h.update(hdr_bytes)
|
|
3807
|
-
return h.hexdigest().lower()
|
|
3808
|
-
|
|
3809
|
-
return "0"
|
|
3810
|
-
|
|
3811
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
3812
|
-
"""
|
|
3813
|
-
Accepts bytes/str/file-like.
|
|
3814
|
-
- Hashlib algos: streamed in 1 MiB chunks.
|
|
3815
|
-
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
3816
|
-
- Falls back to one-shot for non-file-like inputs.
|
|
3817
|
-
"""
|
|
3818
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
3819
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3820
|
-
|
|
3821
|
-
# file-like streaming
|
|
3822
|
-
if hasattr(instr, "read"):
|
|
3823
|
-
# hashlib
|
|
3824
|
-
if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3825
|
-
h = hashlib.new(algo_key)
|
|
3826
|
-
while True:
|
|
3827
|
-
chunk = instr.read(1 << 20)
|
|
3828
|
-
if not chunk:
|
|
3829
|
-
break
|
|
3830
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3831
|
-
chunk = bytes(bytearray(chunk))
|
|
3832
|
-
h.update(chunk)
|
|
3833
|
-
return h.hexdigest().lower()
|
|
3834
|
-
|
|
3835
|
-
# CRC streaming via context
|
|
3836
|
-
if algo_key in _CRC_SPECS:
|
|
3837
|
-
ctx = crc_context_from_name(algo_key)
|
|
3838
|
-
while True:
|
|
3839
|
-
chunk = instr.read(1 << 20)
|
|
3840
|
-
if not chunk:
|
|
3841
|
-
break
|
|
3842
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3843
|
-
chunk = bytes(bytearray(chunk))
|
|
3844
|
-
ctx.update(chunk)
|
|
3845
|
-
return ctx.hexdigest()
|
|
3846
|
-
|
|
3847
|
-
# not known streaming algo: fallback to one-shot bytes
|
|
3848
|
-
data = instr.read()
|
|
3849
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3850
|
-
data = bytes(bytearray(data))
|
|
3851
|
-
else:
|
|
3852
|
-
data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
|
|
3853
|
-
data = bytes(data)
|
|
3854
|
-
|
|
3855
|
-
# one-shot
|
|
3856
|
-
if algo_key in _CRC_SPECS:
|
|
3857
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
3858
|
-
|
|
3859
|
-
if algo_key in _CRC_WIDTH:
|
|
3860
|
-
n = _crc_compute(algo_key, data)
|
|
3861
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
3862
|
-
|
|
3863
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3864
|
-
h = hashlib.new(algo_key)
|
|
3865
|
-
h.update(data)
|
|
3866
|
-
return h.hexdigest().lower()
|
|
3867
|
-
|
|
3868
|
-
return "0"
|
|
3869
|
-
|
|
3870
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3871
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
3872
|
-
want = (inchecksum or "0").strip().lower()
|
|
3873
|
-
if want.startswith("0x"):
|
|
3874
|
-
want = want[2:]
|
|
3875
|
-
return hmac.compare_digest(want, calc)
|
|
3876
|
-
|
|
3877
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3878
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
3879
|
-
want = (inchecksum or "0").strip().lower()
|
|
3880
|
-
if want.startswith("0x"):
|
|
3881
|
-
want = want[2:]
|
|
3882
|
-
return hmac.compare_digest(want, calc)
|
|
3883
|
-
|
|
3884
|
-
|
|
3885
|
-
# =========================
|
|
3886
|
-
# Incremental CRC context
|
|
3887
|
-
# =========================
|
|
3888
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
3889
|
-
|
|
3890
|
-
_CRC_SPECS = {
|
|
3891
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
3892
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
3893
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
3894
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
3895
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
3896
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
3897
|
-
}
|
|
3898
|
-
|
|
3899
3633
|
# --- helpers --------------------------------------------------------------
|
|
3900
3634
|
|
|
3901
3635
|
try:
|
|
@@ -3936,206 +3670,15 @@ def _bytes_to_int(b):
|
|
|
3936
3670
|
value = (value << 8) | ch
|
|
3937
3671
|
return value
|
|
3938
3672
|
|
|
3939
|
-
|
|
3940
|
-
# --- your existing CRCContext (unchanged) ---------------------------------
|
|
3941
|
-
|
|
3942
|
-
class CRCContext(object):
|
|
3943
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
3944
|
-
|
|
3945
|
-
def __init__(self, spec):
|
|
3946
|
-
self.spec = spec
|
|
3947
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
3948
|
-
self.mask = (1 << spec.width) - 1
|
|
3949
|
-
self.shift = spec.width - 8
|
|
3950
|
-
self.crc = spec.init & self.mask
|
|
3951
|
-
|
|
3952
|
-
def update(self, data):
|
|
3953
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3954
|
-
data = bytes(bytearray(data))
|
|
3955
|
-
buf = _mv_tobytes(memoryview(data))
|
|
3956
|
-
if self.spec.refin:
|
|
3957
|
-
c = self.crc
|
|
3958
|
-
tbl = self.table
|
|
3959
|
-
for b in buf:
|
|
3960
|
-
if not isinstance(b, int): # Py2
|
|
3961
|
-
b = ord(b)
|
|
3962
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
3963
|
-
self.crc = c & self.mask
|
|
3964
|
-
else:
|
|
3965
|
-
c = self.crc
|
|
3966
|
-
tbl = self.table
|
|
3967
|
-
sh = self.shift
|
|
3968
|
-
msk = self.mask
|
|
3969
|
-
for b in buf:
|
|
3970
|
-
if not isinstance(b, int):
|
|
3971
|
-
b = ord(b)
|
|
3972
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
3973
|
-
self.crc = c & msk
|
|
3974
|
-
return self
|
|
3975
|
-
|
|
3976
|
-
def digest_int(self):
|
|
3977
|
-
c = self.crc
|
|
3978
|
-
if self.spec.refout ^ self.spec.refin:
|
|
3979
|
-
c = _reflect(c, self.spec.width)
|
|
3980
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
3981
|
-
|
|
3982
|
-
def hexdigest(self):
|
|
3983
|
-
width_hex = (self.spec.width + 3) // 4
|
|
3984
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
3985
|
-
|
|
3986
|
-
|
|
3987
|
-
# --- hashlib-backed implementation ---------------------------------------
|
|
3988
|
-
|
|
3989
|
-
class _HashlibCRCWrapper(object):
|
|
3990
|
-
"""
|
|
3991
|
-
Wrap a hashlib object to present the same interface as CRCContext
|
|
3992
|
-
(update, digest_int, hexdigest).
|
|
3993
|
-
|
|
3994
|
-
Assumes the hashlib algorithm already implements the exact CRC
|
|
3995
|
-
specification (refin/refout/xorout/etc.).
|
|
3996
|
-
"""
|
|
3997
|
-
__slots__ = ("_h", "spec", "mask", "width_hex")
|
|
3998
|
-
|
|
3999
|
-
def __init__(self, algo_name, spec):
|
|
4000
|
-
self._h = hashlib.new(algo_name)
|
|
4001
|
-
self.spec = spec
|
|
4002
|
-
self.mask = (1 << spec.width) - 1
|
|
4003
|
-
self.width_hex = (spec.width + 3) // 4
|
|
4004
|
-
|
|
4005
|
-
def update(self, data):
|
|
4006
|
-
self._h.update(_coerce_bytes(data))
|
|
4007
|
-
return self
|
|
4008
|
-
|
|
4009
|
-
def digest_int(self):
|
|
4010
|
-
# Convert final digest bytes to an integer and mask to width
|
|
4011
|
-
value = _bytes_to_int(self._h.digest())
|
|
4012
|
-
return value & self.mask
|
|
4013
|
-
|
|
4014
|
-
def hexdigest(self):
|
|
4015
|
-
h = self._h.hexdigest().lower()
|
|
4016
|
-
# Normalize to the same number of hex digits as CRCContext
|
|
4017
|
-
if len(h) < self.width_hex:
|
|
4018
|
-
h = ("0" * (self.width_hex - len(h))) + h
|
|
4019
|
-
elif len(h) > self.width_hex:
|
|
4020
|
-
h = h[-self.width_hex:]
|
|
4021
|
-
return h
|
|
4022
|
-
|
|
4023
|
-
|
|
4024
|
-
# --- public class: choose hashlib or fallback -----------------------------
|
|
4025
|
-
|
|
4026
|
-
class CRC(object):
|
|
4027
|
-
"""
|
|
4028
|
-
CRC wrapper that uses hashlib if available, otherwise falls back to
|
|
4029
|
-
the pure-Python CRCContext.
|
|
4030
|
-
|
|
4031
|
-
spec.hashlib_name (preferred) or spec.name is used as the hashlib
|
|
4032
|
-
algorithm name, e.g. 'crc32', 'crc32c', etc.
|
|
4033
|
-
"""
|
|
4034
|
-
|
|
4035
|
-
__slots__ = ("spec", "_impl")
|
|
4036
|
-
|
|
4037
|
-
def __init__(self, spec):
|
|
4038
|
-
self.spec = spec
|
|
4039
|
-
|
|
4040
|
-
algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
|
|
4041
|
-
impl = None
|
|
4042
|
-
|
|
4043
|
-
if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
|
|
4044
|
-
# Use hashlib-backed implementation
|
|
4045
|
-
impl = _HashlibCRCWrapper(algo_name, spec)
|
|
4046
|
-
else:
|
|
4047
|
-
# Fallback to your pure-Python implementation
|
|
4048
|
-
impl = CRCContext(spec)
|
|
4049
|
-
|
|
4050
|
-
self._impl = impl
|
|
4051
|
-
|
|
4052
|
-
def update(self, data):
|
|
4053
|
-
self._impl.update(data)
|
|
4054
|
-
return self
|
|
4055
|
-
|
|
4056
|
-
def digest_int(self):
|
|
4057
|
-
return self._impl.digest_int()
|
|
4058
|
-
|
|
4059
|
-
def hexdigest(self):
|
|
4060
|
-
return self._impl.hexdigest()
|
|
4061
|
-
|
|
4062
|
-
def crc_context_from_name(name_norm):
|
|
4063
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
4064
|
-
if spec is None:
|
|
4065
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
4066
|
-
return CRCContext(spec)
|
|
4067
|
-
|
|
4068
|
-
# =========================
|
|
4069
|
-
# Dispatch helpers
|
|
4070
|
-
# =========================
|
|
4071
|
-
_CRC_ALIASES = {
|
|
4072
|
-
# keep your historical behaviors
|
|
4073
|
-
"crc16": "crc16_ansi",
|
|
4074
|
-
"crc16_ibm": "crc16_ansi",
|
|
4075
|
-
"crc16_ansi": "crc16_ansi",
|
|
4076
|
-
"crc16_modbus": "crc16_ansi",
|
|
4077
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
4078
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
4079
|
-
"crc16_x25": "crc16_x25",
|
|
4080
|
-
"crc16_kermit": "crc16_kermit",
|
|
4081
|
-
"crc64": "crc64_iso",
|
|
4082
|
-
"crc64_iso": "crc64_iso",
|
|
4083
|
-
"crc64_ecma": "crc64_ecma",
|
|
4084
|
-
"adler32": "adler32",
|
|
4085
|
-
"crc32": "crc32",
|
|
4086
|
-
}
|
|
4087
|
-
|
|
4088
|
-
_CRC_WIDTH = {
|
|
4089
|
-
"crc16_ansi": 16,
|
|
4090
|
-
"crc16_ccitt": 16,
|
|
4091
|
-
"crc16_x25": 16,
|
|
4092
|
-
"crc16_kermit": 16,
|
|
4093
|
-
"crc64_iso": 64,
|
|
4094
|
-
"crc64_ecma": 64,
|
|
4095
|
-
"adler32": 32,
|
|
4096
|
-
"crc32": 32,
|
|
4097
|
-
}
|
|
4098
|
-
|
|
4099
|
-
def _crc_compute(algo_key, data_bytes):
|
|
4100
|
-
if algo_key == "crc16_ansi":
|
|
4101
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
4102
|
-
if algo_key == "crc16_ccitt":
|
|
4103
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
4104
|
-
if algo_key == "crc16_x25":
|
|
4105
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
4106
|
-
if algo_key == "crc16_kermit":
|
|
4107
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
4108
|
-
if algo_key == "crc64_iso":
|
|
4109
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4110
|
-
if algo_key == "crc64_ecma":
|
|
4111
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4112
|
-
if algo_key == "adler32":
|
|
4113
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
4114
|
-
if algo_key == "crc32":
|
|
4115
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
4116
|
-
raise KeyError(algo_key)
|
|
4117
|
-
|
|
4118
|
-
try:
|
|
4119
|
-
hashlib_guaranteed
|
|
4120
|
-
except NameError:
|
|
4121
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
4122
|
-
|
|
4123
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
4124
|
-
try:
|
|
4125
|
-
return name.lower() in guaranteed
|
|
4126
|
-
except Exception:
|
|
4127
|
-
return False
|
|
4128
|
-
|
|
4129
3673
|
# =========================
|
|
4130
3674
|
# Public checksum API
|
|
4131
3675
|
# =========================
|
|
4132
|
-
def GetHeaderChecksum(inlist=None, checksumtype="
|
|
3676
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4133
3677
|
"""
|
|
4134
3678
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
4135
3679
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
4136
3680
|
"""
|
|
4137
|
-
|
|
4138
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3681
|
+
algo_key = (checksumtype or "md5").lower()
|
|
4139
3682
|
|
|
4140
3683
|
delim = formatspecs.get('format_delimiter', u"\0")
|
|
4141
3684
|
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
@@ -4143,34 +3686,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, format
|
|
|
4143
3686
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
4144
3687
|
hdr_bytes = bytes(hdr_bytes)
|
|
4145
3688
|
|
|
4146
|
-
if algo_key
|
|
4147
|
-
|
|
4148
|
-
|
|
4149
|
-
|
|
4150
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
4151
|
-
h = hashlib.new(algo_key)
|
|
4152
|
-
h.update(hdr_bytes)
|
|
4153
|
-
return h.hexdigest().lower()
|
|
3689
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3690
|
+
h = hashlib.new(algo_key)
|
|
3691
|
+
h.update(hdr_bytes)
|
|
3692
|
+
return h.hexdigest().lower()
|
|
4154
3693
|
|
|
4155
3694
|
return "0"
|
|
4156
3695
|
|
|
4157
|
-
def GetFileChecksum(
|
|
3696
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4158
3697
|
"""
|
|
4159
3698
|
Accepts bytes/str/file-like.
|
|
4160
3699
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
4161
3700
|
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
4162
3701
|
- Falls back to one-shot for non-file-like inputs.
|
|
4163
3702
|
"""
|
|
4164
|
-
|
|
4165
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3703
|
+
algo_key = (checksumtype or "md5").lower()
|
|
4166
3704
|
|
|
4167
3705
|
# file-like streaming
|
|
4168
|
-
if hasattr(
|
|
3706
|
+
if hasattr(inbytes, "read"):
|
|
4169
3707
|
# hashlib
|
|
4170
|
-
|
|
3708
|
+
|
|
3709
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4171
3710
|
h = hashlib.new(algo_key)
|
|
4172
3711
|
while True:
|
|
4173
|
-
chunk =
|
|
3712
|
+
chunk = inbytes.read(__filebuff_size__)
|
|
4174
3713
|
if not chunk:
|
|
4175
3714
|
break
|
|
4176
3715
|
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
@@ -4178,49 +3717,31 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
|
|
|
4178
3717
|
h.update(chunk)
|
|
4179
3718
|
return h.hexdigest().lower()
|
|
4180
3719
|
|
|
4181
|
-
# CRC streaming via context
|
|
4182
|
-
if algo_key in _CRC_SPECS:
|
|
4183
|
-
ctx = crc_context_from_name(algo_key)
|
|
4184
|
-
while True:
|
|
4185
|
-
chunk = instr.read(1 << 20)
|
|
4186
|
-
if not chunk:
|
|
4187
|
-
break
|
|
4188
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
4189
|
-
chunk = bytes(bytearray(chunk))
|
|
4190
|
-
ctx.update(chunk)
|
|
4191
|
-
return ctx.hexdigest()
|
|
4192
|
-
|
|
4193
3720
|
# not known streaming algo: fallback to one-shot bytes
|
|
4194
|
-
data =
|
|
3721
|
+
data = inbytes.read()
|
|
4195
3722
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
4196
3723
|
data = bytes(bytearray(data))
|
|
4197
3724
|
else:
|
|
4198
|
-
data = _to_bytes(
|
|
3725
|
+
data = _to_bytes(inbytes) if (encodedata or not isinstance(inbytes, (bytes, bytearray, memoryview))) else inbytes
|
|
4199
3726
|
data = bytes(data)
|
|
4200
3727
|
|
|
4201
3728
|
# one-shot
|
|
4202
|
-
if algo_key in _CRC_SPECS:
|
|
4203
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
4204
3729
|
|
|
4205
|
-
if algo_key
|
|
4206
|
-
n = _crc_compute(algo_key, data)
|
|
4207
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
4208
|
-
|
|
4209
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3730
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4210
3731
|
h = hashlib.new(algo_key)
|
|
4211
3732
|
h.update(data)
|
|
4212
3733
|
return h.hexdigest().lower()
|
|
4213
3734
|
|
|
4214
3735
|
return "0"
|
|
4215
3736
|
|
|
4216
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="
|
|
3737
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4217
3738
|
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
4218
3739
|
want = (inchecksum or "0").strip().lower()
|
|
4219
3740
|
if want.startswith("0x"):
|
|
4220
3741
|
want = want[2:]
|
|
4221
3742
|
return hmac.compare_digest(want, calc)
|
|
4222
3743
|
|
|
4223
|
-
def ValidateFileChecksum(infile, checksumtype="
|
|
3744
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4224
3745
|
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
4225
3746
|
want = (inchecksum or "0").strip().lower()
|
|
4226
3747
|
if want.startswith("0x"):
|
|
@@ -4267,66 +3788,6 @@ def GetDataFromArrayAlt(structure, path, default=None):
|
|
|
4267
3788
|
return element
|
|
4268
3789
|
|
|
4269
3790
|
|
|
4270
|
-
def GetHeaderChecksum(inlist=[], checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4271
|
-
fileheader = AppendNullBytes(inlist, formatspecs['format_delimiter']) if isinstance(
|
|
4272
|
-
inlist, list) else AppendNullByte(inlist, formatspecs['format_delimiter'])
|
|
4273
|
-
if encodedata and hasattr(fileheader, "encode"):
|
|
4274
|
-
fileheader = fileheader.encode('UTF-8')
|
|
4275
|
-
checksum_methods = {
|
|
4276
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4277
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4278
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4279
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4280
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4281
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4282
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4283
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4284
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4285
|
-
}
|
|
4286
|
-
if checksumtype in checksum_methods:
|
|
4287
|
-
return checksum_methods[checksumtype](fileheader)
|
|
4288
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4289
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4290
|
-
checksumoutstr.update(fileheader)
|
|
4291
|
-
return checksumoutstr.hexdigest().lower()
|
|
4292
|
-
return format(0, 'x').lower()
|
|
4293
|
-
|
|
4294
|
-
|
|
4295
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4296
|
-
if encodedata and hasattr(instr, "encode"):
|
|
4297
|
-
instr = instr.encode('UTF-8')
|
|
4298
|
-
checksum_methods = {
|
|
4299
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4300
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4301
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4302
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4303
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4304
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4305
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4306
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4307
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4308
|
-
}
|
|
4309
|
-
if checksumtype in checksum_methods:
|
|
4310
|
-
return checksum_methods[checksumtype](instr)
|
|
4311
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4312
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4313
|
-
checksumoutstr.update(instr)
|
|
4314
|
-
return checksumoutstr.hexdigest().lower()
|
|
4315
|
-
return format(0, 'x').lower()
|
|
4316
|
-
|
|
4317
|
-
|
|
4318
|
-
def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4319
|
-
infileheadercshex = GetHeaderChecksum(
|
|
4320
|
-
inlist, checksumtype, True, formatspecs).lower()
|
|
4321
|
-
return inchecksum.lower() == infileheadercshex
|
|
4322
|
-
|
|
4323
|
-
|
|
4324
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4325
|
-
catinfilecshex = GetFileChecksum(
|
|
4326
|
-
infile, checksumtype, True, formatspecs).lower()
|
|
4327
|
-
return inchecksum.lower() == catinfilecshex
|
|
4328
|
-
|
|
4329
|
-
|
|
4330
3791
|
# ========= pushback-aware delimiter reader =========
|
|
4331
3792
|
class _DelimiterReader(object):
|
|
4332
3793
|
"""
|
|
@@ -4659,7 +4120,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4659
4120
|
if(not hasattr(fp, "read")):
|
|
4660
4121
|
return False
|
|
4661
4122
|
delimiter = formatspecs['format_delimiter']
|
|
4662
|
-
fheaderstart = fp.tell()
|
|
4663
4123
|
if(formatspecs['new_style']):
|
|
4664
4124
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4665
4125
|
else:
|
|
@@ -4682,22 +4142,74 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4682
4142
|
fjsonchecksumtype = HeaderOut[30]
|
|
4683
4143
|
fjsonchecksum = HeaderOut[31]
|
|
4684
4144
|
fjsoncontent = {}
|
|
4685
|
-
|
|
4686
|
-
|
|
4687
|
-
|
|
4688
|
-
|
|
4689
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4145
|
+
if(fjsontype=="json"):
|
|
4146
|
+
fjsoncontent = {}
|
|
4147
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4148
|
+
if(fjsonsize > 0):
|
|
4690
4149
|
try:
|
|
4691
|
-
|
|
4150
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4151
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4692
4152
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4693
|
-
|
|
4694
|
-
|
|
4695
|
-
|
|
4696
|
-
|
|
4153
|
+
try:
|
|
4154
|
+
fjsonrawcontent = fprejsoncontent
|
|
4155
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4156
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4157
|
+
fprejsoncontent = ""
|
|
4158
|
+
fjsonrawcontent = fprejsoncontent
|
|
4159
|
+
fjsoncontent = {}
|
|
4160
|
+
else:
|
|
4161
|
+
fprejsoncontent = ""
|
|
4162
|
+
fjsonrawcontent = fprejsoncontent
|
|
4163
|
+
fjsoncontent = {}
|
|
4164
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4165
|
+
fjsoncontent = {}
|
|
4166
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4167
|
+
if (fjsonsize > 0):
|
|
4168
|
+
try:
|
|
4169
|
+
# try base64 → utf-8 → YAML
|
|
4170
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4171
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4172
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4173
|
+
try:
|
|
4174
|
+
# fall back to treating the bytes as plain text YAML
|
|
4175
|
+
fjsonrawcontent = fprejsoncontent
|
|
4176
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4177
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4178
|
+
# final fallback: empty
|
|
4179
|
+
fprejsoncontent = ""
|
|
4180
|
+
fjsonrawcontent = fprejsoncontent
|
|
4181
|
+
fjsoncontent = {}
|
|
4182
|
+
else:
|
|
4183
|
+
fprejsoncontent = ""
|
|
4184
|
+
fjsonrawcontent = fprejsoncontent
|
|
4185
|
+
fjsoncontent = {}
|
|
4186
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4697
4187
|
fjsoncontent = {}
|
|
4188
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4189
|
+
fprejsoncontent = ""
|
|
4190
|
+
fjsonrawcontent = fprejsoncontent
|
|
4191
|
+
elif(fjsontype=="list"):
|
|
4192
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4193
|
+
flisttmp = MkTempFile()
|
|
4194
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4195
|
+
flisttmp.seek(0)
|
|
4196
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4197
|
+
flisttmp.close()
|
|
4198
|
+
fjsonrawcontent = fjsoncontent
|
|
4199
|
+
if(fjsonlen==1):
|
|
4200
|
+
try:
|
|
4201
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4202
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4203
|
+
fjsonlen = len(fjsoncontent)
|
|
4204
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4205
|
+
try:
|
|
4206
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4207
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4208
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4209
|
+
pass
|
|
4698
4210
|
fp.seek(len(delimiter), 1)
|
|
4699
4211
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4700
|
-
if(
|
|
4212
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4701
4213
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4702
4214
|
fname + " at offset " + str(fheaderstart))
|
|
4703
4215
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4711,8 +4223,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4711
4223
|
fname + " at offset " + str(fheaderstart))
|
|
4712
4224
|
VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
|
|
4713
4225
|
return False
|
|
4714
|
-
fhend = fp.tell() - 1
|
|
4715
|
-
fcontentstart = fp.tell()
|
|
4716
4226
|
fcontents = MkTempFile()
|
|
4717
4227
|
if(fsize > 0 and not listonly):
|
|
4718
4228
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -4726,9 +4236,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4726
4236
|
fp.seek(fcsize, 1)
|
|
4727
4237
|
fcontents.seek(0, 0)
|
|
4728
4238
|
newfccs = GetFileChecksum(
|
|
4729
|
-
fcontents
|
|
4239
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4730
4240
|
fcontents.seek(0, 0)
|
|
4731
|
-
if(fccs
|
|
4241
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4732
4242
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4733
4243
|
fname + " at offset " + str(fcontentstart))
|
|
4734
4244
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4741,10 +4251,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4741
4251
|
cfcontents = UncompressFileAlt(fcontents, formatspecs)
|
|
4742
4252
|
cfcontents.seek(0, 0)
|
|
4743
4253
|
fcontents = MkTempFile()
|
|
4744
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4254
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4745
4255
|
cfcontents.close()
|
|
4746
4256
|
fcontents.seek(0, 0)
|
|
4747
|
-
fcontentend = fp.tell()
|
|
4748
4257
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4749
4258
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4750
4259
|
if(abs(fseeknextasnum) == 0):
|
|
@@ -4852,6 +4361,33 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4852
4361
|
fprejsoncontent = ""
|
|
4853
4362
|
fjsonrawcontent = fprejsoncontent
|
|
4854
4363
|
fjsoncontent = {}
|
|
4364
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4365
|
+
fjsoncontent = {}
|
|
4366
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4367
|
+
if (fjsonsize > 0):
|
|
4368
|
+
try:
|
|
4369
|
+
# try base64 → utf-8 → YAML
|
|
4370
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4371
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4372
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4373
|
+
try:
|
|
4374
|
+
# fall back to treating the bytes as plain text YAML
|
|
4375
|
+
fjsonrawcontent = fprejsoncontent
|
|
4376
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4377
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4378
|
+
# final fallback: empty
|
|
4379
|
+
fprejsoncontent = ""
|
|
4380
|
+
fjsonrawcontent = fprejsoncontent
|
|
4381
|
+
fjsoncontent = {}
|
|
4382
|
+
else:
|
|
4383
|
+
fprejsoncontent = ""
|
|
4384
|
+
fjsonrawcontent = fprejsoncontent
|
|
4385
|
+
fjsoncontent = {}
|
|
4386
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4387
|
+
fjsoncontent = {}
|
|
4388
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4389
|
+
fprejsoncontent = ""
|
|
4390
|
+
fjsonrawcontent = fprejsoncontent
|
|
4855
4391
|
elif(fjsontype=="list"):
|
|
4856
4392
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4857
4393
|
flisttmp = MkTempFile()
|
|
@@ -4874,7 +4410,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4874
4410
|
fp.seek(len(delimiter), 1)
|
|
4875
4411
|
fjend = fp.tell() - 1
|
|
4876
4412
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4877
|
-
if(
|
|
4413
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4878
4414
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4879
4415
|
fname + " at offset " + str(fheaderstart))
|
|
4880
4416
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4906,9 +4442,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4906
4442
|
pyhascontents = False
|
|
4907
4443
|
fcontents.seek(0, 0)
|
|
4908
4444
|
newfccs = GetFileChecksum(
|
|
4909
|
-
|
|
4445
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4910
4446
|
fcontents.seek(0, 0)
|
|
4911
|
-
if(fccs
|
|
4447
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4912
4448
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4913
4449
|
fname + " at offset " + str(fcontentstart))
|
|
4914
4450
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4922,11 +4458,11 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4922
4458
|
fcontents, formatspecs)
|
|
4923
4459
|
cfcontents.seek(0, 0)
|
|
4924
4460
|
fcontents = MkTempFile()
|
|
4925
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4461
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4926
4462
|
cfcontents.close()
|
|
4927
4463
|
fcontents.seek(0, 0)
|
|
4928
4464
|
fccs = GetFileChecksum(
|
|
4929
|
-
fcontents
|
|
4465
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4930
4466
|
fcontentend = fp.tell()
|
|
4931
4467
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4932
4468
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5038,6 +4574,33 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5038
4574
|
fprejsoncontent = ""
|
|
5039
4575
|
fjsonrawcontent = fprejsoncontent
|
|
5040
4576
|
fjsoncontent = {}
|
|
4577
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4578
|
+
fjsoncontent = {}
|
|
4579
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4580
|
+
if (fjsonsize > 0):
|
|
4581
|
+
try:
|
|
4582
|
+
# try base64 → utf-8 → YAML
|
|
4583
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4584
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4585
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4586
|
+
try:
|
|
4587
|
+
# fall back to treating the bytes as plain text YAML
|
|
4588
|
+
fjsonrawcontent = fprejsoncontent
|
|
4589
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4590
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4591
|
+
# final fallback: empty
|
|
4592
|
+
fprejsoncontent = ""
|
|
4593
|
+
fjsonrawcontent = fprejsoncontent
|
|
4594
|
+
fjsoncontent = {}
|
|
4595
|
+
else:
|
|
4596
|
+
fprejsoncontent = ""
|
|
4597
|
+
fjsonrawcontent = fprejsoncontent
|
|
4598
|
+
fjsoncontent = {}
|
|
4599
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4600
|
+
fjsoncontent = {}
|
|
4601
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4602
|
+
fprejsoncontent = ""
|
|
4603
|
+
fjsonrawcontent = fprejsoncontent
|
|
5041
4604
|
elif(fjsontype=="list"):
|
|
5042
4605
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5043
4606
|
flisttmp = MkTempFile()
|
|
@@ -5059,7 +4622,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5059
4622
|
pass
|
|
5060
4623
|
fp.seek(len(delimiter), 1)
|
|
5061
4624
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5062
|
-
if(
|
|
4625
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5063
4626
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5064
4627
|
fname + " at offset " + str(fheaderstart))
|
|
5065
4628
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -5091,8 +4654,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5091
4654
|
pyhascontents = False
|
|
5092
4655
|
fcontents.seek(0, 0)
|
|
5093
4656
|
newfccs = GetFileChecksum(
|
|
5094
|
-
fcontents
|
|
5095
|
-
if(fccs
|
|
4657
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4658
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
5096
4659
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5097
4660
|
fname + " at offset " + str(fcontentstart))
|
|
5098
4661
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -5106,11 +4669,11 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5106
4669
|
fcontents, formatspecs)
|
|
5107
4670
|
cfcontents.seek(0, 0)
|
|
5108
4671
|
fcontents = MkTempFile()
|
|
5109
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4672
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
5110
4673
|
cfcontents.close()
|
|
5111
4674
|
fcontents.seek(0, 0)
|
|
5112
4675
|
fccs = GetFileChecksum(
|
|
5113
|
-
fcontents
|
|
4676
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5114
4677
|
fcontentend = fp.tell()
|
|
5115
4678
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5116
4679
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5144,9 +4707,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5144
4707
|
curloc = filestart
|
|
5145
4708
|
try:
|
|
5146
4709
|
fp.seek(0, 2)
|
|
5147
|
-
except OSError:
|
|
5148
|
-
SeekToEndOfFile(fp)
|
|
5149
|
-
except ValueError:
|
|
4710
|
+
except (OSError, ValueError):
|
|
5150
4711
|
SeekToEndOfFile(fp)
|
|
5151
4712
|
CatSize = fp.tell()
|
|
5152
4713
|
CatSizeEnd = CatSize
|
|
@@ -5175,7 +4736,30 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5175
4736
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5176
4737
|
"'" + newfcs + "'")
|
|
5177
4738
|
return False
|
|
5178
|
-
fnumfiles = int(inheader[
|
|
4739
|
+
fnumfiles = int(inheader[6], 16)
|
|
4740
|
+
outfseeknextfile = inheaderdata[7]
|
|
4741
|
+
fjsonsize = int(inheaderdata[10], 16)
|
|
4742
|
+
fjsonchecksumtype = inheader[11]
|
|
4743
|
+
fjsonchecksum = inheader[12]
|
|
4744
|
+
fp.read(fjsonsize)
|
|
4745
|
+
# Next seek directive
|
|
4746
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
4747
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
4748
|
+
if(abs(fseeknextasnum) == 0):
|
|
4749
|
+
pass
|
|
4750
|
+
fp.seek(fseeknextasnum, 1)
|
|
4751
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
4752
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4753
|
+
if(abs(fseeknextasnum) == 0):
|
|
4754
|
+
pass
|
|
4755
|
+
fp.seek(fseeknextasnum, 1)
|
|
4756
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
4757
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4758
|
+
if(abs(fseeknextasnum) == 0):
|
|
4759
|
+
pass
|
|
4760
|
+
fp.seek(fseeknextasnum, 0)
|
|
4761
|
+
else:
|
|
4762
|
+
return False
|
|
5179
4763
|
countnum = 0
|
|
5180
4764
|
flist = []
|
|
5181
4765
|
while(countnum < fnumfiles):
|
|
@@ -5195,9 +4779,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5195
4779
|
curloc = filestart
|
|
5196
4780
|
try:
|
|
5197
4781
|
fp.seek(0, 2)
|
|
5198
|
-
except OSError:
|
|
5199
|
-
SeekToEndOfFile(fp)
|
|
5200
|
-
except ValueError:
|
|
4782
|
+
except (OSError, ValueError):
|
|
5201
4783
|
SeekToEndOfFile(fp)
|
|
5202
4784
|
CatSize = fp.tell()
|
|
5203
4785
|
CatSizeEnd = CatSize
|
|
@@ -5216,10 +4798,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5216
4798
|
else:
|
|
5217
4799
|
inheader = ReadFileHeaderDataWoSize(
|
|
5218
4800
|
fp, formatspecs['format_delimiter'])
|
|
5219
|
-
fnumextrafieldsize = int(inheader[
|
|
5220
|
-
fnumextrafields = int(inheader[
|
|
4801
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
4802
|
+
fnumextrafields = int(inheader[14], 16)
|
|
5221
4803
|
fextrafieldslist = []
|
|
5222
|
-
extrastart =
|
|
4804
|
+
extrastart = 15
|
|
5223
4805
|
extraend = extrastart + fnumextrafields
|
|
5224
4806
|
while(extrastart < extraend):
|
|
5225
4807
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5238,7 +4820,106 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5238
4820
|
fnumfields = int(inheader[1], 16)
|
|
5239
4821
|
fhencoding = inheader[2]
|
|
5240
4822
|
fostype = inheader[3]
|
|
5241
|
-
|
|
4823
|
+
fpythontype = inheader[4]
|
|
4824
|
+
fprojectname = inheader[4]
|
|
4825
|
+
fnumfiles = int(inheader[6], 16)
|
|
4826
|
+
fseeknextfile = inheader[7]
|
|
4827
|
+
fjsontype = inheader[8]
|
|
4828
|
+
fjsonlen = int(inheader[9], 16)
|
|
4829
|
+
fjsonsize = int(inheader[10], 16)
|
|
4830
|
+
fjsonchecksumtype = inheader[11]
|
|
4831
|
+
fjsonchecksum = inheader[12]
|
|
4832
|
+
fjsoncontent = {}
|
|
4833
|
+
fjstart = fp.tell()
|
|
4834
|
+
if(fjsontype=="json"):
|
|
4835
|
+
fjsoncontent = {}
|
|
4836
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4837
|
+
if(fjsonsize > 0):
|
|
4838
|
+
try:
|
|
4839
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4840
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4841
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4842
|
+
try:
|
|
4843
|
+
fjsonrawcontent = fprejsoncontent
|
|
4844
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4845
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4846
|
+
fprejsoncontent = ""
|
|
4847
|
+
fjsonrawcontent = fprejsoncontent
|
|
4848
|
+
fjsoncontent = {}
|
|
4849
|
+
else:
|
|
4850
|
+
fprejsoncontent = ""
|
|
4851
|
+
fjsonrawcontent = fprejsoncontent
|
|
4852
|
+
fjsoncontent = {}
|
|
4853
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4854
|
+
fjsoncontent = {}
|
|
4855
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4856
|
+
if (fjsonsize > 0):
|
|
4857
|
+
try:
|
|
4858
|
+
# try base64 → utf-8 → YAML
|
|
4859
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4860
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4861
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4862
|
+
try:
|
|
4863
|
+
# fall back to treating the bytes as plain text YAML
|
|
4864
|
+
fjsonrawcontent = fprejsoncontent
|
|
4865
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4866
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4867
|
+
# final fallback: empty
|
|
4868
|
+
fprejsoncontent = ""
|
|
4869
|
+
fjsonrawcontent = fprejsoncontent
|
|
4870
|
+
fjsoncontent = {}
|
|
4871
|
+
else:
|
|
4872
|
+
fprejsoncontent = ""
|
|
4873
|
+
fjsonrawcontent = fprejsoncontent
|
|
4874
|
+
fjsoncontent = {}
|
|
4875
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4876
|
+
fjsoncontent = {}
|
|
4877
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4878
|
+
fprejsoncontent = ""
|
|
4879
|
+
fjsonrawcontent = fprejsoncontent
|
|
4880
|
+
elif(fjsontype=="list"):
|
|
4881
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4882
|
+
flisttmp = MkTempFile()
|
|
4883
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4884
|
+
flisttmp.seek(0)
|
|
4885
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4886
|
+
flisttmp.close()
|
|
4887
|
+
fjsonrawcontent = fjsoncontent
|
|
4888
|
+
if(fjsonlen==1):
|
|
4889
|
+
try:
|
|
4890
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4891
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4892
|
+
fjsonlen = len(fjsoncontent)
|
|
4893
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4894
|
+
try:
|
|
4895
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4896
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4897
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4898
|
+
pass
|
|
4899
|
+
fjend = fp.tell()
|
|
4900
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4901
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4902
|
+
if(abs(fseeknextasnum) == 0):
|
|
4903
|
+
pass
|
|
4904
|
+
fp.seek(fseeknextasnum, 1)
|
|
4905
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
4906
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4907
|
+
if(abs(fseeknextasnum) == 0):
|
|
4908
|
+
pass
|
|
4909
|
+
fp.seek(fseeknextasnum, 1)
|
|
4910
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
4911
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4912
|
+
if(abs(fseeknextasnum) == 0):
|
|
4913
|
+
pass
|
|
4914
|
+
fp.seek(fseeknextasnum, 0)
|
|
4915
|
+
else:
|
|
4916
|
+
return False
|
|
4917
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4918
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4919
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4920
|
+
fname + " at offset " + str(fheaderstart))
|
|
4921
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4922
|
+
return False
|
|
5242
4923
|
fprechecksumtype = inheader[-2]
|
|
5243
4924
|
fprechecksum = inheader[-1]
|
|
5244
4925
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5251,7 +4932,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5251
4932
|
return False
|
|
5252
4933
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
5253
4934
|
fcompresstype = ""
|
|
5254
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4935
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5255
4936
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
5256
4937
|
seekstart = 0
|
|
5257
4938
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -5279,7 +4960,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5279
4960
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5280
4961
|
fp.seek(len(delimiter), 1)
|
|
5281
4962
|
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5282
|
-
if(
|
|
4963
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5283
4964
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5284
4965
|
prefname + " at offset " + str(prefhstart))
|
|
5285
4966
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5287,7 +4968,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5287
4968
|
prenewfcs = GetHeaderChecksum(
|
|
5288
4969
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5289
4970
|
prefcs = preheaderdata[-2]
|
|
5290
|
-
if(prefcs
|
|
4971
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5291
4972
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5292
4973
|
prefname + " at offset " + str(prefhstart))
|
|
5293
4974
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5303,10 +4984,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5303
4984
|
prefcontents.write(fp.read(prefsize))
|
|
5304
4985
|
prefcontents.seek(0, 0)
|
|
5305
4986
|
prenewfccs = GetFileChecksum(
|
|
5306
|
-
prefcontents
|
|
4987
|
+
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5307
4988
|
prefccs = preheaderdata[-1]
|
|
5308
4989
|
pyhascontents = True
|
|
5309
|
-
if(prefccs
|
|
4990
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5310
4991
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5311
4992
|
prefname + " at offset " + str(prefcontentstart))
|
|
5312
4993
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5352,9 +5033,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5352
5033
|
curloc = filestart
|
|
5353
5034
|
try:
|
|
5354
5035
|
fp.seek(0, 2)
|
|
5355
|
-
except OSError:
|
|
5356
|
-
SeekToEndOfFile(fp)
|
|
5357
|
-
except ValueError:
|
|
5036
|
+
except (OSError, ValueError):
|
|
5358
5037
|
SeekToEndOfFile(fp)
|
|
5359
5038
|
CatSize = fp.tell()
|
|
5360
5039
|
CatSizeEnd = CatSize
|
|
@@ -5373,10 +5052,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5373
5052
|
else:
|
|
5374
5053
|
inheader = ReadFileHeaderDataWoSize(
|
|
5375
5054
|
fp, formatspecs['format_delimiter'])
|
|
5376
|
-
fnumextrafieldsize = int(inheader[
|
|
5377
|
-
fnumextrafields = int(inheader[
|
|
5055
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
5056
|
+
fnumextrafields = int(inheader[14], 16)
|
|
5378
5057
|
fextrafieldslist = []
|
|
5379
|
-
extrastart =
|
|
5058
|
+
extrastart = 15
|
|
5380
5059
|
extraend = extrastart + fnumextrafields
|
|
5381
5060
|
while(extrastart < extraend):
|
|
5382
5061
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5393,9 +5072,40 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5393
5072
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5394
5073
|
fheadsize = int(inheader[0], 16)
|
|
5395
5074
|
fnumfields = int(inheader[1], 16)
|
|
5396
|
-
|
|
5397
|
-
|
|
5398
|
-
|
|
5075
|
+
fnumfiles = int(inheader[6], 16)
|
|
5076
|
+
fseeknextfile = inheaderdata[7]
|
|
5077
|
+
fjsontype = int(inheader[8], 16)
|
|
5078
|
+
fjsonlen = int(inheader[9], 16)
|
|
5079
|
+
fjsonsize = int(inheader[10], 16)
|
|
5080
|
+
fjsonchecksumtype = inheader[11]
|
|
5081
|
+
fjsonchecksum = inheader[12]
|
|
5082
|
+
fjsoncontent = {}
|
|
5083
|
+
fjstart = fp.tell()
|
|
5084
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5085
|
+
fjend = fp.tell()
|
|
5086
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5087
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
5088
|
+
if(abs(fseeknextasnum) == 0):
|
|
5089
|
+
pass
|
|
5090
|
+
fp.seek(fseeknextasnum, 1)
|
|
5091
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
5092
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5093
|
+
if(abs(fseeknextasnum) == 0):
|
|
5094
|
+
pass
|
|
5095
|
+
fp.seek(fseeknextasnum, 1)
|
|
5096
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
5097
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5098
|
+
if(abs(fseeknextasnum) == 0):
|
|
5099
|
+
pass
|
|
5100
|
+
fp.seek(fseeknextasnum, 0)
|
|
5101
|
+
else:
|
|
5102
|
+
return False
|
|
5103
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5104
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5105
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5106
|
+
fname + " at offset " + str(fheaderstart))
|
|
5107
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5108
|
+
return False
|
|
5399
5109
|
fprechecksumtype = inheader[-2]
|
|
5400
5110
|
fprechecksum = inheader[-1]
|
|
5401
5111
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5441,7 +5151,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5441
5151
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5442
5152
|
fp.seek(len(delimiter), 1)
|
|
5443
5153
|
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5444
|
-
if(
|
|
5154
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5445
5155
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5446
5156
|
prefname + " at offset " + str(prefhstart))
|
|
5447
5157
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5449,7 +5159,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5449
5159
|
prenewfcs = GetHeaderChecksum(
|
|
5450
5160
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5451
5161
|
prefcs = preheaderdata[-2]
|
|
5452
|
-
if(prefcs
|
|
5162
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5453
5163
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5454
5164
|
prefname + " at offset " + str(prefhstart))
|
|
5455
5165
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5470,7 +5180,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5470
5180
|
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5471
5181
|
prefccs = preheaderdata[-1]
|
|
5472
5182
|
pyhascontents = True
|
|
5473
|
-
if(prefccs
|
|
5183
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5474
5184
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5475
5185
|
prefname + " at offset " + str(prefcontentstart))
|
|
5476
5186
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5511,24 +5221,17 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5511
5221
|
fp = infile
|
|
5512
5222
|
try:
|
|
5513
5223
|
fp.seek(0, 2)
|
|
5514
|
-
except OSError:
|
|
5515
|
-
SeekToEndOfFile(fp)
|
|
5516
|
-
except ValueError:
|
|
5224
|
+
except (OSError, ValueError):
|
|
5517
5225
|
SeekToEndOfFile(fp)
|
|
5518
5226
|
outfsize = fp.tell()
|
|
5519
5227
|
fp.seek(filestart, 0)
|
|
5520
5228
|
currentfilepos = fp.tell()
|
|
5521
5229
|
elif(infile == "-"):
|
|
5522
5230
|
fp = MkTempFile()
|
|
5523
|
-
|
|
5524
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5525
|
-
else:
|
|
5526
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5231
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5527
5232
|
try:
|
|
5528
5233
|
fp.seek(0, 2)
|
|
5529
|
-
except OSError:
|
|
5530
|
-
SeekToEndOfFile(fp)
|
|
5531
|
-
except ValueError:
|
|
5234
|
+
except (OSError, ValueError):
|
|
5532
5235
|
SeekToEndOfFile(fp)
|
|
5533
5236
|
outfsize = fp.tell()
|
|
5534
5237
|
fp.seek(filestart, 0)
|
|
@@ -5538,9 +5241,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5538
5241
|
fp.write(infile)
|
|
5539
5242
|
try:
|
|
5540
5243
|
fp.seek(0, 2)
|
|
5541
|
-
except OSError:
|
|
5542
|
-
SeekToEndOfFile(fp)
|
|
5543
|
-
except ValueError:
|
|
5244
|
+
except (OSError, ValueError):
|
|
5544
5245
|
SeekToEndOfFile(fp)
|
|
5545
5246
|
outfsize = fp.tell()
|
|
5546
5247
|
fp.seek(filestart, 0)
|
|
@@ -5549,9 +5250,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5549
5250
|
fp = download_file_from_internet_file(infile)
|
|
5550
5251
|
try:
|
|
5551
5252
|
fp.seek(0, 2)
|
|
5552
|
-
except OSError:
|
|
5553
|
-
SeekToEndOfFile(fp)
|
|
5554
|
-
except ValueError:
|
|
5253
|
+
except (OSError, ValueError):
|
|
5555
5254
|
SeekToEndOfFile(fp)
|
|
5556
5255
|
outfsize = fp.tell()
|
|
5557
5256
|
fp.seek(filestart, 0)
|
|
@@ -5559,9 +5258,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5559
5258
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5560
5259
|
try:
|
|
5561
5260
|
fp.seek(0, 2)
|
|
5562
|
-
except OSError:
|
|
5563
|
-
SeekToEndOfFile(fp)
|
|
5564
|
-
except ValueError:
|
|
5261
|
+
except (OSError, ValueError):
|
|
5565
5262
|
SeekToEndOfFile(fp)
|
|
5566
5263
|
outfsize = fp.tell()
|
|
5567
5264
|
fp.seek(filestart, 0)
|
|
@@ -5571,9 +5268,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5571
5268
|
fp = open(infile, "rb")
|
|
5572
5269
|
try:
|
|
5573
5270
|
fp.seek(0, 2)
|
|
5574
|
-
except OSError:
|
|
5575
|
-
SeekToEndOfFile(fp)
|
|
5576
|
-
except ValueError:
|
|
5271
|
+
except (OSError, ValueError):
|
|
5577
5272
|
SeekToEndOfFile(fp)
|
|
5578
5273
|
outfsize = fp.tell()
|
|
5579
5274
|
fp.seek(filestart, 0)
|
|
@@ -5624,9 +5319,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5624
5319
|
currentinfilepos = infp.tell()
|
|
5625
5320
|
try:
|
|
5626
5321
|
infp.seek(0, 2)
|
|
5627
|
-
except OSError:
|
|
5628
|
-
SeekToEndOfFile(infp)
|
|
5629
|
-
except ValueError:
|
|
5322
|
+
except (OSError, ValueError):
|
|
5630
5323
|
SeekToEndOfFile(infp)
|
|
5631
5324
|
outinfsize = infp.tell()
|
|
5632
5325
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5665,24 +5358,17 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5665
5358
|
fp = infile
|
|
5666
5359
|
try:
|
|
5667
5360
|
fp.seek(0, 2)
|
|
5668
|
-
except OSError:
|
|
5669
|
-
SeekToEndOfFile(fp)
|
|
5670
|
-
except ValueError:
|
|
5361
|
+
except (OSError, ValueError):
|
|
5671
5362
|
SeekToEndOfFile(fp)
|
|
5672
5363
|
outfsize = fp.tell()
|
|
5673
5364
|
fp.seek(filestart, 0)
|
|
5674
5365
|
currentfilepos = fp.tell()
|
|
5675
5366
|
elif(infile == "-"):
|
|
5676
5367
|
fp = MkTempFile()
|
|
5677
|
-
|
|
5678
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5679
|
-
else:
|
|
5680
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5368
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5681
5369
|
try:
|
|
5682
5370
|
fp.seek(0, 2)
|
|
5683
|
-
except OSError:
|
|
5684
|
-
SeekToEndOfFile(fp)
|
|
5685
|
-
except ValueError:
|
|
5371
|
+
except (OSError, ValueError):
|
|
5686
5372
|
SeekToEndOfFile(fp)
|
|
5687
5373
|
outfsize = fp.tell()
|
|
5688
5374
|
fp.seek(filestart, 0)
|
|
@@ -5692,9 +5378,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5692
5378
|
fp.write(infile)
|
|
5693
5379
|
try:
|
|
5694
5380
|
fp.seek(0, 2)
|
|
5695
|
-
except OSError:
|
|
5696
|
-
SeekToEndOfFile(fp)
|
|
5697
|
-
except ValueError:
|
|
5381
|
+
except (OSError, ValueError):
|
|
5698
5382
|
SeekToEndOfFile(fp)
|
|
5699
5383
|
outfsize = fp.tell()
|
|
5700
5384
|
fp.seek(filestart, 0)
|
|
@@ -5703,9 +5387,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5703
5387
|
fp = download_file_from_internet_file(infile)
|
|
5704
5388
|
try:
|
|
5705
5389
|
fp.seek(0, 2)
|
|
5706
|
-
except OSError:
|
|
5707
|
-
SeekToEndOfFile(fp)
|
|
5708
|
-
except ValueError:
|
|
5390
|
+
except (OSError, ValueError):
|
|
5709
5391
|
SeekToEndOfFile(fp)
|
|
5710
5392
|
outfsize = fp.tell()
|
|
5711
5393
|
fp.seek(filestart, 0)
|
|
@@ -5713,9 +5395,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5713
5395
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5714
5396
|
try:
|
|
5715
5397
|
fp.seek(0, 2)
|
|
5716
|
-
except OSError:
|
|
5717
|
-
SeekToEndOfFile(fp)
|
|
5718
|
-
except ValueError:
|
|
5398
|
+
except (OSError, ValueError):
|
|
5719
5399
|
SeekToEndOfFile(fp)
|
|
5720
5400
|
outfsize = fp.tell()
|
|
5721
5401
|
fp.seek(filestart, 0)
|
|
@@ -5725,9 +5405,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5725
5405
|
fp = open(infile, "rb")
|
|
5726
5406
|
try:
|
|
5727
5407
|
fp.seek(0, 2)
|
|
5728
|
-
except OSError:
|
|
5729
|
-
SeekToEndOfFile(fp)
|
|
5730
|
-
except ValueError:
|
|
5408
|
+
except (OSError, ValueError):
|
|
5731
5409
|
SeekToEndOfFile(fp)
|
|
5732
5410
|
outfsize = fp.tell()
|
|
5733
5411
|
fp.seek(filestart, 0)
|
|
@@ -5778,9 +5456,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5778
5456
|
currentinfilepos = infp.tell()
|
|
5779
5457
|
try:
|
|
5780
5458
|
infp.seek(0, 2)
|
|
5781
|
-
except OSError:
|
|
5782
|
-
SeekToEndOfFile(infp)
|
|
5783
|
-
except ValueError:
|
|
5459
|
+
except (OSError, ValueError):
|
|
5784
5460
|
SeekToEndOfFile(infp)
|
|
5785
5461
|
outinfsize = infp.tell()
|
|
5786
5462
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5865,12 +5541,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5865
5541
|
def _hex_lower(n):
|
|
5866
5542
|
return format(int(n), 'x').lower()
|
|
5867
5543
|
|
|
5868
|
-
def AppendFileHeader(fp,
|
|
5869
|
-
numfiles,
|
|
5870
|
-
fencoding,
|
|
5871
|
-
extradata=None,
|
|
5872
|
-
checksumtype="crc32",
|
|
5873
|
-
formatspecs=__file_format_dict__):
|
|
5544
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
5874
5545
|
"""
|
|
5875
5546
|
Build and write the archive file header.
|
|
5876
5547
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5918,24 +5589,44 @@ def AppendFileHeader(fp,
|
|
|
5918
5589
|
# 4) core header fields before checksum:
|
|
5919
5590
|
# tmpoutlenhex, fencoding, platform.system(), fnumfiles
|
|
5920
5591
|
fnumfiles_hex = _hex_lower(numfiles)
|
|
5921
|
-
|
|
5592
|
+
fjsontype = "json"
|
|
5593
|
+
if(len(jsondata) > 0):
|
|
5594
|
+
try:
|
|
5595
|
+
fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
|
|
5596
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5597
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5598
|
+
else:
|
|
5599
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5600
|
+
fjsonsize = format(len(fjsoncontent), 'x').lower()
|
|
5601
|
+
fjsonlen = format(len(jsondata), 'x').lower()
|
|
5602
|
+
tmpoutlist = []
|
|
5603
|
+
tmpoutlist.append(fjsontype)
|
|
5604
|
+
tmpoutlist.append(fjsonlen)
|
|
5605
|
+
tmpoutlist.append(fjsonsize)
|
|
5606
|
+
if(len(jsondata) > 0):
|
|
5607
|
+
tmpoutlist.append(checksumtype[1])
|
|
5608
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
5609
|
+
else:
|
|
5610
|
+
tmpoutlist.append("none")
|
|
5611
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5922
5612
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5923
|
-
tmpoutlist
|
|
5924
|
-
|
|
5613
|
+
tmpoutlist.append(extrasizelen)
|
|
5614
|
+
tmpoutlist.append(extrafields)
|
|
5615
|
+
tmpoutlen = 8 + len(tmpoutlist) + len(xlist)
|
|
5925
5616
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5926
5617
|
|
|
5927
5618
|
# Serialize the first group
|
|
5928
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), fnumfiles_hex], delimiter)
|
|
5619
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5929
5620
|
# Append tmpoutlist
|
|
5930
5621
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5931
5622
|
# Append extradata items if any
|
|
5932
5623
|
if xlist:
|
|
5933
5624
|
fnumfilesa += AppendNullBytes(xlist, delimiter)
|
|
5934
5625
|
# Append checksum type
|
|
5935
|
-
fnumfilesa += AppendNullByte(checksumtype, delimiter)
|
|
5626
|
+
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5936
5627
|
|
|
5937
5628
|
# 5) inner checksum over fnumfilesa
|
|
5938
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5629
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5939
5630
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5940
5631
|
|
|
5941
5632
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5948,7 +5639,7 @@ def AppendFileHeader(fp,
|
|
|
5948
5639
|
+ fnumfilesa
|
|
5949
5640
|
)
|
|
5950
5641
|
|
|
5951
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5642
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5952
5643
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5953
5644
|
|
|
5954
5645
|
# 8) final total size field (again per your original logic)
|
|
@@ -5956,10 +5647,11 @@ def AppendFileHeader(fp,
|
|
|
5956
5647
|
formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
|
|
5957
5648
|
# Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
|
|
5958
5649
|
# Keeping that behavior for compatibility.
|
|
5959
|
-
|
|
5650
|
+
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5651
|
+
outfileout = fnumfilesa + fjsoncontent + nullstrecd
|
|
5960
5652
|
# 9) write and try to sync
|
|
5961
5653
|
try:
|
|
5962
|
-
fp.write(
|
|
5654
|
+
fp.write(outfileout)
|
|
5963
5655
|
except (OSError, io.UnsupportedOperation):
|
|
5964
5656
|
return False
|
|
5965
5657
|
|
|
@@ -5980,21 +5672,21 @@ def AppendFileHeader(fp,
|
|
|
5980
5672
|
return fp
|
|
5981
5673
|
|
|
5982
5674
|
|
|
5983
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5675
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5984
5676
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5985
5677
|
formatspecs = formatspecs[fmttype]
|
|
5986
5678
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5987
5679
|
fmttype = __file_format_default__
|
|
5988
5680
|
formatspecs = formatspecs[fmttype]
|
|
5989
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5681
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5990
5682
|
return fp
|
|
5991
5683
|
|
|
5992
5684
|
|
|
5993
|
-
def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5685
|
+
def MakeEmptyCatFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5994
5686
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5995
5687
|
|
|
5996
5688
|
|
|
5997
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5689
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5998
5690
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5999
5691
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
6000
5692
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -6035,7 +5727,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6035
5727
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
6036
5728
|
except PermissionError:
|
|
6037
5729
|
return False
|
|
6038
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5730
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6039
5731
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6040
5732
|
fp = CompressOpenFileAlt(
|
|
6041
5733
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6043,18 +5735,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6043
5735
|
fp.flush()
|
|
6044
5736
|
if(hasattr(os, "sync")):
|
|
6045
5737
|
os.fsync(fp.fileno())
|
|
6046
|
-
except io.UnsupportedOperation:
|
|
6047
|
-
pass
|
|
6048
|
-
except AttributeError:
|
|
6049
|
-
pass
|
|
6050
|
-
except OSError:
|
|
5738
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6051
5739
|
pass
|
|
6052
5740
|
if(outfile == "-"):
|
|
6053
5741
|
fp.seek(0, 0)
|
|
6054
|
-
|
|
6055
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
6056
|
-
else:
|
|
6057
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
5742
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
6058
5743
|
elif(outfile is None):
|
|
6059
5744
|
fp.seek(0, 0)
|
|
6060
5745
|
outvar = fp.read()
|
|
@@ -6073,11 +5758,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6073
5758
|
return True
|
|
6074
5759
|
|
|
6075
5760
|
|
|
6076
|
-
def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5761
|
+
def MakeEmptyCatFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
6077
5762
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
6078
5763
|
|
|
6079
5764
|
|
|
6080
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["
|
|
5765
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
6081
5766
|
if(not hasattr(fp, "write")):
|
|
6082
5767
|
return False
|
|
6083
5768
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -6156,26 +5841,21 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
6156
5841
|
fp.flush()
|
|
6157
5842
|
if(hasattr(os, "sync")):
|
|
6158
5843
|
os.fsync(fp.fileno())
|
|
6159
|
-
except io.UnsupportedOperation:
|
|
6160
|
-
pass
|
|
6161
|
-
except AttributeError:
|
|
6162
|
-
pass
|
|
6163
|
-
except OSError:
|
|
5844
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6164
5845
|
pass
|
|
6165
5846
|
return fp
|
|
6166
5847
|
|
|
6167
|
-
|
|
6168
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
5848
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6169
5849
|
if(not hasattr(fp, "write")):
|
|
6170
5850
|
return False
|
|
6171
5851
|
advancedlist = formatspecs['use_advanced_list']
|
|
6172
5852
|
altinode = formatspecs['use_alt_inode']
|
|
6173
5853
|
if(verbose):
|
|
6174
5854
|
logging.basicConfig(format="%(message)s",
|
|
6175
|
-
stream=
|
|
5855
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6176
5856
|
infilelist = []
|
|
6177
5857
|
if(infiles == "-"):
|
|
6178
|
-
for line in
|
|
5858
|
+
for line in PY_STDIN_TEXT:
|
|
6179
5859
|
infilelist.append(line.strip())
|
|
6180
5860
|
infilelist = list(filter(None, infilelist))
|
|
6181
5861
|
elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
|
|
@@ -6214,16 +5894,12 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6214
5894
|
inodetoforminode = {}
|
|
6215
5895
|
numfiles = int(len(GetDirList))
|
|
6216
5896
|
fnumfiles = format(numfiles, 'x').lower()
|
|
6217
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
5897
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6218
5898
|
try:
|
|
6219
5899
|
fp.flush()
|
|
6220
5900
|
if(hasattr(os, "sync")):
|
|
6221
5901
|
os.fsync(fp.fileno())
|
|
6222
|
-
except io.UnsupportedOperation:
|
|
6223
|
-
pass
|
|
6224
|
-
except AttributeError:
|
|
6225
|
-
pass
|
|
6226
|
-
except OSError:
|
|
5902
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6227
5903
|
pass
|
|
6228
5904
|
FullSizeFilesAlt = 0
|
|
6229
5905
|
for curfname in GetDirList:
|
|
@@ -6373,7 +6049,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6373
6049
|
curcompression = "none"
|
|
6374
6050
|
if not followlink and ftype in data_types:
|
|
6375
6051
|
with open(fname, "rb") as fpc:
|
|
6376
|
-
|
|
6052
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6377
6053
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6378
6054
|
fcontents.seek(0, 0)
|
|
6379
6055
|
if(typechecktest is not False):
|
|
@@ -6391,7 +6067,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6391
6067
|
while(ilmin < ilsize):
|
|
6392
6068
|
cfcontents = MkTempFile()
|
|
6393
6069
|
fcontents.seek(0, 0)
|
|
6394
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6070
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6395
6071
|
fcontents.seek(0, 0)
|
|
6396
6072
|
cfcontents.seek(0, 0)
|
|
6397
6073
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6407,7 +6083,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6407
6083
|
curcompression = compressionuselist[ilcmin]
|
|
6408
6084
|
fcontents.seek(0, 0)
|
|
6409
6085
|
cfcontents = MkTempFile()
|
|
6410
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6086
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6411
6087
|
cfcontents.seek(0, 0)
|
|
6412
6088
|
cfcontents = CompressOpenFileAlt(
|
|
6413
6089
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6423,7 +6099,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6423
6099
|
return False
|
|
6424
6100
|
flstatinfo = os.stat(flinkname)
|
|
6425
6101
|
with open(flinkname, "rb") as fpc:
|
|
6426
|
-
|
|
6102
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6427
6103
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6428
6104
|
fcontents.seek(0, 0)
|
|
6429
6105
|
if(typechecktest is not False):
|
|
@@ -6441,7 +6117,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6441
6117
|
while(ilmin < ilsize):
|
|
6442
6118
|
cfcontents = MkTempFile()
|
|
6443
6119
|
fcontents.seek(0, 0)
|
|
6444
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6120
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6445
6121
|
fcontents.seek(0, 0)
|
|
6446
6122
|
cfcontents.seek(0, 0)
|
|
6447
6123
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6457,7 +6133,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6457
6133
|
curcompression = compressionuselist[ilcmin]
|
|
6458
6134
|
fcontents.seek(0, 0)
|
|
6459
6135
|
cfcontents = MkTempFile()
|
|
6460
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6136
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6461
6137
|
cfcontents.seek(0, 0)
|
|
6462
6138
|
cfcontents = CompressOpenFileAlt(
|
|
6463
6139
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6475,25 +6151,21 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6475
6151
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6476
6152
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6477
6153
|
AppendFileHeaderWithContent(
|
|
6478
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6154
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6479
6155
|
try:
|
|
6480
6156
|
fp.flush()
|
|
6481
6157
|
if(hasattr(os, "sync")):
|
|
6482
6158
|
os.fsync(fp.fileno())
|
|
6483
|
-
except io.UnsupportedOperation:
|
|
6484
|
-
pass
|
|
6485
|
-
except AttributeError:
|
|
6486
|
-
pass
|
|
6487
|
-
except OSError:
|
|
6159
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6488
6160
|
pass
|
|
6489
6161
|
return fp
|
|
6490
6162
|
|
|
6491
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6163
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6492
6164
|
if(not hasattr(fp, "write")):
|
|
6493
6165
|
return False
|
|
6494
6166
|
if(verbose):
|
|
6495
6167
|
logging.basicConfig(format="%(message)s",
|
|
6496
|
-
stream=
|
|
6168
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6497
6169
|
curinode = 0
|
|
6498
6170
|
curfid = 0
|
|
6499
6171
|
inodelist = []
|
|
@@ -6502,10 +6174,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6502
6174
|
inodetoforminode = {}
|
|
6503
6175
|
if(infile == "-"):
|
|
6504
6176
|
infile = MkTempFile()
|
|
6505
|
-
|
|
6506
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6507
|
-
else:
|
|
6508
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6177
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6509
6178
|
infile.seek(0, 0)
|
|
6510
6179
|
if(not infile):
|
|
6511
6180
|
return False
|
|
@@ -6560,16 +6229,12 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6560
6229
|
except FileNotFoundError:
|
|
6561
6230
|
return False
|
|
6562
6231
|
numfiles = int(len(tarfp.getmembers()))
|
|
6563
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6232
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6564
6233
|
try:
|
|
6565
6234
|
fp.flush()
|
|
6566
6235
|
if(hasattr(os, "sync")):
|
|
6567
6236
|
os.fsync(fp.fileno())
|
|
6568
|
-
except io.UnsupportedOperation:
|
|
6569
|
-
pass
|
|
6570
|
-
except AttributeError:
|
|
6571
|
-
pass
|
|
6572
|
-
except OSError:
|
|
6237
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6573
6238
|
pass
|
|
6574
6239
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6575
6240
|
fencoding = "UTF-8"
|
|
@@ -6655,7 +6320,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6655
6320
|
curcompression = "none"
|
|
6656
6321
|
if ftype in data_types:
|
|
6657
6322
|
fpc = tarfp.extractfile(member)
|
|
6658
|
-
|
|
6323
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6659
6324
|
fpc.close()
|
|
6660
6325
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6661
6326
|
fcontents.seek(0, 0)
|
|
@@ -6674,7 +6339,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6674
6339
|
while(ilmin < ilsize):
|
|
6675
6340
|
cfcontents = MkTempFile()
|
|
6676
6341
|
fcontents.seek(0, 0)
|
|
6677
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6342
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6678
6343
|
fcontents.seek(0, 0)
|
|
6679
6344
|
cfcontents.seek(0, 0)
|
|
6680
6345
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6690,7 +6355,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6690
6355
|
curcompression = compressionuselist[ilcmin]
|
|
6691
6356
|
fcontents.seek(0, 0)
|
|
6692
6357
|
cfcontents = MkTempFile()
|
|
6693
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6358
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6694
6359
|
cfcontents.seek(0, 0)
|
|
6695
6360
|
cfcontents = CompressOpenFileAlt(
|
|
6696
6361
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6708,26 +6373,22 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6708
6373
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6709
6374
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6710
6375
|
AppendFileHeaderWithContent(
|
|
6711
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6376
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6712
6377
|
try:
|
|
6713
6378
|
fp.flush()
|
|
6714
6379
|
if(hasattr(os, "sync")):
|
|
6715
6380
|
os.fsync(fp.fileno())
|
|
6716
|
-
except io.UnsupportedOperation:
|
|
6717
|
-
pass
|
|
6718
|
-
except AttributeError:
|
|
6719
|
-
pass
|
|
6720
|
-
except OSError:
|
|
6381
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6721
6382
|
pass
|
|
6722
6383
|
fcontents.close()
|
|
6723
6384
|
return fp
|
|
6724
6385
|
|
|
6725
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6386
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6726
6387
|
if(not hasattr(fp, "write")):
|
|
6727
6388
|
return False
|
|
6728
6389
|
if(verbose):
|
|
6729
6390
|
logging.basicConfig(format="%(message)s",
|
|
6730
|
-
stream=
|
|
6391
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6731
6392
|
curinode = 0
|
|
6732
6393
|
curfid = 0
|
|
6733
6394
|
inodelist = []
|
|
@@ -6736,10 +6397,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6736
6397
|
inodetoforminode = {}
|
|
6737
6398
|
if(infile == "-"):
|
|
6738
6399
|
infile = MkTempFile()
|
|
6739
|
-
|
|
6740
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6741
|
-
else:
|
|
6742
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6400
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6743
6401
|
infile.seek(0, 0)
|
|
6744
6402
|
if(not infile):
|
|
6745
6403
|
return False
|
|
@@ -6764,16 +6422,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6764
6422
|
if(ziptest):
|
|
6765
6423
|
VerbosePrintOut("Bad file found!")
|
|
6766
6424
|
numfiles = int(len(zipfp.infolist()))
|
|
6767
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6425
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6768
6426
|
try:
|
|
6769
6427
|
fp.flush()
|
|
6770
6428
|
if(hasattr(os, "sync")):
|
|
6771
6429
|
os.fsync(fp.fileno())
|
|
6772
|
-
except io.UnsupportedOperation:
|
|
6773
|
-
pass
|
|
6774
|
-
except AttributeError:
|
|
6775
|
-
pass
|
|
6776
|
-
except OSError:
|
|
6430
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6777
6431
|
pass
|
|
6778
6432
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6779
6433
|
fencoding = "UTF-8"
|
|
@@ -6858,24 +6512,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6858
6512
|
fcsize = format(int(0), 'x').lower()
|
|
6859
6513
|
try:
|
|
6860
6514
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
6861
|
-
except AttributeError:
|
|
6862
|
-
fuid = format(int(0), 'x').lower()
|
|
6863
|
-
except KeyError:
|
|
6515
|
+
except (KeyError, AttributeError):
|
|
6864
6516
|
fuid = format(int(0), 'x').lower()
|
|
6865
6517
|
try:
|
|
6866
6518
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
6867
|
-
except AttributeError:
|
|
6868
|
-
fgid = format(int(0), 'x').lower()
|
|
6869
|
-
except KeyError:
|
|
6519
|
+
except (KeyError, AttributeError):
|
|
6870
6520
|
fgid = format(int(0), 'x').lower()
|
|
6871
6521
|
try:
|
|
6872
6522
|
import pwd
|
|
6873
6523
|
try:
|
|
6874
6524
|
userinfo = pwd.getpwuid(os.getuid())
|
|
6875
6525
|
funame = userinfo.pw_name
|
|
6876
|
-
except KeyError:
|
|
6877
|
-
funame = ""
|
|
6878
|
-
except AttributeError:
|
|
6526
|
+
except (KeyError, AttributeError):
|
|
6879
6527
|
funame = ""
|
|
6880
6528
|
except ImportError:
|
|
6881
6529
|
funame = ""
|
|
@@ -6885,9 +6533,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6885
6533
|
try:
|
|
6886
6534
|
groupinfo = grp.getgrgid(os.getgid())
|
|
6887
6535
|
fgname = groupinfo.gr_name
|
|
6888
|
-
except KeyError:
|
|
6889
|
-
fgname = ""
|
|
6890
|
-
except AttributeError:
|
|
6536
|
+
except (KeyError, AttributeError):
|
|
6891
6537
|
fgname = ""
|
|
6892
6538
|
except ImportError:
|
|
6893
6539
|
fgname = ""
|
|
@@ -6910,7 +6556,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6910
6556
|
while(ilmin < ilsize):
|
|
6911
6557
|
cfcontents = MkTempFile()
|
|
6912
6558
|
fcontents.seek(0, 0)
|
|
6913
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6559
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6914
6560
|
fcontents.seek(0, 0)
|
|
6915
6561
|
cfcontents.seek(0, 0)
|
|
6916
6562
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6923,7 +6569,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6923
6569
|
curcompression = compressionuselist[ilcmin]
|
|
6924
6570
|
fcontents.seek(0, 0)
|
|
6925
6571
|
cfcontents = MkTempFile()
|
|
6926
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6572
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6927
6573
|
cfcontents.seek(0, 0)
|
|
6928
6574
|
cfcontents = CompressOpenFileAlt(
|
|
6929
6575
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6941,31 +6587,26 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6941
6587
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6942
6588
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6943
6589
|
AppendFileHeaderWithContent(
|
|
6944
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6590
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6945
6591
|
try:
|
|
6946
6592
|
fp.flush()
|
|
6947
6593
|
if(hasattr(os, "sync")):
|
|
6948
6594
|
os.fsync(fp.fileno())
|
|
6949
|
-
except io.UnsupportedOperation:
|
|
6950
|
-
pass
|
|
6951
|
-
except AttributeError:
|
|
6952
|
-
pass
|
|
6953
|
-
except OSError:
|
|
6595
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6954
6596
|
pass
|
|
6955
6597
|
fcontents.close()
|
|
6956
6598
|
return fp
|
|
6957
6599
|
|
|
6958
6600
|
if(not rarfile_support):
|
|
6959
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6601
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6960
6602
|
return False
|
|
6961
|
-
|
|
6962
|
-
|
|
6963
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
6603
|
+
else:
|
|
6604
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6964
6605
|
if(not hasattr(fp, "write")):
|
|
6965
6606
|
return False
|
|
6966
6607
|
if(verbose):
|
|
6967
6608
|
logging.basicConfig(format="%(message)s",
|
|
6968
|
-
stream=
|
|
6609
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6969
6610
|
curinode = 0
|
|
6970
6611
|
curfid = 0
|
|
6971
6612
|
inodelist = []
|
|
@@ -6981,26 +6622,18 @@ if(rarfile_support):
|
|
|
6981
6622
|
if(rartest):
|
|
6982
6623
|
VerbosePrintOut("Bad file found!")
|
|
6983
6624
|
numfiles = int(len(rarfp.infolist()))
|
|
6984
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6625
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6985
6626
|
try:
|
|
6986
6627
|
fp.flush()
|
|
6987
6628
|
if(hasattr(os, "sync")):
|
|
6988
6629
|
os.fsync(fp.fileno())
|
|
6989
|
-
except io.UnsupportedOperation:
|
|
6990
|
-
pass
|
|
6991
|
-
except AttributeError:
|
|
6992
|
-
pass
|
|
6993
|
-
except OSError:
|
|
6630
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6994
6631
|
pass
|
|
6995
6632
|
try:
|
|
6996
6633
|
fp.flush()
|
|
6997
6634
|
if(hasattr(os, "sync")):
|
|
6998
6635
|
os.fsync(fp.fileno())
|
|
6999
|
-
except io.UnsupportedOperation:
|
|
7000
|
-
pass
|
|
7001
|
-
except AttributeError:
|
|
7002
|
-
pass
|
|
7003
|
-
except OSError:
|
|
6636
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7004
6637
|
pass
|
|
7005
6638
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
7006
6639
|
is_unix = False
|
|
@@ -7109,24 +6742,18 @@ if(rarfile_support):
|
|
|
7109
6742
|
int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
|
|
7110
6743
|
try:
|
|
7111
6744
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7112
|
-
except AttributeError:
|
|
7113
|
-
fuid = format(int(0), 'x').lower()
|
|
7114
|
-
except KeyError:
|
|
6745
|
+
except (KeyError, AttributeError):
|
|
7115
6746
|
fuid = format(int(0), 'x').lower()
|
|
7116
6747
|
try:
|
|
7117
6748
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7118
|
-
except AttributeError:
|
|
7119
|
-
fgid = format(int(0), 'x').lower()
|
|
7120
|
-
except KeyError:
|
|
6749
|
+
except (KeyError, AttributeError):
|
|
7121
6750
|
fgid = format(int(0), 'x').lower()
|
|
7122
6751
|
try:
|
|
7123
6752
|
import pwd
|
|
7124
6753
|
try:
|
|
7125
6754
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7126
6755
|
funame = userinfo.pw_name
|
|
7127
|
-
except KeyError:
|
|
7128
|
-
funame = ""
|
|
7129
|
-
except AttributeError:
|
|
6756
|
+
except (KeyError, AttributeError):
|
|
7130
6757
|
funame = ""
|
|
7131
6758
|
except ImportError:
|
|
7132
6759
|
funame = ""
|
|
@@ -7136,9 +6763,7 @@ if(rarfile_support):
|
|
|
7136
6763
|
try:
|
|
7137
6764
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7138
6765
|
fgname = groupinfo.gr_name
|
|
7139
|
-
except KeyError:
|
|
7140
|
-
fgname = ""
|
|
7141
|
-
except AttributeError:
|
|
6766
|
+
except (KeyError, AttributeError):
|
|
7142
6767
|
fgname = ""
|
|
7143
6768
|
except ImportError:
|
|
7144
6769
|
fgname = ""
|
|
@@ -7161,7 +6786,7 @@ if(rarfile_support):
|
|
|
7161
6786
|
while(ilmin < ilsize):
|
|
7162
6787
|
cfcontents = MkTempFile()
|
|
7163
6788
|
fcontents.seek(0, 0)
|
|
7164
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6789
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7165
6790
|
fcontents.seek(0, 0)
|
|
7166
6791
|
cfcontents.seek(0, 0)
|
|
7167
6792
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7177,7 +6802,7 @@ if(rarfile_support):
|
|
|
7177
6802
|
curcompression = compressionuselist[ilcmin]
|
|
7178
6803
|
fcontents.seek(0, 0)
|
|
7179
6804
|
cfcontents = MkTempFile()
|
|
7180
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6805
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7181
6806
|
cfcontents.seek(0, 0)
|
|
7182
6807
|
cfcontents = CompressOpenFileAlt(
|
|
7183
6808
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7195,31 +6820,26 @@ if(rarfile_support):
|
|
|
7195
6820
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7196
6821
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7197
6822
|
AppendFileHeaderWithContent(
|
|
7198
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6823
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7199
6824
|
try:
|
|
7200
6825
|
fp.flush()
|
|
7201
6826
|
if(hasattr(os, "sync")):
|
|
7202
6827
|
os.fsync(fp.fileno())
|
|
7203
|
-
except io.UnsupportedOperation:
|
|
7204
|
-
pass
|
|
7205
|
-
except AttributeError:
|
|
7206
|
-
pass
|
|
7207
|
-
except OSError:
|
|
6828
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7208
6829
|
pass
|
|
7209
6830
|
fcontents.close()
|
|
7210
6831
|
return fp
|
|
7211
6832
|
|
|
7212
6833
|
if(not py7zr_support):
|
|
7213
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6834
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7214
6835
|
return False
|
|
7215
|
-
|
|
7216
|
-
|
|
7217
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
6836
|
+
else:
|
|
6837
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7218
6838
|
if(not hasattr(fp, "write")):
|
|
7219
6839
|
return False
|
|
7220
6840
|
if(verbose):
|
|
7221
6841
|
logging.basicConfig(format="%(message)s",
|
|
7222
|
-
stream=
|
|
6842
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7223
6843
|
formver = formatspecs['format_ver']
|
|
7224
6844
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7225
6845
|
curinode = 0
|
|
@@ -7237,16 +6857,12 @@ if(py7zr_support):
|
|
|
7237
6857
|
if(sztestalt):
|
|
7238
6858
|
VerbosePrintOut("Bad file found!")
|
|
7239
6859
|
numfiles = int(len(szpfp.list()))
|
|
7240
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6860
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7241
6861
|
try:
|
|
7242
6862
|
fp.flush()
|
|
7243
6863
|
if(hasattr(os, "sync")):
|
|
7244
6864
|
os.fsync(fp.fileno())
|
|
7245
|
-
except io.UnsupportedOperation:
|
|
7246
|
-
pass
|
|
7247
|
-
except AttributeError:
|
|
7248
|
-
pass
|
|
7249
|
-
except OSError:
|
|
6865
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7250
6866
|
pass
|
|
7251
6867
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7252
6868
|
fencoding = "UTF-8"
|
|
@@ -7296,24 +6912,18 @@ if(py7zr_support):
|
|
|
7296
6912
|
int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
|
|
7297
6913
|
try:
|
|
7298
6914
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7299
|
-
except AttributeError:
|
|
7300
|
-
fuid = format(int(0), 'x').lower()
|
|
7301
|
-
except KeyError:
|
|
6915
|
+
except (KeyError, AttributeError):
|
|
7302
6916
|
fuid = format(int(0), 'x').lower()
|
|
7303
6917
|
try:
|
|
7304
6918
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7305
|
-
except AttributeError:
|
|
7306
|
-
fgid = format(int(0), 'x').lower()
|
|
7307
|
-
except KeyError:
|
|
6919
|
+
except (KeyError, AttributeError):
|
|
7308
6920
|
fgid = format(int(0), 'x').lower()
|
|
7309
6921
|
try:
|
|
7310
6922
|
import pwd
|
|
7311
6923
|
try:
|
|
7312
6924
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7313
6925
|
funame = userinfo.pw_name
|
|
7314
|
-
except KeyError:
|
|
7315
|
-
funame = ""
|
|
7316
|
-
except AttributeError:
|
|
6926
|
+
except (KeyError, AttributeError):
|
|
7317
6927
|
funame = ""
|
|
7318
6928
|
except ImportError:
|
|
7319
6929
|
funame = ""
|
|
@@ -7323,9 +6933,7 @@ if(py7zr_support):
|
|
|
7323
6933
|
try:
|
|
7324
6934
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7325
6935
|
fgname = groupinfo.gr_name
|
|
7326
|
-
except KeyError:
|
|
7327
|
-
fgname = ""
|
|
7328
|
-
except AttributeError:
|
|
6936
|
+
except (KeyError, AttributeError):
|
|
7329
6937
|
fgname = ""
|
|
7330
6938
|
except ImportError:
|
|
7331
6939
|
fgname = ""
|
|
@@ -7351,7 +6959,7 @@ if(py7zr_support):
|
|
|
7351
6959
|
while(ilmin < ilsize):
|
|
7352
6960
|
cfcontents = MkTempFile()
|
|
7353
6961
|
fcontents.seek(0, 0)
|
|
7354
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6962
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7355
6963
|
fcontents.seek(0, 0)
|
|
7356
6964
|
cfcontents.seek(0, 0)
|
|
7357
6965
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7367,7 +6975,7 @@ if(py7zr_support):
|
|
|
7367
6975
|
curcompression = compressionuselist[ilcmin]
|
|
7368
6976
|
fcontents.seek(0, 0)
|
|
7369
6977
|
cfcontents = MkTempFile()
|
|
7370
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6978
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7371
6979
|
cfcontents.seek(0, 0)
|
|
7372
6980
|
cfcontents = CompressOpenFileAlt(
|
|
7373
6981
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7385,25 +6993,21 @@ if(py7zr_support):
|
|
|
7385
6993
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7386
6994
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7387
6995
|
AppendFileHeaderWithContent(
|
|
7388
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6996
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7389
6997
|
try:
|
|
7390
6998
|
fp.flush()
|
|
7391
6999
|
if(hasattr(os, "sync")):
|
|
7392
7000
|
os.fsync(fp.fileno())
|
|
7393
|
-
except io.UnsupportedOperation:
|
|
7394
|
-
pass
|
|
7395
|
-
except AttributeError:
|
|
7396
|
-
pass
|
|
7397
|
-
except OSError:
|
|
7001
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7398
7002
|
pass
|
|
7399
7003
|
fcontents.close()
|
|
7400
7004
|
return fp
|
|
7401
7005
|
|
|
7402
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["
|
|
7006
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7403
7007
|
if(not hasattr(fp, "write")):
|
|
7404
7008
|
return False
|
|
7405
7009
|
if(verbose):
|
|
7406
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
7010
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7407
7011
|
GetDirList = inlist
|
|
7408
7012
|
if(not GetDirList):
|
|
7409
7013
|
return False
|
|
@@ -7415,7 +7019,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7415
7019
|
inodetoforminode = {}
|
|
7416
7020
|
numfiles = int(len(GetDirList))
|
|
7417
7021
|
fnumfiles = format(numfiles, 'x').lower()
|
|
7418
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
7022
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7419
7023
|
for curfname in GetDirList:
|
|
7420
7024
|
ftype = format(curfname[0], 'x').lower()
|
|
7421
7025
|
fencoding = curfname[1]
|
|
@@ -7457,16 +7061,16 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7457
7061
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
7458
7062
|
fcontents.seek(0, 0)
|
|
7459
7063
|
AppendFileHeaderWithContent(
|
|
7460
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
7064
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7461
7065
|
return fp
|
|
7462
7066
|
|
|
7463
7067
|
|
|
7464
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7068
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7465
7069
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7466
7070
|
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7467
7071
|
|
|
7468
7072
|
|
|
7469
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7073
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7470
7074
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7471
7075
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7472
7076
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7519,18 +7123,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7519
7123
|
fp.flush()
|
|
7520
7124
|
if(hasattr(os, "sync")):
|
|
7521
7125
|
os.fsync(fp.fileno())
|
|
7522
|
-
except io.UnsupportedOperation:
|
|
7523
|
-
pass
|
|
7524
|
-
except AttributeError:
|
|
7525
|
-
pass
|
|
7526
|
-
except OSError:
|
|
7126
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7527
7127
|
pass
|
|
7528
7128
|
if(outfile == "-"):
|
|
7529
7129
|
fp.seek(0, 0)
|
|
7530
|
-
|
|
7531
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7532
|
-
else:
|
|
7533
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7130
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7534
7131
|
elif(outfile is None):
|
|
7535
7132
|
fp.seek(0, 0)
|
|
7536
7133
|
outvar = fp.read()
|
|
@@ -7547,7 +7144,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7547
7144
|
fp.close()
|
|
7548
7145
|
return True
|
|
7549
7146
|
|
|
7550
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7147
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7551
7148
|
if not isinstance(infiles, list):
|
|
7552
7149
|
infiles = [infiles]
|
|
7553
7150
|
returnout = False
|
|
@@ -7562,7 +7159,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7562
7159
|
return True
|
|
7563
7160
|
return returnout
|
|
7564
7161
|
|
|
7565
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7162
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7566
7163
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7567
7164
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7568
7165
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7612,18 +7209,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7612
7209
|
fp.flush()
|
|
7613
7210
|
if(hasattr(os, "sync")):
|
|
7614
7211
|
os.fsync(fp.fileno())
|
|
7615
|
-
except io.UnsupportedOperation:
|
|
7616
|
-
pass
|
|
7617
|
-
except AttributeError:
|
|
7618
|
-
pass
|
|
7619
|
-
except OSError:
|
|
7212
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7620
7213
|
pass
|
|
7621
7214
|
if(outfile == "-"):
|
|
7622
7215
|
fp.seek(0, 0)
|
|
7623
|
-
|
|
7624
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7625
|
-
else:
|
|
7626
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7216
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7627
7217
|
elif(outfile is None):
|
|
7628
7218
|
fp.seek(0, 0)
|
|
7629
7219
|
outvar = fp.read()
|
|
@@ -7641,7 +7231,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7641
7231
|
fp.close()
|
|
7642
7232
|
return True
|
|
7643
7233
|
|
|
7644
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7234
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7645
7235
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7646
7236
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7647
7237
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7692,18 +7282,11 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7692
7282
|
fp.flush()
|
|
7693
7283
|
if(hasattr(os, "sync")):
|
|
7694
7284
|
os.fsync(fp.fileno())
|
|
7695
|
-
except io.UnsupportedOperation:
|
|
7696
|
-
pass
|
|
7697
|
-
except AttributeError:
|
|
7698
|
-
pass
|
|
7699
|
-
except OSError:
|
|
7285
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7700
7286
|
pass
|
|
7701
7287
|
if(outfile == "-"):
|
|
7702
7288
|
fp.seek(0, 0)
|
|
7703
|
-
|
|
7704
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7705
|
-
else:
|
|
7706
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7289
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7707
7290
|
elif(outfile is None):
|
|
7708
7291
|
fp.seek(0, 0)
|
|
7709
7292
|
outvar = fp.read()
|
|
@@ -7721,7 +7304,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7721
7304
|
fp.close()
|
|
7722
7305
|
return True
|
|
7723
7306
|
|
|
7724
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7307
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7725
7308
|
if not isinstance(infiles, list):
|
|
7726
7309
|
infiles = [infiles]
|
|
7727
7310
|
returnout = False
|
|
@@ -7736,7 +7319,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7736
7319
|
return True
|
|
7737
7320
|
return returnout
|
|
7738
7321
|
|
|
7739
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7322
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7740
7323
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7741
7324
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7742
7325
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7787,18 +7370,11 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7787
7370
|
fp.flush()
|
|
7788
7371
|
if(hasattr(os, "sync")):
|
|
7789
7372
|
os.fsync(fp.fileno())
|
|
7790
|
-
except io.UnsupportedOperation:
|
|
7791
|
-
pass
|
|
7792
|
-
except AttributeError:
|
|
7793
|
-
pass
|
|
7794
|
-
except OSError:
|
|
7373
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7795
7374
|
pass
|
|
7796
7375
|
if(outfile == "-"):
|
|
7797
7376
|
fp.seek(0, 0)
|
|
7798
|
-
|
|
7799
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7800
|
-
else:
|
|
7801
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7377
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7802
7378
|
elif(outfile is None):
|
|
7803
7379
|
fp.seek(0, 0)
|
|
7804
7380
|
outvar = fp.read()
|
|
@@ -7816,7 +7392,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7816
7392
|
fp.close()
|
|
7817
7393
|
return True
|
|
7818
7394
|
|
|
7819
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7395
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7820
7396
|
if not isinstance(infiles, list):
|
|
7821
7397
|
infiles = [infiles]
|
|
7822
7398
|
returnout = False
|
|
@@ -7832,11 +7408,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7832
7408
|
return returnout
|
|
7833
7409
|
|
|
7834
7410
|
if(not rarfile_support):
|
|
7835
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7411
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7836
7412
|
return False
|
|
7837
|
-
|
|
7838
|
-
|
|
7839
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7413
|
+
else:
|
|
7414
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7840
7415
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7841
7416
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7842
7417
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7887,18 +7462,11 @@ if(rarfile_support):
|
|
|
7887
7462
|
fp.flush()
|
|
7888
7463
|
if(hasattr(os, "sync")):
|
|
7889
7464
|
os.fsync(fp.fileno())
|
|
7890
|
-
except io.UnsupportedOperation:
|
|
7891
|
-
pass
|
|
7892
|
-
except AttributeError:
|
|
7893
|
-
pass
|
|
7894
|
-
except OSError:
|
|
7465
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7895
7466
|
pass
|
|
7896
7467
|
if(outfile == "-"):
|
|
7897
7468
|
fp.seek(0, 0)
|
|
7898
|
-
|
|
7899
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7900
|
-
else:
|
|
7901
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7469
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7902
7470
|
elif(outfile is None):
|
|
7903
7471
|
fp.seek(0, 0)
|
|
7904
7472
|
outvar = fp.read()
|
|
@@ -7916,7 +7484,7 @@ if(rarfile_support):
|
|
|
7916
7484
|
fp.close()
|
|
7917
7485
|
return True
|
|
7918
7486
|
|
|
7919
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7487
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7920
7488
|
if not isinstance(infiles, list):
|
|
7921
7489
|
infiles = [infiles]
|
|
7922
7490
|
returnout = False
|
|
@@ -7932,11 +7500,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7932
7500
|
return returnout
|
|
7933
7501
|
|
|
7934
7502
|
if(not py7zr_support):
|
|
7935
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7503
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7936
7504
|
return False
|
|
7937
|
-
|
|
7938
|
-
|
|
7939
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7505
|
+
else:
|
|
7506
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7940
7507
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7941
7508
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7942
7509
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7987,18 +7554,11 @@ if(py7zr_support):
|
|
|
7987
7554
|
fp.flush()
|
|
7988
7555
|
if(hasattr(os, "sync")):
|
|
7989
7556
|
os.fsync(fp.fileno())
|
|
7990
|
-
except io.UnsupportedOperation:
|
|
7991
|
-
pass
|
|
7992
|
-
except AttributeError:
|
|
7993
|
-
pass
|
|
7994
|
-
except OSError:
|
|
7557
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7995
7558
|
pass
|
|
7996
7559
|
if(outfile == "-"):
|
|
7997
7560
|
fp.seek(0, 0)
|
|
7998
|
-
|
|
7999
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
8000
|
-
else:
|
|
8001
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7561
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
8002
7562
|
elif(outfile is None):
|
|
8003
7563
|
fp.seek(0, 0)
|
|
8004
7564
|
outvar = fp.read()
|
|
@@ -8016,7 +7576,7 @@ if(py7zr_support):
|
|
|
8016
7576
|
fp.close()
|
|
8017
7577
|
return True
|
|
8018
7578
|
|
|
8019
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7579
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8020
7580
|
if not isinstance(infiles, list):
|
|
8021
7581
|
infiles = [infiles]
|
|
8022
7582
|
returnout = False
|
|
@@ -8031,7 +7591,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
8031
7591
|
return True
|
|
8032
7592
|
return returnout
|
|
8033
7593
|
|
|
8034
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7594
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8035
7595
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
8036
7596
|
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
8037
7597
|
|
|
@@ -8065,9 +7625,7 @@ def PrintPermissionString(fchmode, ftype):
|
|
|
8065
7625
|
permissionstr = "w" + permissionstr
|
|
8066
7626
|
try:
|
|
8067
7627
|
permissionoutstr = stat.filemode(fchmode)
|
|
8068
|
-
except AttributeError:
|
|
8069
|
-
permissionoutstr = permissionstr
|
|
8070
|
-
except KeyError:
|
|
7628
|
+
except (KeyError, AttributeError):
|
|
8071
7629
|
permissionoutstr = permissionstr
|
|
8072
7630
|
return permissionoutstr
|
|
8073
7631
|
|
|
@@ -8983,7 +8541,7 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
|
|
|
8983
8541
|
|
|
8984
8542
|
|
|
8985
8543
|
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
8986
|
-
filefp =
|
|
8544
|
+
filefp = MkTempFile("", isbytes=False)
|
|
8987
8545
|
outstring = UncompressString(instring, formatspecs, filestart)
|
|
8988
8546
|
filefp.write(outstring)
|
|
8989
8547
|
filefp.seek(0, 0)
|
|
@@ -8998,7 +8556,7 @@ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=
|
|
|
8998
8556
|
fp.seek(filestart, 0)
|
|
8999
8557
|
if(prechck!="zstd"):
|
|
9000
8558
|
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
9001
|
-
filefp =
|
|
8559
|
+
filefp = MkTempFile("", isbytes=False)
|
|
9002
8560
|
fp.seek(filestart, 0)
|
|
9003
8561
|
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
9004
8562
|
filefp.write(outstring)
|
|
@@ -9070,9 +8628,7 @@ def _extract_base_fp(obj):
|
|
|
9070
8628
|
try:
|
|
9071
8629
|
f() # probe fileno()
|
|
9072
8630
|
return cur
|
|
9073
|
-
except UnsupportedOperation:
|
|
9074
|
-
pass
|
|
9075
|
-
except Exception:
|
|
8631
|
+
except (Exception, UnsupportedOperation):
|
|
9076
8632
|
pass
|
|
9077
8633
|
for attr in ("fileobj", "fp", "_fp", "buffer", "raw"):
|
|
9078
8634
|
nxt = getattr(cur, attr, None)
|
|
@@ -9464,7 +9020,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw):
|
|
|
9464
9020
|
|
|
9465
9021
|
# ========= copy helpers =========
|
|
9466
9022
|
|
|
9467
|
-
def fast_copy(infp, outfp, bufsize=
|
|
9023
|
+
def fast_copy(infp, outfp, bufsize=__filebuff_size__):
|
|
9468
9024
|
"""
|
|
9469
9025
|
Efficient copy from any readable file-like to any writable file-like.
|
|
9470
9026
|
Uses readinto() when available to avoid extra allocations.
|
|
@@ -9508,7 +9064,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
|
|
|
9508
9064
|
shutil.copyfileobj(fp, outfp, length=chunk_size)
|
|
9509
9065
|
|
|
9510
9066
|
|
|
9511
|
-
def copy_opaque(src, dst, bufsize=
|
|
9067
|
+
def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20):
|
|
9512
9068
|
"""
|
|
9513
9069
|
Copy opaque bytes from 'src' (any readable file-like) to 'dst'
|
|
9514
9070
|
(your mmap-backed FileLikeAdapter or any writable file-like).
|
|
@@ -9570,11 +9126,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9570
9126
|
|
|
9571
9127
|
try:
|
|
9572
9128
|
fp.seek(0, 0)
|
|
9573
|
-
except io.UnsupportedOperation:
|
|
9574
|
-
pass
|
|
9575
|
-
except AttributeError:
|
|
9576
|
-
pass
|
|
9577
|
-
except OSError:
|
|
9129
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9578
9130
|
pass
|
|
9579
9131
|
|
|
9580
9132
|
if (not compression or compression == formatspecs['format_magic']
|
|
@@ -9633,11 +9185,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9633
9185
|
|
|
9634
9186
|
try:
|
|
9635
9187
|
bytesfp.seek(0, 0)
|
|
9636
|
-
except io.UnsupportedOperation:
|
|
9637
|
-
pass
|
|
9638
|
-
except AttributeError:
|
|
9639
|
-
pass
|
|
9640
|
-
except OSError:
|
|
9188
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9641
9189
|
pass
|
|
9642
9190
|
out = FileLikeAdapter(bytesfp, mode="rb") # read interface for the caller
|
|
9643
9191
|
try:
|
|
@@ -9767,31 +9315,18 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9767
9315
|
try:
|
|
9768
9316
|
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9769
9317
|
except AttributeError:
|
|
9770
|
-
|
|
9771
|
-
|
|
9772
|
-
|
|
9773
|
-
|
|
9774
|
-
except AttributeError:
|
|
9775
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9776
|
-
checklistout = sorted(hash_list + ['adler32', 'crc16', 'crc16_ansi', 'crc16_ibm',
|
|
9777
|
-
'crc16_ccitt', 'crc32', 'crc64', 'crc64_ecma', 'crc64_iso', 'none'])
|
|
9778
|
-
if(checkfor in checklistout):
|
|
9779
|
-
return True
|
|
9780
|
-
else:
|
|
9781
|
-
return False
|
|
9782
|
-
|
|
9783
|
-
|
|
9784
|
-
def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
9785
|
-
if(guaranteed):
|
|
9786
|
-
try:
|
|
9787
|
-
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9788
|
-
except AttributeError:
|
|
9789
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9318
|
+
try:
|
|
9319
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9320
|
+
except AttributeError:
|
|
9321
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9790
9322
|
else:
|
|
9791
9323
|
try:
|
|
9792
9324
|
hash_list = sorted(list(hashlib.algorithms_available))
|
|
9793
9325
|
except AttributeError:
|
|
9794
|
-
|
|
9326
|
+
try:
|
|
9327
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9328
|
+
except AttributeError:
|
|
9329
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9795
9330
|
checklistout = hash_list
|
|
9796
9331
|
if(checkfor in checklistout):
|
|
9797
9332
|
return True
|
|
@@ -9799,48 +9334,46 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
|
9799
9334
|
return False
|
|
9800
9335
|
|
|
9801
9336
|
|
|
9802
|
-
def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9337
|
+
def PackCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9803
9338
|
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9804
9339
|
|
|
9805
|
-
def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9340
|
+
def PackStackedCatFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9806
9341
|
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9807
9342
|
|
|
9808
|
-
def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9343
|
+
def PackCatFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9809
9344
|
return PackCatFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9810
9345
|
|
|
9811
9346
|
|
|
9812
|
-
def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9347
|
+
def PackCatFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9813
9348
|
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9814
9349
|
|
|
9815
9350
|
|
|
9816
|
-
def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9351
|
+
def PackCatFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9817
9352
|
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9818
9353
|
|
|
9819
9354
|
|
|
9820
9355
|
if(not rarfile_support):
|
|
9821
|
-
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9356
|
+
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9822
9357
|
return False
|
|
9823
|
-
|
|
9824
|
-
|
|
9825
|
-
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9358
|
+
else:
|
|
9359
|
+
def PackCatFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9826
9360
|
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9827
9361
|
|
|
9828
9362
|
|
|
9829
9363
|
if(not py7zr_support):
|
|
9830
|
-
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9364
|
+
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9831
9365
|
return False
|
|
9832
|
-
|
|
9833
|
-
|
|
9834
|
-
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9366
|
+
else:
|
|
9367
|
+
def PackCatFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9835
9368
|
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9836
9369
|
|
|
9837
9370
|
|
|
9838
|
-
def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9371
|
+
def PackCatFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9839
9372
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9840
9373
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9841
9374
|
formatspecs = formatspecs[checkcompressfile]
|
|
9842
9375
|
if(verbose):
|
|
9843
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9376
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9844
9377
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9845
9378
|
return PackCatFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9846
9379
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -9923,7 +9456,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9923
9456
|
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9924
9457
|
seektoend=False, verbose=False, returnfp=False):
|
|
9925
9458
|
if(verbose):
|
|
9926
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9459
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9927
9460
|
|
|
9928
9461
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9929
9462
|
formatspecs = formatspecs[fmttype]
|
|
@@ -9950,10 +9483,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9950
9483
|
|
|
9951
9484
|
elif(infile == "-"):
|
|
9952
9485
|
fp = MkTempFile()
|
|
9953
|
-
|
|
9954
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
9955
|
-
else:
|
|
9956
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
9486
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
9957
9487
|
fp.seek(filestart, 0)
|
|
9958
9488
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9959
9489
|
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
@@ -10030,9 +9560,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10030
9560
|
|
|
10031
9561
|
try:
|
|
10032
9562
|
fp.seek(0, 2)
|
|
10033
|
-
except OSError:
|
|
10034
|
-
SeekToEndOfFile(fp)
|
|
10035
|
-
except ValueError:
|
|
9563
|
+
except (OSError, ValueError):
|
|
10036
9564
|
SeekToEndOfFile(fp)
|
|
10037
9565
|
|
|
10038
9566
|
CatSize = fp.tell()
|
|
@@ -10062,18 +9590,56 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10062
9590
|
else:
|
|
10063
9591
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
10064
9592
|
|
|
10065
|
-
fnumextrafieldsize = int(inheader[
|
|
10066
|
-
fnumextrafields = int(inheader[
|
|
10067
|
-
extrastart =
|
|
9593
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
9594
|
+
fnumextrafields = int(inheader[14], 16)
|
|
9595
|
+
extrastart = 15
|
|
10068
9596
|
extraend = extrastart + fnumextrafields
|
|
10069
9597
|
formversion = re.findall("([\\d]+)", formstring)
|
|
10070
9598
|
fheadsize = int(inheader[0], 16)
|
|
10071
9599
|
fnumfields = int(inheader[1], 16)
|
|
10072
|
-
|
|
10073
|
-
fostype = inheader[3]
|
|
10074
|
-
fnumfiles = int(inheader[4], 16)
|
|
9600
|
+
fnumfiles = int(inheader[6], 16)
|
|
10075
9601
|
fprechecksumtype = inheader[-2]
|
|
10076
9602
|
fprechecksum = inheader[-1]
|
|
9603
|
+
outfseeknextfile = inheader[7]
|
|
9604
|
+
fjsonsize = int(inheader[10], 16)
|
|
9605
|
+
fjsonchecksumtype = inheader[11]
|
|
9606
|
+
fjsonchecksum = inheader[12]
|
|
9607
|
+
fprejsoncontent = fp.read(fjsonsize)
|
|
9608
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9609
|
+
if(fjsonsize > 0):
|
|
9610
|
+
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9611
|
+
if(verbose):
|
|
9612
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9613
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9614
|
+
else:
|
|
9615
|
+
valid_archive = False
|
|
9616
|
+
invalid_archive = True
|
|
9617
|
+
if(verbose):
|
|
9618
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9619
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9620
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9621
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9622
|
+
fname + " at offset " + str(fheaderstart))
|
|
9623
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9624
|
+
return False
|
|
9625
|
+
# Next seek directive
|
|
9626
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9627
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
9628
|
+
if(abs(fseeknextasnum) == 0):
|
|
9629
|
+
pass
|
|
9630
|
+
fp.seek(fseeknextasnum, 1)
|
|
9631
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
9632
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9633
|
+
if(abs(fseeknextasnum) == 0):
|
|
9634
|
+
pass
|
|
9635
|
+
fp.seek(fseeknextasnum, 1)
|
|
9636
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
9637
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9638
|
+
if(abs(fseeknextasnum) == 0):
|
|
9639
|
+
pass
|
|
9640
|
+
fp.seek(fseeknextasnum, 0)
|
|
9641
|
+
else:
|
|
9642
|
+
return False
|
|
10077
9643
|
|
|
10078
9644
|
il = 0
|
|
10079
9645
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -10192,7 +9758,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10192
9758
|
VerbosePrintOut(outfname)
|
|
10193
9759
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
10194
9760
|
|
|
10195
|
-
if(outfcs
|
|
9761
|
+
if(hmac.compare_digest(outfcs, infcs)):
|
|
10196
9762
|
if(verbose):
|
|
10197
9763
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
10198
9764
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -10204,7 +9770,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10204
9770
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
10205
9771
|
|
|
10206
9772
|
if(outfjsonsize > 0):
|
|
10207
|
-
if(outfjsonchecksum
|
|
9773
|
+
if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
|
|
10208
9774
|
if(verbose):
|
|
10209
9775
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
10210
9776
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -10228,7 +9794,7 @@ def CatFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10228
9794
|
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
10229
9795
|
pyhascontents = True
|
|
10230
9796
|
|
|
10231
|
-
if(outfccs
|
|
9797
|
+
if(hmac.compare_digest(outfccs, infccs)):
|
|
10232
9798
|
if(verbose):
|
|
10233
9799
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
10234
9800
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -10301,7 +9867,7 @@ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
|
|
|
10301
9867
|
while True:
|
|
10302
9868
|
if outstartfile >= outfsize: # stop when function signals False
|
|
10303
9869
|
break
|
|
10304
|
-
is_valid_file =
|
|
9870
|
+
is_valid_file = CatFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
|
|
10305
9871
|
if is_valid_file is False: # stop when function signals False
|
|
10306
9872
|
outretval.append(is_valid_file)
|
|
10307
9873
|
break
|
|
@@ -10311,9 +9877,7 @@ def StackedCatFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
|
|
|
10311
9877
|
outstartfile = infile.tell()
|
|
10312
9878
|
try:
|
|
10313
9879
|
infile.seek(0, 2)
|
|
10314
|
-
except OSError:
|
|
10315
|
-
SeekToEndOfFile(infile)
|
|
10316
|
-
except ValueError:
|
|
9880
|
+
except (OSError, ValueError):
|
|
10317
9881
|
SeekToEndOfFile(infile)
|
|
10318
9882
|
outfsize = infile.tell()
|
|
10319
9883
|
infile.seek(outstartfile, 0)
|
|
@@ -10389,7 +9953,7 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10389
9953
|
formatspecs = formatspecs[checkcompressfile]
|
|
10390
9954
|
fp = MkTempFile()
|
|
10391
9955
|
fp = PackCatFileFromTarFile(
|
|
10392
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9956
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10393
9957
|
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10394
9958
|
return listarrayfiles
|
|
10395
9959
|
|
|
@@ -10400,7 +9964,7 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10400
9964
|
formatspecs = formatspecs[checkcompressfile]
|
|
10401
9965
|
fp = MkTempFile()
|
|
10402
9966
|
fp = PackCatFileFromZipFile(
|
|
10403
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9967
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10404
9968
|
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10405
9969
|
return listarrayfiles
|
|
10406
9970
|
|
|
@@ -10416,7 +9980,7 @@ if(rarfile_support):
|
|
|
10416
9980
|
formatspecs = formatspecs[checkcompressfile]
|
|
10417
9981
|
fp = MkTempFile()
|
|
10418
9982
|
fp = PackCatFileFromRarFile(
|
|
10419
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9983
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10420
9984
|
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10421
9985
|
return listarrayfiles
|
|
10422
9986
|
|
|
@@ -10431,7 +9995,7 @@ if(py7zr_support):
|
|
|
10431
9995
|
formatspecs = formatspecs[checkcompressfile]
|
|
10432
9996
|
fp = MkTempFile()
|
|
10433
9997
|
fp = PackCatFileFromSevenZipFile(
|
|
10434
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9998
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10435
9999
|
listarrayfiles = CatFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10436
10000
|
return listarrayfiles
|
|
10437
10001
|
|
|
@@ -10455,7 +10019,7 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10455
10019
|
return False
|
|
10456
10020
|
|
|
10457
10021
|
|
|
10458
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["
|
|
10022
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10459
10023
|
outarray = MkTempFile()
|
|
10460
10024
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10461
10025
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10586,7 +10150,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10586
10150
|
if compressionuselist is None:
|
|
10587
10151
|
compressionuselist = compressionlistalt
|
|
10588
10152
|
if checksumtype is None:
|
|
10589
|
-
checksumtype = ["
|
|
10153
|
+
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10590
10154
|
if extradata is None:
|
|
10591
10155
|
extradata = []
|
|
10592
10156
|
if jsondata is None:
|
|
@@ -10673,7 +10237,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10673
10237
|
compression = "auto"
|
|
10674
10238
|
|
|
10675
10239
|
if verbose:
|
|
10676
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10240
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10677
10241
|
|
|
10678
10242
|
# No files?
|
|
10679
10243
|
if not listarrayfiles.get('ffilelist'):
|
|
@@ -10778,7 +10342,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10778
10342
|
while ilmin < ilsize:
|
|
10779
10343
|
cfcontents = MkTempFile()
|
|
10780
10344
|
fcontents.seek(0, 0)
|
|
10781
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10345
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10782
10346
|
fcontents.seek(0, 0)
|
|
10783
10347
|
cfcontents.seek(0, 0)
|
|
10784
10348
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -10796,7 +10360,7 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10796
10360
|
|
|
10797
10361
|
fcontents.seek(0, 0)
|
|
10798
10362
|
cfcontents = MkTempFile()
|
|
10799
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10363
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10800
10364
|
cfcontents.seek(0, 0)
|
|
10801
10365
|
cfcontents = CompressOpenFileAlt(
|
|
10802
10366
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
|
|
@@ -10895,22 +10459,12 @@ def RePackCatFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10895
10459
|
fp.flush()
|
|
10896
10460
|
if hasattr(os, "sync"):
|
|
10897
10461
|
os.fsync(fp.fileno())
|
|
10898
|
-
except io.UnsupportedOperation:
|
|
10899
|
-
|
|
10900
|
-
logging.warning("Flush/sync unsupported on this file object.")
|
|
10901
|
-
except AttributeError:
|
|
10902
|
-
if verbose:
|
|
10903
|
-
logging.warning("Flush/sync attributes missing on this file object.")
|
|
10904
|
-
except OSError as e:
|
|
10905
|
-
if verbose:
|
|
10906
|
-
logging.warning("OS error during flush/sync: %s", e)
|
|
10462
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
10463
|
+
pass
|
|
10907
10464
|
|
|
10908
10465
|
if outfile == "-":
|
|
10909
10466
|
fp.seek(0, 0)
|
|
10910
|
-
|
|
10911
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
10912
|
-
else:
|
|
10913
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
10467
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
10914
10468
|
elif outfile is None:
|
|
10915
10469
|
fp.seek(0, 0)
|
|
10916
10470
|
outvar = fp.read()
|
|
@@ -10949,14 +10503,14 @@ def RePackMultipleCatFile(infiles, outfile, fmttype="auto", compression="auto",
|
|
|
10949
10503
|
return True
|
|
10950
10504
|
return returnout
|
|
10951
10505
|
|
|
10952
|
-
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10506
|
+
def RePackCatFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10953
10507
|
fp = MkTempFile(instr)
|
|
10954
10508
|
listarrayfiles = RePackCatFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10955
10509
|
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
10956
10510
|
return listarrayfiles
|
|
10957
10511
|
|
|
10958
10512
|
|
|
10959
|
-
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10513
|
+
def PackCatFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10960
10514
|
outarray = MkTempFile()
|
|
10961
10515
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10962
10516
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10969,7 +10523,7 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
10969
10523
|
if(outdir is not None):
|
|
10970
10524
|
outdir = RemoveWindowsPath(outdir)
|
|
10971
10525
|
if(verbose):
|
|
10972
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10526
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10973
10527
|
if(isinstance(infile, dict)):
|
|
10974
10528
|
listarrayfiles = infile
|
|
10975
10529
|
else:
|
|
@@ -11019,16 +10573,12 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
11019
10573
|
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
11020
10574
|
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
11021
10575
|
shutil.copyfileobj(
|
|
11022
|
-
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
10576
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc, length=__filebuff_size__)
|
|
11023
10577
|
try:
|
|
11024
10578
|
fpc.flush()
|
|
11025
10579
|
if(hasattr(os, "sync")):
|
|
11026
10580
|
os.fsync(fpc.fileno())
|
|
11027
|
-
except io.UnsupportedOperation:
|
|
11028
|
-
pass
|
|
11029
|
-
except AttributeError:
|
|
11030
|
-
pass
|
|
11031
|
-
except OSError:
|
|
10581
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11032
10582
|
pass
|
|
11033
10583
|
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
11034
10584
|
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
@@ -11070,16 +10620,12 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
11070
10620
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11071
10621
|
flinkinfo['fcontents'])
|
|
11072
10622
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11073
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10623
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11074
10624
|
try:
|
|
11075
10625
|
fpc.flush()
|
|
11076
10626
|
if(hasattr(os, "sync")):
|
|
11077
10627
|
os.fsync(fpc.fileno())
|
|
11078
|
-
except io.UnsupportedOperation:
|
|
11079
|
-
pass
|
|
11080
|
-
except AttributeError:
|
|
11081
|
-
pass
|
|
11082
|
-
except OSError:
|
|
10628
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11083
10629
|
pass
|
|
11084
10630
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11085
10631
|
os.chown(PrependPath(
|
|
@@ -11149,16 +10695,12 @@ def UnPackCatFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
11149
10695
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11150
10696
|
flinkinfo['fcontents'])
|
|
11151
10697
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11152
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10698
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11153
10699
|
try:
|
|
11154
10700
|
fpc.flush()
|
|
11155
10701
|
if(hasattr(os, "sync")):
|
|
11156
10702
|
os.fsync(fpc.fileno())
|
|
11157
|
-
except io.UnsupportedOperation:
|
|
11158
|
-
pass
|
|
11159
|
-
except AttributeError:
|
|
11160
|
-
pass
|
|
11161
|
-
except OSError:
|
|
10703
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11162
10704
|
pass
|
|
11163
10705
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11164
10706
|
os.chown(PrependPath(
|
|
@@ -11245,7 +10787,7 @@ def ftype_to_str(ftype):
|
|
|
11245
10787
|
|
|
11246
10788
|
def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11247
10789
|
if(verbose):
|
|
11248
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10790
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11249
10791
|
if(isinstance(infile, dict)):
|
|
11250
10792
|
listarrayfileslist = [infile]
|
|
11251
10793
|
if(isinstance(infile, list)):
|
|
@@ -11253,7 +10795,7 @@ def CatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
|
|
|
11253
10795
|
else:
|
|
11254
10796
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
11255
10797
|
infile = RemoveWindowsPath(infile)
|
|
11256
|
-
listarrayfileslist =
|
|
10798
|
+
listarrayfileslist = CatFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11257
10799
|
if(not listarrayfileslist):
|
|
11258
10800
|
return False
|
|
11259
10801
|
for listarrayfiles in listarrayfileslist:
|
|
@@ -11326,9 +10868,7 @@ def StackedCatFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
|
|
|
11326
10868
|
outstartfile = infile.tell()
|
|
11327
10869
|
try:
|
|
11328
10870
|
infile.seek(0, 2)
|
|
11329
|
-
except OSError:
|
|
11330
|
-
SeekToEndOfFile(infile)
|
|
11331
|
-
except ValueError:
|
|
10871
|
+
except (OSError, ValueError):
|
|
11332
10872
|
SeekToEndOfFile(infile)
|
|
11333
10873
|
outfsize = infile.tell()
|
|
11334
10874
|
infile.seek(outstartfile, 0)
|
|
@@ -11358,13 +10898,10 @@ def CatFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
|
|
|
11358
10898
|
|
|
11359
10899
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11360
10900
|
if(verbose):
|
|
11361
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10901
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11362
10902
|
if(infile == "-"):
|
|
11363
10903
|
infile = MkTempFile()
|
|
11364
|
-
|
|
11365
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11366
|
-
else:
|
|
11367
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
10904
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11368
10905
|
infile.seek(0, 0)
|
|
11369
10906
|
if(not infile):
|
|
11370
10907
|
return False
|
|
@@ -11483,13 +11020,10 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11483
11020
|
|
|
11484
11021
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11485
11022
|
if(verbose):
|
|
11486
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11023
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11487
11024
|
if(infile == "-"):
|
|
11488
11025
|
infile = MkTempFile()
|
|
11489
|
-
|
|
11490
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11491
|
-
else:
|
|
11492
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
11026
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11493
11027
|
infile.seek(0, 0)
|
|
11494
11028
|
if(not infile):
|
|
11495
11029
|
return False
|
|
@@ -11566,24 +11100,18 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11566
11100
|
printfname = member.filename
|
|
11567
11101
|
try:
|
|
11568
11102
|
fuid = int(os.getuid())
|
|
11569
|
-
except AttributeError:
|
|
11570
|
-
fuid = int(0)
|
|
11571
|
-
except KeyError:
|
|
11103
|
+
except (KeyError, AttributeError):
|
|
11572
11104
|
fuid = int(0)
|
|
11573
11105
|
try:
|
|
11574
11106
|
fgid = int(os.getgid())
|
|
11575
|
-
except AttributeError:
|
|
11576
|
-
fgid = int(0)
|
|
11577
|
-
except KeyError:
|
|
11107
|
+
except (KeyError, AttributeError):
|
|
11578
11108
|
fgid = int(0)
|
|
11579
11109
|
try:
|
|
11580
11110
|
import pwd
|
|
11581
11111
|
try:
|
|
11582
11112
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11583
11113
|
funame = userinfo.pw_name
|
|
11584
|
-
except KeyError:
|
|
11585
|
-
funame = ""
|
|
11586
|
-
except AttributeError:
|
|
11114
|
+
except (KeyError, AttributeError):
|
|
11587
11115
|
funame = ""
|
|
11588
11116
|
except ImportError:
|
|
11589
11117
|
funame = ""
|
|
@@ -11593,9 +11121,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11593
11121
|
try:
|
|
11594
11122
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11595
11123
|
fgname = groupinfo.gr_name
|
|
11596
|
-
except KeyError:
|
|
11597
|
-
fgname = ""
|
|
11598
|
-
except AttributeError:
|
|
11124
|
+
except (KeyError, AttributeError):
|
|
11599
11125
|
fgname = ""
|
|
11600
11126
|
except ImportError:
|
|
11601
11127
|
fgname = ""
|
|
@@ -11621,7 +11147,7 @@ if(not rarfile_support):
|
|
|
11621
11147
|
if(rarfile_support):
|
|
11622
11148
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11623
11149
|
if(verbose):
|
|
11624
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11150
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11625
11151
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11626
11152
|
return False
|
|
11627
11153
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11704,24 +11230,18 @@ if(rarfile_support):
|
|
|
11704
11230
|
printfname = member.filename
|
|
11705
11231
|
try:
|
|
11706
11232
|
fuid = int(os.getuid())
|
|
11707
|
-
except AttributeError:
|
|
11708
|
-
fuid = int(0)
|
|
11709
|
-
except KeyError:
|
|
11233
|
+
except (KeyError, AttributeError):
|
|
11710
11234
|
fuid = int(0)
|
|
11711
11235
|
try:
|
|
11712
11236
|
fgid = int(os.getgid())
|
|
11713
|
-
except AttributeError:
|
|
11714
|
-
fgid = int(0)
|
|
11715
|
-
except KeyError:
|
|
11237
|
+
except (KeyError, AttributeError):
|
|
11716
11238
|
fgid = int(0)
|
|
11717
11239
|
try:
|
|
11718
11240
|
import pwd
|
|
11719
11241
|
try:
|
|
11720
11242
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11721
11243
|
funame = userinfo.pw_name
|
|
11722
|
-
except KeyError:
|
|
11723
|
-
funame = ""
|
|
11724
|
-
except AttributeError:
|
|
11244
|
+
except (KeyError, AttributeError):
|
|
11725
11245
|
funame = ""
|
|
11726
11246
|
except ImportError:
|
|
11727
11247
|
funame = ""
|
|
@@ -11731,9 +11251,7 @@ if(rarfile_support):
|
|
|
11731
11251
|
try:
|
|
11732
11252
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11733
11253
|
fgname = groupinfo.gr_name
|
|
11734
|
-
except KeyError:
|
|
11735
|
-
fgname = ""
|
|
11736
|
-
except AttributeError:
|
|
11254
|
+
except (KeyError, AttributeError):
|
|
11737
11255
|
fgname = ""
|
|
11738
11256
|
except ImportError:
|
|
11739
11257
|
fgname = ""
|
|
@@ -11758,7 +11276,7 @@ if(not py7zr_support):
|
|
|
11758
11276
|
if(py7zr_support):
|
|
11759
11277
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11760
11278
|
if(verbose):
|
|
11761
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11279
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11762
11280
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11763
11281
|
return False
|
|
11764
11282
|
lcfi = 0
|
|
@@ -11811,24 +11329,18 @@ if(py7zr_support):
|
|
|
11811
11329
|
file_content[member.filename].close()
|
|
11812
11330
|
try:
|
|
11813
11331
|
fuid = int(os.getuid())
|
|
11814
|
-
except AttributeError:
|
|
11815
|
-
fuid = int(0)
|
|
11816
|
-
except KeyError:
|
|
11332
|
+
except (KeyError, AttributeError):
|
|
11817
11333
|
fuid = int(0)
|
|
11818
11334
|
try:
|
|
11819
11335
|
fgid = int(os.getgid())
|
|
11820
|
-
except AttributeError:
|
|
11821
|
-
fgid = int(0)
|
|
11822
|
-
except KeyError:
|
|
11336
|
+
except (KeyError, AttributeError):
|
|
11823
11337
|
fgid = int(0)
|
|
11824
11338
|
try:
|
|
11825
11339
|
import pwd
|
|
11826
11340
|
try:
|
|
11827
11341
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11828
11342
|
funame = userinfo.pw_name
|
|
11829
|
-
except KeyError:
|
|
11830
|
-
funame = ""
|
|
11831
|
-
except AttributeError:
|
|
11343
|
+
except (KeyError, AttributeError):
|
|
11832
11344
|
funame = ""
|
|
11833
11345
|
except ImportError:
|
|
11834
11346
|
funame = ""
|
|
@@ -11838,9 +11350,7 @@ if(py7zr_support):
|
|
|
11838
11350
|
try:
|
|
11839
11351
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11840
11352
|
fgname = groupinfo.gr_name
|
|
11841
|
-
except KeyError:
|
|
11842
|
-
fgname = ""
|
|
11843
|
-
except AttributeError:
|
|
11353
|
+
except (KeyError, AttributeError):
|
|
11844
11354
|
fgname = ""
|
|
11845
11355
|
except ImportError:
|
|
11846
11356
|
fgname = ""
|
|
@@ -11861,7 +11371,7 @@ if(py7zr_support):
|
|
|
11861
11371
|
|
|
11862
11372
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11863
11373
|
if(verbose):
|
|
11864
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11374
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11865
11375
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11866
11376
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11867
11377
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11880,7 +11390,7 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
11880
11390
|
return False
|
|
11881
11391
|
|
|
11882
11392
|
|
|
11883
|
-
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["
|
|
11393
|
+
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11884
11394
|
outarray = MkTempFile()
|
|
11885
11395
|
packform = PackCatFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
11886
11396
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
@@ -11892,19 +11402,19 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11892
11402
|
PyNeoFile compatibility layer
|
|
11893
11403
|
"""
|
|
11894
11404
|
|
|
11895
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11405
|
+
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11896
11406
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11897
11407
|
|
|
11898
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11408
|
+
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11899
11409
|
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11900
11410
|
|
|
11901
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11411
|
+
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11902
11412
|
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11903
11413
|
|
|
11904
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11414
|
+
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11905
11415
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11906
11416
|
|
|
11907
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11417
|
+
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11908
11418
|
return PackCatFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11909
11419
|
|
|
11910
11420
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
@@ -11913,7 +11423,7 @@ def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonl
|
|
|
11913
11423
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11914
11424
|
return UnPackCatFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11915
11425
|
|
|
11916
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["
|
|
11426
|
+
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11917
11427
|
return RePackCatFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11918
11428
|
|
|
11919
11429
|
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
@@ -11922,7 +11432,7 @@ def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False,
|
|
|
11922
11432
|
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11923
11433
|
return CatFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11924
11434
|
|
|
11925
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11435
|
+
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11926
11436
|
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11927
11437
|
return RePackCatFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11928
11438
|
|
|
@@ -11964,10 +11474,7 @@ def download_file_from_ftp_file(url):
|
|
|
11964
11474
|
ftp_port = 21
|
|
11965
11475
|
try:
|
|
11966
11476
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
11967
|
-
except socket.gaierror:
|
|
11968
|
-
log.info("Error With URL "+url)
|
|
11969
|
-
return False
|
|
11970
|
-
except socket.timeout:
|
|
11477
|
+
except (socket.gaierror, socket.timeout):
|
|
11971
11478
|
log.info("Error With URL "+url)
|
|
11972
11479
|
return False
|
|
11973
11480
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12055,10 +11562,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
12055
11562
|
ftp_port = 21
|
|
12056
11563
|
try:
|
|
12057
11564
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
12058
|
-
except socket.gaierror:
|
|
12059
|
-
log.info("Error With URL "+url)
|
|
12060
|
-
return False
|
|
12061
|
-
except socket.timeout:
|
|
11565
|
+
except (socket.gaierror, socket.timeout):
|
|
12062
11566
|
log.info("Error With URL "+url)
|
|
12063
11567
|
return False
|
|
12064
11568
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12169,7 +11673,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12169
11673
|
else:
|
|
12170
11674
|
response = requests.get(rebuilt_url, headers=headers, timeout=(5, 30), stream=True)
|
|
12171
11675
|
response.raw.decode_content = True
|
|
12172
|
-
shutil.copyfileobj(response.raw, httpfile)
|
|
11676
|
+
shutil.copyfileobj(response.raw, httpfile, length=__filebuff_size__)
|
|
12173
11677
|
|
|
12174
11678
|
# 2) HTTPX branch
|
|
12175
11679
|
elif usehttp == 'httpx' and havehttpx:
|
|
@@ -12181,7 +11685,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12181
11685
|
else:
|
|
12182
11686
|
response = client.get(rebuilt_url, headers=headers)
|
|
12183
11687
|
raw_wrapper = RawIteratorWrapper(response.iter_bytes())
|
|
12184
|
-
shutil.copyfileobj(raw_wrapper, httpfile)
|
|
11688
|
+
shutil.copyfileobj(raw_wrapper, httpfile, length=__filebuff_size__)
|
|
12185
11689
|
|
|
12186
11690
|
# 3) Mechanize branch
|
|
12187
11691
|
elif usehttp == 'mechanize' and havemechanize:
|
|
@@ -12200,7 +11704,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12200
11704
|
|
|
12201
11705
|
# Open the URL and copy the response to httpfile
|
|
12202
11706
|
response = br.open(rebuilt_url)
|
|
12203
|
-
shutil.copyfileobj(response, httpfile)
|
|
11707
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12204
11708
|
|
|
12205
11709
|
# 4) Fallback to urllib
|
|
12206
11710
|
else:
|
|
@@ -12213,7 +11717,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12213
11717
|
else:
|
|
12214
11718
|
opener = build_opener()
|
|
12215
11719
|
response = opener.open(request)
|
|
12216
|
-
shutil.copyfileobj(response, httpfile)
|
|
11720
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12217
11721
|
|
|
12218
11722
|
# Reset file pointer to the start before returning
|
|
12219
11723
|
httpfile.seek(0, 0)
|
|
@@ -12346,7 +11850,7 @@ def upload_file_to_http_file(
|
|
|
12346
11850
|
fileobj.seek(0)
|
|
12347
11851
|
except Exception:
|
|
12348
11852
|
pass
|
|
12349
|
-
shutil.copyfileobj(fileobj, buf)
|
|
11853
|
+
shutil.copyfileobj(fileobj, buf, length=__filebuff_size__)
|
|
12350
11854
|
|
|
12351
11855
|
_w('\r\n')
|
|
12352
11856
|
_w('--' + boundary + '--\r\n')
|
|
@@ -12435,10 +11939,7 @@ if(haveparamiko):
|
|
|
12435
11939
|
username=sftp_username, password=sftp_password)
|
|
12436
11940
|
except paramiko.ssh_exception.SSHException:
|
|
12437
11941
|
return False
|
|
12438
|
-
except socket.gaierror:
|
|
12439
|
-
log.info("Error With URL "+url)
|
|
12440
|
-
return False
|
|
12441
|
-
except socket.timeout:
|
|
11942
|
+
except (socket.gaierror, socket.timeout):
|
|
12442
11943
|
log.info("Error With URL "+url)
|
|
12443
11944
|
return False
|
|
12444
11945
|
sftp = ssh.open_sftp()
|
|
@@ -12492,10 +11993,7 @@ if(haveparamiko):
|
|
|
12492
11993
|
username=sftp_username, password=sftp_password)
|
|
12493
11994
|
except paramiko.ssh_exception.SSHException:
|
|
12494
11995
|
return False
|
|
12495
|
-
except socket.gaierror:
|
|
12496
|
-
log.info("Error With URL "+url)
|
|
12497
|
-
return False
|
|
12498
|
-
except socket.timeout:
|
|
11996
|
+
except (socket.gaierror, socket.timeout):
|
|
12499
11997
|
log.info("Error With URL "+url)
|
|
12500
11998
|
return False
|
|
12501
11999
|
sftp = ssh.open_sftp()
|
|
@@ -12546,10 +12044,7 @@ if(havepysftp):
|
|
|
12546
12044
|
username=sftp_username, password=sftp_password)
|
|
12547
12045
|
except paramiko.ssh_exception.SSHException:
|
|
12548
12046
|
return False
|
|
12549
|
-
except socket.gaierror:
|
|
12550
|
-
log.info("Error With URL "+url)
|
|
12551
|
-
return False
|
|
12552
|
-
except socket.timeout:
|
|
12047
|
+
except (socket.gaierror, socket.timeout):
|
|
12553
12048
|
log.info("Error With URL "+url)
|
|
12554
12049
|
return False
|
|
12555
12050
|
sftpfile = MkTempFile()
|
|
@@ -12599,10 +12094,7 @@ if(havepysftp):
|
|
|
12599
12094
|
username=sftp_username, password=sftp_password)
|
|
12600
12095
|
except paramiko.ssh_exception.SSHException:
|
|
12601
12096
|
return False
|
|
12602
|
-
except socket.gaierror:
|
|
12603
|
-
log.info("Error With URL "+url)
|
|
12604
|
-
return False
|
|
12605
|
-
except socket.timeout:
|
|
12097
|
+
except (socket.gaierror, socket.timeout):
|
|
12606
12098
|
log.info("Error With URL "+url)
|
|
12607
12099
|
return False
|
|
12608
12100
|
sftpfile.seek(0, 0)
|