PyArchiveFile 0.24.6__py3-none-any.whl → 0.25.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.24.6.data → pyarchivefile-0.25.2.data}/scripts/archivefile.py +12 -12
- {pyarchivefile-0.24.6.dist-info → pyarchivefile-0.25.2.dist-info}/METADATA +2 -2
- pyarchivefile-0.25.2.dist-info/RECORD +10 -0
- pyarchivefile.py +664 -1170
- pyarchivefile-0.24.6.dist-info/RECORD +0 -10
- {pyarchivefile-0.24.6.data → pyarchivefile-0.25.2.data}/scripts/archiveneofile.py +0 -0
- {pyarchivefile-0.24.6.data → pyarchivefile-0.25.2.data}/scripts/neoarchivefile.py +0 -0
- {pyarchivefile-0.24.6.dist-info → pyarchivefile-0.25.2.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.24.6.dist-info → pyarchivefile-0.25.2.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.24.6.dist-info → pyarchivefile-0.25.2.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.24.6.dist-info → pyarchivefile-0.25.2.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -73,6 +73,17 @@ except ImportError:
|
|
|
73
73
|
except ImportError:
|
|
74
74
|
import json
|
|
75
75
|
|
|
76
|
+
testyaml = False
|
|
77
|
+
try:
|
|
78
|
+
import oyaml as yaml
|
|
79
|
+
testyaml = True
|
|
80
|
+
except ImportError:
|
|
81
|
+
try:
|
|
82
|
+
import yaml
|
|
83
|
+
testyaml = True
|
|
84
|
+
except ImportError:
|
|
85
|
+
testyaml = False
|
|
86
|
+
|
|
76
87
|
try:
|
|
77
88
|
import configparser
|
|
78
89
|
except ImportError:
|
|
@@ -115,6 +126,16 @@ else:
|
|
|
115
126
|
bytes_type = bytes
|
|
116
127
|
text_type = str
|
|
117
128
|
|
|
129
|
+
# Text streams (as provided by Python)
|
|
130
|
+
PY_STDIN_TEXT = sys.stdin
|
|
131
|
+
PY_STDOUT_TEXT = sys.stdout
|
|
132
|
+
PY_STDERR_TEXT = sys.stderr
|
|
133
|
+
|
|
134
|
+
# Binary-friendly streams (use .buffer on Py3, fall back on Py2)
|
|
135
|
+
PY_STDIN_BUF = getattr(sys.stdin, "buffer", sys.stdin)
|
|
136
|
+
PY_STDOUT_BUF = getattr(sys.stdout, "buffer", sys.stdout)
|
|
137
|
+
PY_STDERR_BUF = getattr(sys.stderr, "buffer", sys.stderr)
|
|
138
|
+
|
|
118
139
|
# Text vs bytes tuples you can use with isinstance()
|
|
119
140
|
TEXT_TYPES = (basestring,) # "str or unicode" on Py2, "str" on Py3
|
|
120
141
|
BINARY_TYPES = (bytes,) if not PY2 else (str,) # bytes on Py3, str on Py2
|
|
@@ -211,12 +232,6 @@ if sys.version_info[0] == 2:
|
|
|
211
232
|
except (NameError, AttributeError):
|
|
212
233
|
pass
|
|
213
234
|
|
|
214
|
-
# CRC32 import
|
|
215
|
-
try:
|
|
216
|
-
from zlib import crc32
|
|
217
|
-
except ImportError:
|
|
218
|
-
from binascii import crc32
|
|
219
|
-
|
|
220
235
|
# Define FileNotFoundError for Python 2
|
|
221
236
|
try:
|
|
222
237
|
FileNotFoundError
|
|
@@ -251,9 +266,7 @@ py7zr_support = False
|
|
|
251
266
|
try:
|
|
252
267
|
import py7zr
|
|
253
268
|
py7zr_support = True
|
|
254
|
-
except ImportError:
|
|
255
|
-
pass
|
|
256
|
-
except OSError:
|
|
269
|
+
except (ImportError, OSError):
|
|
257
270
|
pass
|
|
258
271
|
|
|
259
272
|
# TAR file checking
|
|
@@ -279,9 +292,7 @@ haveparamiko = False
|
|
|
279
292
|
try:
|
|
280
293
|
import paramiko
|
|
281
294
|
haveparamiko = True
|
|
282
|
-
except ImportError:
|
|
283
|
-
pass
|
|
284
|
-
except OSError:
|
|
295
|
+
except (ImportError, OSError):
|
|
285
296
|
pass
|
|
286
297
|
|
|
287
298
|
# PySFTP support
|
|
@@ -289,9 +300,7 @@ havepysftp = False
|
|
|
289
300
|
try:
|
|
290
301
|
import pysftp
|
|
291
302
|
havepysftp = True
|
|
292
|
-
except ImportError:
|
|
293
|
-
pass
|
|
294
|
-
except OSError:
|
|
303
|
+
except (ImportError, OSError):
|
|
295
304
|
pass
|
|
296
305
|
|
|
297
306
|
# Add the mechanize import check
|
|
@@ -299,9 +308,7 @@ havemechanize = False
|
|
|
299
308
|
try:
|
|
300
309
|
import mechanize
|
|
301
310
|
havemechanize = True
|
|
302
|
-
except ImportError:
|
|
303
|
-
pass
|
|
304
|
-
except OSError:
|
|
311
|
+
except (ImportError, OSError):
|
|
305
312
|
pass
|
|
306
313
|
|
|
307
314
|
# Requests support
|
|
@@ -311,9 +318,7 @@ try:
|
|
|
311
318
|
haverequests = True
|
|
312
319
|
import urllib3
|
|
313
320
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
314
|
-
except ImportError:
|
|
315
|
-
pass
|
|
316
|
-
except OSError:
|
|
321
|
+
except (ImportError, OSError):
|
|
317
322
|
pass
|
|
318
323
|
|
|
319
324
|
# HTTPX support
|
|
@@ -323,9 +328,7 @@ try:
|
|
|
323
328
|
havehttpx = True
|
|
324
329
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
325
330
|
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
|
326
|
-
except ImportError:
|
|
327
|
-
pass
|
|
328
|
-
except OSError:
|
|
331
|
+
except (ImportError, OSError):
|
|
329
332
|
pass
|
|
330
333
|
|
|
331
334
|
# HTTP and URL parsing
|
|
@@ -416,9 +419,14 @@ __include_defaults__ = True
|
|
|
416
419
|
__use_inmemfile__ = True
|
|
417
420
|
__use_spoolfile__ = False
|
|
418
421
|
__use_spooldir__ = tempfile.gettempdir()
|
|
419
|
-
|
|
420
|
-
|
|
422
|
+
BYTES_PER_KiB = 1024
|
|
423
|
+
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
|
+
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
425
|
+
DEFAULT_SPOOL_MAX = 4 * BYTES_PER_MiB # 4 MiB per spooled temp file
|
|
421
426
|
__spoolfile_size__ = DEFAULT_SPOOL_MAX
|
|
427
|
+
# Buffer: bigger than stdlib default (16 KiB), but still modest
|
|
428
|
+
DEFAULT_BUFFER_MAX = 256 * BYTES_PER_KiB # 256 KiB copy buffer
|
|
429
|
+
__filebuff_size__ = DEFAULT_BUFFER_MAX
|
|
422
430
|
__program_name__ = "Py"+__file_format_default__
|
|
423
431
|
__use_env_file__ = True
|
|
424
432
|
__use_ini_file__ = True
|
|
@@ -634,12 +642,12 @@ __project__ = __program_name__
|
|
|
634
642
|
__program_alt_name__ = __program_name__
|
|
635
643
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
636
644
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
637
|
-
__version_info__ = (0,
|
|
638
|
-
__version_date_info__ = (2025, 11,
|
|
645
|
+
__version_info__ = (0, 25, 2, "RC 1", 1)
|
|
646
|
+
__version_date_info__ = (2025, 11, 6, "RC 1", 1)
|
|
639
647
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
640
648
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
641
649
|
__revision__ = __version_info__[3]
|
|
642
|
-
__revision_id__ = "$Id:
|
|
650
|
+
__revision_id__ = "$Id: ac98f2d8fa689bbcf7939f892030aad675a7d5fe $"
|
|
643
651
|
if(__version_info__[4] is not None):
|
|
644
652
|
__version_date_plusrc__ = __version_date__ + \
|
|
645
653
|
"-" + str(__version_date_info__[4])
|
|
@@ -790,7 +798,7 @@ geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prour
|
|
|
790
798
|
if(platform.python_implementation() != ""):
|
|
791
799
|
py_implementation = platform.python_implementation()
|
|
792
800
|
if(platform.python_implementation() == ""):
|
|
793
|
-
py_implementation = "
|
|
801
|
+
py_implementation = "CPython"
|
|
794
802
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
795
803
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
796
804
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -2378,7 +2386,7 @@ def GetTotalSize(file_list):
|
|
|
2378
2386
|
try:
|
|
2379
2387
|
total_size += os.path.getsize(item)
|
|
2380
2388
|
except OSError:
|
|
2381
|
-
|
|
2389
|
+
PY_STDERR_TEXT.write("Error accessing file {}: {}\n".format(item, e))
|
|
2382
2390
|
return total_size
|
|
2383
2391
|
|
|
2384
2392
|
|
|
@@ -2615,7 +2623,7 @@ class ZlibFile(object):
|
|
|
2615
2623
|
scanned_leading = 0 # for tolerant header scan
|
|
2616
2624
|
|
|
2617
2625
|
while True:
|
|
2618
|
-
data = self.file.read(
|
|
2626
|
+
data = self.file.read(__filebuff_size__) # 1 MiB blocks
|
|
2619
2627
|
if not data:
|
|
2620
2628
|
if d is not None:
|
|
2621
2629
|
self._spool.write(d.flush())
|
|
@@ -2773,7 +2781,7 @@ class ZlibFile(object):
|
|
|
2773
2781
|
|
|
2774
2782
|
# Buffer and compress in chunks to limit memory
|
|
2775
2783
|
self._write_buf += data
|
|
2776
|
-
if len(self._write_buf) >= (
|
|
2784
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
2777
2785
|
chunk = self._compressor.compress(bytes(self._write_buf))
|
|
2778
2786
|
if chunk:
|
|
2779
2787
|
self.file.write(chunk)
|
|
@@ -2883,7 +2891,7 @@ class ZlibFile(object):
|
|
|
2883
2891
|
"""
|
|
2884
2892
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
2885
2893
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
2886
|
-
bio =
|
|
2894
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
2887
2895
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
2888
2896
|
|
|
2889
2897
|
# compatibility aliases for unwrapping utilities
|
|
@@ -2919,7 +2927,7 @@ def compress_bytes(payload, level=6, wbits=15, text=False, **kw):
|
|
|
2919
2927
|
out = compress_bytes(b"hello")
|
|
2920
2928
|
out = compress_bytes(u"hello\n", text=True, encoding="utf-8", newline="\n")
|
|
2921
2929
|
"""
|
|
2922
|
-
bio =
|
|
2930
|
+
bio = MkTempFile()
|
|
2923
2931
|
mode = 'wt' if text else 'wb'
|
|
2924
2932
|
f = ZlibFile(fileobj=bio, mode=mode, level=level, wbits=wbits, **kw)
|
|
2925
2933
|
try:
|
|
@@ -3078,7 +3086,7 @@ class GzipFile(object):
|
|
|
3078
3086
|
|
|
3079
3087
|
self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
|
|
3080
3088
|
|
|
3081
|
-
CHUNK =
|
|
3089
|
+
CHUNK = __filebuff_size__
|
|
3082
3090
|
pending = b""
|
|
3083
3091
|
d = None
|
|
3084
3092
|
absolute_offset = 0
|
|
@@ -3241,7 +3249,7 @@ class GzipFile(object):
|
|
|
3241
3249
|
|
|
3242
3250
|
# Stage and compress in chunks
|
|
3243
3251
|
self._write_buf += data
|
|
3244
|
-
if len(self._write_buf) >= (
|
|
3252
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
3245
3253
|
out = self._compressor.compress(bytes(self._write_buf))
|
|
3246
3254
|
if out:
|
|
3247
3255
|
self.file.write(out)
|
|
@@ -3341,7 +3349,7 @@ class GzipFile(object):
|
|
|
3341
3349
|
"""
|
|
3342
3350
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3343
3351
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
3344
|
-
bio =
|
|
3352
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
3345
3353
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
3346
3354
|
|
|
3347
3355
|
# compatibility aliases for unwrapping utilities
|
|
@@ -3383,7 +3391,7 @@ def gzip_compress_bytes(payload, level=6, text=False, **kw):
|
|
|
3383
3391
|
- text=False: 'payload' must be bytes-like; written via GzipFile('wb')
|
|
3384
3392
|
You can pass newline/encoding/errors to control text encoding.
|
|
3385
3393
|
"""
|
|
3386
|
-
bio =
|
|
3394
|
+
bio = MkTempFile()
|
|
3387
3395
|
mode = 'wt' if text else 'wb'
|
|
3388
3396
|
gf = GzipFile(fileobj=bio, mode=mode, level=level, **kw)
|
|
3389
3397
|
try:
|
|
@@ -3615,53 +3623,6 @@ def crc_generic(msg, width, poly, init, xorout, refin, refout):
|
|
|
3615
3623
|
crc = _reflect(crc, width)
|
|
3616
3624
|
return (crc ^ xorout) & mask
|
|
3617
3625
|
|
|
3618
|
-
# =========================
|
|
3619
|
-
# Named CRCs
|
|
3620
|
-
# =========================
|
|
3621
|
-
# CRC-16/ANSI (ARC/MODBUS family with init=0xFFFF by default)
|
|
3622
|
-
def crc16_ansi(msg, initial_value=0xFFFF):
|
|
3623
|
-
return crc_generic(msg, 16, 0x8005, initial_value & 0xFFFF, 0x0000, True, True)
|
|
3624
|
-
|
|
3625
|
-
def crc16_ibm(msg, initial_value=0xFFFF):
|
|
3626
|
-
return crc16_ansi(msg, initial_value)
|
|
3627
|
-
|
|
3628
|
-
def crc16(msg):
|
|
3629
|
-
return crc16_ansi(msg, 0xFFFF)
|
|
3630
|
-
|
|
3631
|
-
def crc16_ccitt(msg, initial_value=0xFFFF):
|
|
3632
|
-
# CCITT-FALSE
|
|
3633
|
-
return crc_generic(msg, 16, 0x1021, initial_value & 0xFFFF, 0x0000, False, False)
|
|
3634
|
-
|
|
3635
|
-
def crc16_x25(msg):
|
|
3636
|
-
return crc_generic(msg, 16, 0x1021, 0xFFFF, 0xFFFF, True, True)
|
|
3637
|
-
|
|
3638
|
-
def crc16_kermit(msg):
|
|
3639
|
-
return crc_generic(msg, 16, 0x1021, 0x0000, 0x0000, True, True)
|
|
3640
|
-
|
|
3641
|
-
def crc64_ecma(msg, initial_value=0x0000000000000000):
|
|
3642
|
-
return crc_generic(msg, 64, 0x42F0E1EBA9EA3693,
|
|
3643
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3644
|
-
0x0000000000000000, False, False)
|
|
3645
|
-
|
|
3646
|
-
def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
|
|
3647
|
-
return crc_generic(msg, 64, 0x000000000000001B,
|
|
3648
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3649
|
-
0xFFFFFFFFFFFFFFFF, True, True)
|
|
3650
|
-
|
|
3651
|
-
# =========================
|
|
3652
|
-
# Incremental CRC context
|
|
3653
|
-
# =========================
|
|
3654
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
3655
|
-
|
|
3656
|
-
_CRC_SPECS = {
|
|
3657
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
3658
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
3659
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
3660
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
3661
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
3662
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
3663
|
-
}
|
|
3664
|
-
|
|
3665
3626
|
# --- helpers --------------------------------------------------------------
|
|
3666
3627
|
|
|
3667
3628
|
try:
|
|
@@ -3702,206 +3663,15 @@ def _bytes_to_int(b):
|
|
|
3702
3663
|
value = (value << 8) | ch
|
|
3703
3664
|
return value
|
|
3704
3665
|
|
|
3705
|
-
|
|
3706
|
-
# --- your existing CRCContext (unchanged) ---------------------------------
|
|
3707
|
-
|
|
3708
|
-
class CRCContext(object):
|
|
3709
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
3710
|
-
|
|
3711
|
-
def __init__(self, spec):
|
|
3712
|
-
self.spec = spec
|
|
3713
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
3714
|
-
self.mask = (1 << spec.width) - 1
|
|
3715
|
-
self.shift = spec.width - 8
|
|
3716
|
-
self.crc = spec.init & self.mask
|
|
3717
|
-
|
|
3718
|
-
def update(self, data):
|
|
3719
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3720
|
-
data = bytes(bytearray(data))
|
|
3721
|
-
buf = _mv_tobytes(memoryview(data))
|
|
3722
|
-
if self.spec.refin:
|
|
3723
|
-
c = self.crc
|
|
3724
|
-
tbl = self.table
|
|
3725
|
-
for b in buf:
|
|
3726
|
-
if not isinstance(b, int): # Py2
|
|
3727
|
-
b = ord(b)
|
|
3728
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
3729
|
-
self.crc = c & self.mask
|
|
3730
|
-
else:
|
|
3731
|
-
c = self.crc
|
|
3732
|
-
tbl = self.table
|
|
3733
|
-
sh = self.shift
|
|
3734
|
-
msk = self.mask
|
|
3735
|
-
for b in buf:
|
|
3736
|
-
if not isinstance(b, int):
|
|
3737
|
-
b = ord(b)
|
|
3738
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
3739
|
-
self.crc = c & msk
|
|
3740
|
-
return self
|
|
3741
|
-
|
|
3742
|
-
def digest_int(self):
|
|
3743
|
-
c = self.crc
|
|
3744
|
-
if self.spec.refout ^ self.spec.refin:
|
|
3745
|
-
c = _reflect(c, self.spec.width)
|
|
3746
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
3747
|
-
|
|
3748
|
-
def hexdigest(self):
|
|
3749
|
-
width_hex = (self.spec.width + 3) // 4
|
|
3750
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
3751
|
-
|
|
3752
|
-
|
|
3753
|
-
# --- hashlib-backed implementation ---------------------------------------
|
|
3754
|
-
|
|
3755
|
-
class _HashlibCRCWrapper(object):
|
|
3756
|
-
"""
|
|
3757
|
-
Wrap a hashlib object to present the same interface as CRCContext
|
|
3758
|
-
(update, digest_int, hexdigest).
|
|
3759
|
-
|
|
3760
|
-
Assumes the hashlib algorithm already implements the exact CRC
|
|
3761
|
-
specification (refin/refout/xorout/etc.).
|
|
3762
|
-
"""
|
|
3763
|
-
__slots__ = ("_h", "spec", "mask", "width_hex")
|
|
3764
|
-
|
|
3765
|
-
def __init__(self, algo_name, spec):
|
|
3766
|
-
self._h = hashlib.new(algo_name)
|
|
3767
|
-
self.spec = spec
|
|
3768
|
-
self.mask = (1 << spec.width) - 1
|
|
3769
|
-
self.width_hex = (spec.width + 3) // 4
|
|
3770
|
-
|
|
3771
|
-
def update(self, data):
|
|
3772
|
-
self._h.update(_coerce_bytes(data))
|
|
3773
|
-
return self
|
|
3774
|
-
|
|
3775
|
-
def digest_int(self):
|
|
3776
|
-
# Convert final digest bytes to an integer and mask to width
|
|
3777
|
-
value = _bytes_to_int(self._h.digest())
|
|
3778
|
-
return value & self.mask
|
|
3779
|
-
|
|
3780
|
-
def hexdigest(self):
|
|
3781
|
-
h = self._h.hexdigest().lower()
|
|
3782
|
-
# Normalize to the same number of hex digits as CRCContext
|
|
3783
|
-
if len(h) < self.width_hex:
|
|
3784
|
-
h = ("0" * (self.width_hex - len(h))) + h
|
|
3785
|
-
elif len(h) > self.width_hex:
|
|
3786
|
-
h = h[-self.width_hex:]
|
|
3787
|
-
return h
|
|
3788
|
-
|
|
3789
|
-
|
|
3790
|
-
# --- public class: choose hashlib or fallback -----------------------------
|
|
3791
|
-
|
|
3792
|
-
class CRC(object):
|
|
3793
|
-
"""
|
|
3794
|
-
CRC wrapper that uses hashlib if available, otherwise falls back to
|
|
3795
|
-
the pure-Python CRCContext.
|
|
3796
|
-
|
|
3797
|
-
spec.hashlib_name (preferred) or spec.name is used as the hashlib
|
|
3798
|
-
algorithm name, e.g. 'crc32', 'crc32c', etc.
|
|
3799
|
-
"""
|
|
3800
|
-
|
|
3801
|
-
__slots__ = ("spec", "_impl")
|
|
3802
|
-
|
|
3803
|
-
def __init__(self, spec):
|
|
3804
|
-
self.spec = spec
|
|
3805
|
-
|
|
3806
|
-
algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
|
|
3807
|
-
impl = None
|
|
3808
|
-
|
|
3809
|
-
if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
|
|
3810
|
-
# Use hashlib-backed implementation
|
|
3811
|
-
impl = _HashlibCRCWrapper(algo_name, spec)
|
|
3812
|
-
else:
|
|
3813
|
-
# Fallback to your pure-Python implementation
|
|
3814
|
-
impl = CRCContext(spec)
|
|
3815
|
-
|
|
3816
|
-
self._impl = impl
|
|
3817
|
-
|
|
3818
|
-
def update(self, data):
|
|
3819
|
-
self._impl.update(data)
|
|
3820
|
-
return self
|
|
3821
|
-
|
|
3822
|
-
def digest_int(self):
|
|
3823
|
-
return self._impl.digest_int()
|
|
3824
|
-
|
|
3825
|
-
def hexdigest(self):
|
|
3826
|
-
return self._impl.hexdigest()
|
|
3827
|
-
|
|
3828
|
-
def crc_context_from_name(name_norm):
|
|
3829
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
3830
|
-
if spec is None:
|
|
3831
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
3832
|
-
return CRCContext(spec)
|
|
3833
|
-
|
|
3834
|
-
# =========================
|
|
3835
|
-
# Dispatch helpers
|
|
3836
|
-
# =========================
|
|
3837
|
-
_CRC_ALIASES = {
|
|
3838
|
-
# keep your historical behaviors
|
|
3839
|
-
"crc16": "crc16_ansi",
|
|
3840
|
-
"crc16_ibm": "crc16_ansi",
|
|
3841
|
-
"crc16_ansi": "crc16_ansi",
|
|
3842
|
-
"crc16_modbus": "crc16_ansi",
|
|
3843
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
3844
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
3845
|
-
"crc16_x25": "crc16_x25",
|
|
3846
|
-
"crc16_kermit": "crc16_kermit",
|
|
3847
|
-
"crc64": "crc64_iso",
|
|
3848
|
-
"crc64_iso": "crc64_iso",
|
|
3849
|
-
"crc64_ecma": "crc64_ecma",
|
|
3850
|
-
"adler32": "adler32",
|
|
3851
|
-
"crc32": "crc32",
|
|
3852
|
-
}
|
|
3853
|
-
|
|
3854
|
-
_CRC_WIDTH = {
|
|
3855
|
-
"crc16_ansi": 16,
|
|
3856
|
-
"crc16_ccitt": 16,
|
|
3857
|
-
"crc16_x25": 16,
|
|
3858
|
-
"crc16_kermit": 16,
|
|
3859
|
-
"crc64_iso": 64,
|
|
3860
|
-
"crc64_ecma": 64,
|
|
3861
|
-
"adler32": 32,
|
|
3862
|
-
"crc32": 32,
|
|
3863
|
-
}
|
|
3864
|
-
|
|
3865
|
-
def _crc_compute(algo_key, data_bytes):
|
|
3866
|
-
if algo_key == "crc16_ansi":
|
|
3867
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
3868
|
-
if algo_key == "crc16_ccitt":
|
|
3869
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
3870
|
-
if algo_key == "crc16_x25":
|
|
3871
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
3872
|
-
if algo_key == "crc16_kermit":
|
|
3873
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
3874
|
-
if algo_key == "crc64_iso":
|
|
3875
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3876
|
-
if algo_key == "crc64_ecma":
|
|
3877
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3878
|
-
if algo_key == "adler32":
|
|
3879
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
3880
|
-
if algo_key == "crc32":
|
|
3881
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
3882
|
-
raise KeyError(algo_key)
|
|
3883
|
-
|
|
3884
|
-
try:
|
|
3885
|
-
hashlib_guaranteed
|
|
3886
|
-
except NameError:
|
|
3887
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
3888
|
-
|
|
3889
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
3890
|
-
try:
|
|
3891
|
-
return name.lower() in guaranteed
|
|
3892
|
-
except Exception:
|
|
3893
|
-
return False
|
|
3894
|
-
|
|
3895
3666
|
# =========================
|
|
3896
3667
|
# Public checksum API
|
|
3897
3668
|
# =========================
|
|
3898
|
-
def GetHeaderChecksum(inlist=None, checksumtype="
|
|
3669
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
3899
3670
|
"""
|
|
3900
3671
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3901
3672
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
3902
3673
|
"""
|
|
3903
|
-
|
|
3904
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3674
|
+
algo_key = (checksumtype or "md5").lower()
|
|
3905
3675
|
|
|
3906
3676
|
delim = formatspecs.get('format_delimiter', u"\0")
|
|
3907
3677
|
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
@@ -3909,260 +3679,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, format
|
|
|
3909
3679
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3910
3680
|
hdr_bytes = bytes(hdr_bytes)
|
|
3911
3681
|
|
|
3912
|
-
if algo_key
|
|
3913
|
-
|
|
3914
|
-
|
|
3915
|
-
|
|
3916
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3917
|
-
h = hashlib.new(algo_key)
|
|
3918
|
-
h.update(hdr_bytes)
|
|
3919
|
-
return h.hexdigest().lower()
|
|
3682
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3683
|
+
h = hashlib.new(algo_key)
|
|
3684
|
+
h.update(hdr_bytes)
|
|
3685
|
+
return h.hexdigest().lower()
|
|
3920
3686
|
|
|
3921
3687
|
return "0"
|
|
3922
3688
|
|
|
3923
|
-
def GetFileChecksum(
|
|
3689
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
3924
3690
|
"""
|
|
3925
3691
|
Accepts bytes/str/file-like.
|
|
3926
3692
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
3927
3693
|
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
3928
3694
|
- Falls back to one-shot for non-file-like inputs.
|
|
3929
3695
|
"""
|
|
3930
|
-
|
|
3931
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3696
|
+
algo_key = (checksumtype or "md5").lower()
|
|
3932
3697
|
|
|
3933
3698
|
# file-like streaming
|
|
3934
|
-
if hasattr(
|
|
3699
|
+
if hasattr(inbytes, "read"):
|
|
3935
3700
|
# hashlib
|
|
3936
|
-
if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3937
|
-
h = hashlib.new(algo_key)
|
|
3938
|
-
while True:
|
|
3939
|
-
chunk = instr.read(1 << 20)
|
|
3940
|
-
if not chunk:
|
|
3941
|
-
break
|
|
3942
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3943
|
-
chunk = bytes(bytearray(chunk))
|
|
3944
|
-
h.update(chunk)
|
|
3945
|
-
return h.hexdigest().lower()
|
|
3946
3701
|
|
|
3947
|
-
|
|
3948
|
-
if algo_key in _CRC_SPECS:
|
|
3949
|
-
ctx = crc_context_from_name(algo_key)
|
|
3950
|
-
while True:
|
|
3951
|
-
chunk = instr.read(1 << 20)
|
|
3952
|
-
if not chunk:
|
|
3953
|
-
break
|
|
3954
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3955
|
-
chunk = bytes(bytearray(chunk))
|
|
3956
|
-
ctx.update(chunk)
|
|
3957
|
-
return ctx.hexdigest()
|
|
3958
|
-
|
|
3959
|
-
# not known streaming algo: fallback to one-shot bytes
|
|
3960
|
-
data = instr.read()
|
|
3961
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3962
|
-
data = bytes(bytearray(data))
|
|
3963
|
-
else:
|
|
3964
|
-
data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
|
|
3965
|
-
data = bytes(data)
|
|
3966
|
-
|
|
3967
|
-
# one-shot
|
|
3968
|
-
if algo_key in _CRC_SPECS:
|
|
3969
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
3970
|
-
|
|
3971
|
-
if algo_key in _CRC_WIDTH:
|
|
3972
|
-
n = _crc_compute(algo_key, data)
|
|
3973
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
3974
|
-
|
|
3975
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3976
|
-
h = hashlib.new(algo_key)
|
|
3977
|
-
h.update(data)
|
|
3978
|
-
return h.hexdigest().lower()
|
|
3979
|
-
|
|
3980
|
-
return "0"
|
|
3981
|
-
|
|
3982
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3983
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
3984
|
-
want = (inchecksum or "0").strip().lower()
|
|
3985
|
-
if want.startswith("0x"):
|
|
3986
|
-
want = want[2:]
|
|
3987
|
-
return hmac.compare_digest(want, calc)
|
|
3988
|
-
|
|
3989
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3990
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
3991
|
-
want = (inchecksum or "0").strip().lower()
|
|
3992
|
-
if want.startswith("0x"):
|
|
3993
|
-
want = want[2:]
|
|
3994
|
-
return hmac.compare_digest(want, calc)
|
|
3995
|
-
|
|
3996
|
-
|
|
3997
|
-
# =========================
|
|
3998
|
-
# Incremental CRC context
|
|
3999
|
-
# =========================
|
|
4000
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
4001
|
-
|
|
4002
|
-
_CRC_SPECS = {
|
|
4003
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
4004
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
4005
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
4006
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
4007
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
4008
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
4009
|
-
}
|
|
4010
|
-
|
|
4011
|
-
class CRCContext(object):
|
|
4012
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
4013
|
-
|
|
4014
|
-
def __init__(self, spec):
|
|
4015
|
-
self.spec = spec
|
|
4016
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
4017
|
-
self.mask = (1 << spec.width) - 1
|
|
4018
|
-
self.shift = spec.width - 8
|
|
4019
|
-
self.crc = spec.init & self.mask
|
|
4020
|
-
|
|
4021
|
-
def update(self, data):
|
|
4022
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
4023
|
-
data = bytes(bytearray(data))
|
|
4024
|
-
if self.spec.refin:
|
|
4025
|
-
c = self.crc
|
|
4026
|
-
tbl = self.table
|
|
4027
|
-
for b in memoryview(data).tobytes():
|
|
4028
|
-
if not isinstance(b, int): # Py2
|
|
4029
|
-
b = ord(b)
|
|
4030
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
4031
|
-
self.crc = c & self.mask
|
|
4032
|
-
else:
|
|
4033
|
-
c = self.crc
|
|
4034
|
-
tbl = self.table
|
|
4035
|
-
sh = self.shift
|
|
4036
|
-
msk = self.mask
|
|
4037
|
-
for b in memoryview(data).tobytes():
|
|
4038
|
-
if not isinstance(b, int):
|
|
4039
|
-
b = ord(b)
|
|
4040
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
4041
|
-
self.crc = c & msk
|
|
4042
|
-
return self
|
|
4043
|
-
|
|
4044
|
-
def digest_int(self):
|
|
4045
|
-
c = self.crc
|
|
4046
|
-
if self.spec.refout ^ self.spec.refin:
|
|
4047
|
-
c = _reflect(c, self.spec.width)
|
|
4048
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
4049
|
-
|
|
4050
|
-
def hexdigest(self):
|
|
4051
|
-
width_hex = (self.spec.width + 3) // 4
|
|
4052
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
4053
|
-
|
|
4054
|
-
def crc_context_from_name(name_norm):
|
|
4055
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
4056
|
-
if spec is None:
|
|
4057
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
4058
|
-
return CRCContext(spec)
|
|
4059
|
-
|
|
4060
|
-
# =========================
|
|
4061
|
-
# Dispatch helpers
|
|
4062
|
-
# =========================
|
|
4063
|
-
_CRC_ALIASES = {
|
|
4064
|
-
# keep your historical behaviors
|
|
4065
|
-
"crc16": "crc16_ansi",
|
|
4066
|
-
"crc16_ibm": "crc16_ansi",
|
|
4067
|
-
"crc16_ansi": "crc16_ansi",
|
|
4068
|
-
"crc16_modbus": "crc16_ansi",
|
|
4069
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
4070
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
4071
|
-
"crc16_x25": "crc16_x25",
|
|
4072
|
-
"crc16_kermit": "crc16_kermit",
|
|
4073
|
-
"crc64": "crc64_iso",
|
|
4074
|
-
"crc64_iso": "crc64_iso",
|
|
4075
|
-
"crc64_ecma": "crc64_ecma",
|
|
4076
|
-
"adler32": "adler32",
|
|
4077
|
-
"crc32": "crc32",
|
|
4078
|
-
}
|
|
4079
|
-
|
|
4080
|
-
_CRC_WIDTH = {
|
|
4081
|
-
"crc16_ansi": 16,
|
|
4082
|
-
"crc16_ccitt": 16,
|
|
4083
|
-
"crc16_x25": 16,
|
|
4084
|
-
"crc16_kermit": 16,
|
|
4085
|
-
"crc64_iso": 64,
|
|
4086
|
-
"crc64_ecma": 64,
|
|
4087
|
-
"adler32": 32,
|
|
4088
|
-
"crc32": 32,
|
|
4089
|
-
}
|
|
4090
|
-
|
|
4091
|
-
def _crc_compute(algo_key, data_bytes):
|
|
4092
|
-
if algo_key == "crc16_ansi":
|
|
4093
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
4094
|
-
if algo_key == "crc16_ccitt":
|
|
4095
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
4096
|
-
if algo_key == "crc16_x25":
|
|
4097
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
4098
|
-
if algo_key == "crc16_kermit":
|
|
4099
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
4100
|
-
if algo_key == "crc64_iso":
|
|
4101
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4102
|
-
if algo_key == "crc64_ecma":
|
|
4103
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4104
|
-
if algo_key == "adler32":
|
|
4105
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
4106
|
-
if algo_key == "crc32":
|
|
4107
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
4108
|
-
raise KeyError(algo_key)
|
|
4109
|
-
|
|
4110
|
-
try:
|
|
4111
|
-
hashlib_guaranteed
|
|
4112
|
-
except NameError:
|
|
4113
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
4114
|
-
|
|
4115
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
4116
|
-
try:
|
|
4117
|
-
return name.lower() in guaranteed
|
|
4118
|
-
except Exception:
|
|
4119
|
-
return False
|
|
4120
|
-
|
|
4121
|
-
# =========================
|
|
4122
|
-
# Public checksum API
|
|
4123
|
-
# =========================
|
|
4124
|
-
def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4125
|
-
"""
|
|
4126
|
-
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
4127
|
-
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
4128
|
-
"""
|
|
4129
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
4130
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
4131
|
-
|
|
4132
|
-
delim = formatspecs.get('format_delimiter', u"\0")
|
|
4133
|
-
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
4134
|
-
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
4135
|
-
hdr_bytes = _to_bytes(hdr_bytes)
|
|
4136
|
-
hdr_bytes = bytes(hdr_bytes)
|
|
4137
|
-
|
|
4138
|
-
if algo_key in _CRC_WIDTH:
|
|
4139
|
-
n = _crc_compute(algo_key, hdr_bytes)
|
|
4140
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
4141
|
-
|
|
4142
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
4143
|
-
h = hashlib.new(algo_key)
|
|
4144
|
-
h.update(hdr_bytes)
|
|
4145
|
-
return h.hexdigest().lower()
|
|
4146
|
-
|
|
4147
|
-
return "0"
|
|
4148
|
-
|
|
4149
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4150
|
-
"""
|
|
4151
|
-
Accepts bytes/str/file-like.
|
|
4152
|
-
- Hashlib algos: streamed in 1 MiB chunks.
|
|
4153
|
-
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
4154
|
-
- Falls back to one-shot for non-file-like inputs.
|
|
4155
|
-
"""
|
|
4156
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
4157
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
4158
|
-
|
|
4159
|
-
# file-like streaming
|
|
4160
|
-
if hasattr(instr, "read"):
|
|
4161
|
-
# hashlib
|
|
4162
|
-
if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3702
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4163
3703
|
h = hashlib.new(algo_key)
|
|
4164
3704
|
while True:
|
|
4165
|
-
chunk =
|
|
3705
|
+
chunk = inbytes.read(__filebuff_size__)
|
|
4166
3706
|
if not chunk:
|
|
4167
3707
|
break
|
|
4168
3708
|
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
@@ -4170,49 +3710,31 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
|
|
|
4170
3710
|
h.update(chunk)
|
|
4171
3711
|
return h.hexdigest().lower()
|
|
4172
3712
|
|
|
4173
|
-
# CRC streaming via context
|
|
4174
|
-
if algo_key in _CRC_SPECS:
|
|
4175
|
-
ctx = crc_context_from_name(algo_key)
|
|
4176
|
-
while True:
|
|
4177
|
-
chunk = instr.read(1 << 20)
|
|
4178
|
-
if not chunk:
|
|
4179
|
-
break
|
|
4180
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
4181
|
-
chunk = bytes(bytearray(chunk))
|
|
4182
|
-
ctx.update(chunk)
|
|
4183
|
-
return ctx.hexdigest()
|
|
4184
|
-
|
|
4185
3713
|
# not known streaming algo: fallback to one-shot bytes
|
|
4186
|
-
data =
|
|
3714
|
+
data = inbytes.read()
|
|
4187
3715
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
4188
3716
|
data = bytes(bytearray(data))
|
|
4189
3717
|
else:
|
|
4190
|
-
data = _to_bytes(
|
|
3718
|
+
data = _to_bytes(inbytes) if (encodedata or not isinstance(inbytes, (bytes, bytearray, memoryview))) else inbytes
|
|
4191
3719
|
data = bytes(data)
|
|
4192
3720
|
|
|
4193
3721
|
# one-shot
|
|
4194
|
-
if algo_key in _CRC_SPECS:
|
|
4195
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
4196
|
-
|
|
4197
|
-
if algo_key in _CRC_WIDTH:
|
|
4198
|
-
n = _crc_compute(algo_key, data)
|
|
4199
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
4200
3722
|
|
|
4201
|
-
if
|
|
3723
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4202
3724
|
h = hashlib.new(algo_key)
|
|
4203
3725
|
h.update(data)
|
|
4204
3726
|
return h.hexdigest().lower()
|
|
4205
3727
|
|
|
4206
3728
|
return "0"
|
|
4207
3729
|
|
|
4208
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="
|
|
3730
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4209
3731
|
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
4210
3732
|
want = (inchecksum or "0").strip().lower()
|
|
4211
3733
|
if want.startswith("0x"):
|
|
4212
3734
|
want = want[2:]
|
|
4213
3735
|
return hmac.compare_digest(want, calc)
|
|
4214
3736
|
|
|
4215
|
-
def ValidateFileChecksum(infile, checksumtype="
|
|
3737
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4216
3738
|
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
4217
3739
|
want = (inchecksum or "0").strip().lower()
|
|
4218
3740
|
if want.startswith("0x"):
|
|
@@ -4259,66 +3781,6 @@ def GetDataFromArrayAlt(structure, path, default=None):
|
|
|
4259
3781
|
return element
|
|
4260
3782
|
|
|
4261
3783
|
|
|
4262
|
-
def GetHeaderChecksum(inlist=[], checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4263
|
-
fileheader = AppendNullBytes(inlist, formatspecs['format_delimiter']) if isinstance(
|
|
4264
|
-
inlist, list) else AppendNullByte(inlist, formatspecs['format_delimiter'])
|
|
4265
|
-
if encodedata and hasattr(fileheader, "encode"):
|
|
4266
|
-
fileheader = fileheader.encode('UTF-8')
|
|
4267
|
-
checksum_methods = {
|
|
4268
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4269
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4270
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4271
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4272
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4273
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4274
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4275
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4276
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4277
|
-
}
|
|
4278
|
-
if checksumtype in checksum_methods:
|
|
4279
|
-
return checksum_methods[checksumtype](fileheader)
|
|
4280
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4281
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4282
|
-
checksumoutstr.update(fileheader)
|
|
4283
|
-
return checksumoutstr.hexdigest().lower()
|
|
4284
|
-
return format(0, 'x').lower()
|
|
4285
|
-
|
|
4286
|
-
|
|
4287
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4288
|
-
if encodedata and hasattr(instr, "encode"):
|
|
4289
|
-
instr = instr.encode('UTF-8')
|
|
4290
|
-
checksum_methods = {
|
|
4291
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4292
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4293
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4294
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4295
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4296
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4297
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4298
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4299
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4300
|
-
}
|
|
4301
|
-
if checksumtype in checksum_methods:
|
|
4302
|
-
return checksum_methods[checksumtype](instr)
|
|
4303
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4304
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4305
|
-
checksumoutstr.update(instr)
|
|
4306
|
-
return checksumoutstr.hexdigest().lower()
|
|
4307
|
-
return format(0, 'x').lower()
|
|
4308
|
-
|
|
4309
|
-
|
|
4310
|
-
def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4311
|
-
infileheadercshex = GetHeaderChecksum(
|
|
4312
|
-
inlist, checksumtype, True, formatspecs).lower()
|
|
4313
|
-
return inchecksum.lower() == infileheadercshex
|
|
4314
|
-
|
|
4315
|
-
|
|
4316
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4317
|
-
catinfilecshex = GetFileChecksum(
|
|
4318
|
-
infile, checksumtype, True, formatspecs).lower()
|
|
4319
|
-
return inchecksum.lower() == catinfilecshex
|
|
4320
|
-
|
|
4321
|
-
|
|
4322
3784
|
# ========= pushback-aware delimiter reader =========
|
|
4323
3785
|
class _DelimiterReader(object):
|
|
4324
3786
|
"""
|
|
@@ -4651,7 +4113,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4651
4113
|
if(not hasattr(fp, "read")):
|
|
4652
4114
|
return False
|
|
4653
4115
|
delimiter = formatspecs['format_delimiter']
|
|
4654
|
-
fheaderstart = fp.tell()
|
|
4655
4116
|
if(formatspecs['new_style']):
|
|
4656
4117
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4657
4118
|
else:
|
|
@@ -4674,22 +4135,74 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4674
4135
|
fjsonchecksumtype = HeaderOut[30]
|
|
4675
4136
|
fjsonchecksum = HeaderOut[31]
|
|
4676
4137
|
fjsoncontent = {}
|
|
4677
|
-
|
|
4678
|
-
|
|
4679
|
-
|
|
4680
|
-
|
|
4681
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4138
|
+
if(fjsontype=="json"):
|
|
4139
|
+
fjsoncontent = {}
|
|
4140
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4141
|
+
if(fjsonsize > 0):
|
|
4682
4142
|
try:
|
|
4683
|
-
|
|
4143
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4144
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4684
4145
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4685
|
-
|
|
4686
|
-
|
|
4687
|
-
|
|
4688
|
-
|
|
4146
|
+
try:
|
|
4147
|
+
fjsonrawcontent = fprejsoncontent
|
|
4148
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4149
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4150
|
+
fprejsoncontent = ""
|
|
4151
|
+
fjsonrawcontent = fprejsoncontent
|
|
4152
|
+
fjsoncontent = {}
|
|
4153
|
+
else:
|
|
4154
|
+
fprejsoncontent = ""
|
|
4155
|
+
fjsonrawcontent = fprejsoncontent
|
|
4156
|
+
fjsoncontent = {}
|
|
4157
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4158
|
+
fjsoncontent = {}
|
|
4159
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4160
|
+
if (fjsonsize > 0):
|
|
4161
|
+
try:
|
|
4162
|
+
# try base64 → utf-8 → YAML
|
|
4163
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4164
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4165
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4166
|
+
try:
|
|
4167
|
+
# fall back to treating the bytes as plain text YAML
|
|
4168
|
+
fjsonrawcontent = fprejsoncontent
|
|
4169
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4170
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4171
|
+
# final fallback: empty
|
|
4172
|
+
fprejsoncontent = ""
|
|
4173
|
+
fjsonrawcontent = fprejsoncontent
|
|
4174
|
+
fjsoncontent = {}
|
|
4175
|
+
else:
|
|
4176
|
+
fprejsoncontent = ""
|
|
4177
|
+
fjsonrawcontent = fprejsoncontent
|
|
4178
|
+
fjsoncontent = {}
|
|
4179
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4689
4180
|
fjsoncontent = {}
|
|
4181
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4182
|
+
fprejsoncontent = ""
|
|
4183
|
+
fjsonrawcontent = fprejsoncontent
|
|
4184
|
+
elif(fjsontype=="list"):
|
|
4185
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4186
|
+
flisttmp = MkTempFile()
|
|
4187
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4188
|
+
flisttmp.seek(0)
|
|
4189
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4190
|
+
flisttmp.close()
|
|
4191
|
+
fjsonrawcontent = fjsoncontent
|
|
4192
|
+
if(fjsonlen==1):
|
|
4193
|
+
try:
|
|
4194
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4195
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4196
|
+
fjsonlen = len(fjsoncontent)
|
|
4197
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4198
|
+
try:
|
|
4199
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4200
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4201
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4202
|
+
pass
|
|
4690
4203
|
fp.seek(len(delimiter), 1)
|
|
4691
4204
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4692
|
-
if(
|
|
4205
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4693
4206
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4694
4207
|
fname + " at offset " + str(fheaderstart))
|
|
4695
4208
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4703,8 +4216,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4703
4216
|
fname + " at offset " + str(fheaderstart))
|
|
4704
4217
|
VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
|
|
4705
4218
|
return False
|
|
4706
|
-
fhend = fp.tell() - 1
|
|
4707
|
-
fcontentstart = fp.tell()
|
|
4708
4219
|
fcontents = MkTempFile()
|
|
4709
4220
|
if(fsize > 0 and not listonly):
|
|
4710
4221
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -4718,9 +4229,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4718
4229
|
fp.seek(fcsize, 1)
|
|
4719
4230
|
fcontents.seek(0, 0)
|
|
4720
4231
|
newfccs = GetFileChecksum(
|
|
4721
|
-
fcontents
|
|
4232
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4722
4233
|
fcontents.seek(0, 0)
|
|
4723
|
-
if(fccs
|
|
4234
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4724
4235
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4725
4236
|
fname + " at offset " + str(fcontentstart))
|
|
4726
4237
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4733,10 +4244,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4733
4244
|
cfcontents = UncompressFileAlt(fcontents, formatspecs)
|
|
4734
4245
|
cfcontents.seek(0, 0)
|
|
4735
4246
|
fcontents = MkTempFile()
|
|
4736
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4247
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4737
4248
|
cfcontents.close()
|
|
4738
4249
|
fcontents.seek(0, 0)
|
|
4739
|
-
fcontentend = fp.tell()
|
|
4740
4250
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4741
4251
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4742
4252
|
if(abs(fseeknextasnum) == 0):
|
|
@@ -4844,6 +4354,33 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4844
4354
|
fprejsoncontent = ""
|
|
4845
4355
|
fjsonrawcontent = fprejsoncontent
|
|
4846
4356
|
fjsoncontent = {}
|
|
4357
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4358
|
+
fjsoncontent = {}
|
|
4359
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4360
|
+
if (fjsonsize > 0):
|
|
4361
|
+
try:
|
|
4362
|
+
# try base64 → utf-8 → YAML
|
|
4363
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4364
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4365
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4366
|
+
try:
|
|
4367
|
+
# fall back to treating the bytes as plain text YAML
|
|
4368
|
+
fjsonrawcontent = fprejsoncontent
|
|
4369
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4370
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4371
|
+
# final fallback: empty
|
|
4372
|
+
fprejsoncontent = ""
|
|
4373
|
+
fjsonrawcontent = fprejsoncontent
|
|
4374
|
+
fjsoncontent = {}
|
|
4375
|
+
else:
|
|
4376
|
+
fprejsoncontent = ""
|
|
4377
|
+
fjsonrawcontent = fprejsoncontent
|
|
4378
|
+
fjsoncontent = {}
|
|
4379
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4380
|
+
fjsoncontent = {}
|
|
4381
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4382
|
+
fprejsoncontent = ""
|
|
4383
|
+
fjsonrawcontent = fprejsoncontent
|
|
4847
4384
|
elif(fjsontype=="list"):
|
|
4848
4385
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4849
4386
|
flisttmp = MkTempFile()
|
|
@@ -4866,7 +4403,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4866
4403
|
fp.seek(len(delimiter), 1)
|
|
4867
4404
|
fjend = fp.tell() - 1
|
|
4868
4405
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4869
|
-
if(
|
|
4406
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4870
4407
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4871
4408
|
fname + " at offset " + str(fheaderstart))
|
|
4872
4409
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4898,9 +4435,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4898
4435
|
pyhascontents = False
|
|
4899
4436
|
fcontents.seek(0, 0)
|
|
4900
4437
|
newfccs = GetFileChecksum(
|
|
4901
|
-
fcontents
|
|
4438
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4902
4439
|
fcontents.seek(0, 0)
|
|
4903
|
-
if(fccs
|
|
4440
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4904
4441
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4905
4442
|
fname + " at offset " + str(fcontentstart))
|
|
4906
4443
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4914,7 +4451,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4914
4451
|
fcontents, formatspecs)
|
|
4915
4452
|
cfcontents.seek(0, 0)
|
|
4916
4453
|
fcontents = MkTempFile()
|
|
4917
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4454
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4918
4455
|
cfcontents.close()
|
|
4919
4456
|
fcontents.seek(0, 0)
|
|
4920
4457
|
fccs = GetFileChecksum(
|
|
@@ -5030,6 +4567,33 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5030
4567
|
fprejsoncontent = ""
|
|
5031
4568
|
fjsonrawcontent = fprejsoncontent
|
|
5032
4569
|
fjsoncontent = {}
|
|
4570
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4571
|
+
fjsoncontent = {}
|
|
4572
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4573
|
+
if (fjsonsize > 0):
|
|
4574
|
+
try:
|
|
4575
|
+
# try base64 → utf-8 → YAML
|
|
4576
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4577
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4578
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4579
|
+
try:
|
|
4580
|
+
# fall back to treating the bytes as plain text YAML
|
|
4581
|
+
fjsonrawcontent = fprejsoncontent
|
|
4582
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4583
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4584
|
+
# final fallback: empty
|
|
4585
|
+
fprejsoncontent = ""
|
|
4586
|
+
fjsonrawcontent = fprejsoncontent
|
|
4587
|
+
fjsoncontent = {}
|
|
4588
|
+
else:
|
|
4589
|
+
fprejsoncontent = ""
|
|
4590
|
+
fjsonrawcontent = fprejsoncontent
|
|
4591
|
+
fjsoncontent = {}
|
|
4592
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4593
|
+
fjsoncontent = {}
|
|
4594
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4595
|
+
fprejsoncontent = ""
|
|
4596
|
+
fjsonrawcontent = fprejsoncontent
|
|
5033
4597
|
elif(fjsontype=="list"):
|
|
5034
4598
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5035
4599
|
flisttmp = MkTempFile()
|
|
@@ -5051,7 +4615,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5051
4615
|
pass
|
|
5052
4616
|
fp.seek(len(delimiter), 1)
|
|
5053
4617
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5054
|
-
if(
|
|
4618
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5055
4619
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5056
4620
|
fname + " at offset " + str(fheaderstart))
|
|
5057
4621
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -5083,8 +4647,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5083
4647
|
pyhascontents = False
|
|
5084
4648
|
fcontents.seek(0, 0)
|
|
5085
4649
|
newfccs = GetFileChecksum(
|
|
5086
|
-
fcontents
|
|
5087
|
-
if(fccs
|
|
4650
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4651
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
5088
4652
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5089
4653
|
fname + " at offset " + str(fcontentstart))
|
|
5090
4654
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -5098,11 +4662,11 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5098
4662
|
fcontents, formatspecs)
|
|
5099
4663
|
cfcontents.seek(0, 0)
|
|
5100
4664
|
fcontents = MkTempFile()
|
|
5101
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4665
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
5102
4666
|
cfcontents.close()
|
|
5103
4667
|
fcontents.seek(0, 0)
|
|
5104
4668
|
fccs = GetFileChecksum(
|
|
5105
|
-
fcontents
|
|
4669
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5106
4670
|
fcontentend = fp.tell()
|
|
5107
4671
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5108
4672
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5136,9 +4700,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5136
4700
|
curloc = filestart
|
|
5137
4701
|
try:
|
|
5138
4702
|
fp.seek(0, 2)
|
|
5139
|
-
except OSError:
|
|
5140
|
-
SeekToEndOfFile(fp)
|
|
5141
|
-
except ValueError:
|
|
4703
|
+
except (OSError, ValueError):
|
|
5142
4704
|
SeekToEndOfFile(fp)
|
|
5143
4705
|
CatSize = fp.tell()
|
|
5144
4706
|
CatSizeEnd = CatSize
|
|
@@ -5167,7 +4729,30 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5167
4729
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5168
4730
|
"'" + newfcs + "'")
|
|
5169
4731
|
return False
|
|
5170
|
-
fnumfiles = int(inheader[
|
|
4732
|
+
fnumfiles = int(inheader[6], 16)
|
|
4733
|
+
outfseeknextfile = inheaderdata[7]
|
|
4734
|
+
fjsonsize = int(inheaderdata[10], 16)
|
|
4735
|
+
fjsonchecksumtype = inheader[11]
|
|
4736
|
+
fjsonchecksum = inheader[12]
|
|
4737
|
+
fp.read(fjsonsize)
|
|
4738
|
+
# Next seek directive
|
|
4739
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
4740
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
4741
|
+
if(abs(fseeknextasnum) == 0):
|
|
4742
|
+
pass
|
|
4743
|
+
fp.seek(fseeknextasnum, 1)
|
|
4744
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
4745
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4746
|
+
if(abs(fseeknextasnum) == 0):
|
|
4747
|
+
pass
|
|
4748
|
+
fp.seek(fseeknextasnum, 1)
|
|
4749
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
4750
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4751
|
+
if(abs(fseeknextasnum) == 0):
|
|
4752
|
+
pass
|
|
4753
|
+
fp.seek(fseeknextasnum, 0)
|
|
4754
|
+
else:
|
|
4755
|
+
return False
|
|
5171
4756
|
countnum = 0
|
|
5172
4757
|
flist = []
|
|
5173
4758
|
while(countnum < fnumfiles):
|
|
@@ -5187,9 +4772,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5187
4772
|
curloc = filestart
|
|
5188
4773
|
try:
|
|
5189
4774
|
fp.seek(0, 2)
|
|
5190
|
-
except OSError:
|
|
5191
|
-
SeekToEndOfFile(fp)
|
|
5192
|
-
except ValueError:
|
|
4775
|
+
except (OSError, ValueError):
|
|
5193
4776
|
SeekToEndOfFile(fp)
|
|
5194
4777
|
CatSize = fp.tell()
|
|
5195
4778
|
CatSizeEnd = CatSize
|
|
@@ -5208,10 +4791,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5208
4791
|
else:
|
|
5209
4792
|
inheader = ReadFileHeaderDataWoSize(
|
|
5210
4793
|
fp, formatspecs['format_delimiter'])
|
|
5211
|
-
fnumextrafieldsize = int(inheader[
|
|
5212
|
-
fnumextrafields = int(inheader[
|
|
4794
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
4795
|
+
fnumextrafields = int(inheader[14], 16)
|
|
5213
4796
|
fextrafieldslist = []
|
|
5214
|
-
extrastart =
|
|
4797
|
+
extrastart = 15
|
|
5215
4798
|
extraend = extrastart + fnumextrafields
|
|
5216
4799
|
while(extrastart < extraend):
|
|
5217
4800
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5230,7 +4813,106 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5230
4813
|
fnumfields = int(inheader[1], 16)
|
|
5231
4814
|
fhencoding = inheader[2]
|
|
5232
4815
|
fostype = inheader[3]
|
|
5233
|
-
|
|
4816
|
+
fpythontype = inheader[4]
|
|
4817
|
+
fprojectname = inheader[4]
|
|
4818
|
+
fnumfiles = int(inheader[6], 16)
|
|
4819
|
+
fseeknextfile = inheader[7]
|
|
4820
|
+
fjsontype = inheader[8]
|
|
4821
|
+
fjsonlen = int(inheader[9], 16)
|
|
4822
|
+
fjsonsize = int(inheader[10], 16)
|
|
4823
|
+
fjsonchecksumtype = inheader[11]
|
|
4824
|
+
fjsonchecksum = inheader[12]
|
|
4825
|
+
fjsoncontent = {}
|
|
4826
|
+
fjstart = fp.tell()
|
|
4827
|
+
if(fjsontype=="json"):
|
|
4828
|
+
fjsoncontent = {}
|
|
4829
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4830
|
+
if(fjsonsize > 0):
|
|
4831
|
+
try:
|
|
4832
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4833
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4834
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4835
|
+
try:
|
|
4836
|
+
fjsonrawcontent = fprejsoncontent
|
|
4837
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4838
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4839
|
+
fprejsoncontent = ""
|
|
4840
|
+
fjsonrawcontent = fprejsoncontent
|
|
4841
|
+
fjsoncontent = {}
|
|
4842
|
+
else:
|
|
4843
|
+
fprejsoncontent = ""
|
|
4844
|
+
fjsonrawcontent = fprejsoncontent
|
|
4845
|
+
fjsoncontent = {}
|
|
4846
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4847
|
+
fjsoncontent = {}
|
|
4848
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4849
|
+
if (fjsonsize > 0):
|
|
4850
|
+
try:
|
|
4851
|
+
# try base64 → utf-8 → YAML
|
|
4852
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4853
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4854
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4855
|
+
try:
|
|
4856
|
+
# fall back to treating the bytes as plain text YAML
|
|
4857
|
+
fjsonrawcontent = fprejsoncontent
|
|
4858
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4859
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4860
|
+
# final fallback: empty
|
|
4861
|
+
fprejsoncontent = ""
|
|
4862
|
+
fjsonrawcontent = fprejsoncontent
|
|
4863
|
+
fjsoncontent = {}
|
|
4864
|
+
else:
|
|
4865
|
+
fprejsoncontent = ""
|
|
4866
|
+
fjsonrawcontent = fprejsoncontent
|
|
4867
|
+
fjsoncontent = {}
|
|
4868
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4869
|
+
fjsoncontent = {}
|
|
4870
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4871
|
+
fprejsoncontent = ""
|
|
4872
|
+
fjsonrawcontent = fprejsoncontent
|
|
4873
|
+
elif(fjsontype=="list"):
|
|
4874
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4875
|
+
flisttmp = MkTempFile()
|
|
4876
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4877
|
+
flisttmp.seek(0)
|
|
4878
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4879
|
+
flisttmp.close()
|
|
4880
|
+
fjsonrawcontent = fjsoncontent
|
|
4881
|
+
if(fjsonlen==1):
|
|
4882
|
+
try:
|
|
4883
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4884
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4885
|
+
fjsonlen = len(fjsoncontent)
|
|
4886
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4887
|
+
try:
|
|
4888
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4889
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4890
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4891
|
+
pass
|
|
4892
|
+
fjend = fp.tell()
|
|
4893
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4894
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4895
|
+
if(abs(fseeknextasnum) == 0):
|
|
4896
|
+
pass
|
|
4897
|
+
fp.seek(fseeknextasnum, 1)
|
|
4898
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
4899
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4900
|
+
if(abs(fseeknextasnum) == 0):
|
|
4901
|
+
pass
|
|
4902
|
+
fp.seek(fseeknextasnum, 1)
|
|
4903
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
4904
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4905
|
+
if(abs(fseeknextasnum) == 0):
|
|
4906
|
+
pass
|
|
4907
|
+
fp.seek(fseeknextasnum, 0)
|
|
4908
|
+
else:
|
|
4909
|
+
return False
|
|
4910
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4911
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4912
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4913
|
+
fname + " at offset " + str(fheaderstart))
|
|
4914
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4915
|
+
return False
|
|
5234
4916
|
fprechecksumtype = inheader[-2]
|
|
5235
4917
|
fprechecksum = inheader[-1]
|
|
5236
4918
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5243,7 +4925,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5243
4925
|
return False
|
|
5244
4926
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
5245
4927
|
fcompresstype = ""
|
|
5246
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4928
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5247
4929
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
5248
4930
|
seekstart = 0
|
|
5249
4931
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -5271,7 +4953,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5271
4953
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5272
4954
|
fp.seek(len(delimiter), 1)
|
|
5273
4955
|
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5274
|
-
if(
|
|
4956
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5275
4957
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5276
4958
|
prefname + " at offset " + str(prefhstart))
|
|
5277
4959
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5279,7 +4961,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5279
4961
|
prenewfcs = GetHeaderChecksum(
|
|
5280
4962
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5281
4963
|
prefcs = preheaderdata[-2]
|
|
5282
|
-
if(prefcs
|
|
4964
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5283
4965
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5284
4966
|
prefname + " at offset " + str(prefhstart))
|
|
5285
4967
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5295,10 +4977,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5295
4977
|
prefcontents.write(fp.read(prefsize))
|
|
5296
4978
|
prefcontents.seek(0, 0)
|
|
5297
4979
|
prenewfccs = GetFileChecksum(
|
|
5298
|
-
prefcontents
|
|
4980
|
+
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5299
4981
|
prefccs = preheaderdata[-1]
|
|
5300
4982
|
pyhascontents = True
|
|
5301
|
-
if(prefccs
|
|
4983
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5302
4984
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5303
4985
|
prefname + " at offset " + str(prefcontentstart))
|
|
5304
4986
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5344,9 +5026,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5344
5026
|
curloc = filestart
|
|
5345
5027
|
try:
|
|
5346
5028
|
fp.seek(0, 2)
|
|
5347
|
-
except OSError:
|
|
5348
|
-
SeekToEndOfFile(fp)
|
|
5349
|
-
except ValueError:
|
|
5029
|
+
except (OSError, ValueError):
|
|
5350
5030
|
SeekToEndOfFile(fp)
|
|
5351
5031
|
CatSize = fp.tell()
|
|
5352
5032
|
CatSizeEnd = CatSize
|
|
@@ -5365,10 +5045,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5365
5045
|
else:
|
|
5366
5046
|
inheader = ReadFileHeaderDataWoSize(
|
|
5367
5047
|
fp, formatspecs['format_delimiter'])
|
|
5368
|
-
fnumextrafieldsize = int(inheader[
|
|
5369
|
-
fnumextrafields = int(inheader[
|
|
5048
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
5049
|
+
fnumextrafields = int(inheader[14], 16)
|
|
5370
5050
|
fextrafieldslist = []
|
|
5371
|
-
extrastart =
|
|
5051
|
+
extrastart = 15
|
|
5372
5052
|
extraend = extrastart + fnumextrafields
|
|
5373
5053
|
while(extrastart < extraend):
|
|
5374
5054
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5385,9 +5065,40 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5385
5065
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5386
5066
|
fheadsize = int(inheader[0], 16)
|
|
5387
5067
|
fnumfields = int(inheader[1], 16)
|
|
5388
|
-
|
|
5389
|
-
|
|
5390
|
-
|
|
5068
|
+
fnumfiles = int(inheader[6], 16)
|
|
5069
|
+
fseeknextfile = inheaderdata[7]
|
|
5070
|
+
fjsontype = int(inheader[8], 16)
|
|
5071
|
+
fjsonlen = int(inheader[9], 16)
|
|
5072
|
+
fjsonsize = int(inheader[10], 16)
|
|
5073
|
+
fjsonchecksumtype = inheader[11]
|
|
5074
|
+
fjsonchecksum = inheader[12]
|
|
5075
|
+
fjsoncontent = {}
|
|
5076
|
+
fjstart = fp.tell()
|
|
5077
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5078
|
+
fjend = fp.tell()
|
|
5079
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5080
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
5081
|
+
if(abs(fseeknextasnum) == 0):
|
|
5082
|
+
pass
|
|
5083
|
+
fp.seek(fseeknextasnum, 1)
|
|
5084
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
5085
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5086
|
+
if(abs(fseeknextasnum) == 0):
|
|
5087
|
+
pass
|
|
5088
|
+
fp.seek(fseeknextasnum, 1)
|
|
5089
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
5090
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5091
|
+
if(abs(fseeknextasnum) == 0):
|
|
5092
|
+
pass
|
|
5093
|
+
fp.seek(fseeknextasnum, 0)
|
|
5094
|
+
else:
|
|
5095
|
+
return False
|
|
5096
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5097
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5098
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5099
|
+
fname + " at offset " + str(fheaderstart))
|
|
5100
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5101
|
+
return False
|
|
5391
5102
|
fprechecksumtype = inheader[-2]
|
|
5392
5103
|
fprechecksum = inheader[-1]
|
|
5393
5104
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5433,7 +5144,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5433
5144
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5434
5145
|
fp.seek(len(delimiter), 1)
|
|
5435
5146
|
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5436
|
-
if(
|
|
5147
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5437
5148
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5438
5149
|
prefname + " at offset " + str(prefhstart))
|
|
5439
5150
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5441,7 +5152,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5441
5152
|
prenewfcs = GetHeaderChecksum(
|
|
5442
5153
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5443
5154
|
prefcs = preheaderdata[-2]
|
|
5444
|
-
if(prefcs
|
|
5155
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5445
5156
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5446
5157
|
prefname + " at offset " + str(prefhstart))
|
|
5447
5158
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5462,7 +5173,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5462
5173
|
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5463
5174
|
prefccs = preheaderdata[-1]
|
|
5464
5175
|
pyhascontents = True
|
|
5465
|
-
if(prefccs
|
|
5176
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5466
5177
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5467
5178
|
prefname + " at offset " + str(prefcontentstart))
|
|
5468
5179
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5503,24 +5214,17 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5503
5214
|
fp = infile
|
|
5504
5215
|
try:
|
|
5505
5216
|
fp.seek(0, 2)
|
|
5506
|
-
except OSError:
|
|
5507
|
-
SeekToEndOfFile(fp)
|
|
5508
|
-
except ValueError:
|
|
5217
|
+
except (OSError, ValueError):
|
|
5509
5218
|
SeekToEndOfFile(fp)
|
|
5510
5219
|
outfsize = fp.tell()
|
|
5511
5220
|
fp.seek(filestart, 0)
|
|
5512
5221
|
currentfilepos = fp.tell()
|
|
5513
5222
|
elif(infile == "-"):
|
|
5514
5223
|
fp = MkTempFile()
|
|
5515
|
-
|
|
5516
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5517
|
-
else:
|
|
5518
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5224
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5519
5225
|
try:
|
|
5520
5226
|
fp.seek(0, 2)
|
|
5521
|
-
except OSError:
|
|
5522
|
-
SeekToEndOfFile(fp)
|
|
5523
|
-
except ValueError:
|
|
5227
|
+
except (OSError, ValueError):
|
|
5524
5228
|
SeekToEndOfFile(fp)
|
|
5525
5229
|
outfsize = fp.tell()
|
|
5526
5230
|
fp.seek(filestart, 0)
|
|
@@ -5530,9 +5234,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5530
5234
|
fp.write(infile)
|
|
5531
5235
|
try:
|
|
5532
5236
|
fp.seek(0, 2)
|
|
5533
|
-
except OSError:
|
|
5534
|
-
SeekToEndOfFile(fp)
|
|
5535
|
-
except ValueError:
|
|
5237
|
+
except (OSError, ValueError):
|
|
5536
5238
|
SeekToEndOfFile(fp)
|
|
5537
5239
|
outfsize = fp.tell()
|
|
5538
5240
|
fp.seek(filestart, 0)
|
|
@@ -5541,9 +5243,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5541
5243
|
fp = download_file_from_internet_file(infile)
|
|
5542
5244
|
try:
|
|
5543
5245
|
fp.seek(0, 2)
|
|
5544
|
-
except OSError:
|
|
5545
|
-
SeekToEndOfFile(fp)
|
|
5546
|
-
except ValueError:
|
|
5246
|
+
except (OSError, ValueError):
|
|
5547
5247
|
SeekToEndOfFile(fp)
|
|
5548
5248
|
outfsize = fp.tell()
|
|
5549
5249
|
fp.seek(filestart, 0)
|
|
@@ -5551,9 +5251,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5551
5251
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5552
5252
|
try:
|
|
5553
5253
|
fp.seek(0, 2)
|
|
5554
|
-
except OSError:
|
|
5555
|
-
SeekToEndOfFile(fp)
|
|
5556
|
-
except ValueError:
|
|
5254
|
+
except (OSError, ValueError):
|
|
5557
5255
|
SeekToEndOfFile(fp)
|
|
5558
5256
|
outfsize = fp.tell()
|
|
5559
5257
|
fp.seek(filestart, 0)
|
|
@@ -5563,9 +5261,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5563
5261
|
fp = open(infile, "rb")
|
|
5564
5262
|
try:
|
|
5565
5263
|
fp.seek(0, 2)
|
|
5566
|
-
except OSError:
|
|
5567
|
-
SeekToEndOfFile(fp)
|
|
5568
|
-
except ValueError:
|
|
5264
|
+
except (OSError, ValueError):
|
|
5569
5265
|
SeekToEndOfFile(fp)
|
|
5570
5266
|
outfsize = fp.tell()
|
|
5571
5267
|
fp.seek(filestart, 0)
|
|
@@ -5616,9 +5312,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5616
5312
|
currentinfilepos = infp.tell()
|
|
5617
5313
|
try:
|
|
5618
5314
|
infp.seek(0, 2)
|
|
5619
|
-
except OSError:
|
|
5620
|
-
SeekToEndOfFile(infp)
|
|
5621
|
-
except ValueError:
|
|
5315
|
+
except (OSError, ValueError):
|
|
5622
5316
|
SeekToEndOfFile(infp)
|
|
5623
5317
|
outinfsize = infp.tell()
|
|
5624
5318
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5657,24 +5351,17 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5657
5351
|
fp = infile
|
|
5658
5352
|
try:
|
|
5659
5353
|
fp.seek(0, 2)
|
|
5660
|
-
except OSError:
|
|
5661
|
-
SeekToEndOfFile(fp)
|
|
5662
|
-
except ValueError:
|
|
5354
|
+
except (OSError, ValueError):
|
|
5663
5355
|
SeekToEndOfFile(fp)
|
|
5664
5356
|
outfsize = fp.tell()
|
|
5665
5357
|
fp.seek(filestart, 0)
|
|
5666
5358
|
currentfilepos = fp.tell()
|
|
5667
5359
|
elif(infile == "-"):
|
|
5668
5360
|
fp = MkTempFile()
|
|
5669
|
-
|
|
5670
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5671
|
-
else:
|
|
5672
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5361
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5673
5362
|
try:
|
|
5674
5363
|
fp.seek(0, 2)
|
|
5675
|
-
except OSError:
|
|
5676
|
-
SeekToEndOfFile(fp)
|
|
5677
|
-
except ValueError:
|
|
5364
|
+
except (OSError, ValueError):
|
|
5678
5365
|
SeekToEndOfFile(fp)
|
|
5679
5366
|
outfsize = fp.tell()
|
|
5680
5367
|
fp.seek(filestart, 0)
|
|
@@ -5684,9 +5371,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5684
5371
|
fp.write(infile)
|
|
5685
5372
|
try:
|
|
5686
5373
|
fp.seek(0, 2)
|
|
5687
|
-
except OSError:
|
|
5688
|
-
SeekToEndOfFile(fp)
|
|
5689
|
-
except ValueError:
|
|
5374
|
+
except (OSError, ValueError):
|
|
5690
5375
|
SeekToEndOfFile(fp)
|
|
5691
5376
|
outfsize = fp.tell()
|
|
5692
5377
|
fp.seek(filestart, 0)
|
|
@@ -5695,9 +5380,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5695
5380
|
fp = download_file_from_internet_file(infile)
|
|
5696
5381
|
try:
|
|
5697
5382
|
fp.seek(0, 2)
|
|
5698
|
-
except OSError:
|
|
5699
|
-
SeekToEndOfFile(fp)
|
|
5700
|
-
except ValueError:
|
|
5383
|
+
except (OSError, ValueError):
|
|
5701
5384
|
SeekToEndOfFile(fp)
|
|
5702
5385
|
outfsize = fp.tell()
|
|
5703
5386
|
fp.seek(filestart, 0)
|
|
@@ -5705,9 +5388,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5705
5388
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5706
5389
|
try:
|
|
5707
5390
|
fp.seek(0, 2)
|
|
5708
|
-
except OSError:
|
|
5709
|
-
SeekToEndOfFile(fp)
|
|
5710
|
-
except ValueError:
|
|
5391
|
+
except (OSError, ValueError):
|
|
5711
5392
|
SeekToEndOfFile(fp)
|
|
5712
5393
|
outfsize = fp.tell()
|
|
5713
5394
|
fp.seek(filestart, 0)
|
|
@@ -5717,9 +5398,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5717
5398
|
fp = open(infile, "rb")
|
|
5718
5399
|
try:
|
|
5719
5400
|
fp.seek(0, 2)
|
|
5720
|
-
except OSError:
|
|
5721
|
-
SeekToEndOfFile(fp)
|
|
5722
|
-
except ValueError:
|
|
5401
|
+
except (OSError, ValueError):
|
|
5723
5402
|
SeekToEndOfFile(fp)
|
|
5724
5403
|
outfsize = fp.tell()
|
|
5725
5404
|
fp.seek(filestart, 0)
|
|
@@ -5770,9 +5449,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5770
5449
|
currentinfilepos = infp.tell()
|
|
5771
5450
|
try:
|
|
5772
5451
|
infp.seek(0, 2)
|
|
5773
|
-
except OSError:
|
|
5774
|
-
SeekToEndOfFile(infp)
|
|
5775
|
-
except ValueError:
|
|
5452
|
+
except (OSError, ValueError):
|
|
5776
5453
|
SeekToEndOfFile(infp)
|
|
5777
5454
|
outinfsize = infp.tell()
|
|
5778
5455
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5857,12 +5534,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5857
5534
|
def _hex_lower(n):
|
|
5858
5535
|
return format(int(n), 'x').lower()
|
|
5859
5536
|
|
|
5860
|
-
def AppendFileHeader(fp,
|
|
5861
|
-
numfiles,
|
|
5862
|
-
fencoding,
|
|
5863
|
-
extradata=None,
|
|
5864
|
-
checksumtype="crc32",
|
|
5865
|
-
formatspecs=__file_format_dict__):
|
|
5537
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
5866
5538
|
"""
|
|
5867
5539
|
Build and write the archive file header.
|
|
5868
5540
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5910,24 +5582,44 @@ def AppendFileHeader(fp,
|
|
|
5910
5582
|
# 4) core header fields before checksum:
|
|
5911
5583
|
# tmpoutlenhex, fencoding, platform.system(), fnumfiles
|
|
5912
5584
|
fnumfiles_hex = _hex_lower(numfiles)
|
|
5913
|
-
|
|
5585
|
+
fjsontype = "json"
|
|
5586
|
+
if(len(jsondata) > 0):
|
|
5587
|
+
try:
|
|
5588
|
+
fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
|
|
5589
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5590
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5591
|
+
else:
|
|
5592
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5593
|
+
fjsonsize = format(len(fjsoncontent), 'x').lower()
|
|
5594
|
+
fjsonlen = format(len(jsondata), 'x').lower()
|
|
5595
|
+
tmpoutlist = []
|
|
5596
|
+
tmpoutlist.append(fjsontype)
|
|
5597
|
+
tmpoutlist.append(fjsonlen)
|
|
5598
|
+
tmpoutlist.append(fjsonsize)
|
|
5599
|
+
if(len(jsondata) > 0):
|
|
5600
|
+
tmpoutlist.append(checksumtype[1])
|
|
5601
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
5602
|
+
else:
|
|
5603
|
+
tmpoutlist.append("none")
|
|
5604
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5914
5605
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5915
|
-
tmpoutlist
|
|
5916
|
-
|
|
5606
|
+
tmpoutlist.append(extrasizelen)
|
|
5607
|
+
tmpoutlist.append(extrafields)
|
|
5608
|
+
tmpoutlen = 8 + len(tmpoutlist) + len(xlist)
|
|
5917
5609
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5918
5610
|
|
|
5919
5611
|
# Serialize the first group
|
|
5920
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), fnumfiles_hex], delimiter)
|
|
5612
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5921
5613
|
# Append tmpoutlist
|
|
5922
5614
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5923
5615
|
# Append extradata items if any
|
|
5924
5616
|
if xlist:
|
|
5925
5617
|
fnumfilesa += AppendNullBytes(xlist, delimiter)
|
|
5926
5618
|
# Append checksum type
|
|
5927
|
-
fnumfilesa += AppendNullByte(checksumtype, delimiter)
|
|
5619
|
+
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5928
5620
|
|
|
5929
5621
|
# 5) inner checksum over fnumfilesa
|
|
5930
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5622
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5931
5623
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5932
5624
|
|
|
5933
5625
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5940,7 +5632,7 @@ def AppendFileHeader(fp,
|
|
|
5940
5632
|
+ fnumfilesa
|
|
5941
5633
|
)
|
|
5942
5634
|
|
|
5943
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5635
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5944
5636
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5945
5637
|
|
|
5946
5638
|
# 8) final total size field (again per your original logic)
|
|
@@ -5948,10 +5640,11 @@ def AppendFileHeader(fp,
|
|
|
5948
5640
|
formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
|
|
5949
5641
|
# Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
|
|
5950
5642
|
# Keeping that behavior for compatibility.
|
|
5951
|
-
|
|
5643
|
+
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5644
|
+
outfileout = fnumfilesa + fjsoncontent + nullstrecd
|
|
5952
5645
|
# 9) write and try to sync
|
|
5953
5646
|
try:
|
|
5954
|
-
fp.write(
|
|
5647
|
+
fp.write(outfileout)
|
|
5955
5648
|
except (OSError, io.UnsupportedOperation):
|
|
5956
5649
|
return False
|
|
5957
5650
|
|
|
@@ -5972,21 +5665,21 @@ def AppendFileHeader(fp,
|
|
|
5972
5665
|
return fp
|
|
5973
5666
|
|
|
5974
5667
|
|
|
5975
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5668
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5976
5669
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5977
5670
|
formatspecs = formatspecs[fmttype]
|
|
5978
5671
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5979
5672
|
fmttype = __file_format_default__
|
|
5980
5673
|
formatspecs = formatspecs[fmttype]
|
|
5981
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5674
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5982
5675
|
return fp
|
|
5983
5676
|
|
|
5984
5677
|
|
|
5985
|
-
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5678
|
+
def MakeEmptyArchiveFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5986
5679
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5987
5680
|
|
|
5988
5681
|
|
|
5989
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5682
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5990
5683
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5991
5684
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5992
5685
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -6027,7 +5720,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6027
5720
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
6028
5721
|
except PermissionError:
|
|
6029
5722
|
return False
|
|
6030
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5723
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6031
5724
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6032
5725
|
fp = CompressOpenFileAlt(
|
|
6033
5726
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6035,18 +5728,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6035
5728
|
fp.flush()
|
|
6036
5729
|
if(hasattr(os, "sync")):
|
|
6037
5730
|
os.fsync(fp.fileno())
|
|
6038
|
-
except io.UnsupportedOperation:
|
|
6039
|
-
pass
|
|
6040
|
-
except AttributeError:
|
|
6041
|
-
pass
|
|
6042
|
-
except OSError:
|
|
5731
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6043
5732
|
pass
|
|
6044
5733
|
if(outfile == "-"):
|
|
6045
5734
|
fp.seek(0, 0)
|
|
6046
|
-
|
|
6047
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
6048
|
-
else:
|
|
6049
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
5735
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
6050
5736
|
elif(outfile is None):
|
|
6051
5737
|
fp.seek(0, 0)
|
|
6052
5738
|
outvar = fp.read()
|
|
@@ -6065,11 +5751,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6065
5751
|
return True
|
|
6066
5752
|
|
|
6067
5753
|
|
|
6068
|
-
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5754
|
+
def MakeEmptyArchiveFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
6069
5755
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
6070
5756
|
|
|
6071
5757
|
|
|
6072
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["
|
|
5758
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
6073
5759
|
if(not hasattr(fp, "write")):
|
|
6074
5760
|
return False
|
|
6075
5761
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -6148,25 +5834,21 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
6148
5834
|
fp.flush()
|
|
6149
5835
|
if(hasattr(os, "sync")):
|
|
6150
5836
|
os.fsync(fp.fileno())
|
|
6151
|
-
except io.UnsupportedOperation:
|
|
6152
|
-
pass
|
|
6153
|
-
except AttributeError:
|
|
6154
|
-
pass
|
|
6155
|
-
except OSError:
|
|
5837
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6156
5838
|
pass
|
|
6157
5839
|
return fp
|
|
6158
5840
|
|
|
6159
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
5841
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6160
5842
|
if(not hasattr(fp, "write")):
|
|
6161
5843
|
return False
|
|
6162
5844
|
advancedlist = formatspecs['use_advanced_list']
|
|
6163
5845
|
altinode = formatspecs['use_alt_inode']
|
|
6164
5846
|
if(verbose):
|
|
6165
5847
|
logging.basicConfig(format="%(message)s",
|
|
6166
|
-
stream=
|
|
5848
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6167
5849
|
infilelist = []
|
|
6168
5850
|
if(infiles == "-"):
|
|
6169
|
-
for line in
|
|
5851
|
+
for line in PY_STDIN_TEXT:
|
|
6170
5852
|
infilelist.append(line.strip())
|
|
6171
5853
|
infilelist = list(filter(None, infilelist))
|
|
6172
5854
|
elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
|
|
@@ -6205,16 +5887,12 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6205
5887
|
inodetoforminode = {}
|
|
6206
5888
|
numfiles = int(len(GetDirList))
|
|
6207
5889
|
fnumfiles = format(numfiles, 'x').lower()
|
|
6208
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
5890
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6209
5891
|
try:
|
|
6210
5892
|
fp.flush()
|
|
6211
5893
|
if(hasattr(os, "sync")):
|
|
6212
5894
|
os.fsync(fp.fileno())
|
|
6213
|
-
except io.UnsupportedOperation:
|
|
6214
|
-
pass
|
|
6215
|
-
except AttributeError:
|
|
6216
|
-
pass
|
|
6217
|
-
except OSError:
|
|
5895
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6218
5896
|
pass
|
|
6219
5897
|
FullSizeFilesAlt = 0
|
|
6220
5898
|
for curfname in GetDirList:
|
|
@@ -6364,7 +6042,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6364
6042
|
curcompression = "none"
|
|
6365
6043
|
if not followlink and ftype in data_types:
|
|
6366
6044
|
with open(fname, "rb") as fpc:
|
|
6367
|
-
|
|
6045
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6368
6046
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6369
6047
|
fcontents.seek(0, 0)
|
|
6370
6048
|
if(typechecktest is not False):
|
|
@@ -6382,7 +6060,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6382
6060
|
while(ilmin < ilsize):
|
|
6383
6061
|
cfcontents = MkTempFile()
|
|
6384
6062
|
fcontents.seek(0, 0)
|
|
6385
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6063
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6386
6064
|
fcontents.seek(0, 0)
|
|
6387
6065
|
cfcontents.seek(0, 0)
|
|
6388
6066
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6398,7 +6076,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6398
6076
|
curcompression = compressionuselist[ilcmin]
|
|
6399
6077
|
fcontents.seek(0, 0)
|
|
6400
6078
|
cfcontents = MkTempFile()
|
|
6401
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6079
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6402
6080
|
cfcontents.seek(0, 0)
|
|
6403
6081
|
cfcontents = CompressOpenFileAlt(
|
|
6404
6082
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6414,7 +6092,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6414
6092
|
return False
|
|
6415
6093
|
flstatinfo = os.stat(flinkname)
|
|
6416
6094
|
with open(flinkname, "rb") as fpc:
|
|
6417
|
-
|
|
6095
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6418
6096
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6419
6097
|
fcontents.seek(0, 0)
|
|
6420
6098
|
if(typechecktest is not False):
|
|
@@ -6432,7 +6110,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6432
6110
|
while(ilmin < ilsize):
|
|
6433
6111
|
cfcontents = MkTempFile()
|
|
6434
6112
|
fcontents.seek(0, 0)
|
|
6435
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6113
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6436
6114
|
fcontents.seek(0, 0)
|
|
6437
6115
|
cfcontents.seek(0, 0)
|
|
6438
6116
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6448,7 +6126,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6448
6126
|
curcompression = compressionuselist[ilcmin]
|
|
6449
6127
|
fcontents.seek(0, 0)
|
|
6450
6128
|
cfcontents = MkTempFile()
|
|
6451
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6129
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6452
6130
|
cfcontents.seek(0, 0)
|
|
6453
6131
|
cfcontents = CompressOpenFileAlt(
|
|
6454
6132
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6466,25 +6144,21 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6466
6144
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6467
6145
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6468
6146
|
AppendFileHeaderWithContent(
|
|
6469
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6147
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6470
6148
|
try:
|
|
6471
6149
|
fp.flush()
|
|
6472
6150
|
if(hasattr(os, "sync")):
|
|
6473
6151
|
os.fsync(fp.fileno())
|
|
6474
|
-
except io.UnsupportedOperation:
|
|
6475
|
-
pass
|
|
6476
|
-
except AttributeError:
|
|
6477
|
-
pass
|
|
6478
|
-
except OSError:
|
|
6152
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6479
6153
|
pass
|
|
6480
6154
|
return fp
|
|
6481
6155
|
|
|
6482
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6156
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6483
6157
|
if(not hasattr(fp, "write")):
|
|
6484
6158
|
return False
|
|
6485
6159
|
if(verbose):
|
|
6486
6160
|
logging.basicConfig(format="%(message)s",
|
|
6487
|
-
stream=
|
|
6161
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6488
6162
|
curinode = 0
|
|
6489
6163
|
curfid = 0
|
|
6490
6164
|
inodelist = []
|
|
@@ -6493,10 +6167,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6493
6167
|
inodetoforminode = {}
|
|
6494
6168
|
if(infile == "-"):
|
|
6495
6169
|
infile = MkTempFile()
|
|
6496
|
-
|
|
6497
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6498
|
-
else:
|
|
6499
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6170
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6500
6171
|
infile.seek(0, 0)
|
|
6501
6172
|
if(not infile):
|
|
6502
6173
|
return False
|
|
@@ -6551,16 +6222,12 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6551
6222
|
except FileNotFoundError:
|
|
6552
6223
|
return False
|
|
6553
6224
|
numfiles = int(len(tarfp.getmembers()))
|
|
6554
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6225
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6555
6226
|
try:
|
|
6556
6227
|
fp.flush()
|
|
6557
6228
|
if(hasattr(os, "sync")):
|
|
6558
6229
|
os.fsync(fp.fileno())
|
|
6559
|
-
except io.UnsupportedOperation:
|
|
6560
|
-
pass
|
|
6561
|
-
except AttributeError:
|
|
6562
|
-
pass
|
|
6563
|
-
except OSError:
|
|
6230
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6564
6231
|
pass
|
|
6565
6232
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6566
6233
|
fencoding = "UTF-8"
|
|
@@ -6646,7 +6313,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6646
6313
|
curcompression = "none"
|
|
6647
6314
|
if ftype in data_types:
|
|
6648
6315
|
fpc = tarfp.extractfile(member)
|
|
6649
|
-
|
|
6316
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6650
6317
|
fpc.close()
|
|
6651
6318
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6652
6319
|
fcontents.seek(0, 0)
|
|
@@ -6665,7 +6332,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6665
6332
|
while(ilmin < ilsize):
|
|
6666
6333
|
cfcontents = MkTempFile()
|
|
6667
6334
|
fcontents.seek(0, 0)
|
|
6668
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6335
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6669
6336
|
fcontents.seek(0, 0)
|
|
6670
6337
|
cfcontents.seek(0, 0)
|
|
6671
6338
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6681,7 +6348,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6681
6348
|
curcompression = compressionuselist[ilcmin]
|
|
6682
6349
|
fcontents.seek(0, 0)
|
|
6683
6350
|
cfcontents = MkTempFile()
|
|
6684
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6351
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6685
6352
|
cfcontents.seek(0, 0)
|
|
6686
6353
|
cfcontents = CompressOpenFileAlt(
|
|
6687
6354
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6699,26 +6366,22 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6699
6366
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6700
6367
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6701
6368
|
AppendFileHeaderWithContent(
|
|
6702
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6369
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6703
6370
|
try:
|
|
6704
6371
|
fp.flush()
|
|
6705
6372
|
if(hasattr(os, "sync")):
|
|
6706
6373
|
os.fsync(fp.fileno())
|
|
6707
|
-
except io.UnsupportedOperation:
|
|
6708
|
-
pass
|
|
6709
|
-
except AttributeError:
|
|
6710
|
-
pass
|
|
6711
|
-
except OSError:
|
|
6374
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6712
6375
|
pass
|
|
6713
6376
|
fcontents.close()
|
|
6714
6377
|
return fp
|
|
6715
6378
|
|
|
6716
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6379
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6717
6380
|
if(not hasattr(fp, "write")):
|
|
6718
6381
|
return False
|
|
6719
6382
|
if(verbose):
|
|
6720
6383
|
logging.basicConfig(format="%(message)s",
|
|
6721
|
-
stream=
|
|
6384
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6722
6385
|
curinode = 0
|
|
6723
6386
|
curfid = 0
|
|
6724
6387
|
inodelist = []
|
|
@@ -6727,10 +6390,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6727
6390
|
inodetoforminode = {}
|
|
6728
6391
|
if(infile == "-"):
|
|
6729
6392
|
infile = MkTempFile()
|
|
6730
|
-
|
|
6731
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6732
|
-
else:
|
|
6733
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6393
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6734
6394
|
infile.seek(0, 0)
|
|
6735
6395
|
if(not infile):
|
|
6736
6396
|
return False
|
|
@@ -6755,16 +6415,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6755
6415
|
if(ziptest):
|
|
6756
6416
|
VerbosePrintOut("Bad file found!")
|
|
6757
6417
|
numfiles = int(len(zipfp.infolist()))
|
|
6758
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6418
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6759
6419
|
try:
|
|
6760
6420
|
fp.flush()
|
|
6761
6421
|
if(hasattr(os, "sync")):
|
|
6762
6422
|
os.fsync(fp.fileno())
|
|
6763
|
-
except io.UnsupportedOperation:
|
|
6764
|
-
pass
|
|
6765
|
-
except AttributeError:
|
|
6766
|
-
pass
|
|
6767
|
-
except OSError:
|
|
6423
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6768
6424
|
pass
|
|
6769
6425
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6770
6426
|
fencoding = "UTF-8"
|
|
@@ -6849,24 +6505,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6849
6505
|
fcsize = format(int(0), 'x').lower()
|
|
6850
6506
|
try:
|
|
6851
6507
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
6852
|
-
except AttributeError:
|
|
6853
|
-
fuid = format(int(0), 'x').lower()
|
|
6854
|
-
except KeyError:
|
|
6508
|
+
except (KeyError, AttributeError):
|
|
6855
6509
|
fuid = format(int(0), 'x').lower()
|
|
6856
6510
|
try:
|
|
6857
6511
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
6858
|
-
except AttributeError:
|
|
6859
|
-
fgid = format(int(0), 'x').lower()
|
|
6860
|
-
except KeyError:
|
|
6512
|
+
except (KeyError, AttributeError):
|
|
6861
6513
|
fgid = format(int(0), 'x').lower()
|
|
6862
6514
|
try:
|
|
6863
6515
|
import pwd
|
|
6864
6516
|
try:
|
|
6865
6517
|
userinfo = pwd.getpwuid(os.getuid())
|
|
6866
6518
|
funame = userinfo.pw_name
|
|
6867
|
-
except KeyError:
|
|
6868
|
-
funame = ""
|
|
6869
|
-
except AttributeError:
|
|
6519
|
+
except (KeyError, AttributeError):
|
|
6870
6520
|
funame = ""
|
|
6871
6521
|
except ImportError:
|
|
6872
6522
|
funame = ""
|
|
@@ -6876,9 +6526,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6876
6526
|
try:
|
|
6877
6527
|
groupinfo = grp.getgrgid(os.getgid())
|
|
6878
6528
|
fgname = groupinfo.gr_name
|
|
6879
|
-
except KeyError:
|
|
6880
|
-
fgname = ""
|
|
6881
|
-
except AttributeError:
|
|
6529
|
+
except (KeyError, AttributeError):
|
|
6882
6530
|
fgname = ""
|
|
6883
6531
|
except ImportError:
|
|
6884
6532
|
fgname = ""
|
|
@@ -6901,7 +6549,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6901
6549
|
while(ilmin < ilsize):
|
|
6902
6550
|
cfcontents = MkTempFile()
|
|
6903
6551
|
fcontents.seek(0, 0)
|
|
6904
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6552
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6905
6553
|
fcontents.seek(0, 0)
|
|
6906
6554
|
cfcontents.seek(0, 0)
|
|
6907
6555
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6914,7 +6562,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6914
6562
|
curcompression = compressionuselist[ilcmin]
|
|
6915
6563
|
fcontents.seek(0, 0)
|
|
6916
6564
|
cfcontents = MkTempFile()
|
|
6917
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6565
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6918
6566
|
cfcontents.seek(0, 0)
|
|
6919
6567
|
cfcontents = CompressOpenFileAlt(
|
|
6920
6568
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6932,31 +6580,26 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6932
6580
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6933
6581
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6934
6582
|
AppendFileHeaderWithContent(
|
|
6935
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6583
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6936
6584
|
try:
|
|
6937
6585
|
fp.flush()
|
|
6938
6586
|
if(hasattr(os, "sync")):
|
|
6939
6587
|
os.fsync(fp.fileno())
|
|
6940
|
-
except io.UnsupportedOperation:
|
|
6941
|
-
pass
|
|
6942
|
-
except AttributeError:
|
|
6943
|
-
pass
|
|
6944
|
-
except OSError:
|
|
6588
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6945
6589
|
pass
|
|
6946
6590
|
fcontents.close()
|
|
6947
6591
|
return fp
|
|
6948
6592
|
|
|
6949
6593
|
if(not rarfile_support):
|
|
6950
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6594
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6951
6595
|
return False
|
|
6952
|
-
|
|
6953
|
-
|
|
6954
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
6596
|
+
else:
|
|
6597
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6955
6598
|
if(not hasattr(fp, "write")):
|
|
6956
6599
|
return False
|
|
6957
6600
|
if(verbose):
|
|
6958
6601
|
logging.basicConfig(format="%(message)s",
|
|
6959
|
-
stream=
|
|
6602
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6960
6603
|
curinode = 0
|
|
6961
6604
|
curfid = 0
|
|
6962
6605
|
inodelist = []
|
|
@@ -6972,26 +6615,18 @@ if(rarfile_support):
|
|
|
6972
6615
|
if(rartest):
|
|
6973
6616
|
VerbosePrintOut("Bad file found!")
|
|
6974
6617
|
numfiles = int(len(rarfp.infolist()))
|
|
6975
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6618
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6976
6619
|
try:
|
|
6977
6620
|
fp.flush()
|
|
6978
6621
|
if(hasattr(os, "sync")):
|
|
6979
6622
|
os.fsync(fp.fileno())
|
|
6980
|
-
except io.UnsupportedOperation:
|
|
6981
|
-
pass
|
|
6982
|
-
except AttributeError:
|
|
6983
|
-
pass
|
|
6984
|
-
except OSError:
|
|
6623
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6985
6624
|
pass
|
|
6986
6625
|
try:
|
|
6987
6626
|
fp.flush()
|
|
6988
6627
|
if(hasattr(os, "sync")):
|
|
6989
6628
|
os.fsync(fp.fileno())
|
|
6990
|
-
except io.UnsupportedOperation:
|
|
6991
|
-
pass
|
|
6992
|
-
except AttributeError:
|
|
6993
|
-
pass
|
|
6994
|
-
except OSError:
|
|
6629
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6995
6630
|
pass
|
|
6996
6631
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6997
6632
|
is_unix = False
|
|
@@ -7100,24 +6735,18 @@ if(rarfile_support):
|
|
|
7100
6735
|
int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
|
|
7101
6736
|
try:
|
|
7102
6737
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7103
|
-
except AttributeError:
|
|
7104
|
-
fuid = format(int(0), 'x').lower()
|
|
7105
|
-
except KeyError:
|
|
6738
|
+
except (KeyError, AttributeError):
|
|
7106
6739
|
fuid = format(int(0), 'x').lower()
|
|
7107
6740
|
try:
|
|
7108
6741
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7109
|
-
except AttributeError:
|
|
7110
|
-
fgid = format(int(0), 'x').lower()
|
|
7111
|
-
except KeyError:
|
|
6742
|
+
except (KeyError, AttributeError):
|
|
7112
6743
|
fgid = format(int(0), 'x').lower()
|
|
7113
6744
|
try:
|
|
7114
6745
|
import pwd
|
|
7115
6746
|
try:
|
|
7116
6747
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7117
6748
|
funame = userinfo.pw_name
|
|
7118
|
-
except KeyError:
|
|
7119
|
-
funame = ""
|
|
7120
|
-
except AttributeError:
|
|
6749
|
+
except (KeyError, AttributeError):
|
|
7121
6750
|
funame = ""
|
|
7122
6751
|
except ImportError:
|
|
7123
6752
|
funame = ""
|
|
@@ -7127,9 +6756,7 @@ if(rarfile_support):
|
|
|
7127
6756
|
try:
|
|
7128
6757
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7129
6758
|
fgname = groupinfo.gr_name
|
|
7130
|
-
except KeyError:
|
|
7131
|
-
fgname = ""
|
|
7132
|
-
except AttributeError:
|
|
6759
|
+
except (KeyError, AttributeError):
|
|
7133
6760
|
fgname = ""
|
|
7134
6761
|
except ImportError:
|
|
7135
6762
|
fgname = ""
|
|
@@ -7152,7 +6779,7 @@ if(rarfile_support):
|
|
|
7152
6779
|
while(ilmin < ilsize):
|
|
7153
6780
|
cfcontents = MkTempFile()
|
|
7154
6781
|
fcontents.seek(0, 0)
|
|
7155
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6782
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7156
6783
|
fcontents.seek(0, 0)
|
|
7157
6784
|
cfcontents.seek(0, 0)
|
|
7158
6785
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7168,7 +6795,7 @@ if(rarfile_support):
|
|
|
7168
6795
|
curcompression = compressionuselist[ilcmin]
|
|
7169
6796
|
fcontents.seek(0, 0)
|
|
7170
6797
|
cfcontents = MkTempFile()
|
|
7171
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6798
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7172
6799
|
cfcontents.seek(0, 0)
|
|
7173
6800
|
cfcontents = CompressOpenFileAlt(
|
|
7174
6801
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7186,31 +6813,26 @@ if(rarfile_support):
|
|
|
7186
6813
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7187
6814
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7188
6815
|
AppendFileHeaderWithContent(
|
|
7189
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6816
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7190
6817
|
try:
|
|
7191
6818
|
fp.flush()
|
|
7192
6819
|
if(hasattr(os, "sync")):
|
|
7193
6820
|
os.fsync(fp.fileno())
|
|
7194
|
-
except io.UnsupportedOperation:
|
|
7195
|
-
pass
|
|
7196
|
-
except AttributeError:
|
|
7197
|
-
pass
|
|
7198
|
-
except OSError:
|
|
6821
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7199
6822
|
pass
|
|
7200
6823
|
fcontents.close()
|
|
7201
6824
|
return fp
|
|
7202
6825
|
|
|
7203
6826
|
if(not py7zr_support):
|
|
7204
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6827
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7205
6828
|
return False
|
|
7206
|
-
|
|
7207
|
-
|
|
7208
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
6829
|
+
else:
|
|
6830
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7209
6831
|
if(not hasattr(fp, "write")):
|
|
7210
6832
|
return False
|
|
7211
6833
|
if(verbose):
|
|
7212
6834
|
logging.basicConfig(format="%(message)s",
|
|
7213
|
-
stream=
|
|
6835
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7214
6836
|
formver = formatspecs['format_ver']
|
|
7215
6837
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7216
6838
|
curinode = 0
|
|
@@ -7228,16 +6850,12 @@ if(py7zr_support):
|
|
|
7228
6850
|
if(sztestalt):
|
|
7229
6851
|
VerbosePrintOut("Bad file found!")
|
|
7230
6852
|
numfiles = int(len(szpfp.list()))
|
|
7231
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6853
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7232
6854
|
try:
|
|
7233
6855
|
fp.flush()
|
|
7234
6856
|
if(hasattr(os, "sync")):
|
|
7235
6857
|
os.fsync(fp.fileno())
|
|
7236
|
-
except io.UnsupportedOperation:
|
|
7237
|
-
pass
|
|
7238
|
-
except AttributeError:
|
|
7239
|
-
pass
|
|
7240
|
-
except OSError:
|
|
6858
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7241
6859
|
pass
|
|
7242
6860
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7243
6861
|
fencoding = "UTF-8"
|
|
@@ -7287,24 +6905,18 @@ if(py7zr_support):
|
|
|
7287
6905
|
int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
|
|
7288
6906
|
try:
|
|
7289
6907
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7290
|
-
except AttributeError:
|
|
7291
|
-
fuid = format(int(0), 'x').lower()
|
|
7292
|
-
except KeyError:
|
|
6908
|
+
except (KeyError, AttributeError):
|
|
7293
6909
|
fuid = format(int(0), 'x').lower()
|
|
7294
6910
|
try:
|
|
7295
6911
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7296
|
-
except AttributeError:
|
|
7297
|
-
fgid = format(int(0), 'x').lower()
|
|
7298
|
-
except KeyError:
|
|
6912
|
+
except (KeyError, AttributeError):
|
|
7299
6913
|
fgid = format(int(0), 'x').lower()
|
|
7300
6914
|
try:
|
|
7301
6915
|
import pwd
|
|
7302
6916
|
try:
|
|
7303
6917
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7304
6918
|
funame = userinfo.pw_name
|
|
7305
|
-
except KeyError:
|
|
7306
|
-
funame = ""
|
|
7307
|
-
except AttributeError:
|
|
6919
|
+
except (KeyError, AttributeError):
|
|
7308
6920
|
funame = ""
|
|
7309
6921
|
except ImportError:
|
|
7310
6922
|
funame = ""
|
|
@@ -7314,9 +6926,7 @@ if(py7zr_support):
|
|
|
7314
6926
|
try:
|
|
7315
6927
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7316
6928
|
fgname = groupinfo.gr_name
|
|
7317
|
-
except KeyError:
|
|
7318
|
-
fgname = ""
|
|
7319
|
-
except AttributeError:
|
|
6929
|
+
except (KeyError, AttributeError):
|
|
7320
6930
|
fgname = ""
|
|
7321
6931
|
except ImportError:
|
|
7322
6932
|
fgname = ""
|
|
@@ -7342,7 +6952,7 @@ if(py7zr_support):
|
|
|
7342
6952
|
while(ilmin < ilsize):
|
|
7343
6953
|
cfcontents = MkTempFile()
|
|
7344
6954
|
fcontents.seek(0, 0)
|
|
7345
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6955
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7346
6956
|
fcontents.seek(0, 0)
|
|
7347
6957
|
cfcontents.seek(0, 0)
|
|
7348
6958
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7358,7 +6968,7 @@ if(py7zr_support):
|
|
|
7358
6968
|
curcompression = compressionuselist[ilcmin]
|
|
7359
6969
|
fcontents.seek(0, 0)
|
|
7360
6970
|
cfcontents = MkTempFile()
|
|
7361
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6971
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7362
6972
|
cfcontents.seek(0, 0)
|
|
7363
6973
|
cfcontents = CompressOpenFileAlt(
|
|
7364
6974
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7376,25 +6986,21 @@ if(py7zr_support):
|
|
|
7376
6986
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7377
6987
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7378
6988
|
AppendFileHeaderWithContent(
|
|
7379
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6989
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7380
6990
|
try:
|
|
7381
6991
|
fp.flush()
|
|
7382
6992
|
if(hasattr(os, "sync")):
|
|
7383
6993
|
os.fsync(fp.fileno())
|
|
7384
|
-
except io.UnsupportedOperation:
|
|
7385
|
-
pass
|
|
7386
|
-
except AttributeError:
|
|
7387
|
-
pass
|
|
7388
|
-
except OSError:
|
|
6994
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7389
6995
|
pass
|
|
7390
6996
|
fcontents.close()
|
|
7391
6997
|
return fp
|
|
7392
6998
|
|
|
7393
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["
|
|
6999
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7394
7000
|
if(not hasattr(fp, "write")):
|
|
7395
7001
|
return False
|
|
7396
7002
|
if(verbose):
|
|
7397
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
7003
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7398
7004
|
GetDirList = inlist
|
|
7399
7005
|
if(not GetDirList):
|
|
7400
7006
|
return False
|
|
@@ -7406,7 +7012,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7406
7012
|
inodetoforminode = {}
|
|
7407
7013
|
numfiles = int(len(GetDirList))
|
|
7408
7014
|
fnumfiles = format(numfiles, 'x').lower()
|
|
7409
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
7015
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7410
7016
|
for curfname in GetDirList:
|
|
7411
7017
|
ftype = format(curfname[0], 'x').lower()
|
|
7412
7018
|
fencoding = curfname[1]
|
|
@@ -7448,16 +7054,16 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7448
7054
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
7449
7055
|
fcontents.seek(0, 0)
|
|
7450
7056
|
AppendFileHeaderWithContent(
|
|
7451
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
7057
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7452
7058
|
return fp
|
|
7453
7059
|
|
|
7454
7060
|
|
|
7455
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7061
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7456
7062
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7457
7063
|
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7458
7064
|
|
|
7459
7065
|
|
|
7460
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7066
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7461
7067
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7462
7068
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7463
7069
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7510,18 +7116,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7510
7116
|
fp.flush()
|
|
7511
7117
|
if(hasattr(os, "sync")):
|
|
7512
7118
|
os.fsync(fp.fileno())
|
|
7513
|
-
except io.UnsupportedOperation:
|
|
7514
|
-
pass
|
|
7515
|
-
except AttributeError:
|
|
7516
|
-
pass
|
|
7517
|
-
except OSError:
|
|
7119
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7518
7120
|
pass
|
|
7519
7121
|
if(outfile == "-"):
|
|
7520
7122
|
fp.seek(0, 0)
|
|
7521
|
-
|
|
7522
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7523
|
-
else:
|
|
7524
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7123
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7525
7124
|
elif(outfile is None):
|
|
7526
7125
|
fp.seek(0, 0)
|
|
7527
7126
|
outvar = fp.read()
|
|
@@ -7538,7 +7137,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7538
7137
|
fp.close()
|
|
7539
7138
|
return True
|
|
7540
7139
|
|
|
7541
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7140
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7542
7141
|
if not isinstance(infiles, list):
|
|
7543
7142
|
infiles = [infiles]
|
|
7544
7143
|
returnout = False
|
|
@@ -7553,7 +7152,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7553
7152
|
return True
|
|
7554
7153
|
return returnout
|
|
7555
7154
|
|
|
7556
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7155
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7557
7156
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7558
7157
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7559
7158
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7603,18 +7202,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7603
7202
|
fp.flush()
|
|
7604
7203
|
if(hasattr(os, "sync")):
|
|
7605
7204
|
os.fsync(fp.fileno())
|
|
7606
|
-
except io.UnsupportedOperation:
|
|
7607
|
-
pass
|
|
7608
|
-
except AttributeError:
|
|
7609
|
-
pass
|
|
7610
|
-
except OSError:
|
|
7205
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7611
7206
|
pass
|
|
7612
7207
|
if(outfile == "-"):
|
|
7613
7208
|
fp.seek(0, 0)
|
|
7614
|
-
|
|
7615
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7616
|
-
else:
|
|
7617
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7209
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7618
7210
|
elif(outfile is None):
|
|
7619
7211
|
fp.seek(0, 0)
|
|
7620
7212
|
outvar = fp.read()
|
|
@@ -7632,7 +7224,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7632
7224
|
fp.close()
|
|
7633
7225
|
return True
|
|
7634
7226
|
|
|
7635
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7227
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7636
7228
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7637
7229
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7638
7230
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7683,18 +7275,11 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7683
7275
|
fp.flush()
|
|
7684
7276
|
if(hasattr(os, "sync")):
|
|
7685
7277
|
os.fsync(fp.fileno())
|
|
7686
|
-
except io.UnsupportedOperation:
|
|
7687
|
-
pass
|
|
7688
|
-
except AttributeError:
|
|
7689
|
-
pass
|
|
7690
|
-
except OSError:
|
|
7278
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7691
7279
|
pass
|
|
7692
7280
|
if(outfile == "-"):
|
|
7693
7281
|
fp.seek(0, 0)
|
|
7694
|
-
|
|
7695
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7696
|
-
else:
|
|
7697
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7282
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7698
7283
|
elif(outfile is None):
|
|
7699
7284
|
fp.seek(0, 0)
|
|
7700
7285
|
outvar = fp.read()
|
|
@@ -7712,7 +7297,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7712
7297
|
fp.close()
|
|
7713
7298
|
return True
|
|
7714
7299
|
|
|
7715
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7300
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7716
7301
|
if not isinstance(infiles, list):
|
|
7717
7302
|
infiles = [infiles]
|
|
7718
7303
|
returnout = False
|
|
@@ -7727,7 +7312,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7727
7312
|
return True
|
|
7728
7313
|
return returnout
|
|
7729
7314
|
|
|
7730
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7315
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7731
7316
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7732
7317
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7733
7318
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7778,18 +7363,11 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7778
7363
|
fp.flush()
|
|
7779
7364
|
if(hasattr(os, "sync")):
|
|
7780
7365
|
os.fsync(fp.fileno())
|
|
7781
|
-
except io.UnsupportedOperation:
|
|
7782
|
-
pass
|
|
7783
|
-
except AttributeError:
|
|
7784
|
-
pass
|
|
7785
|
-
except OSError:
|
|
7366
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7786
7367
|
pass
|
|
7787
7368
|
if(outfile == "-"):
|
|
7788
7369
|
fp.seek(0, 0)
|
|
7789
|
-
|
|
7790
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7791
|
-
else:
|
|
7792
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7370
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7793
7371
|
elif(outfile is None):
|
|
7794
7372
|
fp.seek(0, 0)
|
|
7795
7373
|
outvar = fp.read()
|
|
@@ -7807,7 +7385,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7807
7385
|
fp.close()
|
|
7808
7386
|
return True
|
|
7809
7387
|
|
|
7810
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7388
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7811
7389
|
if not isinstance(infiles, list):
|
|
7812
7390
|
infiles = [infiles]
|
|
7813
7391
|
returnout = False
|
|
@@ -7823,11 +7401,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7823
7401
|
return returnout
|
|
7824
7402
|
|
|
7825
7403
|
if(not rarfile_support):
|
|
7826
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7404
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7827
7405
|
return False
|
|
7828
|
-
|
|
7829
|
-
|
|
7830
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7406
|
+
else:
|
|
7407
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7831
7408
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7832
7409
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7833
7410
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7878,18 +7455,11 @@ if(rarfile_support):
|
|
|
7878
7455
|
fp.flush()
|
|
7879
7456
|
if(hasattr(os, "sync")):
|
|
7880
7457
|
os.fsync(fp.fileno())
|
|
7881
|
-
except io.UnsupportedOperation:
|
|
7882
|
-
pass
|
|
7883
|
-
except AttributeError:
|
|
7884
|
-
pass
|
|
7885
|
-
except OSError:
|
|
7458
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7886
7459
|
pass
|
|
7887
7460
|
if(outfile == "-"):
|
|
7888
7461
|
fp.seek(0, 0)
|
|
7889
|
-
|
|
7890
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7891
|
-
else:
|
|
7892
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7462
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7893
7463
|
elif(outfile is None):
|
|
7894
7464
|
fp.seek(0, 0)
|
|
7895
7465
|
outvar = fp.read()
|
|
@@ -7907,7 +7477,7 @@ if(rarfile_support):
|
|
|
7907
7477
|
fp.close()
|
|
7908
7478
|
return True
|
|
7909
7479
|
|
|
7910
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7480
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7911
7481
|
if not isinstance(infiles, list):
|
|
7912
7482
|
infiles = [infiles]
|
|
7913
7483
|
returnout = False
|
|
@@ -7923,11 +7493,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7923
7493
|
return returnout
|
|
7924
7494
|
|
|
7925
7495
|
if(not py7zr_support):
|
|
7926
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7496
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7927
7497
|
return False
|
|
7928
|
-
|
|
7929
|
-
|
|
7930
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7498
|
+
else:
|
|
7499
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7931
7500
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7932
7501
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7933
7502
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7978,18 +7547,11 @@ if(py7zr_support):
|
|
|
7978
7547
|
fp.flush()
|
|
7979
7548
|
if(hasattr(os, "sync")):
|
|
7980
7549
|
os.fsync(fp.fileno())
|
|
7981
|
-
except io.UnsupportedOperation:
|
|
7982
|
-
pass
|
|
7983
|
-
except AttributeError:
|
|
7984
|
-
pass
|
|
7985
|
-
except OSError:
|
|
7550
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7986
7551
|
pass
|
|
7987
7552
|
if(outfile == "-"):
|
|
7988
7553
|
fp.seek(0, 0)
|
|
7989
|
-
|
|
7990
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7991
|
-
else:
|
|
7992
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7554
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7993
7555
|
elif(outfile is None):
|
|
7994
7556
|
fp.seek(0, 0)
|
|
7995
7557
|
outvar = fp.read()
|
|
@@ -8007,7 +7569,7 @@ if(py7zr_support):
|
|
|
8007
7569
|
fp.close()
|
|
8008
7570
|
return True
|
|
8009
7571
|
|
|
8010
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7572
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8011
7573
|
if not isinstance(infiles, list):
|
|
8012
7574
|
infiles = [infiles]
|
|
8013
7575
|
returnout = False
|
|
@@ -8022,7 +7584,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
8022
7584
|
return True
|
|
8023
7585
|
return returnout
|
|
8024
7586
|
|
|
8025
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7587
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8026
7588
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
8027
7589
|
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
8028
7590
|
|
|
@@ -8056,9 +7618,7 @@ def PrintPermissionString(fchmode, ftype):
|
|
|
8056
7618
|
permissionstr = "w" + permissionstr
|
|
8057
7619
|
try:
|
|
8058
7620
|
permissionoutstr = stat.filemode(fchmode)
|
|
8059
|
-
except AttributeError:
|
|
8060
|
-
permissionoutstr = permissionstr
|
|
8061
|
-
except KeyError:
|
|
7621
|
+
except (KeyError, AttributeError):
|
|
8062
7622
|
permissionoutstr = permissionstr
|
|
8063
7623
|
return permissionoutstr
|
|
8064
7624
|
|
|
@@ -8974,7 +8534,7 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
|
|
|
8974
8534
|
|
|
8975
8535
|
|
|
8976
8536
|
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
8977
|
-
filefp =
|
|
8537
|
+
filefp = MkTempFile("", isbytes=False)
|
|
8978
8538
|
outstring = UncompressString(instring, formatspecs, filestart)
|
|
8979
8539
|
filefp.write(outstring)
|
|
8980
8540
|
filefp.seek(0, 0)
|
|
@@ -8989,7 +8549,7 @@ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=
|
|
|
8989
8549
|
fp.seek(filestart, 0)
|
|
8990
8550
|
if(prechck!="zstd"):
|
|
8991
8551
|
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
8992
|
-
filefp =
|
|
8552
|
+
filefp = MkTempFile("", isbytes=False)
|
|
8993
8553
|
fp.seek(filestart, 0)
|
|
8994
8554
|
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
8995
8555
|
filefp.write(outstring)
|
|
@@ -9061,9 +8621,7 @@ def _extract_base_fp(obj):
|
|
|
9061
8621
|
try:
|
|
9062
8622
|
f() # probe fileno()
|
|
9063
8623
|
return cur
|
|
9064
|
-
except UnsupportedOperation:
|
|
9065
|
-
pass
|
|
9066
|
-
except Exception:
|
|
8624
|
+
except (Exception, UnsupportedOperation):
|
|
9067
8625
|
pass
|
|
9068
8626
|
for attr in ("fileobj", "fp", "_fp", "buffer", "raw"):
|
|
9069
8627
|
nxt = getattr(cur, attr, None)
|
|
@@ -9455,7 +9013,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw):
|
|
|
9455
9013
|
|
|
9456
9014
|
# ========= copy helpers =========
|
|
9457
9015
|
|
|
9458
|
-
def fast_copy(infp, outfp, bufsize=
|
|
9016
|
+
def fast_copy(infp, outfp, bufsize=__filebuff_size__):
|
|
9459
9017
|
"""
|
|
9460
9018
|
Efficient copy from any readable file-like to any writable file-like.
|
|
9461
9019
|
Uses readinto() when available to avoid extra allocations.
|
|
@@ -9499,7 +9057,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
|
|
|
9499
9057
|
shutil.copyfileobj(fp, outfp, length=chunk_size)
|
|
9500
9058
|
|
|
9501
9059
|
|
|
9502
|
-
def copy_opaque(src, dst, bufsize=
|
|
9060
|
+
def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20):
|
|
9503
9061
|
"""
|
|
9504
9062
|
Copy opaque bytes from 'src' (any readable file-like) to 'dst'
|
|
9505
9063
|
(your mmap-backed FileLikeAdapter or any writable file-like).
|
|
@@ -9561,11 +9119,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9561
9119
|
|
|
9562
9120
|
try:
|
|
9563
9121
|
fp.seek(0, 0)
|
|
9564
|
-
except io.UnsupportedOperation:
|
|
9565
|
-
pass
|
|
9566
|
-
except AttributeError:
|
|
9567
|
-
pass
|
|
9568
|
-
except OSError:
|
|
9122
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9569
9123
|
pass
|
|
9570
9124
|
|
|
9571
9125
|
if (not compression or compression == formatspecs['format_magic']
|
|
@@ -9624,11 +9178,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9624
9178
|
|
|
9625
9179
|
try:
|
|
9626
9180
|
bytesfp.seek(0, 0)
|
|
9627
|
-
except io.UnsupportedOperation:
|
|
9628
|
-
pass
|
|
9629
|
-
except AttributeError:
|
|
9630
|
-
pass
|
|
9631
|
-
except OSError:
|
|
9181
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9632
9182
|
pass
|
|
9633
9183
|
out = FileLikeAdapter(bytesfp, mode="rb") # read interface for the caller
|
|
9634
9184
|
try:
|
|
@@ -9758,31 +9308,18 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9758
9308
|
try:
|
|
9759
9309
|
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9760
9310
|
except AttributeError:
|
|
9761
|
-
|
|
9762
|
-
|
|
9763
|
-
|
|
9764
|
-
|
|
9765
|
-
except AttributeError:
|
|
9766
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9767
|
-
checklistout = sorted(hash_list + ['adler32', 'crc16', 'crc16_ansi', 'crc16_ibm',
|
|
9768
|
-
'crc16_ccitt', 'crc32', 'crc64', 'crc64_ecma', 'crc64_iso', 'none'])
|
|
9769
|
-
if(checkfor in checklistout):
|
|
9770
|
-
return True
|
|
9771
|
-
else:
|
|
9772
|
-
return False
|
|
9773
|
-
|
|
9774
|
-
|
|
9775
|
-
def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
9776
|
-
if(guaranteed):
|
|
9777
|
-
try:
|
|
9778
|
-
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9779
|
-
except AttributeError:
|
|
9780
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9311
|
+
try:
|
|
9312
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9313
|
+
except AttributeError:
|
|
9314
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9781
9315
|
else:
|
|
9782
9316
|
try:
|
|
9783
9317
|
hash_list = sorted(list(hashlib.algorithms_available))
|
|
9784
9318
|
except AttributeError:
|
|
9785
|
-
|
|
9319
|
+
try:
|
|
9320
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9321
|
+
except AttributeError:
|
|
9322
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9786
9323
|
checklistout = hash_list
|
|
9787
9324
|
if(checkfor in checklistout):
|
|
9788
9325
|
return True
|
|
@@ -9790,48 +9327,46 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
|
9790
9327
|
return False
|
|
9791
9328
|
|
|
9792
9329
|
|
|
9793
|
-
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9330
|
+
def PackArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9794
9331
|
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9795
9332
|
|
|
9796
|
-
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9333
|
+
def PackStackedArchiveFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9797
9334
|
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9798
9335
|
|
|
9799
|
-
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9336
|
+
def PackArchiveFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9800
9337
|
return PackArchiveFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9801
9338
|
|
|
9802
9339
|
|
|
9803
|
-
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9340
|
+
def PackArchiveFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9804
9341
|
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9805
9342
|
|
|
9806
9343
|
|
|
9807
|
-
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9344
|
+
def PackArchiveFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9808
9345
|
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9809
9346
|
|
|
9810
9347
|
|
|
9811
9348
|
if(not rarfile_support):
|
|
9812
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9349
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9813
9350
|
return False
|
|
9814
|
-
|
|
9815
|
-
|
|
9816
|
-
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9351
|
+
else:
|
|
9352
|
+
def PackArchiveFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9817
9353
|
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9818
9354
|
|
|
9819
9355
|
|
|
9820
9356
|
if(not py7zr_support):
|
|
9821
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9357
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9822
9358
|
return False
|
|
9823
|
-
|
|
9824
|
-
|
|
9825
|
-
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9359
|
+
else:
|
|
9360
|
+
def PackArchiveFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9826
9361
|
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9827
9362
|
|
|
9828
9363
|
|
|
9829
|
-
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9364
|
+
def PackArchiveFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9830
9365
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9831
9366
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9832
9367
|
formatspecs = formatspecs[checkcompressfile]
|
|
9833
9368
|
if(verbose):
|
|
9834
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9369
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9835
9370
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9836
9371
|
return PackArchiveFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9837
9372
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -9914,7 +9449,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9914
9449
|
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9915
9450
|
seektoend=False, verbose=False, returnfp=False):
|
|
9916
9451
|
if(verbose):
|
|
9917
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9452
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9918
9453
|
|
|
9919
9454
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9920
9455
|
formatspecs = formatspecs[fmttype]
|
|
@@ -9941,10 +9476,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9941
9476
|
|
|
9942
9477
|
elif(infile == "-"):
|
|
9943
9478
|
fp = MkTempFile()
|
|
9944
|
-
|
|
9945
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
9946
|
-
else:
|
|
9947
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
9479
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
9948
9480
|
fp.seek(filestart, 0)
|
|
9949
9481
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9950
9482
|
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
@@ -10021,9 +9553,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10021
9553
|
|
|
10022
9554
|
try:
|
|
10023
9555
|
fp.seek(0, 2)
|
|
10024
|
-
except OSError:
|
|
10025
|
-
SeekToEndOfFile(fp)
|
|
10026
|
-
except ValueError:
|
|
9556
|
+
except (OSError, ValueError):
|
|
10027
9557
|
SeekToEndOfFile(fp)
|
|
10028
9558
|
|
|
10029
9559
|
CatSize = fp.tell()
|
|
@@ -10053,18 +9583,56 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10053
9583
|
else:
|
|
10054
9584
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
10055
9585
|
|
|
10056
|
-
fnumextrafieldsize = int(inheader[
|
|
10057
|
-
fnumextrafields = int(inheader[
|
|
10058
|
-
extrastart =
|
|
9586
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
9587
|
+
fnumextrafields = int(inheader[14], 16)
|
|
9588
|
+
extrastart = 15
|
|
10059
9589
|
extraend = extrastart + fnumextrafields
|
|
10060
9590
|
formversion = re.findall("([\\d]+)", formstring)
|
|
10061
9591
|
fheadsize = int(inheader[0], 16)
|
|
10062
9592
|
fnumfields = int(inheader[1], 16)
|
|
10063
|
-
|
|
10064
|
-
fostype = inheader[3]
|
|
10065
|
-
fnumfiles = int(inheader[4], 16)
|
|
9593
|
+
fnumfiles = int(inheader[6], 16)
|
|
10066
9594
|
fprechecksumtype = inheader[-2]
|
|
10067
9595
|
fprechecksum = inheader[-1]
|
|
9596
|
+
outfseeknextfile = inheader[7]
|
|
9597
|
+
fjsonsize = int(inheader[10], 16)
|
|
9598
|
+
fjsonchecksumtype = inheader[11]
|
|
9599
|
+
fjsonchecksum = inheader[12]
|
|
9600
|
+
fprejsoncontent = fp.read(fjsonsize)
|
|
9601
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9602
|
+
if(fjsonsize > 0):
|
|
9603
|
+
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9604
|
+
if(verbose):
|
|
9605
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9606
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9607
|
+
else:
|
|
9608
|
+
valid_archive = False
|
|
9609
|
+
invalid_archive = True
|
|
9610
|
+
if(verbose):
|
|
9611
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9612
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9613
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9614
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9615
|
+
fname + " at offset " + str(fheaderstart))
|
|
9616
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9617
|
+
return False
|
|
9618
|
+
# Next seek directive
|
|
9619
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9620
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
9621
|
+
if(abs(fseeknextasnum) == 0):
|
|
9622
|
+
pass
|
|
9623
|
+
fp.seek(fseeknextasnum, 1)
|
|
9624
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
9625
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9626
|
+
if(abs(fseeknextasnum) == 0):
|
|
9627
|
+
pass
|
|
9628
|
+
fp.seek(fseeknextasnum, 1)
|
|
9629
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
9630
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9631
|
+
if(abs(fseeknextasnum) == 0):
|
|
9632
|
+
pass
|
|
9633
|
+
fp.seek(fseeknextasnum, 0)
|
|
9634
|
+
else:
|
|
9635
|
+
return False
|
|
10068
9636
|
|
|
10069
9637
|
il = 0
|
|
10070
9638
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -10183,7 +9751,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10183
9751
|
VerbosePrintOut(outfname)
|
|
10184
9752
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
10185
9753
|
|
|
10186
|
-
if(outfcs
|
|
9754
|
+
if(hmac.compare_digest(outfcs, infcs)):
|
|
10187
9755
|
if(verbose):
|
|
10188
9756
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
10189
9757
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -10195,7 +9763,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10195
9763
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
10196
9764
|
|
|
10197
9765
|
if(outfjsonsize > 0):
|
|
10198
|
-
if(outfjsonchecksum
|
|
9766
|
+
if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
|
|
10199
9767
|
if(verbose):
|
|
10200
9768
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
10201
9769
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -10219,7 +9787,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10219
9787
|
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
10220
9788
|
pyhascontents = True
|
|
10221
9789
|
|
|
10222
|
-
if(outfccs
|
|
9790
|
+
if(hmac.compare_digest(outfccs, infccs)):
|
|
10223
9791
|
if(verbose):
|
|
10224
9792
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
10225
9793
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -10302,9 +9870,7 @@ def StackedArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=
|
|
|
10302
9870
|
outstartfile = infile.tell()
|
|
10303
9871
|
try:
|
|
10304
9872
|
infile.seek(0, 2)
|
|
10305
|
-
except OSError:
|
|
10306
|
-
SeekToEndOfFile(infile)
|
|
10307
|
-
except ValueError:
|
|
9873
|
+
except (OSError, ValueError):
|
|
10308
9874
|
SeekToEndOfFile(infile)
|
|
10309
9875
|
outfsize = infile.tell()
|
|
10310
9876
|
infile.seek(outstartfile, 0)
|
|
@@ -10380,7 +9946,7 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10380
9946
|
formatspecs = formatspecs[checkcompressfile]
|
|
10381
9947
|
fp = MkTempFile()
|
|
10382
9948
|
fp = PackArchiveFileFromTarFile(
|
|
10383
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9949
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10384
9950
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10385
9951
|
return listarrayfiles
|
|
10386
9952
|
|
|
@@ -10391,7 +9957,7 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10391
9957
|
formatspecs = formatspecs[checkcompressfile]
|
|
10392
9958
|
fp = MkTempFile()
|
|
10393
9959
|
fp = PackArchiveFileFromZipFile(
|
|
10394
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9960
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10395
9961
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10396
9962
|
return listarrayfiles
|
|
10397
9963
|
|
|
@@ -10407,7 +9973,7 @@ if(rarfile_support):
|
|
|
10407
9973
|
formatspecs = formatspecs[checkcompressfile]
|
|
10408
9974
|
fp = MkTempFile()
|
|
10409
9975
|
fp = PackArchiveFileFromRarFile(
|
|
10410
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9976
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10411
9977
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10412
9978
|
return listarrayfiles
|
|
10413
9979
|
|
|
@@ -10422,7 +9988,7 @@ if(py7zr_support):
|
|
|
10422
9988
|
formatspecs = formatspecs[checkcompressfile]
|
|
10423
9989
|
fp = MkTempFile()
|
|
10424
9990
|
fp = PackArchiveFileFromSevenZipFile(
|
|
10425
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9991
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10426
9992
|
listarrayfiles = ArchiveFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10427
9993
|
return listarrayfiles
|
|
10428
9994
|
|
|
@@ -10446,7 +10012,7 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10446
10012
|
return False
|
|
10447
10013
|
|
|
10448
10014
|
|
|
10449
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["
|
|
10015
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10450
10016
|
outarray = MkTempFile()
|
|
10451
10017
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10452
10018
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10577,7 +10143,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10577
10143
|
if compressionuselist is None:
|
|
10578
10144
|
compressionuselist = compressionlistalt
|
|
10579
10145
|
if checksumtype is None:
|
|
10580
|
-
checksumtype = ["
|
|
10146
|
+
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10581
10147
|
if extradata is None:
|
|
10582
10148
|
extradata = []
|
|
10583
10149
|
if jsondata is None:
|
|
@@ -10664,7 +10230,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10664
10230
|
compression = "auto"
|
|
10665
10231
|
|
|
10666
10232
|
if verbose:
|
|
10667
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10233
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10668
10234
|
|
|
10669
10235
|
# No files?
|
|
10670
10236
|
if not listarrayfiles.get('ffilelist'):
|
|
@@ -10769,7 +10335,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10769
10335
|
while ilmin < ilsize:
|
|
10770
10336
|
cfcontents = MkTempFile()
|
|
10771
10337
|
fcontents.seek(0, 0)
|
|
10772
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10338
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10773
10339
|
fcontents.seek(0, 0)
|
|
10774
10340
|
cfcontents.seek(0, 0)
|
|
10775
10341
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -10787,7 +10353,7 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10787
10353
|
|
|
10788
10354
|
fcontents.seek(0, 0)
|
|
10789
10355
|
cfcontents = MkTempFile()
|
|
10790
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10356
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10791
10357
|
cfcontents.seek(0, 0)
|
|
10792
10358
|
cfcontents = CompressOpenFileAlt(
|
|
10793
10359
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
|
|
@@ -10886,22 +10452,12 @@ def RePackArchiveFile(infile, outfile, fmttype="auto", compression="auto", compr
|
|
|
10886
10452
|
fp.flush()
|
|
10887
10453
|
if hasattr(os, "sync"):
|
|
10888
10454
|
os.fsync(fp.fileno())
|
|
10889
|
-
except io.UnsupportedOperation:
|
|
10890
|
-
|
|
10891
|
-
logging.warning("Flush/sync unsupported on this file object.")
|
|
10892
|
-
except AttributeError:
|
|
10893
|
-
if verbose:
|
|
10894
|
-
logging.warning("Flush/sync attributes missing on this file object.")
|
|
10895
|
-
except OSError as e:
|
|
10896
|
-
if verbose:
|
|
10897
|
-
logging.warning("OS error during flush/sync: %s", e)
|
|
10455
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
10456
|
+
pass
|
|
10898
10457
|
|
|
10899
10458
|
if outfile == "-":
|
|
10900
10459
|
fp.seek(0, 0)
|
|
10901
|
-
|
|
10902
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
10903
|
-
else:
|
|
10904
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
10460
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
10905
10461
|
elif outfile is None:
|
|
10906
10462
|
fp.seek(0, 0)
|
|
10907
10463
|
outvar = fp.read()
|
|
@@ -10940,14 +10496,14 @@ def RePackMultipleArchiveFile(infiles, outfile, fmttype="auto", compression="aut
|
|
|
10940
10496
|
return True
|
|
10941
10497
|
return returnout
|
|
10942
10498
|
|
|
10943
|
-
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10499
|
+
def RePackArchiveFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10944
10500
|
fp = MkTempFile(instr)
|
|
10945
10501
|
listarrayfiles = RePackArchiveFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10946
10502
|
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
10947
10503
|
return listarrayfiles
|
|
10948
10504
|
|
|
10949
10505
|
|
|
10950
|
-
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10506
|
+
def PackArchiveFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10951
10507
|
outarray = MkTempFile()
|
|
10952
10508
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10953
10509
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10960,7 +10516,7 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
10960
10516
|
if(outdir is not None):
|
|
10961
10517
|
outdir = RemoveWindowsPath(outdir)
|
|
10962
10518
|
if(verbose):
|
|
10963
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10519
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10964
10520
|
if(isinstance(infile, dict)):
|
|
10965
10521
|
listarrayfiles = infile
|
|
10966
10522
|
else:
|
|
@@ -11010,16 +10566,12 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
11010
10566
|
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
11011
10567
|
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
11012
10568
|
shutil.copyfileobj(
|
|
11013
|
-
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
10569
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc, length=__filebuff_size__)
|
|
11014
10570
|
try:
|
|
11015
10571
|
fpc.flush()
|
|
11016
10572
|
if(hasattr(os, "sync")):
|
|
11017
10573
|
os.fsync(fpc.fileno())
|
|
11018
|
-
except io.UnsupportedOperation:
|
|
11019
|
-
pass
|
|
11020
|
-
except AttributeError:
|
|
11021
|
-
pass
|
|
11022
|
-
except OSError:
|
|
10574
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11023
10575
|
pass
|
|
11024
10576
|
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
11025
10577
|
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
@@ -11061,16 +10613,12 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
11061
10613
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11062
10614
|
flinkinfo['fcontents'])
|
|
11063
10615
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11064
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10616
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11065
10617
|
try:
|
|
11066
10618
|
fpc.flush()
|
|
11067
10619
|
if(hasattr(os, "sync")):
|
|
11068
10620
|
os.fsync(fpc.fileno())
|
|
11069
|
-
except io.UnsupportedOperation:
|
|
11070
|
-
pass
|
|
11071
|
-
except AttributeError:
|
|
11072
|
-
pass
|
|
11073
|
-
except OSError:
|
|
10621
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11074
10622
|
pass
|
|
11075
10623
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11076
10624
|
os.chown(PrependPath(
|
|
@@ -11140,16 +10688,12 @@ def UnPackArchiveFile(infile, outdir=None, followlink=False, filestart=0, seekst
|
|
|
11140
10688
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11141
10689
|
flinkinfo['fcontents'])
|
|
11142
10690
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11143
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10691
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11144
10692
|
try:
|
|
11145
10693
|
fpc.flush()
|
|
11146
10694
|
if(hasattr(os, "sync")):
|
|
11147
10695
|
os.fsync(fpc.fileno())
|
|
11148
|
-
except io.UnsupportedOperation:
|
|
11149
|
-
pass
|
|
11150
|
-
except AttributeError:
|
|
11151
|
-
pass
|
|
11152
|
-
except OSError:
|
|
10696
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11153
10697
|
pass
|
|
11154
10698
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11155
10699
|
os.chown(PrependPath(
|
|
@@ -11236,7 +10780,7 @@ def ftype_to_str(ftype):
|
|
|
11236
10780
|
|
|
11237
10781
|
def ArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11238
10782
|
if(verbose):
|
|
11239
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10783
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11240
10784
|
if(isinstance(infile, dict)):
|
|
11241
10785
|
listarrayfileslist = [infile]
|
|
11242
10786
|
if(isinstance(infile, list)):
|
|
@@ -11317,9 +10861,7 @@ def StackedArchiveFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
11317
10861
|
outstartfile = infile.tell()
|
|
11318
10862
|
try:
|
|
11319
10863
|
infile.seek(0, 2)
|
|
11320
|
-
except OSError:
|
|
11321
|
-
SeekToEndOfFile(infile)
|
|
11322
|
-
except ValueError:
|
|
10864
|
+
except (OSError, ValueError):
|
|
11323
10865
|
SeekToEndOfFile(infile)
|
|
11324
10866
|
outfsize = infile.tell()
|
|
11325
10867
|
infile.seek(outstartfile, 0)
|
|
@@ -11349,13 +10891,10 @@ def ArchiveFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipc
|
|
|
11349
10891
|
|
|
11350
10892
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11351
10893
|
if(verbose):
|
|
11352
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10894
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11353
10895
|
if(infile == "-"):
|
|
11354
10896
|
infile = MkTempFile()
|
|
11355
|
-
|
|
11356
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11357
|
-
else:
|
|
11358
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
10897
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11359
10898
|
infile.seek(0, 0)
|
|
11360
10899
|
if(not infile):
|
|
11361
10900
|
return False
|
|
@@ -11474,13 +11013,10 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11474
11013
|
|
|
11475
11014
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11476
11015
|
if(verbose):
|
|
11477
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11016
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11478
11017
|
if(infile == "-"):
|
|
11479
11018
|
infile = MkTempFile()
|
|
11480
|
-
|
|
11481
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11482
|
-
else:
|
|
11483
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
11019
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11484
11020
|
infile.seek(0, 0)
|
|
11485
11021
|
if(not infile):
|
|
11486
11022
|
return False
|
|
@@ -11557,24 +11093,18 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11557
11093
|
printfname = member.filename
|
|
11558
11094
|
try:
|
|
11559
11095
|
fuid = int(os.getuid())
|
|
11560
|
-
except AttributeError:
|
|
11561
|
-
fuid = int(0)
|
|
11562
|
-
except KeyError:
|
|
11096
|
+
except (KeyError, AttributeError):
|
|
11563
11097
|
fuid = int(0)
|
|
11564
11098
|
try:
|
|
11565
11099
|
fgid = int(os.getgid())
|
|
11566
|
-
except AttributeError:
|
|
11567
|
-
fgid = int(0)
|
|
11568
|
-
except KeyError:
|
|
11100
|
+
except (KeyError, AttributeError):
|
|
11569
11101
|
fgid = int(0)
|
|
11570
11102
|
try:
|
|
11571
11103
|
import pwd
|
|
11572
11104
|
try:
|
|
11573
11105
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11574
11106
|
funame = userinfo.pw_name
|
|
11575
|
-
except KeyError:
|
|
11576
|
-
funame = ""
|
|
11577
|
-
except AttributeError:
|
|
11107
|
+
except (KeyError, AttributeError):
|
|
11578
11108
|
funame = ""
|
|
11579
11109
|
except ImportError:
|
|
11580
11110
|
funame = ""
|
|
@@ -11584,9 +11114,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11584
11114
|
try:
|
|
11585
11115
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11586
11116
|
fgname = groupinfo.gr_name
|
|
11587
|
-
except KeyError:
|
|
11588
|
-
fgname = ""
|
|
11589
|
-
except AttributeError:
|
|
11117
|
+
except (KeyError, AttributeError):
|
|
11590
11118
|
fgname = ""
|
|
11591
11119
|
except ImportError:
|
|
11592
11120
|
fgname = ""
|
|
@@ -11612,7 +11140,7 @@ if(not rarfile_support):
|
|
|
11612
11140
|
if(rarfile_support):
|
|
11613
11141
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11614
11142
|
if(verbose):
|
|
11615
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11143
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11616
11144
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11617
11145
|
return False
|
|
11618
11146
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11695,24 +11223,18 @@ if(rarfile_support):
|
|
|
11695
11223
|
printfname = member.filename
|
|
11696
11224
|
try:
|
|
11697
11225
|
fuid = int(os.getuid())
|
|
11698
|
-
except AttributeError:
|
|
11699
|
-
fuid = int(0)
|
|
11700
|
-
except KeyError:
|
|
11226
|
+
except (KeyError, AttributeError):
|
|
11701
11227
|
fuid = int(0)
|
|
11702
11228
|
try:
|
|
11703
11229
|
fgid = int(os.getgid())
|
|
11704
|
-
except AttributeError:
|
|
11705
|
-
fgid = int(0)
|
|
11706
|
-
except KeyError:
|
|
11230
|
+
except (KeyError, AttributeError):
|
|
11707
11231
|
fgid = int(0)
|
|
11708
11232
|
try:
|
|
11709
11233
|
import pwd
|
|
11710
11234
|
try:
|
|
11711
11235
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11712
11236
|
funame = userinfo.pw_name
|
|
11713
|
-
except KeyError:
|
|
11714
|
-
funame = ""
|
|
11715
|
-
except AttributeError:
|
|
11237
|
+
except (KeyError, AttributeError):
|
|
11716
11238
|
funame = ""
|
|
11717
11239
|
except ImportError:
|
|
11718
11240
|
funame = ""
|
|
@@ -11722,9 +11244,7 @@ if(rarfile_support):
|
|
|
11722
11244
|
try:
|
|
11723
11245
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11724
11246
|
fgname = groupinfo.gr_name
|
|
11725
|
-
except KeyError:
|
|
11726
|
-
fgname = ""
|
|
11727
|
-
except AttributeError:
|
|
11247
|
+
except (KeyError, AttributeError):
|
|
11728
11248
|
fgname = ""
|
|
11729
11249
|
except ImportError:
|
|
11730
11250
|
fgname = ""
|
|
@@ -11749,7 +11269,7 @@ if(not py7zr_support):
|
|
|
11749
11269
|
if(py7zr_support):
|
|
11750
11270
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11751
11271
|
if(verbose):
|
|
11752
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11272
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11753
11273
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11754
11274
|
return False
|
|
11755
11275
|
lcfi = 0
|
|
@@ -11802,24 +11322,18 @@ if(py7zr_support):
|
|
|
11802
11322
|
file_content[member.filename].close()
|
|
11803
11323
|
try:
|
|
11804
11324
|
fuid = int(os.getuid())
|
|
11805
|
-
except AttributeError:
|
|
11806
|
-
fuid = int(0)
|
|
11807
|
-
except KeyError:
|
|
11325
|
+
except (KeyError, AttributeError):
|
|
11808
11326
|
fuid = int(0)
|
|
11809
11327
|
try:
|
|
11810
11328
|
fgid = int(os.getgid())
|
|
11811
|
-
except AttributeError:
|
|
11812
|
-
fgid = int(0)
|
|
11813
|
-
except KeyError:
|
|
11329
|
+
except (KeyError, AttributeError):
|
|
11814
11330
|
fgid = int(0)
|
|
11815
11331
|
try:
|
|
11816
11332
|
import pwd
|
|
11817
11333
|
try:
|
|
11818
11334
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11819
11335
|
funame = userinfo.pw_name
|
|
11820
|
-
except KeyError:
|
|
11821
|
-
funame = ""
|
|
11822
|
-
except AttributeError:
|
|
11336
|
+
except (KeyError, AttributeError):
|
|
11823
11337
|
funame = ""
|
|
11824
11338
|
except ImportError:
|
|
11825
11339
|
funame = ""
|
|
@@ -11829,9 +11343,7 @@ if(py7zr_support):
|
|
|
11829
11343
|
try:
|
|
11830
11344
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11831
11345
|
fgname = groupinfo.gr_name
|
|
11832
|
-
except KeyError:
|
|
11833
|
-
fgname = ""
|
|
11834
|
-
except AttributeError:
|
|
11346
|
+
except (KeyError, AttributeError):
|
|
11835
11347
|
fgname = ""
|
|
11836
11348
|
except ImportError:
|
|
11837
11349
|
fgname = ""
|
|
@@ -11852,7 +11364,7 @@ if(py7zr_support):
|
|
|
11852
11364
|
|
|
11853
11365
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11854
11366
|
if(verbose):
|
|
11855
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11367
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11856
11368
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11857
11369
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11858
11370
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11871,7 +11383,7 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
11871
11383
|
return False
|
|
11872
11384
|
|
|
11873
11385
|
|
|
11874
|
-
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["
|
|
11386
|
+
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11875
11387
|
outarray = MkTempFile()
|
|
11876
11388
|
packform = PackArchiveFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
11877
11389
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
@@ -11883,19 +11395,19 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11883
11395
|
PyNeoFile compatibility layer
|
|
11884
11396
|
"""
|
|
11885
11397
|
|
|
11886
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11398
|
+
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11887
11399
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11888
11400
|
|
|
11889
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11401
|
+
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11890
11402
|
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11891
11403
|
|
|
11892
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11404
|
+
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11893
11405
|
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11894
11406
|
|
|
11895
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11407
|
+
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11896
11408
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11897
11409
|
|
|
11898
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11410
|
+
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11899
11411
|
return PackArchiveFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11900
11412
|
|
|
11901
11413
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
@@ -11904,7 +11416,7 @@ def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonl
|
|
|
11904
11416
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11905
11417
|
return UnPackArchiveFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11906
11418
|
|
|
11907
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["
|
|
11419
|
+
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11908
11420
|
return RePackArchiveFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11909
11421
|
|
|
11910
11422
|
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
@@ -11913,7 +11425,7 @@ def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False,
|
|
|
11913
11425
|
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11914
11426
|
return ArchiveFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11915
11427
|
|
|
11916
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11428
|
+
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11917
11429
|
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11918
11430
|
return RePackArchiveFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11919
11431
|
|
|
@@ -11955,10 +11467,7 @@ def download_file_from_ftp_file(url):
|
|
|
11955
11467
|
ftp_port = 21
|
|
11956
11468
|
try:
|
|
11957
11469
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
11958
|
-
except socket.gaierror:
|
|
11959
|
-
log.info("Error With URL "+url)
|
|
11960
|
-
return False
|
|
11961
|
-
except socket.timeout:
|
|
11470
|
+
except (socket.gaierror, socket.timeout):
|
|
11962
11471
|
log.info("Error With URL "+url)
|
|
11963
11472
|
return False
|
|
11964
11473
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12046,10 +11555,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
12046
11555
|
ftp_port = 21
|
|
12047
11556
|
try:
|
|
12048
11557
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
12049
|
-
except socket.gaierror:
|
|
12050
|
-
log.info("Error With URL "+url)
|
|
12051
|
-
return False
|
|
12052
|
-
except socket.timeout:
|
|
11558
|
+
except (socket.gaierror, socket.timeout):
|
|
12053
11559
|
log.info("Error With URL "+url)
|
|
12054
11560
|
return False
|
|
12055
11561
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12160,7 +11666,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12160
11666
|
else:
|
|
12161
11667
|
response = requests.get(rebuilt_url, headers=headers, timeout=(5, 30), stream=True)
|
|
12162
11668
|
response.raw.decode_content = True
|
|
12163
|
-
shutil.copyfileobj(response.raw, httpfile)
|
|
11669
|
+
shutil.copyfileobj(response.raw, httpfile, length=__filebuff_size__)
|
|
12164
11670
|
|
|
12165
11671
|
# 2) HTTPX branch
|
|
12166
11672
|
elif usehttp == 'httpx' and havehttpx:
|
|
@@ -12172,7 +11678,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12172
11678
|
else:
|
|
12173
11679
|
response = client.get(rebuilt_url, headers=headers)
|
|
12174
11680
|
raw_wrapper = RawIteratorWrapper(response.iter_bytes())
|
|
12175
|
-
shutil.copyfileobj(raw_wrapper, httpfile)
|
|
11681
|
+
shutil.copyfileobj(raw_wrapper, httpfile, length=__filebuff_size__)
|
|
12176
11682
|
|
|
12177
11683
|
# 3) Mechanize branch
|
|
12178
11684
|
elif usehttp == 'mechanize' and havemechanize:
|
|
@@ -12191,7 +11697,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12191
11697
|
|
|
12192
11698
|
# Open the URL and copy the response to httpfile
|
|
12193
11699
|
response = br.open(rebuilt_url)
|
|
12194
|
-
shutil.copyfileobj(response, httpfile)
|
|
11700
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12195
11701
|
|
|
12196
11702
|
# 4) Fallback to urllib
|
|
12197
11703
|
else:
|
|
@@ -12204,7 +11710,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12204
11710
|
else:
|
|
12205
11711
|
opener = build_opener()
|
|
12206
11712
|
response = opener.open(request)
|
|
12207
|
-
shutil.copyfileobj(response, httpfile)
|
|
11713
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12208
11714
|
|
|
12209
11715
|
# Reset file pointer to the start before returning
|
|
12210
11716
|
httpfile.seek(0, 0)
|
|
@@ -12337,7 +11843,7 @@ def upload_file_to_http_file(
|
|
|
12337
11843
|
fileobj.seek(0)
|
|
12338
11844
|
except Exception:
|
|
12339
11845
|
pass
|
|
12340
|
-
shutil.copyfileobj(fileobj, buf)
|
|
11846
|
+
shutil.copyfileobj(fileobj, buf, length=__filebuff_size__)
|
|
12341
11847
|
|
|
12342
11848
|
_w('\r\n')
|
|
12343
11849
|
_w('--' + boundary + '--\r\n')
|
|
@@ -12426,10 +11932,7 @@ if(haveparamiko):
|
|
|
12426
11932
|
username=sftp_username, password=sftp_password)
|
|
12427
11933
|
except paramiko.ssh_exception.SSHException:
|
|
12428
11934
|
return False
|
|
12429
|
-
except socket.gaierror:
|
|
12430
|
-
log.info("Error With URL "+url)
|
|
12431
|
-
return False
|
|
12432
|
-
except socket.timeout:
|
|
11935
|
+
except (socket.gaierror, socket.timeout):
|
|
12433
11936
|
log.info("Error With URL "+url)
|
|
12434
11937
|
return False
|
|
12435
11938
|
sftp = ssh.open_sftp()
|
|
@@ -12483,10 +11986,7 @@ if(haveparamiko):
|
|
|
12483
11986
|
username=sftp_username, password=sftp_password)
|
|
12484
11987
|
except paramiko.ssh_exception.SSHException:
|
|
12485
11988
|
return False
|
|
12486
|
-
except socket.gaierror:
|
|
12487
|
-
log.info("Error With URL "+url)
|
|
12488
|
-
return False
|
|
12489
|
-
except socket.timeout:
|
|
11989
|
+
except (socket.gaierror, socket.timeout):
|
|
12490
11990
|
log.info("Error With URL "+url)
|
|
12491
11991
|
return False
|
|
12492
11992
|
sftp = ssh.open_sftp()
|
|
@@ -12537,10 +12037,7 @@ if(havepysftp):
|
|
|
12537
12037
|
username=sftp_username, password=sftp_password)
|
|
12538
12038
|
except paramiko.ssh_exception.SSHException:
|
|
12539
12039
|
return False
|
|
12540
|
-
except socket.gaierror:
|
|
12541
|
-
log.info("Error With URL "+url)
|
|
12542
|
-
return False
|
|
12543
|
-
except socket.timeout:
|
|
12040
|
+
except (socket.gaierror, socket.timeout):
|
|
12544
12041
|
log.info("Error With URL "+url)
|
|
12545
12042
|
return False
|
|
12546
12043
|
sftpfile = MkTempFile()
|
|
@@ -12590,10 +12087,7 @@ if(havepysftp):
|
|
|
12590
12087
|
username=sftp_username, password=sftp_password)
|
|
12591
12088
|
except paramiko.ssh_exception.SSHException:
|
|
12592
12089
|
return False
|
|
12593
|
-
except socket.gaierror:
|
|
12594
|
-
log.info("Error With URL "+url)
|
|
12595
|
-
return False
|
|
12596
|
-
except socket.timeout:
|
|
12090
|
+
except (socket.gaierror, socket.timeout):
|
|
12597
12091
|
log.info("Error With URL "+url)
|
|
12598
12092
|
return False
|
|
12599
12093
|
sftpfile.seek(0, 0)
|