PyFoxFile 0.24.6__py3-none-any.whl → 0.25.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyfoxfile-0.24.6.data → pyfoxfile-0.25.2.data}/scripts/foxfile.py +12 -12
- {pyfoxfile-0.24.6.dist-info → pyfoxfile-0.25.2.dist-info}/METADATA +2 -2
- pyfoxfile-0.25.2.dist-info/RECORD +10 -0
- pyfoxfile.py +670 -1176
- pyfoxfile-0.24.6.dist-info/RECORD +0 -10
- {pyfoxfile-0.24.6.data → pyfoxfile-0.25.2.data}/scripts/foxneofile.py +0 -0
- {pyfoxfile-0.24.6.data → pyfoxfile-0.25.2.data}/scripts/neofoxfile.py +0 -0
- {pyfoxfile-0.24.6.dist-info → pyfoxfile-0.25.2.dist-info}/WHEEL +0 -0
- {pyfoxfile-0.24.6.dist-info → pyfoxfile-0.25.2.dist-info}/licenses/LICENSE +0 -0
- {pyfoxfile-0.24.6.dist-info → pyfoxfile-0.25.2.dist-info}/top_level.txt +0 -0
- {pyfoxfile-0.24.6.dist-info → pyfoxfile-0.25.2.dist-info}/zip-safe +0 -0
pyfoxfile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyfoxfile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyfoxfile.py - Last Update: 11/6/2025 Ver. 0.25.2 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -73,6 +73,17 @@ except ImportError:
|
|
|
73
73
|
except ImportError:
|
|
74
74
|
import json
|
|
75
75
|
|
|
76
|
+
testyaml = False
|
|
77
|
+
try:
|
|
78
|
+
import oyaml as yaml
|
|
79
|
+
testyaml = True
|
|
80
|
+
except ImportError:
|
|
81
|
+
try:
|
|
82
|
+
import yaml
|
|
83
|
+
testyaml = True
|
|
84
|
+
except ImportError:
|
|
85
|
+
testyaml = False
|
|
86
|
+
|
|
76
87
|
try:
|
|
77
88
|
import configparser
|
|
78
89
|
except ImportError:
|
|
@@ -115,6 +126,16 @@ else:
|
|
|
115
126
|
bytes_type = bytes
|
|
116
127
|
text_type = str
|
|
117
128
|
|
|
129
|
+
# Text streams (as provided by Python)
|
|
130
|
+
PY_STDIN_TEXT = sys.stdin
|
|
131
|
+
PY_STDOUT_TEXT = sys.stdout
|
|
132
|
+
PY_STDERR_TEXT = sys.stderr
|
|
133
|
+
|
|
134
|
+
# Binary-friendly streams (use .buffer on Py3, fall back on Py2)
|
|
135
|
+
PY_STDIN_BUF = getattr(sys.stdin, "buffer", sys.stdin)
|
|
136
|
+
PY_STDOUT_BUF = getattr(sys.stdout, "buffer", sys.stdout)
|
|
137
|
+
PY_STDERR_BUF = getattr(sys.stderr, "buffer", sys.stderr)
|
|
138
|
+
|
|
118
139
|
# Text vs bytes tuples you can use with isinstance()
|
|
119
140
|
TEXT_TYPES = (basestring,) # "str or unicode" on Py2, "str" on Py3
|
|
120
141
|
BINARY_TYPES = (bytes,) if not PY2 else (str,) # bytes on Py3, str on Py2
|
|
@@ -211,12 +232,6 @@ if sys.version_info[0] == 2:
|
|
|
211
232
|
except (NameError, AttributeError):
|
|
212
233
|
pass
|
|
213
234
|
|
|
214
|
-
# CRC32 import
|
|
215
|
-
try:
|
|
216
|
-
from zlib import crc32
|
|
217
|
-
except ImportError:
|
|
218
|
-
from binascii import crc32
|
|
219
|
-
|
|
220
235
|
# Define FileNotFoundError for Python 2
|
|
221
236
|
try:
|
|
222
237
|
FileNotFoundError
|
|
@@ -251,9 +266,7 @@ py7zr_support = False
|
|
|
251
266
|
try:
|
|
252
267
|
import py7zr
|
|
253
268
|
py7zr_support = True
|
|
254
|
-
except ImportError:
|
|
255
|
-
pass
|
|
256
|
-
except OSError:
|
|
269
|
+
except (ImportError, OSError):
|
|
257
270
|
pass
|
|
258
271
|
|
|
259
272
|
# TAR file checking
|
|
@@ -279,9 +292,7 @@ haveparamiko = False
|
|
|
279
292
|
try:
|
|
280
293
|
import paramiko
|
|
281
294
|
haveparamiko = True
|
|
282
|
-
except ImportError:
|
|
283
|
-
pass
|
|
284
|
-
except OSError:
|
|
295
|
+
except (ImportError, OSError):
|
|
285
296
|
pass
|
|
286
297
|
|
|
287
298
|
# PySFTP support
|
|
@@ -289,9 +300,7 @@ havepysftp = False
|
|
|
289
300
|
try:
|
|
290
301
|
import pysftp
|
|
291
302
|
havepysftp = True
|
|
292
|
-
except ImportError:
|
|
293
|
-
pass
|
|
294
|
-
except OSError:
|
|
303
|
+
except (ImportError, OSError):
|
|
295
304
|
pass
|
|
296
305
|
|
|
297
306
|
# Add the mechanize import check
|
|
@@ -299,9 +308,7 @@ havemechanize = False
|
|
|
299
308
|
try:
|
|
300
309
|
import mechanize
|
|
301
310
|
havemechanize = True
|
|
302
|
-
except ImportError:
|
|
303
|
-
pass
|
|
304
|
-
except OSError:
|
|
311
|
+
except (ImportError, OSError):
|
|
305
312
|
pass
|
|
306
313
|
|
|
307
314
|
# Requests support
|
|
@@ -311,9 +318,7 @@ try:
|
|
|
311
318
|
haverequests = True
|
|
312
319
|
import urllib3
|
|
313
320
|
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
|
314
|
-
except ImportError:
|
|
315
|
-
pass
|
|
316
|
-
except OSError:
|
|
321
|
+
except (ImportError, OSError):
|
|
317
322
|
pass
|
|
318
323
|
|
|
319
324
|
# HTTPX support
|
|
@@ -323,9 +328,7 @@ try:
|
|
|
323
328
|
havehttpx = True
|
|
324
329
|
logging.getLogger("httpx").setLevel(logging.WARNING)
|
|
325
330
|
logging.getLogger("httpcore").setLevel(logging.WARNING)
|
|
326
|
-
except ImportError:
|
|
327
|
-
pass
|
|
328
|
-
except OSError:
|
|
331
|
+
except (ImportError, OSError):
|
|
329
332
|
pass
|
|
330
333
|
|
|
331
334
|
# HTTP and URL parsing
|
|
@@ -416,9 +419,14 @@ __include_defaults__ = True
|
|
|
416
419
|
__use_inmemfile__ = True
|
|
417
420
|
__use_spoolfile__ = False
|
|
418
421
|
__use_spooldir__ = tempfile.gettempdir()
|
|
419
|
-
|
|
420
|
-
|
|
422
|
+
BYTES_PER_KiB = 1024
|
|
423
|
+
BYTES_PER_MiB = 1024 * BYTES_PER_KiB
|
|
424
|
+
# Spool: not tiny, but won’t blow up RAM if many are in use
|
|
425
|
+
DEFAULT_SPOOL_MAX = 4 * BYTES_PER_MiB # 4 MiB per spooled temp file
|
|
421
426
|
__spoolfile_size__ = DEFAULT_SPOOL_MAX
|
|
427
|
+
# Buffer: bigger than stdlib default (16 KiB), but still modest
|
|
428
|
+
DEFAULT_BUFFER_MAX = 256 * BYTES_PER_KiB # 256 KiB copy buffer
|
|
429
|
+
__filebuff_size__ = DEFAULT_BUFFER_MAX
|
|
422
430
|
__program_name__ = "Py"+__file_format_default__
|
|
423
431
|
__use_env_file__ = True
|
|
424
432
|
__use_ini_file__ = True
|
|
@@ -621,6 +629,8 @@ if __include_defaults__:
|
|
|
621
629
|
add_format(__file_format_multi_dict__, "狐ファイル", "狐ファイル", ".狐", "KitsuneFairu")
|
|
622
630
|
add_format(__file_format_multi_dict__, "狐狸文件", "狐狸文件", ".狐狸", "HúlíWénjiàn")
|
|
623
631
|
add_format(__file_format_multi_dict__, "여우파일", "여우파일", ".여우", "YeouPa-il")
|
|
632
|
+
add_format(__file_format_multi_dict__, "基次内法伊鲁", "基次内法伊鲁", ".基次内", "JīCìNèiFǎYīLǔ")
|
|
633
|
+
add_format(__file_format_multi_dict__, "키츠네파일", "키츠네파일", ".키츠네", "KicheunePa-il")
|
|
624
634
|
|
|
625
635
|
# Pick a default if current default key is not present
|
|
626
636
|
if __file_format_default__ not in __file_format_multi_dict__:
|
|
@@ -640,12 +650,12 @@ __project__ = __program_name__
|
|
|
640
650
|
__program_alt_name__ = __program_name__
|
|
641
651
|
__project_url__ = "https://github.com/GameMaker2k/PyFoxFile"
|
|
642
652
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
643
|
-
__version_info__ = (0,
|
|
644
|
-
__version_date_info__ = (2025, 11,
|
|
653
|
+
__version_info__ = (0, 25, 2, "RC 1", 1)
|
|
654
|
+
__version_date_info__ = (2025, 11, 6, "RC 1", 1)
|
|
645
655
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
646
656
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
647
657
|
__revision__ = __version_info__[3]
|
|
648
|
-
__revision_id__ = "$Id:
|
|
658
|
+
__revision_id__ = "$Id: 30b50b3fe5848bbe7a8ffa021b798be5dd67425e $"
|
|
649
659
|
if(__version_info__[4] is not None):
|
|
650
660
|
__version_date_plusrc__ = __version_date__ + \
|
|
651
661
|
"-" + str(__version_date_info__[4])
|
|
@@ -796,7 +806,7 @@ geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prour
|
|
|
796
806
|
if(platform.python_implementation() != ""):
|
|
797
807
|
py_implementation = platform.python_implementation()
|
|
798
808
|
if(platform.python_implementation() == ""):
|
|
799
|
-
py_implementation = "
|
|
809
|
+
py_implementation = "CPython"
|
|
800
810
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
801
811
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
802
812
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -2059,7 +2069,7 @@ def MkTempFile(data=None,
|
|
|
2059
2069
|
suffix="",
|
|
2060
2070
|
use_spool=__use_spoolfile__,
|
|
2061
2071
|
spool_max=__spoolfile_size__,
|
|
2062
|
-
spool_dir=__use_spooldir__:
|
|
2072
|
+
spool_dir=__use_spooldir__):
|
|
2063
2073
|
"""
|
|
2064
2074
|
Return a file-like handle with consistent behavior on Py2.7 and Py3.x.
|
|
2065
2075
|
|
|
@@ -2384,7 +2394,7 @@ def GetTotalSize(file_list):
|
|
|
2384
2394
|
try:
|
|
2385
2395
|
total_size += os.path.getsize(item)
|
|
2386
2396
|
except OSError:
|
|
2387
|
-
|
|
2397
|
+
PY_STDERR_TEXT.write("Error accessing file {}: {}\n".format(item, e))
|
|
2388
2398
|
return total_size
|
|
2389
2399
|
|
|
2390
2400
|
|
|
@@ -2621,7 +2631,7 @@ class ZlibFile(object):
|
|
|
2621
2631
|
scanned_leading = 0 # for tolerant header scan
|
|
2622
2632
|
|
|
2623
2633
|
while True:
|
|
2624
|
-
data = self.file.read(
|
|
2634
|
+
data = self.file.read(__filebuff_size__) # 1 MiB blocks
|
|
2625
2635
|
if not data:
|
|
2626
2636
|
if d is not None:
|
|
2627
2637
|
self._spool.write(d.flush())
|
|
@@ -2779,7 +2789,7 @@ class ZlibFile(object):
|
|
|
2779
2789
|
|
|
2780
2790
|
# Buffer and compress in chunks to limit memory
|
|
2781
2791
|
self._write_buf += data
|
|
2782
|
-
if len(self._write_buf) >= (
|
|
2792
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
2783
2793
|
chunk = self._compressor.compress(bytes(self._write_buf))
|
|
2784
2794
|
if chunk:
|
|
2785
2795
|
self.file.write(chunk)
|
|
@@ -2889,7 +2899,7 @@ class ZlibFile(object):
|
|
|
2889
2899
|
"""
|
|
2890
2900
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
2891
2901
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
2892
|
-
bio =
|
|
2902
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
2893
2903
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
2894
2904
|
|
|
2895
2905
|
# compatibility aliases for unwrapping utilities
|
|
@@ -2925,7 +2935,7 @@ def compress_bytes(payload, level=6, wbits=15, text=False, **kw):
|
|
|
2925
2935
|
out = compress_bytes(b"hello")
|
|
2926
2936
|
out = compress_bytes(u"hello\n", text=True, encoding="utf-8", newline="\n")
|
|
2927
2937
|
"""
|
|
2928
|
-
bio =
|
|
2938
|
+
bio = MkTempFile()
|
|
2929
2939
|
mode = 'wt' if text else 'wb'
|
|
2930
2940
|
f = ZlibFile(fileobj=bio, mode=mode, level=level, wbits=wbits, **kw)
|
|
2931
2941
|
try:
|
|
@@ -3084,7 +3094,7 @@ class GzipFile(object):
|
|
|
3084
3094
|
|
|
3085
3095
|
self._spool = tempfile.SpooledTemporaryFile(max_size=self.spool_threshold)
|
|
3086
3096
|
|
|
3087
|
-
CHUNK =
|
|
3097
|
+
CHUNK = __filebuff_size__
|
|
3088
3098
|
pending = b""
|
|
3089
3099
|
d = None
|
|
3090
3100
|
absolute_offset = 0
|
|
@@ -3247,7 +3257,7 @@ class GzipFile(object):
|
|
|
3247
3257
|
|
|
3248
3258
|
# Stage and compress in chunks
|
|
3249
3259
|
self._write_buf += data
|
|
3250
|
-
if len(self._write_buf) >= (
|
|
3260
|
+
if len(self._write_buf) >= (__filebuff_size__): # 1 MiB threshold
|
|
3251
3261
|
out = self._compressor.compress(bytes(self._write_buf))
|
|
3252
3262
|
if out:
|
|
3253
3263
|
self.file.write(out)
|
|
@@ -3347,7 +3357,7 @@ class GzipFile(object):
|
|
|
3347
3357
|
"""
|
|
3348
3358
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3349
3359
|
raise TypeError("from_bytes() expects a bytes-like object")
|
|
3350
|
-
bio =
|
|
3360
|
+
bio = MkTempFile(bytes(data) if not isinstance(data, bytes) else data)
|
|
3351
3361
|
return cls(fileobj=bio, mode=mode, **kw)
|
|
3352
3362
|
|
|
3353
3363
|
# compatibility aliases for unwrapping utilities
|
|
@@ -3389,7 +3399,7 @@ def gzip_compress_bytes(payload, level=6, text=False, **kw):
|
|
|
3389
3399
|
- text=False: 'payload' must be bytes-like; written via GzipFile('wb')
|
|
3390
3400
|
You can pass newline/encoding/errors to control text encoding.
|
|
3391
3401
|
"""
|
|
3392
|
-
bio =
|
|
3402
|
+
bio = MkTempFile()
|
|
3393
3403
|
mode = 'wt' if text else 'wb'
|
|
3394
3404
|
gf = GzipFile(fileobj=bio, mode=mode, level=level, **kw)
|
|
3395
3405
|
try:
|
|
@@ -3621,280 +3631,6 @@ def crc_generic(msg, width, poly, init, xorout, refin, refout):
|
|
|
3621
3631
|
crc = _reflect(crc, width)
|
|
3622
3632
|
return (crc ^ xorout) & mask
|
|
3623
3633
|
|
|
3624
|
-
# =========================
|
|
3625
|
-
# Named CRCs
|
|
3626
|
-
# =========================
|
|
3627
|
-
# CRC-16/ANSI (ARC/MODBUS family with init=0xFFFF by default)
|
|
3628
|
-
def crc16_ansi(msg, initial_value=0xFFFF):
|
|
3629
|
-
return crc_generic(msg, 16, 0x8005, initial_value & 0xFFFF, 0x0000, True, True)
|
|
3630
|
-
|
|
3631
|
-
def crc16_ibm(msg, initial_value=0xFFFF):
|
|
3632
|
-
return crc16_ansi(msg, initial_value)
|
|
3633
|
-
|
|
3634
|
-
def crc16(msg):
|
|
3635
|
-
return crc16_ansi(msg, 0xFFFF)
|
|
3636
|
-
|
|
3637
|
-
def crc16_ccitt(msg, initial_value=0xFFFF):
|
|
3638
|
-
# CCITT-FALSE
|
|
3639
|
-
return crc_generic(msg, 16, 0x1021, initial_value & 0xFFFF, 0x0000, False, False)
|
|
3640
|
-
|
|
3641
|
-
def crc16_x25(msg):
|
|
3642
|
-
return crc_generic(msg, 16, 0x1021, 0xFFFF, 0xFFFF, True, True)
|
|
3643
|
-
|
|
3644
|
-
def crc16_kermit(msg):
|
|
3645
|
-
return crc_generic(msg, 16, 0x1021, 0x0000, 0x0000, True, True)
|
|
3646
|
-
|
|
3647
|
-
def crc64_ecma(msg, initial_value=0x0000000000000000):
|
|
3648
|
-
return crc_generic(msg, 64, 0x42F0E1EBA9EA3693,
|
|
3649
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3650
|
-
0x0000000000000000, False, False)
|
|
3651
|
-
|
|
3652
|
-
def crc64_iso(msg, initial_value=0xFFFFFFFFFFFFFFFF):
|
|
3653
|
-
return crc_generic(msg, 64, 0x000000000000001B,
|
|
3654
|
-
initial_value & 0xFFFFFFFFFFFFFFFF,
|
|
3655
|
-
0xFFFFFFFFFFFFFFFF, True, True)
|
|
3656
|
-
|
|
3657
|
-
# =========================
|
|
3658
|
-
# Incremental CRC context
|
|
3659
|
-
# =========================
|
|
3660
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
3661
|
-
|
|
3662
|
-
_CRC_SPECS = {
|
|
3663
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
3664
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
3665
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
3666
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
3667
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
3668
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
3669
|
-
}
|
|
3670
|
-
|
|
3671
|
-
class CRCContext(object):
|
|
3672
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
3673
|
-
|
|
3674
|
-
def __init__(self, spec):
|
|
3675
|
-
self.spec = spec
|
|
3676
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
3677
|
-
self.mask = (1 << spec.width) - 1
|
|
3678
|
-
self.shift = spec.width - 8
|
|
3679
|
-
self.crc = spec.init & self.mask
|
|
3680
|
-
|
|
3681
|
-
def update(self, data):
|
|
3682
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3683
|
-
data = bytes(bytearray(data))
|
|
3684
|
-
buf = _mv_tobytes(memoryview(data))
|
|
3685
|
-
if self.spec.refin:
|
|
3686
|
-
c = self.crc
|
|
3687
|
-
tbl = self.table
|
|
3688
|
-
for b in buf:
|
|
3689
|
-
if not isinstance(b, int): # Py2
|
|
3690
|
-
b = ord(b)
|
|
3691
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
3692
|
-
self.crc = c & self.mask
|
|
3693
|
-
else:
|
|
3694
|
-
c = self.crc
|
|
3695
|
-
tbl = self.table
|
|
3696
|
-
sh = self.shift
|
|
3697
|
-
msk = self.mask
|
|
3698
|
-
for b in buf:
|
|
3699
|
-
if not isinstance(b, int):
|
|
3700
|
-
b = ord(b)
|
|
3701
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
3702
|
-
self.crc = c & msk
|
|
3703
|
-
return self
|
|
3704
|
-
|
|
3705
|
-
def digest_int(self):
|
|
3706
|
-
c = self.crc
|
|
3707
|
-
if self.spec.refout ^ self.spec.refin:
|
|
3708
|
-
c = _reflect(c, self.spec.width)
|
|
3709
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
3710
|
-
|
|
3711
|
-
def hexdigest(self):
|
|
3712
|
-
width_hex = (self.spec.width + 3) // 4
|
|
3713
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
3714
|
-
|
|
3715
|
-
def crc_context_from_name(name_norm):
|
|
3716
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
3717
|
-
if spec is None:
|
|
3718
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
3719
|
-
return CRCContext(spec)
|
|
3720
|
-
|
|
3721
|
-
# =========================
|
|
3722
|
-
# Dispatch helpers
|
|
3723
|
-
# =========================
|
|
3724
|
-
_CRC_ALIASES = {
|
|
3725
|
-
# keep your historical behaviors
|
|
3726
|
-
"crc16": "crc16_ansi",
|
|
3727
|
-
"crc16_ibm": "crc16_ansi",
|
|
3728
|
-
"crc16_ansi": "crc16_ansi",
|
|
3729
|
-
"crc16_modbus": "crc16_ansi",
|
|
3730
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
3731
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
3732
|
-
"crc16_x25": "crc16_x25",
|
|
3733
|
-
"crc16_kermit": "crc16_kermit",
|
|
3734
|
-
"crc64": "crc64_iso",
|
|
3735
|
-
"crc64_iso": "crc64_iso",
|
|
3736
|
-
"crc64_ecma": "crc64_ecma",
|
|
3737
|
-
"adler32": "adler32",
|
|
3738
|
-
"crc32": "crc32",
|
|
3739
|
-
}
|
|
3740
|
-
|
|
3741
|
-
_CRC_WIDTH = {
|
|
3742
|
-
"crc16_ansi": 16,
|
|
3743
|
-
"crc16_ccitt": 16,
|
|
3744
|
-
"crc16_x25": 16,
|
|
3745
|
-
"crc16_kermit": 16,
|
|
3746
|
-
"crc64_iso": 64,
|
|
3747
|
-
"crc64_ecma": 64,
|
|
3748
|
-
"adler32": 32,
|
|
3749
|
-
"crc32": 32,
|
|
3750
|
-
}
|
|
3751
|
-
|
|
3752
|
-
def _crc_compute(algo_key, data_bytes):
|
|
3753
|
-
if algo_key == "crc16_ansi":
|
|
3754
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
3755
|
-
if algo_key == "crc16_ccitt":
|
|
3756
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
3757
|
-
if algo_key == "crc16_x25":
|
|
3758
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
3759
|
-
if algo_key == "crc16_kermit":
|
|
3760
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
3761
|
-
if algo_key == "crc64_iso":
|
|
3762
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3763
|
-
if algo_key == "crc64_ecma":
|
|
3764
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
3765
|
-
if algo_key == "adler32":
|
|
3766
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
3767
|
-
if algo_key == "crc32":
|
|
3768
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
3769
|
-
raise KeyError(algo_key)
|
|
3770
|
-
|
|
3771
|
-
try:
|
|
3772
|
-
hashlib_guaranteed
|
|
3773
|
-
except NameError:
|
|
3774
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
3775
|
-
|
|
3776
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
3777
|
-
try:
|
|
3778
|
-
return name.lower() in guaranteed
|
|
3779
|
-
except Exception:
|
|
3780
|
-
return False
|
|
3781
|
-
|
|
3782
|
-
# =========================
|
|
3783
|
-
# Public checksum API
|
|
3784
|
-
# =========================
|
|
3785
|
-
def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
3786
|
-
"""
|
|
3787
|
-
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
3788
|
-
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
3789
|
-
"""
|
|
3790
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
3791
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3792
|
-
|
|
3793
|
-
delim = formatspecs.get('format_delimiter', u"\0")
|
|
3794
|
-
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
3795
|
-
if encodedata and not isinstance(hdr_bytes, (bytes, bytearray, memoryview)):
|
|
3796
|
-
hdr_bytes = _to_bytes(hdr_bytes)
|
|
3797
|
-
hdr_bytes = bytes(hdr_bytes)
|
|
3798
|
-
|
|
3799
|
-
if algo_key in _CRC_WIDTH:
|
|
3800
|
-
n = _crc_compute(algo_key, hdr_bytes)
|
|
3801
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
3802
|
-
|
|
3803
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3804
|
-
h = hashlib.new(algo_key)
|
|
3805
|
-
h.update(hdr_bytes)
|
|
3806
|
-
return h.hexdigest().lower()
|
|
3807
|
-
|
|
3808
|
-
return "0"
|
|
3809
|
-
|
|
3810
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
3811
|
-
"""
|
|
3812
|
-
Accepts bytes/str/file-like.
|
|
3813
|
-
- Hashlib algos: streamed in 1 MiB chunks.
|
|
3814
|
-
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
3815
|
-
- Falls back to one-shot for non-file-like inputs.
|
|
3816
|
-
"""
|
|
3817
|
-
checksumtype_norm = (checksumtype or "crc32").lower()
|
|
3818
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3819
|
-
|
|
3820
|
-
# file-like streaming
|
|
3821
|
-
if hasattr(instr, "read"):
|
|
3822
|
-
# hashlib
|
|
3823
|
-
if algo_key not in _CRC_SPECS and CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3824
|
-
h = hashlib.new(algo_key)
|
|
3825
|
-
while True:
|
|
3826
|
-
chunk = instr.read(1 << 20)
|
|
3827
|
-
if not chunk:
|
|
3828
|
-
break
|
|
3829
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3830
|
-
chunk = bytes(bytearray(chunk))
|
|
3831
|
-
h.update(chunk)
|
|
3832
|
-
return h.hexdigest().lower()
|
|
3833
|
-
|
|
3834
|
-
# CRC streaming via context
|
|
3835
|
-
if algo_key in _CRC_SPECS:
|
|
3836
|
-
ctx = crc_context_from_name(algo_key)
|
|
3837
|
-
while True:
|
|
3838
|
-
chunk = instr.read(1 << 20)
|
|
3839
|
-
if not chunk:
|
|
3840
|
-
break
|
|
3841
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
3842
|
-
chunk = bytes(bytearray(chunk))
|
|
3843
|
-
ctx.update(chunk)
|
|
3844
|
-
return ctx.hexdigest()
|
|
3845
|
-
|
|
3846
|
-
# not known streaming algo: fallback to one-shot bytes
|
|
3847
|
-
data = instr.read()
|
|
3848
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3849
|
-
data = bytes(bytearray(data))
|
|
3850
|
-
else:
|
|
3851
|
-
data = _to_bytes(instr) if (encodedata or not isinstance(instr, (bytes, bytearray, memoryview))) else instr
|
|
3852
|
-
data = bytes(data)
|
|
3853
|
-
|
|
3854
|
-
# one-shot
|
|
3855
|
-
if algo_key in _CRC_SPECS:
|
|
3856
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
3857
|
-
|
|
3858
|
-
if algo_key in _CRC_WIDTH:
|
|
3859
|
-
n = _crc_compute(algo_key, data)
|
|
3860
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
3861
|
-
|
|
3862
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
3863
|
-
h = hashlib.new(algo_key)
|
|
3864
|
-
h.update(data)
|
|
3865
|
-
return h.hexdigest().lower()
|
|
3866
|
-
|
|
3867
|
-
return "0"
|
|
3868
|
-
|
|
3869
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3870
|
-
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
3871
|
-
want = (inchecksum or "0").strip().lower()
|
|
3872
|
-
if want.startswith("0x"):
|
|
3873
|
-
want = want[2:]
|
|
3874
|
-
return hmac.compare_digest(want, calc)
|
|
3875
|
-
|
|
3876
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
3877
|
-
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
3878
|
-
want = (inchecksum or "0").strip().lower()
|
|
3879
|
-
if want.startswith("0x"):
|
|
3880
|
-
want = want[2:]
|
|
3881
|
-
return hmac.compare_digest(want, calc)
|
|
3882
|
-
|
|
3883
|
-
|
|
3884
|
-
# =========================
|
|
3885
|
-
# Incremental CRC context
|
|
3886
|
-
# =========================
|
|
3887
|
-
CRCSpec = namedtuple("CRCSpec", "width poly init xorout refin refout")
|
|
3888
|
-
|
|
3889
|
-
_CRC_SPECS = {
|
|
3890
|
-
"crc16_ansi": CRCSpec(16, 0x8005, 0xFFFF, 0x0000, True, True),
|
|
3891
|
-
"crc16_ccitt": CRCSpec(16, 0x1021, 0xFFFF, 0x0000, False, False),
|
|
3892
|
-
"crc16_x25": CRCSpec(16, 0x1021, 0xFFFF, 0xFFFF, True, True),
|
|
3893
|
-
"crc16_kermit":CRCSpec(16, 0x1021, 0x0000, 0x0000, True, True),
|
|
3894
|
-
"crc64_ecma": CRCSpec(64, 0x42F0E1EBA9EA3693, 0x0000000000000000, 0x0000000000000000, False, False),
|
|
3895
|
-
"crc64_iso": CRCSpec(64, 0x000000000000001B, 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF, True, True),
|
|
3896
|
-
}
|
|
3897
|
-
|
|
3898
3634
|
# --- helpers --------------------------------------------------------------
|
|
3899
3635
|
|
|
3900
3636
|
try:
|
|
@@ -3935,206 +3671,15 @@ def _bytes_to_int(b):
|
|
|
3935
3671
|
value = (value << 8) | ch
|
|
3936
3672
|
return value
|
|
3937
3673
|
|
|
3938
|
-
|
|
3939
|
-
# --- your existing CRCContext (unchanged) ---------------------------------
|
|
3940
|
-
|
|
3941
|
-
class CRCContext(object):
|
|
3942
|
-
__slots__ = ("spec", "table", "mask", "shift", "crc")
|
|
3943
|
-
|
|
3944
|
-
def __init__(self, spec):
|
|
3945
|
-
self.spec = spec
|
|
3946
|
-
self.table = _build_table(spec.width, spec.poly, spec.refin)
|
|
3947
|
-
self.mask = (1 << spec.width) - 1
|
|
3948
|
-
self.shift = spec.width - 8
|
|
3949
|
-
self.crc = spec.init & self.mask
|
|
3950
|
-
|
|
3951
|
-
def update(self, data):
|
|
3952
|
-
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
3953
|
-
data = bytes(bytearray(data))
|
|
3954
|
-
buf = _mv_tobytes(memoryview(data))
|
|
3955
|
-
if self.spec.refin:
|
|
3956
|
-
c = self.crc
|
|
3957
|
-
tbl = self.table
|
|
3958
|
-
for b in buf:
|
|
3959
|
-
if not isinstance(b, int): # Py2
|
|
3960
|
-
b = ord(b)
|
|
3961
|
-
c = tbl[(c ^ b) & 0xFF] ^ (c >> 8)
|
|
3962
|
-
self.crc = c & self.mask
|
|
3963
|
-
else:
|
|
3964
|
-
c = self.crc
|
|
3965
|
-
tbl = self.table
|
|
3966
|
-
sh = self.shift
|
|
3967
|
-
msk = self.mask
|
|
3968
|
-
for b in buf:
|
|
3969
|
-
if not isinstance(b, int):
|
|
3970
|
-
b = ord(b)
|
|
3971
|
-
c = tbl[((c >> sh) ^ b) & 0xFF] ^ ((c << 8) & msk)
|
|
3972
|
-
self.crc = c & msk
|
|
3973
|
-
return self
|
|
3974
|
-
|
|
3975
|
-
def digest_int(self):
|
|
3976
|
-
c = self.crc
|
|
3977
|
-
if self.spec.refout ^ self.spec.refin:
|
|
3978
|
-
c = _reflect(c, self.spec.width)
|
|
3979
|
-
return (c ^ self.spec.xorout) & self.mask
|
|
3980
|
-
|
|
3981
|
-
def hexdigest(self):
|
|
3982
|
-
width_hex = (self.spec.width + 3) // 4
|
|
3983
|
-
return format(self.digest_int(), "0{}x".format(width_hex)).lower()
|
|
3984
|
-
|
|
3985
|
-
|
|
3986
|
-
# --- hashlib-backed implementation ---------------------------------------
|
|
3987
|
-
|
|
3988
|
-
class _HashlibCRCWrapper(object):
|
|
3989
|
-
"""
|
|
3990
|
-
Wrap a hashlib object to present the same interface as CRCContext
|
|
3991
|
-
(update, digest_int, hexdigest).
|
|
3992
|
-
|
|
3993
|
-
Assumes the hashlib algorithm already implements the exact CRC
|
|
3994
|
-
specification (refin/refout/xorout/etc.).
|
|
3995
|
-
"""
|
|
3996
|
-
__slots__ = ("_h", "spec", "mask", "width_hex")
|
|
3997
|
-
|
|
3998
|
-
def __init__(self, algo_name, spec):
|
|
3999
|
-
self._h = hashlib.new(algo_name)
|
|
4000
|
-
self.spec = spec
|
|
4001
|
-
self.mask = (1 << spec.width) - 1
|
|
4002
|
-
self.width_hex = (spec.width + 3) // 4
|
|
4003
|
-
|
|
4004
|
-
def update(self, data):
|
|
4005
|
-
self._h.update(_coerce_bytes(data))
|
|
4006
|
-
return self
|
|
4007
|
-
|
|
4008
|
-
def digest_int(self):
|
|
4009
|
-
# Convert final digest bytes to an integer and mask to width
|
|
4010
|
-
value = _bytes_to_int(self._h.digest())
|
|
4011
|
-
return value & self.mask
|
|
4012
|
-
|
|
4013
|
-
def hexdigest(self):
|
|
4014
|
-
h = self._h.hexdigest().lower()
|
|
4015
|
-
# Normalize to the same number of hex digits as CRCContext
|
|
4016
|
-
if len(h) < self.width_hex:
|
|
4017
|
-
h = ("0" * (self.width_hex - len(h))) + h
|
|
4018
|
-
elif len(h) > self.width_hex:
|
|
4019
|
-
h = h[-self.width_hex:]
|
|
4020
|
-
return h
|
|
4021
|
-
|
|
4022
|
-
|
|
4023
|
-
# --- public class: choose hashlib or fallback -----------------------------
|
|
4024
|
-
|
|
4025
|
-
class CRC(object):
|
|
4026
|
-
"""
|
|
4027
|
-
CRC wrapper that uses hashlib if available, otherwise falls back to
|
|
4028
|
-
the pure-Python CRCContext.
|
|
4029
|
-
|
|
4030
|
-
spec.hashlib_name (preferred) or spec.name is used as the hashlib
|
|
4031
|
-
algorithm name, e.g. 'crc32', 'crc32c', etc.
|
|
4032
|
-
"""
|
|
4033
|
-
|
|
4034
|
-
__slots__ = ("spec", "_impl")
|
|
4035
|
-
|
|
4036
|
-
def __init__(self, spec):
|
|
4037
|
-
self.spec = spec
|
|
4038
|
-
|
|
4039
|
-
algo_name = getattr(spec, "hashlib_name", None) or getattr(spec, "name", None)
|
|
4040
|
-
impl = None
|
|
4041
|
-
|
|
4042
|
-
if algo_name and algo_name in _ALGORITHMS_AVAILABLE:
|
|
4043
|
-
# Use hashlib-backed implementation
|
|
4044
|
-
impl = _HashlibCRCWrapper(algo_name, spec)
|
|
4045
|
-
else:
|
|
4046
|
-
# Fallback to your pure-Python implementation
|
|
4047
|
-
impl = CRCContext(spec)
|
|
4048
|
-
|
|
4049
|
-
self._impl = impl
|
|
4050
|
-
|
|
4051
|
-
def update(self, data):
|
|
4052
|
-
self._impl.update(data)
|
|
4053
|
-
return self
|
|
4054
|
-
|
|
4055
|
-
def digest_int(self):
|
|
4056
|
-
return self._impl.digest_int()
|
|
4057
|
-
|
|
4058
|
-
def hexdigest(self):
|
|
4059
|
-
return self._impl.hexdigest()
|
|
4060
|
-
|
|
4061
|
-
def crc_context_from_name(name_norm):
|
|
4062
|
-
spec = _CRC_SPECS.get(name_norm)
|
|
4063
|
-
if spec is None:
|
|
4064
|
-
raise KeyError("Unknown CRC spec: {}".format(name_norm))
|
|
4065
|
-
return CRCContext(spec)
|
|
4066
|
-
|
|
4067
|
-
# =========================
|
|
4068
|
-
# Dispatch helpers
|
|
4069
|
-
# =========================
|
|
4070
|
-
_CRC_ALIASES = {
|
|
4071
|
-
# keep your historical behaviors
|
|
4072
|
-
"crc16": "crc16_ansi",
|
|
4073
|
-
"crc16_ibm": "crc16_ansi",
|
|
4074
|
-
"crc16_ansi": "crc16_ansi",
|
|
4075
|
-
"crc16_modbus": "crc16_ansi",
|
|
4076
|
-
"crc16_ccitt": "crc16_ccitt",
|
|
4077
|
-
"crc16_ccitt_false": "crc16_ccitt",
|
|
4078
|
-
"crc16_x25": "crc16_x25",
|
|
4079
|
-
"crc16_kermit": "crc16_kermit",
|
|
4080
|
-
"crc64": "crc64_iso",
|
|
4081
|
-
"crc64_iso": "crc64_iso",
|
|
4082
|
-
"crc64_ecma": "crc64_ecma",
|
|
4083
|
-
"adler32": "adler32",
|
|
4084
|
-
"crc32": "crc32",
|
|
4085
|
-
}
|
|
4086
|
-
|
|
4087
|
-
_CRC_WIDTH = {
|
|
4088
|
-
"crc16_ansi": 16,
|
|
4089
|
-
"crc16_ccitt": 16,
|
|
4090
|
-
"crc16_x25": 16,
|
|
4091
|
-
"crc16_kermit": 16,
|
|
4092
|
-
"crc64_iso": 64,
|
|
4093
|
-
"crc64_ecma": 64,
|
|
4094
|
-
"adler32": 32,
|
|
4095
|
-
"crc32": 32,
|
|
4096
|
-
}
|
|
4097
|
-
|
|
4098
|
-
def _crc_compute(algo_key, data_bytes):
|
|
4099
|
-
if algo_key == "crc16_ansi":
|
|
4100
|
-
return crc16_ansi(data_bytes) & 0xFFFF
|
|
4101
|
-
if algo_key == "crc16_ccitt":
|
|
4102
|
-
return crc16_ccitt(data_bytes) & 0xFFFF
|
|
4103
|
-
if algo_key == "crc16_x25":
|
|
4104
|
-
return crc16_x25(data_bytes) & 0xFFFF
|
|
4105
|
-
if algo_key == "crc16_kermit":
|
|
4106
|
-
return crc16_kermit(data_bytes) & 0xFFFF
|
|
4107
|
-
if algo_key == "crc64_iso":
|
|
4108
|
-
return crc64_iso(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4109
|
-
if algo_key == "crc64_ecma":
|
|
4110
|
-
return crc64_ecma(data_bytes) & 0xFFFFFFFFFFFFFFFF
|
|
4111
|
-
if algo_key == "adler32":
|
|
4112
|
-
return zlib.adler32(data_bytes) & 0xFFFFFFFF
|
|
4113
|
-
if algo_key == "crc32":
|
|
4114
|
-
return zlib.crc32(data_bytes) & 0xFFFFFFFF
|
|
4115
|
-
raise KeyError(algo_key)
|
|
4116
|
-
|
|
4117
|
-
try:
|
|
4118
|
-
hashlib_guaranteed
|
|
4119
|
-
except NameError:
|
|
4120
|
-
hashlib_guaranteed = set(a.lower() for a in hashlib.algorithms_available)
|
|
4121
|
-
|
|
4122
|
-
def CheckSumSupportAlt(name, guaranteed):
|
|
4123
|
-
try:
|
|
4124
|
-
return name.lower() in guaranteed
|
|
4125
|
-
except Exception:
|
|
4126
|
-
return False
|
|
4127
|
-
|
|
4128
3674
|
# =========================
|
|
4129
3675
|
# Public checksum API
|
|
4130
3676
|
# =========================
|
|
4131
|
-
def GetHeaderChecksum(inlist=None, checksumtype="
|
|
3677
|
+
def GetHeaderChecksum(inlist=None, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4132
3678
|
"""
|
|
4133
3679
|
Serialize header fields (list/tuple => joined with delimiter + trailing delimiter;
|
|
4134
3680
|
or a single field) and compute the requested checksum. Returns lowercase hex.
|
|
4135
3681
|
"""
|
|
4136
|
-
|
|
4137
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3682
|
+
algo_key = (checksumtype or "md5").lower()
|
|
4138
3683
|
|
|
4139
3684
|
delim = formatspecs.get('format_delimiter', u"\0")
|
|
4140
3685
|
hdr_bytes = _serialize_header_fields(inlist or [], delim)
|
|
@@ -4142,34 +3687,30 @@ def GetHeaderChecksum(inlist=None, checksumtype="crc32", encodedata=True, format
|
|
|
4142
3687
|
hdr_bytes = _to_bytes(hdr_bytes)
|
|
4143
3688
|
hdr_bytes = bytes(hdr_bytes)
|
|
4144
3689
|
|
|
4145
|
-
if algo_key
|
|
4146
|
-
|
|
4147
|
-
|
|
4148
|
-
|
|
4149
|
-
if CheckSumSupportAlt(algo_key, hashlib_guaranteed):
|
|
4150
|
-
h = hashlib.new(algo_key)
|
|
4151
|
-
h.update(hdr_bytes)
|
|
4152
|
-
return h.hexdigest().lower()
|
|
3690
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
3691
|
+
h = hashlib.new(algo_key)
|
|
3692
|
+
h.update(hdr_bytes)
|
|
3693
|
+
return h.hexdigest().lower()
|
|
4153
3694
|
|
|
4154
3695
|
return "0"
|
|
4155
3696
|
|
|
4156
|
-
def GetFileChecksum(
|
|
3697
|
+
def GetFileChecksum(inbytes, checksumtype="md5", encodedata=True, formatspecs=__file_format_dict__):
|
|
4157
3698
|
"""
|
|
4158
3699
|
Accepts bytes/str/file-like.
|
|
4159
3700
|
- Hashlib algos: streamed in 1 MiB chunks.
|
|
4160
3701
|
- CRC algos (crc16_ansi/ccitt/x25/kermit, crc64_iso/ecma): streamed via CRCContext for file-like.
|
|
4161
3702
|
- Falls back to one-shot for non-file-like inputs.
|
|
4162
3703
|
"""
|
|
4163
|
-
|
|
4164
|
-
algo_key = _CRC_ALIASES.get(checksumtype_norm, checksumtype_norm)
|
|
3704
|
+
algo_key = (checksumtype or "md5").lower()
|
|
4165
3705
|
|
|
4166
3706
|
# file-like streaming
|
|
4167
|
-
if hasattr(
|
|
3707
|
+
if hasattr(inbytes, "read"):
|
|
4168
3708
|
# hashlib
|
|
4169
|
-
|
|
3709
|
+
|
|
3710
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4170
3711
|
h = hashlib.new(algo_key)
|
|
4171
3712
|
while True:
|
|
4172
|
-
chunk =
|
|
3713
|
+
chunk = inbytes.read(__filebuff_size__)
|
|
4173
3714
|
if not chunk:
|
|
4174
3715
|
break
|
|
4175
3716
|
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
@@ -4177,49 +3718,31 @@ def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__
|
|
|
4177
3718
|
h.update(chunk)
|
|
4178
3719
|
return h.hexdigest().lower()
|
|
4179
3720
|
|
|
4180
|
-
# CRC streaming via context
|
|
4181
|
-
if algo_key in _CRC_SPECS:
|
|
4182
|
-
ctx = crc_context_from_name(algo_key)
|
|
4183
|
-
while True:
|
|
4184
|
-
chunk = instr.read(1 << 20)
|
|
4185
|
-
if not chunk:
|
|
4186
|
-
break
|
|
4187
|
-
if not isinstance(chunk, (bytes, bytearray, memoryview)):
|
|
4188
|
-
chunk = bytes(bytearray(chunk))
|
|
4189
|
-
ctx.update(chunk)
|
|
4190
|
-
return ctx.hexdigest()
|
|
4191
|
-
|
|
4192
3721
|
# not known streaming algo: fallback to one-shot bytes
|
|
4193
|
-
data =
|
|
3722
|
+
data = inbytes.read()
|
|
4194
3723
|
if not isinstance(data, (bytes, bytearray, memoryview)):
|
|
4195
3724
|
data = bytes(bytearray(data))
|
|
4196
3725
|
else:
|
|
4197
|
-
data = _to_bytes(
|
|
3726
|
+
data = _to_bytes(inbytes) if (encodedata or not isinstance(inbytes, (bytes, bytearray, memoryview))) else inbytes
|
|
4198
3727
|
data = bytes(data)
|
|
4199
3728
|
|
|
4200
3729
|
# one-shot
|
|
4201
|
-
if algo_key in _CRC_SPECS:
|
|
4202
|
-
return crc_context_from_name(algo_key).update(data).hexdigest()
|
|
4203
|
-
|
|
4204
|
-
if algo_key in _CRC_WIDTH:
|
|
4205
|
-
n = _crc_compute(algo_key, data)
|
|
4206
|
-
return _hex_pad(n, _CRC_WIDTH[algo_key])
|
|
4207
3730
|
|
|
4208
|
-
if
|
|
3731
|
+
if CheckSumSupport(algo_key, hashlib_guaranteed):
|
|
4209
3732
|
h = hashlib.new(algo_key)
|
|
4210
3733
|
h.update(data)
|
|
4211
3734
|
return h.hexdigest().lower()
|
|
4212
3735
|
|
|
4213
3736
|
return "0"
|
|
4214
3737
|
|
|
4215
|
-
def ValidateHeaderChecksum(inlist=None, checksumtype="
|
|
3738
|
+
def ValidateHeaderChecksum(inlist=None, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4216
3739
|
calc = GetHeaderChecksum(inlist, checksumtype, True, formatspecs)
|
|
4217
3740
|
want = (inchecksum or "0").strip().lower()
|
|
4218
3741
|
if want.startswith("0x"):
|
|
4219
3742
|
want = want[2:]
|
|
4220
3743
|
return hmac.compare_digest(want, calc)
|
|
4221
3744
|
|
|
4222
|
-
def ValidateFileChecksum(infile, checksumtype="
|
|
3745
|
+
def ValidateFileChecksum(infile, checksumtype="md5", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4223
3746
|
calc = GetFileChecksum(infile, checksumtype, True, formatspecs)
|
|
4224
3747
|
want = (inchecksum or "0").strip().lower()
|
|
4225
3748
|
if want.startswith("0x"):
|
|
@@ -4266,66 +3789,6 @@ def GetDataFromArrayAlt(structure, path, default=None):
|
|
|
4266
3789
|
return element
|
|
4267
3790
|
|
|
4268
3791
|
|
|
4269
|
-
def GetHeaderChecksum(inlist=[], checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4270
|
-
fileheader = AppendNullBytes(inlist, formatspecs['format_delimiter']) if isinstance(
|
|
4271
|
-
inlist, list) else AppendNullByte(inlist, formatspecs['format_delimiter'])
|
|
4272
|
-
if encodedata and hasattr(fileheader, "encode"):
|
|
4273
|
-
fileheader = fileheader.encode('UTF-8')
|
|
4274
|
-
checksum_methods = {
|
|
4275
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4276
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4277
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4278
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4279
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4280
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4281
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4282
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4283
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4284
|
-
}
|
|
4285
|
-
if checksumtype in checksum_methods:
|
|
4286
|
-
return checksum_methods[checksumtype](fileheader)
|
|
4287
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4288
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4289
|
-
checksumoutstr.update(fileheader)
|
|
4290
|
-
return checksumoutstr.hexdigest().lower()
|
|
4291
|
-
return format(0, 'x').lower()
|
|
4292
|
-
|
|
4293
|
-
|
|
4294
|
-
def GetFileChecksum(instr, checksumtype="crc32", encodedata=True, formatspecs=__file_format_dict__):
|
|
4295
|
-
if encodedata and hasattr(instr, "encode"):
|
|
4296
|
-
instr = instr.encode('UTF-8')
|
|
4297
|
-
checksum_methods = {
|
|
4298
|
-
"crc16": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4299
|
-
"crc16_ansi": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4300
|
-
"crc16_ibm": lambda data: format(crc16(data) & 0xffff, '04x').lower(),
|
|
4301
|
-
"crc16_ccitt": lambda data: format(crc16_ccitt(data) & 0xffff, '04x').lower(),
|
|
4302
|
-
"adler32": lambda data: format(zlib.adler32(data) & 0xffffffff, '08x').lower(),
|
|
4303
|
-
"crc32": lambda data: format(crc32(data) & 0xffffffff, '08x').lower(),
|
|
4304
|
-
"crc64_ecma": lambda data: format(crc64_ecma(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4305
|
-
"crc64": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4306
|
-
"crc64_iso": lambda data: format(crc64_iso(data) & 0xffffffffffffffff, '016x').lower(),
|
|
4307
|
-
}
|
|
4308
|
-
if checksumtype in checksum_methods:
|
|
4309
|
-
return checksum_methods[checksumtype](instr)
|
|
4310
|
-
elif CheckSumSupportAlt(checksumtype, hashlib_guaranteed):
|
|
4311
|
-
checksumoutstr = hashlib.new(checksumtype)
|
|
4312
|
-
checksumoutstr.update(instr)
|
|
4313
|
-
return checksumoutstr.hexdigest().lower()
|
|
4314
|
-
return format(0, 'x').lower()
|
|
4315
|
-
|
|
4316
|
-
|
|
4317
|
-
def ValidateHeaderChecksum(inlist=[], checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4318
|
-
infileheadercshex = GetHeaderChecksum(
|
|
4319
|
-
inlist, checksumtype, True, formatspecs).lower()
|
|
4320
|
-
return inchecksum.lower() == infileheadercshex
|
|
4321
|
-
|
|
4322
|
-
|
|
4323
|
-
def ValidateFileChecksum(infile, checksumtype="crc32", inchecksum="0", formatspecs=__file_format_dict__):
|
|
4324
|
-
catinfilecshex = GetFileChecksum(
|
|
4325
|
-
infile, checksumtype, True, formatspecs).lower()
|
|
4326
|
-
return inchecksum.lower() == catinfilecshex
|
|
4327
|
-
|
|
4328
|
-
|
|
4329
3792
|
# ========= pushback-aware delimiter reader =========
|
|
4330
3793
|
class _DelimiterReader(object):
|
|
4331
3794
|
"""
|
|
@@ -4658,7 +4121,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4658
4121
|
if(not hasattr(fp, "read")):
|
|
4659
4122
|
return False
|
|
4660
4123
|
delimiter = formatspecs['format_delimiter']
|
|
4661
|
-
fheaderstart = fp.tell()
|
|
4662
4124
|
if(formatspecs['new_style']):
|
|
4663
4125
|
HeaderOut = ReadFileHeaderDataBySize(fp, delimiter)
|
|
4664
4126
|
else:
|
|
@@ -4681,22 +4143,74 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4681
4143
|
fjsonchecksumtype = HeaderOut[30]
|
|
4682
4144
|
fjsonchecksum = HeaderOut[31]
|
|
4683
4145
|
fjsoncontent = {}
|
|
4684
|
-
|
|
4685
|
-
|
|
4686
|
-
|
|
4687
|
-
|
|
4688
|
-
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4146
|
+
if(fjsontype=="json"):
|
|
4147
|
+
fjsoncontent = {}
|
|
4148
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4149
|
+
if(fjsonsize > 0):
|
|
4689
4150
|
try:
|
|
4690
|
-
|
|
4151
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4152
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4691
4153
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4692
|
-
|
|
4693
|
-
|
|
4694
|
-
|
|
4695
|
-
|
|
4154
|
+
try:
|
|
4155
|
+
fjsonrawcontent = fprejsoncontent
|
|
4156
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4157
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4158
|
+
fprejsoncontent = ""
|
|
4159
|
+
fjsonrawcontent = fprejsoncontent
|
|
4160
|
+
fjsoncontent = {}
|
|
4161
|
+
else:
|
|
4162
|
+
fprejsoncontent = ""
|
|
4163
|
+
fjsonrawcontent = fprejsoncontent
|
|
4164
|
+
fjsoncontent = {}
|
|
4165
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4166
|
+
fjsoncontent = {}
|
|
4167
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4168
|
+
if (fjsonsize > 0):
|
|
4169
|
+
try:
|
|
4170
|
+
# try base64 → utf-8 → YAML
|
|
4171
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4172
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4173
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4174
|
+
try:
|
|
4175
|
+
# fall back to treating the bytes as plain text YAML
|
|
4176
|
+
fjsonrawcontent = fprejsoncontent
|
|
4177
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4178
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4179
|
+
# final fallback: empty
|
|
4180
|
+
fprejsoncontent = ""
|
|
4181
|
+
fjsonrawcontent = fprejsoncontent
|
|
4182
|
+
fjsoncontent = {}
|
|
4183
|
+
else:
|
|
4184
|
+
fprejsoncontent = ""
|
|
4185
|
+
fjsonrawcontent = fprejsoncontent
|
|
4186
|
+
fjsoncontent = {}
|
|
4187
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4696
4188
|
fjsoncontent = {}
|
|
4189
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4190
|
+
fprejsoncontent = ""
|
|
4191
|
+
fjsonrawcontent = fprejsoncontent
|
|
4192
|
+
elif(fjsontype=="list"):
|
|
4193
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4194
|
+
flisttmp = MkTempFile()
|
|
4195
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4196
|
+
flisttmp.seek(0)
|
|
4197
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4198
|
+
flisttmp.close()
|
|
4199
|
+
fjsonrawcontent = fjsoncontent
|
|
4200
|
+
if(fjsonlen==1):
|
|
4201
|
+
try:
|
|
4202
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4203
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4204
|
+
fjsonlen = len(fjsoncontent)
|
|
4205
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4206
|
+
try:
|
|
4207
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4208
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4209
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4210
|
+
pass
|
|
4697
4211
|
fp.seek(len(delimiter), 1)
|
|
4698
4212
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4699
|
-
if(
|
|
4213
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4700
4214
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4701
4215
|
fname + " at offset " + str(fheaderstart))
|
|
4702
4216
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4710,8 +4224,6 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4710
4224
|
fname + " at offset " + str(fheaderstart))
|
|
4711
4225
|
VerbosePrintOut("'" + fcs + "' != " + "'" + newfcs + "'")
|
|
4712
4226
|
return False
|
|
4713
|
-
fhend = fp.tell() - 1
|
|
4714
|
-
fcontentstart = fp.tell()
|
|
4715
4227
|
fcontents = MkTempFile()
|
|
4716
4228
|
if(fsize > 0 and not listonly):
|
|
4717
4229
|
if(fcompression == "none" or fcompression == "" or fcompression == "auto"):
|
|
@@ -4725,9 +4237,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4725
4237
|
fp.seek(fcsize, 1)
|
|
4726
4238
|
fcontents.seek(0, 0)
|
|
4727
4239
|
newfccs = GetFileChecksum(
|
|
4728
|
-
fcontents
|
|
4240
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4729
4241
|
fcontents.seek(0, 0)
|
|
4730
|
-
if(fccs
|
|
4242
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4731
4243
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4732
4244
|
fname + " at offset " + str(fcontentstart))
|
|
4733
4245
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4740,10 +4252,9 @@ def ReadFileHeaderDataWithContent(fp, listonly=False, uncompress=True, skipcheck
|
|
|
4740
4252
|
cfcontents = UncompressFileAlt(fcontents, formatspecs)
|
|
4741
4253
|
cfcontents.seek(0, 0)
|
|
4742
4254
|
fcontents = MkTempFile()
|
|
4743
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4255
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4744
4256
|
cfcontents.close()
|
|
4745
4257
|
fcontents.seek(0, 0)
|
|
4746
|
-
fcontentend = fp.tell()
|
|
4747
4258
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4748
4259
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4749
4260
|
if(abs(fseeknextasnum) == 0):
|
|
@@ -4851,6 +4362,33 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4851
4362
|
fprejsoncontent = ""
|
|
4852
4363
|
fjsonrawcontent = fprejsoncontent
|
|
4853
4364
|
fjsoncontent = {}
|
|
4365
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4366
|
+
fjsoncontent = {}
|
|
4367
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4368
|
+
if (fjsonsize > 0):
|
|
4369
|
+
try:
|
|
4370
|
+
# try base64 → utf-8 → YAML
|
|
4371
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4372
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4373
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4374
|
+
try:
|
|
4375
|
+
# fall back to treating the bytes as plain text YAML
|
|
4376
|
+
fjsonrawcontent = fprejsoncontent
|
|
4377
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4378
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4379
|
+
# final fallback: empty
|
|
4380
|
+
fprejsoncontent = ""
|
|
4381
|
+
fjsonrawcontent = fprejsoncontent
|
|
4382
|
+
fjsoncontent = {}
|
|
4383
|
+
else:
|
|
4384
|
+
fprejsoncontent = ""
|
|
4385
|
+
fjsonrawcontent = fprejsoncontent
|
|
4386
|
+
fjsoncontent = {}
|
|
4387
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4388
|
+
fjsoncontent = {}
|
|
4389
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4390
|
+
fprejsoncontent = ""
|
|
4391
|
+
fjsonrawcontent = fprejsoncontent
|
|
4854
4392
|
elif(fjsontype=="list"):
|
|
4855
4393
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4856
4394
|
flisttmp = MkTempFile()
|
|
@@ -4873,7 +4411,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4873
4411
|
fp.seek(len(delimiter), 1)
|
|
4874
4412
|
fjend = fp.tell() - 1
|
|
4875
4413
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4876
|
-
if(
|
|
4414
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4877
4415
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4878
4416
|
fname + " at offset " + str(fheaderstart))
|
|
4879
4417
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -4905,9 +4443,9 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4905
4443
|
pyhascontents = False
|
|
4906
4444
|
fcontents.seek(0, 0)
|
|
4907
4445
|
newfccs = GetFileChecksum(
|
|
4908
|
-
fcontents
|
|
4446
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4909
4447
|
fcontents.seek(0, 0)
|
|
4910
|
-
if(fccs
|
|
4448
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4911
4449
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4912
4450
|
fname + " at offset " + str(fcontentstart))
|
|
4913
4451
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -4921,7 +4459,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4921
4459
|
fcontents, formatspecs)
|
|
4922
4460
|
cfcontents.seek(0, 0)
|
|
4923
4461
|
fcontents = MkTempFile()
|
|
4924
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4462
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
4925
4463
|
cfcontents.close()
|
|
4926
4464
|
fcontents.seek(0, 0)
|
|
4927
4465
|
fccs = GetFileChecksum(
|
|
@@ -5037,6 +4575,33 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5037
4575
|
fprejsoncontent = ""
|
|
5038
4576
|
fjsonrawcontent = fprejsoncontent
|
|
5039
4577
|
fjsoncontent = {}
|
|
4578
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4579
|
+
fjsoncontent = {}
|
|
4580
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4581
|
+
if (fjsonsize > 0):
|
|
4582
|
+
try:
|
|
4583
|
+
# try base64 → utf-8 → YAML
|
|
4584
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4585
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4586
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4587
|
+
try:
|
|
4588
|
+
# fall back to treating the bytes as plain text YAML
|
|
4589
|
+
fjsonrawcontent = fprejsoncontent
|
|
4590
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4591
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4592
|
+
# final fallback: empty
|
|
4593
|
+
fprejsoncontent = ""
|
|
4594
|
+
fjsonrawcontent = fprejsoncontent
|
|
4595
|
+
fjsoncontent = {}
|
|
4596
|
+
else:
|
|
4597
|
+
fprejsoncontent = ""
|
|
4598
|
+
fjsonrawcontent = fprejsoncontent
|
|
4599
|
+
fjsoncontent = {}
|
|
4600
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4601
|
+
fjsoncontent = {}
|
|
4602
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4603
|
+
fprejsoncontent = ""
|
|
4604
|
+
fjsonrawcontent = fprejsoncontent
|
|
5040
4605
|
elif(fjsontype=="list"):
|
|
5041
4606
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5042
4607
|
flisttmp = MkTempFile()
|
|
@@ -5058,7 +4623,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5058
4623
|
pass
|
|
5059
4624
|
fp.seek(len(delimiter), 1)
|
|
5060
4625
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5061
|
-
if(
|
|
4626
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5062
4627
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5063
4628
|
fname + " at offset " + str(fheaderstart))
|
|
5064
4629
|
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
@@ -5090,8 +4655,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5090
4655
|
pyhascontents = False
|
|
5091
4656
|
fcontents.seek(0, 0)
|
|
5092
4657
|
newfccs = GetFileChecksum(
|
|
5093
|
-
fcontents
|
|
5094
|
-
if(fccs
|
|
4658
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
4659
|
+
if(not hmac.compare_digest(fccs, newfccs) and not skipchecksum and not listonly):
|
|
5095
4660
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5096
4661
|
fname + " at offset " + str(fcontentstart))
|
|
5097
4662
|
VerbosePrintOut("'" + fccs + "' != " + "'" + newfccs + "'")
|
|
@@ -5105,11 +4670,11 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
5105
4670
|
fcontents, formatspecs)
|
|
5106
4671
|
cfcontents.seek(0, 0)
|
|
5107
4672
|
fcontents = MkTempFile()
|
|
5108
|
-
shutil.copyfileobj(cfcontents, fcontents)
|
|
4673
|
+
shutil.copyfileobj(cfcontents, fcontents, length=__filebuff_size__)
|
|
5109
4674
|
cfcontents.close()
|
|
5110
4675
|
fcontents.seek(0, 0)
|
|
5111
4676
|
fccs = GetFileChecksum(
|
|
5112
|
-
fcontents
|
|
4677
|
+
fcontents, HeaderOut[-3].lower(), False, formatspecs)
|
|
5113
4678
|
fcontentend = fp.tell()
|
|
5114
4679
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5115
4680
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5143,9 +4708,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5143
4708
|
curloc = filestart
|
|
5144
4709
|
try:
|
|
5145
4710
|
fp.seek(0, 2)
|
|
5146
|
-
except OSError:
|
|
5147
|
-
SeekToEndOfFile(fp)
|
|
5148
|
-
except ValueError:
|
|
4711
|
+
except (OSError, ValueError):
|
|
5149
4712
|
SeekToEndOfFile(fp)
|
|
5150
4713
|
CatSize = fp.tell()
|
|
5151
4714
|
CatSizeEnd = CatSize
|
|
@@ -5174,7 +4737,30 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
5174
4737
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5175
4738
|
"'" + newfcs + "'")
|
|
5176
4739
|
return False
|
|
5177
|
-
fnumfiles = int(inheader[
|
|
4740
|
+
fnumfiles = int(inheader[6], 16)
|
|
4741
|
+
outfseeknextfile = inheaderdata[7]
|
|
4742
|
+
fjsonsize = int(inheaderdata[10], 16)
|
|
4743
|
+
fjsonchecksumtype = inheader[11]
|
|
4744
|
+
fjsonchecksum = inheader[12]
|
|
4745
|
+
fp.read(fjsonsize)
|
|
4746
|
+
# Next seek directive
|
|
4747
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
4748
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
4749
|
+
if(abs(fseeknextasnum) == 0):
|
|
4750
|
+
pass
|
|
4751
|
+
fp.seek(fseeknextasnum, 1)
|
|
4752
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
4753
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4754
|
+
if(abs(fseeknextasnum) == 0):
|
|
4755
|
+
pass
|
|
4756
|
+
fp.seek(fseeknextasnum, 1)
|
|
4757
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
4758
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
4759
|
+
if(abs(fseeknextasnum) == 0):
|
|
4760
|
+
pass
|
|
4761
|
+
fp.seek(fseeknextasnum, 0)
|
|
4762
|
+
else:
|
|
4763
|
+
return False
|
|
5178
4764
|
countnum = 0
|
|
5179
4765
|
flist = []
|
|
5180
4766
|
while(countnum < fnumfiles):
|
|
@@ -5194,9 +4780,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5194
4780
|
curloc = filestart
|
|
5195
4781
|
try:
|
|
5196
4782
|
fp.seek(0, 2)
|
|
5197
|
-
except OSError:
|
|
5198
|
-
SeekToEndOfFile(fp)
|
|
5199
|
-
except ValueError:
|
|
4783
|
+
except (OSError, ValueError):
|
|
5200
4784
|
SeekToEndOfFile(fp)
|
|
5201
4785
|
CatSize = fp.tell()
|
|
5202
4786
|
CatSizeEnd = CatSize
|
|
@@ -5215,10 +4799,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5215
4799
|
else:
|
|
5216
4800
|
inheader = ReadFileHeaderDataWoSize(
|
|
5217
4801
|
fp, formatspecs['format_delimiter'])
|
|
5218
|
-
fnumextrafieldsize = int(inheader[
|
|
5219
|
-
fnumextrafields = int(inheader[
|
|
4802
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
4803
|
+
fnumextrafields = int(inheader[14], 16)
|
|
5220
4804
|
fextrafieldslist = []
|
|
5221
|
-
extrastart =
|
|
4805
|
+
extrastart = 15
|
|
5222
4806
|
extraend = extrastart + fnumextrafields
|
|
5223
4807
|
while(extrastart < extraend):
|
|
5224
4808
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5237,7 +4821,106 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5237
4821
|
fnumfields = int(inheader[1], 16)
|
|
5238
4822
|
fhencoding = inheader[2]
|
|
5239
4823
|
fostype = inheader[3]
|
|
5240
|
-
|
|
4824
|
+
fpythontype = inheader[4]
|
|
4825
|
+
fprojectname = inheader[4]
|
|
4826
|
+
fnumfiles = int(inheader[6], 16)
|
|
4827
|
+
fseeknextfile = inheader[7]
|
|
4828
|
+
fjsontype = inheader[8]
|
|
4829
|
+
fjsonlen = int(inheader[9], 16)
|
|
4830
|
+
fjsonsize = int(inheader[10], 16)
|
|
4831
|
+
fjsonchecksumtype = inheader[11]
|
|
4832
|
+
fjsonchecksum = inheader[12]
|
|
4833
|
+
fjsoncontent = {}
|
|
4834
|
+
fjstart = fp.tell()
|
|
4835
|
+
if(fjsontype=="json"):
|
|
4836
|
+
fjsoncontent = {}
|
|
4837
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4838
|
+
if(fjsonsize > 0):
|
|
4839
|
+
try:
|
|
4840
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4841
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
4842
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4843
|
+
try:
|
|
4844
|
+
fjsonrawcontent = fprejsoncontent
|
|
4845
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
4846
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4847
|
+
fprejsoncontent = ""
|
|
4848
|
+
fjsonrawcontent = fprejsoncontent
|
|
4849
|
+
fjsoncontent = {}
|
|
4850
|
+
else:
|
|
4851
|
+
fprejsoncontent = ""
|
|
4852
|
+
fjsonrawcontent = fprejsoncontent
|
|
4853
|
+
fjsoncontent = {}
|
|
4854
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
4855
|
+
fjsoncontent = {}
|
|
4856
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4857
|
+
if (fjsonsize > 0):
|
|
4858
|
+
try:
|
|
4859
|
+
# try base64 → utf-8 → YAML
|
|
4860
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
4861
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4862
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
4863
|
+
try:
|
|
4864
|
+
# fall back to treating the bytes as plain text YAML
|
|
4865
|
+
fjsonrawcontent = fprejsoncontent
|
|
4866
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
4867
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
4868
|
+
# final fallback: empty
|
|
4869
|
+
fprejsoncontent = ""
|
|
4870
|
+
fjsonrawcontent = fprejsoncontent
|
|
4871
|
+
fjsoncontent = {}
|
|
4872
|
+
else:
|
|
4873
|
+
fprejsoncontent = ""
|
|
4874
|
+
fjsonrawcontent = fprejsoncontent
|
|
4875
|
+
fjsoncontent = {}
|
|
4876
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
4877
|
+
fjsoncontent = {}
|
|
4878
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4879
|
+
fprejsoncontent = ""
|
|
4880
|
+
fjsonrawcontent = fprejsoncontent
|
|
4881
|
+
elif(fjsontype=="list"):
|
|
4882
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
4883
|
+
flisttmp = MkTempFile()
|
|
4884
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
4885
|
+
flisttmp.seek(0)
|
|
4886
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
4887
|
+
flisttmp.close()
|
|
4888
|
+
fjsonrawcontent = fjsoncontent
|
|
4889
|
+
if(fjsonlen==1):
|
|
4890
|
+
try:
|
|
4891
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
4892
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
4893
|
+
fjsonlen = len(fjsoncontent)
|
|
4894
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4895
|
+
try:
|
|
4896
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
4897
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
4898
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4899
|
+
pass
|
|
4900
|
+
fjend = fp.tell()
|
|
4901
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
4902
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
4903
|
+
if(abs(fseeknextasnum) == 0):
|
|
4904
|
+
pass
|
|
4905
|
+
fp.seek(fseeknextasnum, 1)
|
|
4906
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
4907
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4908
|
+
if(abs(fseeknextasnum) == 0):
|
|
4909
|
+
pass
|
|
4910
|
+
fp.seek(fseeknextasnum, 1)
|
|
4911
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
4912
|
+
fseeknextasnum = int(fseeknextfile)
|
|
4913
|
+
if(abs(fseeknextasnum) == 0):
|
|
4914
|
+
pass
|
|
4915
|
+
fp.seek(fseeknextasnum, 0)
|
|
4916
|
+
else:
|
|
4917
|
+
return False
|
|
4918
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
4919
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4920
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
4921
|
+
fname + " at offset " + str(fheaderstart))
|
|
4922
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
4923
|
+
return False
|
|
5241
4924
|
fprechecksumtype = inheader[-2]
|
|
5242
4925
|
fprechecksum = inheader[-1]
|
|
5243
4926
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5250,7 +4933,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5250
4933
|
return False
|
|
5251
4934
|
formversions = re.search('(.*?)(\\d+)', formstring).groups()
|
|
5252
4935
|
fcompresstype = ""
|
|
5253
|
-
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
4936
|
+
outlist = {'fnumfiles': fnumfiles, 'ffilestart': filestart, 'fformat': formversions[0], 'fcompression': fcompresstype, 'fencoding': fhencoding, 'fversion': formversions[1], 'fostype': fostype, 'fprojectname': fprojectname, 'fimptype': fpythontype, 'fheadersize': fheadsize, 'fsize': CatSizeEnd, 'fnumfields': fnumfields + 2, 'fformatspecs': formatspecs, 'fseeknextfile': fseeknextfile, 'fchecksumtype': fprechecksumtype, 'fheaderchecksum': fprechecksum, 'fjsonchecksumtype': fjsonchecksumtype, 'fjsontype': fjsontype, 'fjsonlen': fjsonlen, 'fjsonsize': fjsonsize, 'fjsonrawdata': fjsonrawcontent, 'fjsondata': fjsoncontent, 'fjstart': fjstart, 'fjend': fjend, 'fjsonchecksum': fjsonchecksum, 'frawheader': [formstring] + inheader, 'fextrafields': fnumextrafields, 'fextrafieldsize': fnumextrafieldsize, 'fextradata': fextrafieldslist, 'ffilelist': []}
|
|
5254
4937
|
if (seekstart < 0) or (seekstart > fnumfiles):
|
|
5255
4938
|
seekstart = 0
|
|
5256
4939
|
if (seekend == 0) or (seekend > fnumfiles) or (seekend < seekstart):
|
|
@@ -5278,7 +4961,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5278
4961
|
prejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5279
4962
|
fp.seek(len(delimiter), 1)
|
|
5280
4963
|
prejsonfcs = GetFileChecksum(prejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5281
|
-
if(
|
|
4964
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5282
4965
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5283
4966
|
prefname + " at offset " + str(prefhstart))
|
|
5284
4967
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5286,7 +4969,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5286
4969
|
prenewfcs = GetHeaderChecksum(
|
|
5287
4970
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5288
4971
|
prefcs = preheaderdata[-2]
|
|
5289
|
-
if(prefcs
|
|
4972
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5290
4973
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5291
4974
|
prefname + " at offset " + str(prefhstart))
|
|
5292
4975
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5302,10 +4985,10 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5302
4985
|
prefcontents.write(fp.read(prefsize))
|
|
5303
4986
|
prefcontents.seek(0, 0)
|
|
5304
4987
|
prenewfccs = GetFileChecksum(
|
|
5305
|
-
prefcontents
|
|
4988
|
+
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5306
4989
|
prefccs = preheaderdata[-1]
|
|
5307
4990
|
pyhascontents = True
|
|
5308
|
-
if(prefccs
|
|
4991
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5309
4992
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5310
4993
|
prefname + " at offset " + str(prefcontentstart))
|
|
5311
4994
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5351,9 +5034,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5351
5034
|
curloc = filestart
|
|
5352
5035
|
try:
|
|
5353
5036
|
fp.seek(0, 2)
|
|
5354
|
-
except OSError:
|
|
5355
|
-
SeekToEndOfFile(fp)
|
|
5356
|
-
except ValueError:
|
|
5037
|
+
except (OSError, ValueError):
|
|
5357
5038
|
SeekToEndOfFile(fp)
|
|
5358
5039
|
CatSize = fp.tell()
|
|
5359
5040
|
CatSizeEnd = CatSize
|
|
@@ -5372,10 +5053,10 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5372
5053
|
else:
|
|
5373
5054
|
inheader = ReadFileHeaderDataWoSize(
|
|
5374
5055
|
fp, formatspecs['format_delimiter'])
|
|
5375
|
-
fnumextrafieldsize = int(inheader[
|
|
5376
|
-
fnumextrafields = int(inheader[
|
|
5056
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
5057
|
+
fnumextrafields = int(inheader[14], 16)
|
|
5377
5058
|
fextrafieldslist = []
|
|
5378
|
-
extrastart =
|
|
5059
|
+
extrastart = 15
|
|
5379
5060
|
extraend = extrastart + fnumextrafields
|
|
5380
5061
|
while(extrastart < extraend):
|
|
5381
5062
|
fextrafieldslist.append(inheader[extrastart])
|
|
@@ -5392,9 +5073,40 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5392
5073
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5393
5074
|
fheadsize = int(inheader[0], 16)
|
|
5394
5075
|
fnumfields = int(inheader[1], 16)
|
|
5395
|
-
|
|
5396
|
-
|
|
5397
|
-
|
|
5076
|
+
fnumfiles = int(inheader[6], 16)
|
|
5077
|
+
fseeknextfile = inheaderdata[7]
|
|
5078
|
+
fjsontype = int(inheader[8], 16)
|
|
5079
|
+
fjsonlen = int(inheader[9], 16)
|
|
5080
|
+
fjsonsize = int(inheader[10], 16)
|
|
5081
|
+
fjsonchecksumtype = inheader[11]
|
|
5082
|
+
fjsonchecksum = inheader[12]
|
|
5083
|
+
fjsoncontent = {}
|
|
5084
|
+
fjstart = fp.tell()
|
|
5085
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5086
|
+
fjend = fp.tell()
|
|
5087
|
+
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5088
|
+
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
5089
|
+
if(abs(fseeknextasnum) == 0):
|
|
5090
|
+
pass
|
|
5091
|
+
fp.seek(fseeknextasnum, 1)
|
|
5092
|
+
elif(re.findall("^\\-([0-9]+)", fseeknextfile)):
|
|
5093
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5094
|
+
if(abs(fseeknextasnum) == 0):
|
|
5095
|
+
pass
|
|
5096
|
+
fp.seek(fseeknextasnum, 1)
|
|
5097
|
+
elif(re.findall("^([0-9]+)", fseeknextfile)):
|
|
5098
|
+
fseeknextasnum = int(fseeknextfile)
|
|
5099
|
+
if(abs(fseeknextasnum) == 0):
|
|
5100
|
+
pass
|
|
5101
|
+
fp.seek(fseeknextasnum, 0)
|
|
5102
|
+
else:
|
|
5103
|
+
return False
|
|
5104
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
5105
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
5106
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5107
|
+
fname + " at offset " + str(fheaderstart))
|
|
5108
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
5109
|
+
return False
|
|
5398
5110
|
fprechecksumtype = inheader[-2]
|
|
5399
5111
|
fprechecksum = inheader[-1]
|
|
5400
5112
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -5440,7 +5152,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5440
5152
|
prefprejsoncontent = fp.read(prefjsonsize).decode("UTF-8")
|
|
5441
5153
|
fp.seek(len(delimiter), 1)
|
|
5442
5154
|
prejsonfcs = GetFileChecksum(prefprejsoncontent, prefjsonchecksumtype, True, formatspecs)
|
|
5443
|
-
if(
|
|
5155
|
+
if(not hmac.compare_digest(prefjsonchecksum, prejsonfcs) and not skipchecksum):
|
|
5444
5156
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
5445
5157
|
prefname + " at offset " + str(prefhstart))
|
|
5446
5158
|
VerbosePrintOut("'" + prefjsonchecksum + "' != " + "'" + prejsonfcs + "'")
|
|
@@ -5448,7 +5160,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5448
5160
|
prenewfcs = GetHeaderChecksum(
|
|
5449
5161
|
preheaderdata[:-2], preheaderdata[-4].lower(), True, formatspecs)
|
|
5450
5162
|
prefcs = preheaderdata[-2]
|
|
5451
|
-
if(prefcs
|
|
5163
|
+
if(not hmac.compare_digest(prefcs, prenewfcs) and not skipchecksum):
|
|
5452
5164
|
VerbosePrintOut("File Header Checksum Error with file " +
|
|
5453
5165
|
prefname + " at offset " + str(prefhstart))
|
|
5454
5166
|
VerbosePrintOut("'" + prefcs + "' != " +
|
|
@@ -5469,7 +5181,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5469
5181
|
prefcontents, preheaderdata[-3].lower(), False, formatspecs)
|
|
5470
5182
|
prefccs = preheaderdata[-1]
|
|
5471
5183
|
pyhascontents = True
|
|
5472
|
-
if(prefccs
|
|
5184
|
+
if(not hmac.compare_digest(prefccs, prenewfccs) and not skipchecksum):
|
|
5473
5185
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
5474
5186
|
prefname + " at offset " + str(prefcontentstart))
|
|
5475
5187
|
VerbosePrintOut("'" + prefccs +
|
|
@@ -5510,24 +5222,17 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5510
5222
|
fp = infile
|
|
5511
5223
|
try:
|
|
5512
5224
|
fp.seek(0, 2)
|
|
5513
|
-
except OSError:
|
|
5514
|
-
SeekToEndOfFile(fp)
|
|
5515
|
-
except ValueError:
|
|
5225
|
+
except (OSError, ValueError):
|
|
5516
5226
|
SeekToEndOfFile(fp)
|
|
5517
5227
|
outfsize = fp.tell()
|
|
5518
5228
|
fp.seek(filestart, 0)
|
|
5519
5229
|
currentfilepos = fp.tell()
|
|
5520
5230
|
elif(infile == "-"):
|
|
5521
5231
|
fp = MkTempFile()
|
|
5522
|
-
|
|
5523
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5524
|
-
else:
|
|
5525
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5232
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5526
5233
|
try:
|
|
5527
5234
|
fp.seek(0, 2)
|
|
5528
|
-
except OSError:
|
|
5529
|
-
SeekToEndOfFile(fp)
|
|
5530
|
-
except ValueError:
|
|
5235
|
+
except (OSError, ValueError):
|
|
5531
5236
|
SeekToEndOfFile(fp)
|
|
5532
5237
|
outfsize = fp.tell()
|
|
5533
5238
|
fp.seek(filestart, 0)
|
|
@@ -5537,9 +5242,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5537
5242
|
fp.write(infile)
|
|
5538
5243
|
try:
|
|
5539
5244
|
fp.seek(0, 2)
|
|
5540
|
-
except OSError:
|
|
5541
|
-
SeekToEndOfFile(fp)
|
|
5542
|
-
except ValueError:
|
|
5245
|
+
except (OSError, ValueError):
|
|
5543
5246
|
SeekToEndOfFile(fp)
|
|
5544
5247
|
outfsize = fp.tell()
|
|
5545
5248
|
fp.seek(filestart, 0)
|
|
@@ -5548,9 +5251,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5548
5251
|
fp = download_file_from_internet_file(infile)
|
|
5549
5252
|
try:
|
|
5550
5253
|
fp.seek(0, 2)
|
|
5551
|
-
except OSError:
|
|
5552
|
-
SeekToEndOfFile(fp)
|
|
5553
|
-
except ValueError:
|
|
5254
|
+
except (OSError, ValueError):
|
|
5554
5255
|
SeekToEndOfFile(fp)
|
|
5555
5256
|
outfsize = fp.tell()
|
|
5556
5257
|
fp.seek(filestart, 0)
|
|
@@ -5558,9 +5259,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5558
5259
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5559
5260
|
try:
|
|
5560
5261
|
fp.seek(0, 2)
|
|
5561
|
-
except OSError:
|
|
5562
|
-
SeekToEndOfFile(fp)
|
|
5563
|
-
except ValueError:
|
|
5262
|
+
except (OSError, ValueError):
|
|
5564
5263
|
SeekToEndOfFile(fp)
|
|
5565
5264
|
outfsize = fp.tell()
|
|
5566
5265
|
fp.seek(filestart, 0)
|
|
@@ -5570,9 +5269,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5570
5269
|
fp = open(infile, "rb")
|
|
5571
5270
|
try:
|
|
5572
5271
|
fp.seek(0, 2)
|
|
5573
|
-
except OSError:
|
|
5574
|
-
SeekToEndOfFile(fp)
|
|
5575
|
-
except ValueError:
|
|
5272
|
+
except (OSError, ValueError):
|
|
5576
5273
|
SeekToEndOfFile(fp)
|
|
5577
5274
|
outfsize = fp.tell()
|
|
5578
5275
|
fp.seek(filestart, 0)
|
|
@@ -5623,9 +5320,7 @@ def ReadInFileWithContentToArray(infile, fmttype="auto", filestart=0, seekstart=
|
|
|
5623
5320
|
currentinfilepos = infp.tell()
|
|
5624
5321
|
try:
|
|
5625
5322
|
infp.seek(0, 2)
|
|
5626
|
-
except OSError:
|
|
5627
|
-
SeekToEndOfFile(infp)
|
|
5628
|
-
except ValueError:
|
|
5323
|
+
except (OSError, ValueError):
|
|
5629
5324
|
SeekToEndOfFile(infp)
|
|
5630
5325
|
outinfsize = infp.tell()
|
|
5631
5326
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5664,24 +5359,17 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5664
5359
|
fp = infile
|
|
5665
5360
|
try:
|
|
5666
5361
|
fp.seek(0, 2)
|
|
5667
|
-
except OSError:
|
|
5668
|
-
SeekToEndOfFile(fp)
|
|
5669
|
-
except ValueError:
|
|
5362
|
+
except (OSError, ValueError):
|
|
5670
5363
|
SeekToEndOfFile(fp)
|
|
5671
5364
|
outfsize = fp.tell()
|
|
5672
5365
|
fp.seek(filestart, 0)
|
|
5673
5366
|
currentfilepos = fp.tell()
|
|
5674
5367
|
elif(infile == "-"):
|
|
5675
5368
|
fp = MkTempFile()
|
|
5676
|
-
|
|
5677
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
5678
|
-
else:
|
|
5679
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
5369
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
5680
5370
|
try:
|
|
5681
5371
|
fp.seek(0, 2)
|
|
5682
|
-
except OSError:
|
|
5683
|
-
SeekToEndOfFile(fp)
|
|
5684
|
-
except ValueError:
|
|
5372
|
+
except (OSError, ValueError):
|
|
5685
5373
|
SeekToEndOfFile(fp)
|
|
5686
5374
|
outfsize = fp.tell()
|
|
5687
5375
|
fp.seek(filestart, 0)
|
|
@@ -5691,9 +5379,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5691
5379
|
fp.write(infile)
|
|
5692
5380
|
try:
|
|
5693
5381
|
fp.seek(0, 2)
|
|
5694
|
-
except OSError:
|
|
5695
|
-
SeekToEndOfFile(fp)
|
|
5696
|
-
except ValueError:
|
|
5382
|
+
except (OSError, ValueError):
|
|
5697
5383
|
SeekToEndOfFile(fp)
|
|
5698
5384
|
outfsize = fp.tell()
|
|
5699
5385
|
fp.seek(filestart, 0)
|
|
@@ -5702,9 +5388,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5702
5388
|
fp = download_file_from_internet_file(infile)
|
|
5703
5389
|
try:
|
|
5704
5390
|
fp.seek(0, 2)
|
|
5705
|
-
except OSError:
|
|
5706
|
-
SeekToEndOfFile(fp)
|
|
5707
|
-
except ValueError:
|
|
5391
|
+
except (OSError, ValueError):
|
|
5708
5392
|
SeekToEndOfFile(fp)
|
|
5709
5393
|
outfsize = fp.tell()
|
|
5710
5394
|
fp.seek(filestart, 0)
|
|
@@ -5712,9 +5396,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5712
5396
|
elif(isinstance(infile, FileLikeAdapter)):
|
|
5713
5397
|
try:
|
|
5714
5398
|
fp.seek(0, 2)
|
|
5715
|
-
except OSError:
|
|
5716
|
-
SeekToEndOfFile(fp)
|
|
5717
|
-
except ValueError:
|
|
5399
|
+
except (OSError, ValueError):
|
|
5718
5400
|
SeekToEndOfFile(fp)
|
|
5719
5401
|
outfsize = fp.tell()
|
|
5720
5402
|
fp.seek(filestart, 0)
|
|
@@ -5724,9 +5406,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5724
5406
|
fp = open(infile, "rb")
|
|
5725
5407
|
try:
|
|
5726
5408
|
fp.seek(0, 2)
|
|
5727
|
-
except OSError:
|
|
5728
|
-
SeekToEndOfFile(fp)
|
|
5729
|
-
except ValueError:
|
|
5409
|
+
except (OSError, ValueError):
|
|
5730
5410
|
SeekToEndOfFile(fp)
|
|
5731
5411
|
outfsize = fp.tell()
|
|
5732
5412
|
fp.seek(filestart, 0)
|
|
@@ -5777,9 +5457,7 @@ def ReadInFileWithContentToList(infile, fmttype="auto", filestart=0, seekstart=0
|
|
|
5777
5457
|
currentinfilepos = infp.tell()
|
|
5778
5458
|
try:
|
|
5779
5459
|
infp.seek(0, 2)
|
|
5780
|
-
except OSError:
|
|
5781
|
-
SeekToEndOfFile(infp)
|
|
5782
|
-
except ValueError:
|
|
5460
|
+
except (OSError, ValueError):
|
|
5783
5461
|
SeekToEndOfFile(infp)
|
|
5784
5462
|
outinfsize = infp.tell()
|
|
5785
5463
|
infp.seek(currentinfilepos, 0)
|
|
@@ -5864,12 +5542,7 @@ def AppendNullBytes(indata=None, delimiter=__file_format_dict__['format_delimite
|
|
|
5864
5542
|
def _hex_lower(n):
|
|
5865
5543
|
return format(int(n), 'x').lower()
|
|
5866
5544
|
|
|
5867
|
-
def AppendFileHeader(fp,
|
|
5868
|
-
numfiles,
|
|
5869
|
-
fencoding,
|
|
5870
|
-
extradata=None,
|
|
5871
|
-
checksumtype="crc32",
|
|
5872
|
-
formatspecs=__file_format_dict__):
|
|
5545
|
+
def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__):
|
|
5873
5546
|
"""
|
|
5874
5547
|
Build and write the archive file header.
|
|
5875
5548
|
Returns the same file-like 'fp' on success, or False on failure.
|
|
@@ -5917,24 +5590,44 @@ def AppendFileHeader(fp,
|
|
|
5917
5590
|
# 4) core header fields before checksum:
|
|
5918
5591
|
# tmpoutlenhex, fencoding, platform.system(), fnumfiles
|
|
5919
5592
|
fnumfiles_hex = _hex_lower(numfiles)
|
|
5920
|
-
|
|
5593
|
+
fjsontype = "json"
|
|
5594
|
+
if(len(jsondata) > 0):
|
|
5595
|
+
try:
|
|
5596
|
+
fjsoncontent = json.dumps(jsondata, separators=(',', ':')).encode("UTF-8")
|
|
5597
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5598
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5599
|
+
else:
|
|
5600
|
+
fjsoncontent = "".encode("UTF-8")
|
|
5601
|
+
fjsonsize = format(len(fjsoncontent), 'x').lower()
|
|
5602
|
+
fjsonlen = format(len(jsondata), 'x').lower()
|
|
5603
|
+
tmpoutlist = []
|
|
5604
|
+
tmpoutlist.append(fjsontype)
|
|
5605
|
+
tmpoutlist.append(fjsonlen)
|
|
5606
|
+
tmpoutlist.append(fjsonsize)
|
|
5607
|
+
if(len(jsondata) > 0):
|
|
5608
|
+
tmpoutlist.append(checksumtype[1])
|
|
5609
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, checksumtype[1], True, formatspecs))
|
|
5610
|
+
else:
|
|
5611
|
+
tmpoutlist.append("none")
|
|
5612
|
+
tmpoutlist.append(GetFileChecksum(fjsoncontent, "none", True, formatspecs))
|
|
5921
5613
|
# Preserve your original "tmpoutlen" computation exactly
|
|
5922
|
-
tmpoutlist
|
|
5923
|
-
|
|
5614
|
+
tmpoutlist.append(extrasizelen)
|
|
5615
|
+
tmpoutlist.append(extrafields)
|
|
5616
|
+
tmpoutlen = 8 + len(tmpoutlist) + len(xlist)
|
|
5924
5617
|
tmpoutlenhex = _hex_lower(tmpoutlen)
|
|
5925
5618
|
|
|
5926
5619
|
# Serialize the first group
|
|
5927
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), fnumfiles_hex], delimiter)
|
|
5620
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5928
5621
|
# Append tmpoutlist
|
|
5929
5622
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5930
5623
|
# Append extradata items if any
|
|
5931
5624
|
if xlist:
|
|
5932
5625
|
fnumfilesa += AppendNullBytes(xlist, delimiter)
|
|
5933
5626
|
# Append checksum type
|
|
5934
|
-
fnumfilesa += AppendNullByte(checksumtype, delimiter)
|
|
5627
|
+
fnumfilesa += AppendNullByte(checksumtype[0], delimiter)
|
|
5935
5628
|
|
|
5936
5629
|
# 5) inner checksum over fnumfilesa
|
|
5937
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5630
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5938
5631
|
tmpfileoutstr = fnumfilesa + AppendNullByte(outfileheadercshex, delimiter)
|
|
5939
5632
|
|
|
5940
5633
|
# 6) size of (tmpfileoutstr) excluding one delimiter, per your original math
|
|
@@ -5947,7 +5640,7 @@ def AppendFileHeader(fp,
|
|
|
5947
5640
|
+ fnumfilesa
|
|
5948
5641
|
)
|
|
5949
5642
|
|
|
5950
|
-
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype, True, formatspecs)
|
|
5643
|
+
outfileheadercshex = GetFileChecksum(fnumfilesa, checksumtype[0], True, formatspecs)
|
|
5951
5644
|
fnumfilesa += AppendNullByte(outfileheadercshex, delimiter)
|
|
5952
5645
|
|
|
5953
5646
|
# 8) final total size field (again per your original logic)
|
|
@@ -5955,10 +5648,11 @@ def AppendFileHeader(fp,
|
|
|
5955
5648
|
formheaersizestr = AppendNullByte(formheaersize, delimiter) # computed but not appended in original
|
|
5956
5649
|
# Note: you computed 'formheaersizestr' but didn’t append it afterward in the original either.
|
|
5957
5650
|
# Keeping that behavior for compatibility.
|
|
5958
|
-
|
|
5651
|
+
nullstrecd = formatspecs['format_delimiter'].encode('UTF-8')
|
|
5652
|
+
outfileout = fnumfilesa + fjsoncontent + nullstrecd
|
|
5959
5653
|
# 9) write and try to sync
|
|
5960
5654
|
try:
|
|
5961
|
-
fp.write(
|
|
5655
|
+
fp.write(outfileout)
|
|
5962
5656
|
except (OSError, io.UnsupportedOperation):
|
|
5963
5657
|
return False
|
|
5964
5658
|
|
|
@@ -5979,21 +5673,21 @@ def AppendFileHeader(fp,
|
|
|
5979
5673
|
return fp
|
|
5980
5674
|
|
|
5981
5675
|
|
|
5982
|
-
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5676
|
+
def MakeEmptyFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5983
5677
|
if(IsNestedDict(formatspecs) and fmttype in formatspecs):
|
|
5984
5678
|
formatspecs = formatspecs[fmttype]
|
|
5985
5679
|
elif(IsNestedDict(formatspecs) and fmttype not in formatspecs):
|
|
5986
5680
|
fmttype = __file_format_default__
|
|
5987
5681
|
formatspecs = formatspecs[fmttype]
|
|
5988
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5682
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
5989
5683
|
return fp
|
|
5990
5684
|
|
|
5991
5685
|
|
|
5992
|
-
def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype="
|
|
5686
|
+
def MakeEmptyFoxFilePointer(fp, fmttype=__file_format_default__, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__):
|
|
5993
5687
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
5994
5688
|
|
|
5995
5689
|
|
|
5996
|
-
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5690
|
+
def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_multi_dict__, returnfp=False):
|
|
5997
5691
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
5998
5692
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
5999
5693
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -6034,7 +5728,7 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6034
5728
|
fp = CompressOpenFile(outfile, compresswholefile, compressionlevel)
|
|
6035
5729
|
except PermissionError:
|
|
6036
5730
|
return False
|
|
6037
|
-
AppendFileHeader(fp, 0, "UTF-8", [], checksumtype, formatspecs)
|
|
5731
|
+
AppendFileHeader(fp, 0, "UTF-8", [], {}, checksumtype, formatspecs)
|
|
6038
5732
|
if(outfile == "-" or outfile is None or hasattr(outfile, "read") or hasattr(outfile, "write")):
|
|
6039
5733
|
fp = CompressOpenFileAlt(
|
|
6040
5734
|
fp, compression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6042,18 +5736,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6042
5736
|
fp.flush()
|
|
6043
5737
|
if(hasattr(os, "sync")):
|
|
6044
5738
|
os.fsync(fp.fileno())
|
|
6045
|
-
except io.UnsupportedOperation:
|
|
6046
|
-
pass
|
|
6047
|
-
except AttributeError:
|
|
6048
|
-
pass
|
|
6049
|
-
except OSError:
|
|
5739
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6050
5740
|
pass
|
|
6051
5741
|
if(outfile == "-"):
|
|
6052
5742
|
fp.seek(0, 0)
|
|
6053
|
-
|
|
6054
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
6055
|
-
else:
|
|
6056
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
5743
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
6057
5744
|
elif(outfile is None):
|
|
6058
5745
|
fp.seek(0, 0)
|
|
6059
5746
|
outvar = fp.read()
|
|
@@ -6072,11 +5759,11 @@ def MakeEmptyFile(outfile, fmttype="auto", compression="auto", compresswholefile
|
|
|
6072
5759
|
return True
|
|
6073
5760
|
|
|
6074
5761
|
|
|
6075
|
-
def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype="
|
|
5762
|
+
def MakeEmptyFoxFile(outfile, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5"], formatspecs=__file_format_dict__, returnfp=False):
|
|
6076
5763
|
return MakeEmptyFile(outfile, "auto", compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, formatspecs, returnfp)
|
|
6077
5764
|
|
|
6078
5765
|
|
|
6079
|
-
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["
|
|
5766
|
+
def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, filecontent="", checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__):
|
|
6080
5767
|
if(not hasattr(fp, "write")):
|
|
6081
5768
|
return False
|
|
6082
5769
|
if (isinstance(extradata, dict) or IsNestedDictAlt(extradata)) and len(extradata) > 0:
|
|
@@ -6155,26 +5842,21 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
6155
5842
|
fp.flush()
|
|
6156
5843
|
if(hasattr(os, "sync")):
|
|
6157
5844
|
os.fsync(fp.fileno())
|
|
6158
|
-
except io.UnsupportedOperation:
|
|
6159
|
-
pass
|
|
6160
|
-
except AttributeError:
|
|
6161
|
-
pass
|
|
6162
|
-
except OSError:
|
|
5845
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6163
5846
|
pass
|
|
6164
5847
|
return fp
|
|
6165
5848
|
|
|
6166
|
-
|
|
6167
|
-
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
5849
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6168
5850
|
if(not hasattr(fp, "write")):
|
|
6169
5851
|
return False
|
|
6170
5852
|
advancedlist = formatspecs['use_advanced_list']
|
|
6171
5853
|
altinode = formatspecs['use_alt_inode']
|
|
6172
5854
|
if(verbose):
|
|
6173
5855
|
logging.basicConfig(format="%(message)s",
|
|
6174
|
-
stream=
|
|
5856
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6175
5857
|
infilelist = []
|
|
6176
5858
|
if(infiles == "-"):
|
|
6177
|
-
for line in
|
|
5859
|
+
for line in PY_STDIN_TEXT:
|
|
6178
5860
|
infilelist.append(line.strip())
|
|
6179
5861
|
infilelist = list(filter(None, infilelist))
|
|
6180
5862
|
elif(infiles != "-" and dirlistfromtxt and os.path.exists(infiles) and (os.path.isfile(infiles) or infiles == os.devnull)):
|
|
@@ -6213,16 +5895,12 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6213
5895
|
inodetoforminode = {}
|
|
6214
5896
|
numfiles = int(len(GetDirList))
|
|
6215
5897
|
fnumfiles = format(numfiles, 'x').lower()
|
|
6216
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
5898
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6217
5899
|
try:
|
|
6218
5900
|
fp.flush()
|
|
6219
5901
|
if(hasattr(os, "sync")):
|
|
6220
5902
|
os.fsync(fp.fileno())
|
|
6221
|
-
except io.UnsupportedOperation:
|
|
6222
|
-
pass
|
|
6223
|
-
except AttributeError:
|
|
6224
|
-
pass
|
|
6225
|
-
except OSError:
|
|
5903
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6226
5904
|
pass
|
|
6227
5905
|
FullSizeFilesAlt = 0
|
|
6228
5906
|
for curfname in GetDirList:
|
|
@@ -6372,7 +6050,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6372
6050
|
curcompression = "none"
|
|
6373
6051
|
if not followlink and ftype in data_types:
|
|
6374
6052
|
with open(fname, "rb") as fpc:
|
|
6375
|
-
|
|
6053
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6376
6054
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6377
6055
|
fcontents.seek(0, 0)
|
|
6378
6056
|
if(typechecktest is not False):
|
|
@@ -6390,7 +6068,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6390
6068
|
while(ilmin < ilsize):
|
|
6391
6069
|
cfcontents = MkTempFile()
|
|
6392
6070
|
fcontents.seek(0, 0)
|
|
6393
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6071
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6394
6072
|
fcontents.seek(0, 0)
|
|
6395
6073
|
cfcontents.seek(0, 0)
|
|
6396
6074
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6406,7 +6084,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6406
6084
|
curcompression = compressionuselist[ilcmin]
|
|
6407
6085
|
fcontents.seek(0, 0)
|
|
6408
6086
|
cfcontents = MkTempFile()
|
|
6409
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6087
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6410
6088
|
cfcontents.seek(0, 0)
|
|
6411
6089
|
cfcontents = CompressOpenFileAlt(
|
|
6412
6090
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6422,7 +6100,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6422
6100
|
return False
|
|
6423
6101
|
flstatinfo = os.stat(flinkname)
|
|
6424
6102
|
with open(flinkname, "rb") as fpc:
|
|
6425
|
-
|
|
6103
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6426
6104
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6427
6105
|
fcontents.seek(0, 0)
|
|
6428
6106
|
if(typechecktest is not False):
|
|
@@ -6440,7 +6118,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6440
6118
|
while(ilmin < ilsize):
|
|
6441
6119
|
cfcontents = MkTempFile()
|
|
6442
6120
|
fcontents.seek(0, 0)
|
|
6443
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6121
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6444
6122
|
fcontents.seek(0, 0)
|
|
6445
6123
|
cfcontents.seek(0, 0)
|
|
6446
6124
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6456,7 +6134,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6456
6134
|
curcompression = compressionuselist[ilcmin]
|
|
6457
6135
|
fcontents.seek(0, 0)
|
|
6458
6136
|
cfcontents = MkTempFile()
|
|
6459
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6137
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6460
6138
|
cfcontents.seek(0, 0)
|
|
6461
6139
|
cfcontents = CompressOpenFileAlt(
|
|
6462
6140
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6474,25 +6152,21 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6474
6152
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6475
6153
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6476
6154
|
AppendFileHeaderWithContent(
|
|
6477
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6155
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6478
6156
|
try:
|
|
6479
6157
|
fp.flush()
|
|
6480
6158
|
if(hasattr(os, "sync")):
|
|
6481
6159
|
os.fsync(fp.fileno())
|
|
6482
|
-
except io.UnsupportedOperation:
|
|
6483
|
-
pass
|
|
6484
|
-
except AttributeError:
|
|
6485
|
-
pass
|
|
6486
|
-
except OSError:
|
|
6160
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6487
6161
|
pass
|
|
6488
6162
|
return fp
|
|
6489
6163
|
|
|
6490
|
-
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6164
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6491
6165
|
if(not hasattr(fp, "write")):
|
|
6492
6166
|
return False
|
|
6493
6167
|
if(verbose):
|
|
6494
6168
|
logging.basicConfig(format="%(message)s",
|
|
6495
|
-
stream=
|
|
6169
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6496
6170
|
curinode = 0
|
|
6497
6171
|
curfid = 0
|
|
6498
6172
|
inodelist = []
|
|
@@ -6501,10 +6175,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6501
6175
|
inodetoforminode = {}
|
|
6502
6176
|
if(infile == "-"):
|
|
6503
6177
|
infile = MkTempFile()
|
|
6504
|
-
|
|
6505
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6506
|
-
else:
|
|
6507
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6178
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6508
6179
|
infile.seek(0, 0)
|
|
6509
6180
|
if(not infile):
|
|
6510
6181
|
return False
|
|
@@ -6559,16 +6230,12 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6559
6230
|
except FileNotFoundError:
|
|
6560
6231
|
return False
|
|
6561
6232
|
numfiles = int(len(tarfp.getmembers()))
|
|
6562
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6233
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6563
6234
|
try:
|
|
6564
6235
|
fp.flush()
|
|
6565
6236
|
if(hasattr(os, "sync")):
|
|
6566
6237
|
os.fsync(fp.fileno())
|
|
6567
|
-
except io.UnsupportedOperation:
|
|
6568
|
-
pass
|
|
6569
|
-
except AttributeError:
|
|
6570
|
-
pass
|
|
6571
|
-
except OSError:
|
|
6238
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6572
6239
|
pass
|
|
6573
6240
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6574
6241
|
fencoding = "UTF-8"
|
|
@@ -6654,7 +6321,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6654
6321
|
curcompression = "none"
|
|
6655
6322
|
if ftype in data_types:
|
|
6656
6323
|
fpc = tarfp.extractfile(member)
|
|
6657
|
-
|
|
6324
|
+
shutil.copyfileobj(fpc, fcontents, length=__filebuff_size__)
|
|
6658
6325
|
fpc.close()
|
|
6659
6326
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6660
6327
|
fcontents.seek(0, 0)
|
|
@@ -6673,7 +6340,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6673
6340
|
while(ilmin < ilsize):
|
|
6674
6341
|
cfcontents = MkTempFile()
|
|
6675
6342
|
fcontents.seek(0, 0)
|
|
6676
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6343
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6677
6344
|
fcontents.seek(0, 0)
|
|
6678
6345
|
cfcontents.seek(0, 0)
|
|
6679
6346
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6689,7 +6356,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6689
6356
|
curcompression = compressionuselist[ilcmin]
|
|
6690
6357
|
fcontents.seek(0, 0)
|
|
6691
6358
|
cfcontents = MkTempFile()
|
|
6692
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6359
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6693
6360
|
cfcontents.seek(0, 0)
|
|
6694
6361
|
cfcontents = CompressOpenFileAlt(
|
|
6695
6362
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6707,26 +6374,22 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6707
6374
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6708
6375
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6709
6376
|
AppendFileHeaderWithContent(
|
|
6710
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6377
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6711
6378
|
try:
|
|
6712
6379
|
fp.flush()
|
|
6713
6380
|
if(hasattr(os, "sync")):
|
|
6714
6381
|
os.fsync(fp.fileno())
|
|
6715
|
-
except io.UnsupportedOperation:
|
|
6716
|
-
pass
|
|
6717
|
-
except AttributeError:
|
|
6718
|
-
pass
|
|
6719
|
-
except OSError:
|
|
6382
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6720
6383
|
pass
|
|
6721
6384
|
fcontents.close()
|
|
6722
6385
|
return fp
|
|
6723
6386
|
|
|
6724
|
-
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6387
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6725
6388
|
if(not hasattr(fp, "write")):
|
|
6726
6389
|
return False
|
|
6727
6390
|
if(verbose):
|
|
6728
6391
|
logging.basicConfig(format="%(message)s",
|
|
6729
|
-
stream=
|
|
6392
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6730
6393
|
curinode = 0
|
|
6731
6394
|
curfid = 0
|
|
6732
6395
|
inodelist = []
|
|
@@ -6735,10 +6398,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6735
6398
|
inodetoforminode = {}
|
|
6736
6399
|
if(infile == "-"):
|
|
6737
6400
|
infile = MkTempFile()
|
|
6738
|
-
|
|
6739
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
6740
|
-
else:
|
|
6741
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
6401
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
6742
6402
|
infile.seek(0, 0)
|
|
6743
6403
|
if(not infile):
|
|
6744
6404
|
return False
|
|
@@ -6763,16 +6423,12 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6763
6423
|
if(ziptest):
|
|
6764
6424
|
VerbosePrintOut("Bad file found!")
|
|
6765
6425
|
numfiles = int(len(zipfp.infolist()))
|
|
6766
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6426
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6767
6427
|
try:
|
|
6768
6428
|
fp.flush()
|
|
6769
6429
|
if(hasattr(os, "sync")):
|
|
6770
6430
|
os.fsync(fp.fileno())
|
|
6771
|
-
except io.UnsupportedOperation:
|
|
6772
|
-
pass
|
|
6773
|
-
except AttributeError:
|
|
6774
|
-
pass
|
|
6775
|
-
except OSError:
|
|
6431
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6776
6432
|
pass
|
|
6777
6433
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6778
6434
|
fencoding = "UTF-8"
|
|
@@ -6857,24 +6513,18 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6857
6513
|
fcsize = format(int(0), 'x').lower()
|
|
6858
6514
|
try:
|
|
6859
6515
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
6860
|
-
except AttributeError:
|
|
6861
|
-
fuid = format(int(0), 'x').lower()
|
|
6862
|
-
except KeyError:
|
|
6516
|
+
except (KeyError, AttributeError):
|
|
6863
6517
|
fuid = format(int(0), 'x').lower()
|
|
6864
6518
|
try:
|
|
6865
6519
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
6866
|
-
except AttributeError:
|
|
6867
|
-
fgid = format(int(0), 'x').lower()
|
|
6868
|
-
except KeyError:
|
|
6520
|
+
except (KeyError, AttributeError):
|
|
6869
6521
|
fgid = format(int(0), 'x').lower()
|
|
6870
6522
|
try:
|
|
6871
6523
|
import pwd
|
|
6872
6524
|
try:
|
|
6873
6525
|
userinfo = pwd.getpwuid(os.getuid())
|
|
6874
6526
|
funame = userinfo.pw_name
|
|
6875
|
-
except KeyError:
|
|
6876
|
-
funame = ""
|
|
6877
|
-
except AttributeError:
|
|
6527
|
+
except (KeyError, AttributeError):
|
|
6878
6528
|
funame = ""
|
|
6879
6529
|
except ImportError:
|
|
6880
6530
|
funame = ""
|
|
@@ -6884,9 +6534,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6884
6534
|
try:
|
|
6885
6535
|
groupinfo = grp.getgrgid(os.getgid())
|
|
6886
6536
|
fgname = groupinfo.gr_name
|
|
6887
|
-
except KeyError:
|
|
6888
|
-
fgname = ""
|
|
6889
|
-
except AttributeError:
|
|
6537
|
+
except (KeyError, AttributeError):
|
|
6890
6538
|
fgname = ""
|
|
6891
6539
|
except ImportError:
|
|
6892
6540
|
fgname = ""
|
|
@@ -6909,7 +6557,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6909
6557
|
while(ilmin < ilsize):
|
|
6910
6558
|
cfcontents = MkTempFile()
|
|
6911
6559
|
fcontents.seek(0, 0)
|
|
6912
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6560
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6913
6561
|
fcontents.seek(0, 0)
|
|
6914
6562
|
cfcontents.seek(0, 0)
|
|
6915
6563
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -6922,7 +6570,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6922
6570
|
curcompression = compressionuselist[ilcmin]
|
|
6923
6571
|
fcontents.seek(0, 0)
|
|
6924
6572
|
cfcontents = MkTempFile()
|
|
6925
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6573
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
6926
6574
|
cfcontents.seek(0, 0)
|
|
6927
6575
|
cfcontents = CompressOpenFileAlt(
|
|
6928
6576
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -6940,31 +6588,26 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6940
6588
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6941
6589
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
6942
6590
|
AppendFileHeaderWithContent(
|
|
6943
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6591
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
6944
6592
|
try:
|
|
6945
6593
|
fp.flush()
|
|
6946
6594
|
if(hasattr(os, "sync")):
|
|
6947
6595
|
os.fsync(fp.fileno())
|
|
6948
|
-
except io.UnsupportedOperation:
|
|
6949
|
-
pass
|
|
6950
|
-
except AttributeError:
|
|
6951
|
-
pass
|
|
6952
|
-
except OSError:
|
|
6596
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6953
6597
|
pass
|
|
6954
6598
|
fcontents.close()
|
|
6955
6599
|
return fp
|
|
6956
6600
|
|
|
6957
6601
|
if(not rarfile_support):
|
|
6958
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6602
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6959
6603
|
return False
|
|
6960
|
-
|
|
6961
|
-
|
|
6962
|
-
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
6604
|
+
else:
|
|
6605
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
6963
6606
|
if(not hasattr(fp, "write")):
|
|
6964
6607
|
return False
|
|
6965
6608
|
if(verbose):
|
|
6966
6609
|
logging.basicConfig(format="%(message)s",
|
|
6967
|
-
stream=
|
|
6610
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
6968
6611
|
curinode = 0
|
|
6969
6612
|
curfid = 0
|
|
6970
6613
|
inodelist = []
|
|
@@ -6980,26 +6623,18 @@ if(rarfile_support):
|
|
|
6980
6623
|
if(rartest):
|
|
6981
6624
|
VerbosePrintOut("Bad file found!")
|
|
6982
6625
|
numfiles = int(len(rarfp.infolist()))
|
|
6983
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6626
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
6984
6627
|
try:
|
|
6985
6628
|
fp.flush()
|
|
6986
6629
|
if(hasattr(os, "sync")):
|
|
6987
6630
|
os.fsync(fp.fileno())
|
|
6988
|
-
except io.UnsupportedOperation:
|
|
6989
|
-
pass
|
|
6990
|
-
except AttributeError:
|
|
6991
|
-
pass
|
|
6992
|
-
except OSError:
|
|
6631
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6993
6632
|
pass
|
|
6994
6633
|
try:
|
|
6995
6634
|
fp.flush()
|
|
6996
6635
|
if(hasattr(os, "sync")):
|
|
6997
6636
|
os.fsync(fp.fileno())
|
|
6998
|
-
except io.UnsupportedOperation:
|
|
6999
|
-
pass
|
|
7000
|
-
except AttributeError:
|
|
7001
|
-
pass
|
|
7002
|
-
except OSError:
|
|
6637
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7003
6638
|
pass
|
|
7004
6639
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
7005
6640
|
is_unix = False
|
|
@@ -7108,24 +6743,18 @@ if(rarfile_support):
|
|
|
7108
6743
|
int(stat.S_IFMT(int(stat.S_IFDIR | 0x1ff))), 'x').lower()
|
|
7109
6744
|
try:
|
|
7110
6745
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7111
|
-
except AttributeError:
|
|
7112
|
-
fuid = format(int(0), 'x').lower()
|
|
7113
|
-
except KeyError:
|
|
6746
|
+
except (KeyError, AttributeError):
|
|
7114
6747
|
fuid = format(int(0), 'x').lower()
|
|
7115
6748
|
try:
|
|
7116
6749
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7117
|
-
except AttributeError:
|
|
7118
|
-
fgid = format(int(0), 'x').lower()
|
|
7119
|
-
except KeyError:
|
|
6750
|
+
except (KeyError, AttributeError):
|
|
7120
6751
|
fgid = format(int(0), 'x').lower()
|
|
7121
6752
|
try:
|
|
7122
6753
|
import pwd
|
|
7123
6754
|
try:
|
|
7124
6755
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7125
6756
|
funame = userinfo.pw_name
|
|
7126
|
-
except KeyError:
|
|
7127
|
-
funame = ""
|
|
7128
|
-
except AttributeError:
|
|
6757
|
+
except (KeyError, AttributeError):
|
|
7129
6758
|
funame = ""
|
|
7130
6759
|
except ImportError:
|
|
7131
6760
|
funame = ""
|
|
@@ -7135,9 +6764,7 @@ if(rarfile_support):
|
|
|
7135
6764
|
try:
|
|
7136
6765
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7137
6766
|
fgname = groupinfo.gr_name
|
|
7138
|
-
except KeyError:
|
|
7139
|
-
fgname = ""
|
|
7140
|
-
except AttributeError:
|
|
6767
|
+
except (KeyError, AttributeError):
|
|
7141
6768
|
fgname = ""
|
|
7142
6769
|
except ImportError:
|
|
7143
6770
|
fgname = ""
|
|
@@ -7160,7 +6787,7 @@ if(rarfile_support):
|
|
|
7160
6787
|
while(ilmin < ilsize):
|
|
7161
6788
|
cfcontents = MkTempFile()
|
|
7162
6789
|
fcontents.seek(0, 0)
|
|
7163
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6790
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7164
6791
|
fcontents.seek(0, 0)
|
|
7165
6792
|
cfcontents.seek(0, 0)
|
|
7166
6793
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7176,7 +6803,7 @@ if(rarfile_support):
|
|
|
7176
6803
|
curcompression = compressionuselist[ilcmin]
|
|
7177
6804
|
fcontents.seek(0, 0)
|
|
7178
6805
|
cfcontents = MkTempFile()
|
|
7179
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6806
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7180
6807
|
cfcontents.seek(0, 0)
|
|
7181
6808
|
cfcontents = CompressOpenFileAlt(
|
|
7182
6809
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7194,31 +6821,26 @@ if(rarfile_support):
|
|
|
7194
6821
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7195
6822
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7196
6823
|
AppendFileHeaderWithContent(
|
|
7197
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6824
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7198
6825
|
try:
|
|
7199
6826
|
fp.flush()
|
|
7200
6827
|
if(hasattr(os, "sync")):
|
|
7201
6828
|
os.fsync(fp.fileno())
|
|
7202
|
-
except io.UnsupportedOperation:
|
|
7203
|
-
pass
|
|
7204
|
-
except AttributeError:
|
|
7205
|
-
pass
|
|
7206
|
-
except OSError:
|
|
6829
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7207
6830
|
pass
|
|
7208
6831
|
fcontents.close()
|
|
7209
6832
|
return fp
|
|
7210
6833
|
|
|
7211
6834
|
if(not py7zr_support):
|
|
7212
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
6835
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7213
6836
|
return False
|
|
7214
|
-
|
|
7215
|
-
|
|
7216
|
-
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_dict__, verbose=False):
|
|
6837
|
+
else:
|
|
6838
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7217
6839
|
if(not hasattr(fp, "write")):
|
|
7218
6840
|
return False
|
|
7219
6841
|
if(verbose):
|
|
7220
6842
|
logging.basicConfig(format="%(message)s",
|
|
7221
|
-
stream=
|
|
6843
|
+
stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7222
6844
|
formver = formatspecs['format_ver']
|
|
7223
6845
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7224
6846
|
curinode = 0
|
|
@@ -7236,16 +6858,12 @@ if(py7zr_support):
|
|
|
7236
6858
|
if(sztestalt):
|
|
7237
6859
|
VerbosePrintOut("Bad file found!")
|
|
7238
6860
|
numfiles = int(len(szpfp.list()))
|
|
7239
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
6861
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7240
6862
|
try:
|
|
7241
6863
|
fp.flush()
|
|
7242
6864
|
if(hasattr(os, "sync")):
|
|
7243
6865
|
os.fsync(fp.fileno())
|
|
7244
|
-
except io.UnsupportedOperation:
|
|
7245
|
-
pass
|
|
7246
|
-
except AttributeError:
|
|
7247
|
-
pass
|
|
7248
|
-
except OSError:
|
|
6866
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7249
6867
|
pass
|
|
7250
6868
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7251
6869
|
fencoding = "UTF-8"
|
|
@@ -7295,24 +6913,18 @@ if(py7zr_support):
|
|
|
7295
6913
|
int(stat.S_IFMT(int(stat.S_IFREG | 0x1b6))), 'x').lower()
|
|
7296
6914
|
try:
|
|
7297
6915
|
fuid = format(int(os.getuid()), 'x').lower()
|
|
7298
|
-
except AttributeError:
|
|
7299
|
-
fuid = format(int(0), 'x').lower()
|
|
7300
|
-
except KeyError:
|
|
6916
|
+
except (KeyError, AttributeError):
|
|
7301
6917
|
fuid = format(int(0), 'x').lower()
|
|
7302
6918
|
try:
|
|
7303
6919
|
fgid = format(int(os.getgid()), 'x').lower()
|
|
7304
|
-
except AttributeError:
|
|
7305
|
-
fgid = format(int(0), 'x').lower()
|
|
7306
|
-
except KeyError:
|
|
6920
|
+
except (KeyError, AttributeError):
|
|
7307
6921
|
fgid = format(int(0), 'x').lower()
|
|
7308
6922
|
try:
|
|
7309
6923
|
import pwd
|
|
7310
6924
|
try:
|
|
7311
6925
|
userinfo = pwd.getpwuid(os.getuid())
|
|
7312
6926
|
funame = userinfo.pw_name
|
|
7313
|
-
except KeyError:
|
|
7314
|
-
funame = ""
|
|
7315
|
-
except AttributeError:
|
|
6927
|
+
except (KeyError, AttributeError):
|
|
7316
6928
|
funame = ""
|
|
7317
6929
|
except ImportError:
|
|
7318
6930
|
funame = ""
|
|
@@ -7322,9 +6934,7 @@ if(py7zr_support):
|
|
|
7322
6934
|
try:
|
|
7323
6935
|
groupinfo = grp.getgrgid(os.getgid())
|
|
7324
6936
|
fgname = groupinfo.gr_name
|
|
7325
|
-
except KeyError:
|
|
7326
|
-
fgname = ""
|
|
7327
|
-
except AttributeError:
|
|
6937
|
+
except (KeyError, AttributeError):
|
|
7328
6938
|
fgname = ""
|
|
7329
6939
|
except ImportError:
|
|
7330
6940
|
fgname = ""
|
|
@@ -7350,7 +6960,7 @@ if(py7zr_support):
|
|
|
7350
6960
|
while(ilmin < ilsize):
|
|
7351
6961
|
cfcontents = MkTempFile()
|
|
7352
6962
|
fcontents.seek(0, 0)
|
|
7353
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6963
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7354
6964
|
fcontents.seek(0, 0)
|
|
7355
6965
|
cfcontents.seek(0, 0)
|
|
7356
6966
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -7366,7 +6976,7 @@ if(py7zr_support):
|
|
|
7366
6976
|
curcompression = compressionuselist[ilcmin]
|
|
7367
6977
|
fcontents.seek(0, 0)
|
|
7368
6978
|
cfcontents = MkTempFile()
|
|
7369
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
6979
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
7370
6980
|
cfcontents.seek(0, 0)
|
|
7371
6981
|
cfcontents = CompressOpenFileAlt(
|
|
7372
6982
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs)
|
|
@@ -7384,25 +6994,21 @@ if(py7zr_support):
|
|
|
7384
6994
|
tmpoutlist = [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7385
6995
|
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, fdev_minor, fdev_major, "+"+str(len(formatspecs['format_delimiter']))]
|
|
7386
6996
|
AppendFileHeaderWithContent(
|
|
7387
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
6997
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7388
6998
|
try:
|
|
7389
6999
|
fp.flush()
|
|
7390
7000
|
if(hasattr(os, "sync")):
|
|
7391
7001
|
os.fsync(fp.fileno())
|
|
7392
|
-
except io.UnsupportedOperation:
|
|
7393
|
-
pass
|
|
7394
|
-
except AttributeError:
|
|
7395
|
-
pass
|
|
7396
|
-
except OSError:
|
|
7002
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7397
7003
|
pass
|
|
7398
7004
|
fcontents.close()
|
|
7399
7005
|
return fp
|
|
7400
7006
|
|
|
7401
|
-
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["
|
|
7007
|
+
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7402
7008
|
if(not hasattr(fp, "write")):
|
|
7403
7009
|
return False
|
|
7404
7010
|
if(verbose):
|
|
7405
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
7011
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
7406
7012
|
GetDirList = inlist
|
|
7407
7013
|
if(not GetDirList):
|
|
7408
7014
|
return False
|
|
@@ -7414,7 +7020,7 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7414
7020
|
inodetoforminode = {}
|
|
7415
7021
|
numfiles = int(len(GetDirList))
|
|
7416
7022
|
fnumfiles = format(numfiles, 'x').lower()
|
|
7417
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], checksumtype[0], formatspecs)
|
|
7023
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], [checksumtype[0], checksumtype[1]], formatspecs)
|
|
7418
7024
|
for curfname in GetDirList:
|
|
7419
7025
|
ftype = format(curfname[0], 'x').lower()
|
|
7420
7026
|
fencoding = curfname[1]
|
|
@@ -7456,16 +7062,16 @@ def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsond
|
|
|
7456
7062
|
fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, fdev_minor, fdev_major, fseeknextfile]
|
|
7457
7063
|
fcontents.seek(0, 0)
|
|
7458
7064
|
AppendFileHeaderWithContent(
|
|
7459
|
-
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[
|
|
7065
|
+
fp, tmpoutlist, extradata, jsondata, fcontents.read(), [checksumtype[2], checksumtype[3], checksumtype[4]], formatspecs)
|
|
7460
7066
|
return fp
|
|
7461
7067
|
|
|
7462
7068
|
|
|
7463
|
-
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7069
|
+
def AppendInFileWithContent(infile, fp, dirlistfromtxt=False, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False):
|
|
7464
7070
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
7465
7071
|
return AppendListsWithContent(inlist, fp, dirlistfromtxt, extradata, jsondata, followlink, checksumtype, formatspecs, verbose)
|
|
7466
7072
|
|
|
7467
7073
|
|
|
7468
|
-
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7074
|
+
def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7469
7075
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7470
7076
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7471
7077
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7518,18 +7124,11 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7518
7124
|
fp.flush()
|
|
7519
7125
|
if(hasattr(os, "sync")):
|
|
7520
7126
|
os.fsync(fp.fileno())
|
|
7521
|
-
except io.UnsupportedOperation:
|
|
7522
|
-
pass
|
|
7523
|
-
except AttributeError:
|
|
7524
|
-
pass
|
|
7525
|
-
except OSError:
|
|
7127
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7526
7128
|
pass
|
|
7527
7129
|
if(outfile == "-"):
|
|
7528
7130
|
fp.seek(0, 0)
|
|
7529
|
-
|
|
7530
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7531
|
-
else:
|
|
7532
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7131
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7533
7132
|
elif(outfile is None):
|
|
7534
7133
|
fp.seek(0, 0)
|
|
7535
7134
|
outvar = fp.read()
|
|
@@ -7546,7 +7145,7 @@ def AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt=False, fmtt
|
|
|
7546
7145
|
fp.close()
|
|
7547
7146
|
return True
|
|
7548
7147
|
|
|
7549
|
-
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7148
|
+
def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7550
7149
|
if not isinstance(infiles, list):
|
|
7551
7150
|
infiles = [infiles]
|
|
7552
7151
|
returnout = False
|
|
@@ -7561,7 +7160,7 @@ def AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt=Fals
|
|
|
7561
7160
|
return True
|
|
7562
7161
|
return returnout
|
|
7563
7162
|
|
|
7564
|
-
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7163
|
+
def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
7565
7164
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7566
7165
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7567
7166
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7611,18 +7210,11 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7611
7210
|
fp.flush()
|
|
7612
7211
|
if(hasattr(os, "sync")):
|
|
7613
7212
|
os.fsync(fp.fileno())
|
|
7614
|
-
except io.UnsupportedOperation:
|
|
7615
|
-
pass
|
|
7616
|
-
except AttributeError:
|
|
7617
|
-
pass
|
|
7618
|
-
except OSError:
|
|
7213
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7619
7214
|
pass
|
|
7620
7215
|
if(outfile == "-"):
|
|
7621
7216
|
fp.seek(0, 0)
|
|
7622
|
-
|
|
7623
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7624
|
-
else:
|
|
7625
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7217
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7626
7218
|
elif(outfile is None):
|
|
7627
7219
|
fp.seek(0, 0)
|
|
7628
7220
|
outvar = fp.read()
|
|
@@ -7640,7 +7232,7 @@ def AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt=False, fmtty
|
|
|
7640
7232
|
fp.close()
|
|
7641
7233
|
return True
|
|
7642
7234
|
|
|
7643
|
-
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7235
|
+
def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7644
7236
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7645
7237
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7646
7238
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7691,18 +7283,11 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7691
7283
|
fp.flush()
|
|
7692
7284
|
if(hasattr(os, "sync")):
|
|
7693
7285
|
os.fsync(fp.fileno())
|
|
7694
|
-
except io.UnsupportedOperation:
|
|
7695
|
-
pass
|
|
7696
|
-
except AttributeError:
|
|
7697
|
-
pass
|
|
7698
|
-
except OSError:
|
|
7286
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7699
7287
|
pass
|
|
7700
7288
|
if(outfile == "-"):
|
|
7701
7289
|
fp.seek(0, 0)
|
|
7702
|
-
|
|
7703
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7704
|
-
else:
|
|
7705
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7290
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7706
7291
|
elif(outfile is None):
|
|
7707
7292
|
fp.seek(0, 0)
|
|
7708
7293
|
outvar = fp.read()
|
|
@@ -7720,7 +7305,7 @@ def AppendFilesWithContentFromTarFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7720
7305
|
fp.close()
|
|
7721
7306
|
return True
|
|
7722
7307
|
|
|
7723
|
-
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7308
|
+
def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7724
7309
|
if not isinstance(infiles, list):
|
|
7725
7310
|
infiles = [infiles]
|
|
7726
7311
|
returnout = False
|
|
@@ -7735,7 +7320,7 @@ def AppendFilesWithContentFromTarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7735
7320
|
return True
|
|
7736
7321
|
return returnout
|
|
7737
7322
|
|
|
7738
|
-
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7323
|
+
def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7739
7324
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7740
7325
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7741
7326
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7786,18 +7371,11 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7786
7371
|
fp.flush()
|
|
7787
7372
|
if(hasattr(os, "sync")):
|
|
7788
7373
|
os.fsync(fp.fileno())
|
|
7789
|
-
except io.UnsupportedOperation:
|
|
7790
|
-
pass
|
|
7791
|
-
except AttributeError:
|
|
7792
|
-
pass
|
|
7793
|
-
except OSError:
|
|
7374
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7794
7375
|
pass
|
|
7795
7376
|
if(outfile == "-"):
|
|
7796
7377
|
fp.seek(0, 0)
|
|
7797
|
-
|
|
7798
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7799
|
-
else:
|
|
7800
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7378
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7801
7379
|
elif(outfile is None):
|
|
7802
7380
|
fp.seek(0, 0)
|
|
7803
7381
|
outvar = fp.read()
|
|
@@ -7815,7 +7393,7 @@ def AppendFilesWithContentFromZipFileToOutFile(infiles, outfile, fmttype="auto",
|
|
|
7815
7393
|
fp.close()
|
|
7816
7394
|
return True
|
|
7817
7395
|
|
|
7818
|
-
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7396
|
+
def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7819
7397
|
if not isinstance(infiles, list):
|
|
7820
7398
|
infiles = [infiles]
|
|
7821
7399
|
returnout = False
|
|
@@ -7831,11 +7409,10 @@ def AppendFilesWithContentFromZipFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7831
7409
|
return returnout
|
|
7832
7410
|
|
|
7833
7411
|
if(not rarfile_support):
|
|
7834
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7412
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7835
7413
|
return False
|
|
7836
|
-
|
|
7837
|
-
|
|
7838
|
-
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7414
|
+
else:
|
|
7415
|
+
def AppendFilesWithContentFromRarFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7839
7416
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7840
7417
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7841
7418
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7886,18 +7463,11 @@ if(rarfile_support):
|
|
|
7886
7463
|
fp.flush()
|
|
7887
7464
|
if(hasattr(os, "sync")):
|
|
7888
7465
|
os.fsync(fp.fileno())
|
|
7889
|
-
except io.UnsupportedOperation:
|
|
7890
|
-
pass
|
|
7891
|
-
except AttributeError:
|
|
7892
|
-
pass
|
|
7893
|
-
except OSError:
|
|
7466
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7894
7467
|
pass
|
|
7895
7468
|
if(outfile == "-"):
|
|
7896
7469
|
fp.seek(0, 0)
|
|
7897
|
-
|
|
7898
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7899
|
-
else:
|
|
7900
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7470
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
7901
7471
|
elif(outfile is None):
|
|
7902
7472
|
fp.seek(0, 0)
|
|
7903
7473
|
outvar = fp.read()
|
|
@@ -7915,7 +7485,7 @@ if(rarfile_support):
|
|
|
7915
7485
|
fp.close()
|
|
7916
7486
|
return True
|
|
7917
7487
|
|
|
7918
|
-
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7488
|
+
def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7919
7489
|
if not isinstance(infiles, list):
|
|
7920
7490
|
infiles = [infiles]
|
|
7921
7491
|
returnout = False
|
|
@@ -7931,11 +7501,10 @@ def AppendFilesWithContentFromRarFileToStackedOutFile(infiles, outfile, fmttype=
|
|
|
7931
7501
|
return returnout
|
|
7932
7502
|
|
|
7933
7503
|
if(not py7zr_support):
|
|
7934
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7504
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7935
7505
|
return False
|
|
7936
|
-
|
|
7937
|
-
|
|
7938
|
-
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["crc32", "crc32", "crc32", "crc32"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7506
|
+
else:
|
|
7507
|
+
def AppendFilesWithContentFromSevenZipFileToOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
7939
7508
|
if(IsNestedDict(formatspecs) and fmttype=="auto" and
|
|
7940
7509
|
(outfile != "-" and outfile is not None and not hasattr(outfile, "read") and not hasattr(outfile, "write"))):
|
|
7941
7510
|
get_in_ext = os.path.splitext(outfile)
|
|
@@ -7986,18 +7555,11 @@ if(py7zr_support):
|
|
|
7986
7555
|
fp.flush()
|
|
7987
7556
|
if(hasattr(os, "sync")):
|
|
7988
7557
|
os.fsync(fp.fileno())
|
|
7989
|
-
except io.UnsupportedOperation:
|
|
7990
|
-
pass
|
|
7991
|
-
except AttributeError:
|
|
7992
|
-
pass
|
|
7993
|
-
except OSError:
|
|
7558
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7994
7559
|
pass
|
|
7995
7560
|
if(outfile == "-"):
|
|
7996
7561
|
fp.seek(0, 0)
|
|
7997
|
-
|
|
7998
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
7999
|
-
else:
|
|
8000
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
7562
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
8001
7563
|
elif(outfile is None):
|
|
8002
7564
|
fp.seek(0, 0)
|
|
8003
7565
|
outvar = fp.read()
|
|
@@ -8015,7 +7577,7 @@ if(py7zr_support):
|
|
|
8015
7577
|
fp.close()
|
|
8016
7578
|
return True
|
|
8017
7579
|
|
|
8018
|
-
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["
|
|
7580
|
+
def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, extradata=[], jsondata={}, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
8019
7581
|
if not isinstance(infiles, list):
|
|
8020
7582
|
infiles = [infiles]
|
|
8021
7583
|
returnout = False
|
|
@@ -8030,7 +7592,7 @@ def AppendFilesWithContentFromSevenZipFileToStackedOutFile(infiles, outfile, fmt
|
|
|
8030
7592
|
return True
|
|
8031
7593
|
return returnout
|
|
8032
7594
|
|
|
8033
|
-
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["
|
|
7595
|
+
def AppendInFileWithContentToOutFile(infile, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, extradata=[], jsondata={}, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
8034
7596
|
inlist = ReadInFileWithContentToList(infile, "auto", 0, 0, False, False, True, False, formatspecs)
|
|
8035
7597
|
return AppendListsWithContentToOutFile(inlist, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
8036
7598
|
|
|
@@ -8064,9 +7626,7 @@ def PrintPermissionString(fchmode, ftype):
|
|
|
8064
7626
|
permissionstr = "w" + permissionstr
|
|
8065
7627
|
try:
|
|
8066
7628
|
permissionoutstr = stat.filemode(fchmode)
|
|
8067
|
-
except AttributeError:
|
|
8068
|
-
permissionoutstr = permissionstr
|
|
8069
|
-
except KeyError:
|
|
7629
|
+
except (KeyError, AttributeError):
|
|
8070
7630
|
permissionoutstr = permissionstr
|
|
8071
7631
|
return permissionoutstr
|
|
8072
7632
|
|
|
@@ -8982,7 +8542,7 @@ def UncompressString(infile, formatspecs=__file_format_multi_dict__, filestart=0
|
|
|
8982
8542
|
|
|
8983
8543
|
|
|
8984
8544
|
def UncompressStringAlt(instring, formatspecs=__file_format_multi_dict__, filestart=0):
|
|
8985
|
-
filefp =
|
|
8545
|
+
filefp = MkTempFile("", isbytes=False)
|
|
8986
8546
|
outstring = UncompressString(instring, formatspecs, filestart)
|
|
8987
8547
|
filefp.write(outstring)
|
|
8988
8548
|
filefp.seek(0, 0)
|
|
@@ -8997,7 +8557,7 @@ def UncompressStringAltFP(fp, formatspecs=__file_format_multi_dict__, filestart=
|
|
|
8997
8557
|
fp.seek(filestart, 0)
|
|
8998
8558
|
if(prechck!="zstd"):
|
|
8999
8559
|
return UncompressFileAlt(fp, formatspecs, filestart)
|
|
9000
|
-
filefp =
|
|
8560
|
+
filefp = MkTempFile("", isbytes=False)
|
|
9001
8561
|
fp.seek(filestart, 0)
|
|
9002
8562
|
outstring = UncompressString(fp.read(), formatspecs, 0)
|
|
9003
8563
|
filefp.write(outstring)
|
|
@@ -9069,9 +8629,7 @@ def _extract_base_fp(obj):
|
|
|
9069
8629
|
try:
|
|
9070
8630
|
f() # probe fileno()
|
|
9071
8631
|
return cur
|
|
9072
|
-
except UnsupportedOperation:
|
|
9073
|
-
pass
|
|
9074
|
-
except Exception:
|
|
8632
|
+
except (Exception, UnsupportedOperation):
|
|
9075
8633
|
pass
|
|
9076
8634
|
for attr in ("fileobj", "fp", "_fp", "buffer", "raw"):
|
|
9077
8635
|
nxt = getattr(cur, attr, None)
|
|
@@ -9463,7 +9021,7 @@ def ensure_filelike(infile, mode="rb", use_mmap=False, **adapter_kw):
|
|
|
9463
9021
|
|
|
9464
9022
|
# ========= copy helpers =========
|
|
9465
9023
|
|
|
9466
|
-
def fast_copy(infp, outfp, bufsize=
|
|
9024
|
+
def fast_copy(infp, outfp, bufsize=__filebuff_size__):
|
|
9467
9025
|
"""
|
|
9468
9026
|
Efficient copy from any readable file-like to any writable file-like.
|
|
9469
9027
|
Uses readinto() when available to avoid extra allocations.
|
|
@@ -9507,7 +9065,7 @@ def copy_file_to_mmap_dest(src_path, outfp, chunk_size=__spoolfile_size__):
|
|
|
9507
9065
|
shutil.copyfileobj(fp, outfp, length=chunk_size)
|
|
9508
9066
|
|
|
9509
9067
|
|
|
9510
|
-
def copy_opaque(src, dst, bufsize=
|
|
9068
|
+
def copy_opaque(src, dst, bufsize=__filebuff_size__, grow_step=64 << 20):
|
|
9511
9069
|
"""
|
|
9512
9070
|
Copy opaque bytes from 'src' (any readable file-like) to 'dst'
|
|
9513
9071
|
(your mmap-backed FileLikeAdapter or any writable file-like).
|
|
@@ -9569,11 +9127,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9569
9127
|
|
|
9570
9128
|
try:
|
|
9571
9129
|
fp.seek(0, 0)
|
|
9572
|
-
except io.UnsupportedOperation:
|
|
9573
|
-
pass
|
|
9574
|
-
except AttributeError:
|
|
9575
|
-
pass
|
|
9576
|
-
except OSError:
|
|
9130
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9577
9131
|
pass
|
|
9578
9132
|
|
|
9579
9133
|
if (not compression or compression == formatspecs['format_magic']
|
|
@@ -9632,11 +9186,7 @@ def CompressOpenFileAlt(fp, compression="auto", compressionlevel=None,
|
|
|
9632
9186
|
|
|
9633
9187
|
try:
|
|
9634
9188
|
bytesfp.seek(0, 0)
|
|
9635
|
-
except io.UnsupportedOperation:
|
|
9636
|
-
pass
|
|
9637
|
-
except AttributeError:
|
|
9638
|
-
pass
|
|
9639
|
-
except OSError:
|
|
9189
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
9640
9190
|
pass
|
|
9641
9191
|
out = FileLikeAdapter(bytesfp, mode="rb") # read interface for the caller
|
|
9642
9192
|
try:
|
|
@@ -9766,31 +9316,18 @@ def CheckSumSupport(checkfor, guaranteed=True):
|
|
|
9766
9316
|
try:
|
|
9767
9317
|
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9768
9318
|
except AttributeError:
|
|
9769
|
-
|
|
9770
|
-
|
|
9771
|
-
|
|
9772
|
-
|
|
9773
|
-
except AttributeError:
|
|
9774
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9775
|
-
checklistout = sorted(hash_list + ['adler32', 'crc16', 'crc16_ansi', 'crc16_ibm',
|
|
9776
|
-
'crc16_ccitt', 'crc32', 'crc64', 'crc64_ecma', 'crc64_iso', 'none'])
|
|
9777
|
-
if(checkfor in checklistout):
|
|
9778
|
-
return True
|
|
9779
|
-
else:
|
|
9780
|
-
return False
|
|
9781
|
-
|
|
9782
|
-
|
|
9783
|
-
def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
9784
|
-
if(guaranteed):
|
|
9785
|
-
try:
|
|
9786
|
-
hash_list = sorted(list(hashlib.algorithms_guaranteed))
|
|
9787
|
-
except AttributeError:
|
|
9788
|
-
hash_list = sorted(list(hashlib.algorithms))
|
|
9319
|
+
try:
|
|
9320
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9321
|
+
except AttributeError:
|
|
9322
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9789
9323
|
else:
|
|
9790
9324
|
try:
|
|
9791
9325
|
hash_list = sorted(list(hashlib.algorithms_available))
|
|
9792
9326
|
except AttributeError:
|
|
9793
|
-
|
|
9327
|
+
try:
|
|
9328
|
+
hash_list = sorted(list(hashlib.algorithms))
|
|
9329
|
+
except AttributeError:
|
|
9330
|
+
hash_list = sorted(list(a.lower() for a in hashlib.algorithms_available))
|
|
9794
9331
|
checklistout = hash_list
|
|
9795
9332
|
if(checkfor in checklistout):
|
|
9796
9333
|
return True
|
|
@@ -9798,48 +9335,46 @@ def CheckSumSupportAlt(checkfor, guaranteed=True):
|
|
|
9798
9335
|
return False
|
|
9799
9336
|
|
|
9800
9337
|
|
|
9801
|
-
def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9338
|
+
def PackFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9802
9339
|
return AppendFilesWithContentToOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9803
9340
|
|
|
9804
|
-
def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9341
|
+
def PackStackedFoxFile(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_multi_dict__, verbose=False, returnfp=False):
|
|
9805
9342
|
return AppendFilesWithContentToStackedOutFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, followlink, checksumtype, formatspecs, verbose, returnfp)
|
|
9806
9343
|
|
|
9807
|
-
def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["
|
|
9344
|
+
def PackFoxFileFromDirList(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9808
9345
|
return PackFoxFile(infiles, outfile, dirlistfromtxt, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, returnfp)
|
|
9809
9346
|
|
|
9810
9347
|
|
|
9811
|
-
def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9348
|
+
def PackFoxFileFromTarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9812
9349
|
return AppendFilesWithContentFromTarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9813
9350
|
|
|
9814
9351
|
|
|
9815
|
-
def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9352
|
+
def PackFoxFileFromZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9816
9353
|
return AppendFilesWithContentFromZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9817
9354
|
|
|
9818
9355
|
|
|
9819
9356
|
if(not rarfile_support):
|
|
9820
|
-
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9357
|
+
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9821
9358
|
return False
|
|
9822
|
-
|
|
9823
|
-
|
|
9824
|
-
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9359
|
+
else:
|
|
9360
|
+
def PackFoxFileFromRarFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9825
9361
|
return AppendFilesWithContentFromRarFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9826
9362
|
|
|
9827
9363
|
|
|
9828
9364
|
if(not py7zr_support):
|
|
9829
|
-
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9365
|
+
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9830
9366
|
return False
|
|
9831
|
-
|
|
9832
|
-
|
|
9833
|
-
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["crc32", "crc32", "crc32", "crc32"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9367
|
+
else:
|
|
9368
|
+
def PackFoxFileFromSevenZipFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9834
9369
|
return AppendFilesWithContentFromSevenZipFileToOutFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, extradata, jsondata, checksumtype, formatspecs, verbose, returnfp)
|
|
9835
9370
|
|
|
9836
9371
|
|
|
9837
|
-
def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["
|
|
9372
|
+
def PackFoxFileFromInFile(infile, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], extradata=[], jsondata={}, formatspecs=__file_format_dict__, verbose=False, returnfp=False):
|
|
9838
9373
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, 0, True)
|
|
9839
9374
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
9840
9375
|
formatspecs = formatspecs[checkcompressfile]
|
|
9841
9376
|
if(verbose):
|
|
9842
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9377
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9843
9378
|
if(checkcompressfile == "tarfile" and TarFileCheck(infile)):
|
|
9844
9379
|
return PackFoxFileFromTarFile(infile, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, checksumtype, extradata, jsondata, formatspecs, verbose, returnfp)
|
|
9845
9380
|
elif(checkcompressfile == "zipfile" and zipfile.is_zipfile(infile)):
|
|
@@ -9922,7 +9457,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9922
9457
|
formatspecs=__file_format_multi_dict__, # keep default like original
|
|
9923
9458
|
seektoend=False, verbose=False, returnfp=False):
|
|
9924
9459
|
if(verbose):
|
|
9925
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
9460
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
9926
9461
|
|
|
9927
9462
|
if(IsNestedDict(formatspecs) and fmttype!="auto" and fmttype in formatspecs):
|
|
9928
9463
|
formatspecs = formatspecs[fmttype]
|
|
@@ -9949,10 +9484,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
9949
9484
|
|
|
9950
9485
|
elif(infile == "-"):
|
|
9951
9486
|
fp = MkTempFile()
|
|
9952
|
-
|
|
9953
|
-
shutil.copyfileobj(sys.stdin.buffer, fp)
|
|
9954
|
-
else:
|
|
9955
|
-
shutil.copyfileobj(sys.stdin, fp)
|
|
9487
|
+
shutil.copyfileobj(PY_STDIN_BUF, fp, length=__filebuff_size__)
|
|
9956
9488
|
fp.seek(filestart, 0)
|
|
9957
9489
|
fp = UncompressFileAlt(fp, formatspecs, filestart)
|
|
9958
9490
|
checkcompressfile = CheckCompressionSubType(fp, formatspecs, filestart, True)
|
|
@@ -10029,9 +9561,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10029
9561
|
|
|
10030
9562
|
try:
|
|
10031
9563
|
fp.seek(0, 2)
|
|
10032
|
-
except OSError:
|
|
10033
|
-
SeekToEndOfFile(fp)
|
|
10034
|
-
except ValueError:
|
|
9564
|
+
except (OSError, ValueError):
|
|
10035
9565
|
SeekToEndOfFile(fp)
|
|
10036
9566
|
|
|
10037
9567
|
CatSize = fp.tell()
|
|
@@ -10061,18 +9591,56 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10061
9591
|
else:
|
|
10062
9592
|
inheader = ReadFileHeaderDataWoSize(fp, formatspecs['format_delimiter'])
|
|
10063
9593
|
|
|
10064
|
-
fnumextrafieldsize = int(inheader[
|
|
10065
|
-
fnumextrafields = int(inheader[
|
|
10066
|
-
extrastart =
|
|
9594
|
+
fnumextrafieldsize = int(inheader[13], 16)
|
|
9595
|
+
fnumextrafields = int(inheader[14], 16)
|
|
9596
|
+
extrastart = 15
|
|
10067
9597
|
extraend = extrastart + fnumextrafields
|
|
10068
9598
|
formversion = re.findall("([\\d]+)", formstring)
|
|
10069
9599
|
fheadsize = int(inheader[0], 16)
|
|
10070
9600
|
fnumfields = int(inheader[1], 16)
|
|
10071
|
-
|
|
10072
|
-
fostype = inheader[3]
|
|
10073
|
-
fnumfiles = int(inheader[4], 16)
|
|
9601
|
+
fnumfiles = int(inheader[6], 16)
|
|
10074
9602
|
fprechecksumtype = inheader[-2]
|
|
10075
9603
|
fprechecksum = inheader[-1]
|
|
9604
|
+
outfseeknextfile = inheader[7]
|
|
9605
|
+
fjsonsize = int(inheader[10], 16)
|
|
9606
|
+
fjsonchecksumtype = inheader[11]
|
|
9607
|
+
fjsonchecksum = inheader[12]
|
|
9608
|
+
fprejsoncontent = fp.read(fjsonsize)
|
|
9609
|
+
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs)
|
|
9610
|
+
if(fjsonsize > 0):
|
|
9611
|
+
if(hmac.compare_digest(jsonfcs, fjsonchecksum)):
|
|
9612
|
+
if(verbose):
|
|
9613
|
+
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
9614
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
9615
|
+
else:
|
|
9616
|
+
valid_archive = False
|
|
9617
|
+
invalid_archive = True
|
|
9618
|
+
if(verbose):
|
|
9619
|
+
VerbosePrintOut("File JSON Data Checksum Error at offset " + str(outfjstart))
|
|
9620
|
+
VerbosePrintOut("'" + outfjsonchecksum + "' != " + "'" + injsonfcs + "'")
|
|
9621
|
+
if(not hmac.compare_digest(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
9622
|
+
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
9623
|
+
fname + " at offset " + str(fheaderstart))
|
|
9624
|
+
VerbosePrintOut("'" + fjsonchecksum + "' != " + "'" + jsonfcs + "'")
|
|
9625
|
+
return False
|
|
9626
|
+
# Next seek directive
|
|
9627
|
+
if(re.findall(r"^\+([0-9]+)", outfseeknextfile)):
|
|
9628
|
+
fseeknextasnum = int(outfseeknextfile.replace("+", ""))
|
|
9629
|
+
if(abs(fseeknextasnum) == 0):
|
|
9630
|
+
pass
|
|
9631
|
+
fp.seek(fseeknextasnum, 1)
|
|
9632
|
+
elif(re.findall(r"^\-([0-9]+)", outfseeknextfile)):
|
|
9633
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9634
|
+
if(abs(fseeknextasnum) == 0):
|
|
9635
|
+
pass
|
|
9636
|
+
fp.seek(fseeknextasnum, 1)
|
|
9637
|
+
elif(re.findall(r"^([0-9]+)", outfseeknextfile)):
|
|
9638
|
+
fseeknextasnum = int(outfseeknextfile)
|
|
9639
|
+
if(abs(fseeknextasnum) == 0):
|
|
9640
|
+
pass
|
|
9641
|
+
fp.seek(fseeknextasnum, 0)
|
|
9642
|
+
else:
|
|
9643
|
+
return False
|
|
10076
9644
|
|
|
10077
9645
|
il = 0
|
|
10078
9646
|
headercheck = ValidateHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, fprechecksum, formatspecs)
|
|
@@ -10191,7 +9759,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10191
9759
|
VerbosePrintOut(outfname)
|
|
10192
9760
|
VerbosePrintOut("Record Number " + str(il) + "; File ID " + str(fid) + "; iNode Number " + str(finode))
|
|
10193
9761
|
|
|
10194
|
-
if(outfcs
|
|
9762
|
+
if(hmac.compare_digest(outfcs, infcs)):
|
|
10195
9763
|
if(verbose):
|
|
10196
9764
|
VerbosePrintOut("File Header Checksum Passed at offset " + str(outfhstart))
|
|
10197
9765
|
VerbosePrintOut("'" + outfcs + "' == " + "'" + infcs + "'")
|
|
@@ -10203,7 +9771,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10203
9771
|
VerbosePrintOut("'" + outfcs + "' != " + "'" + infcs + "'")
|
|
10204
9772
|
|
|
10205
9773
|
if(outfjsonsize > 0):
|
|
10206
|
-
if(outfjsonchecksum
|
|
9774
|
+
if(hmac.compare_digest(injsonfcs, outfjsonchecksum)):
|
|
10207
9775
|
if(verbose):
|
|
10208
9776
|
VerbosePrintOut("File JSON Data Checksum Passed at offset " + str(outfjstart))
|
|
10209
9777
|
VerbosePrintOut("'" + outfjsonchecksum + "' == " + "'" + injsonfcs + "'")
|
|
@@ -10227,7 +9795,7 @@ def FoxFileValidate(infile, fmttype="auto", filestart=0,
|
|
|
10227
9795
|
infccs = GetFileChecksum(outfcontents, inheaderdata[-3].lower(), False, formatspecs)
|
|
10228
9796
|
pyhascontents = True
|
|
10229
9797
|
|
|
10230
|
-
if(outfccs
|
|
9798
|
+
if(hmac.compare_digest(outfccs, infccs)):
|
|
10231
9799
|
if(verbose):
|
|
10232
9800
|
VerbosePrintOut("File Content Checksum Passed at offset " + str(outfcontentstart))
|
|
10233
9801
|
VerbosePrintOut("'" + outfccs + "' == " + "'" + infccs + "'")
|
|
@@ -10300,7 +9868,7 @@ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
|
|
|
10300
9868
|
while True:
|
|
10301
9869
|
if outstartfile >= outfsize: # stop when function signals False
|
|
10302
9870
|
break
|
|
10303
|
-
is_valid_file =
|
|
9871
|
+
is_valid_file = FoxFileValidate(infile, fmttype, outstartfile, formatspecs, seektoend, verbose, True)
|
|
10304
9872
|
if is_valid_file is False: # stop when function signals False
|
|
10305
9873
|
outretval.append(is_valid_file)
|
|
10306
9874
|
break
|
|
@@ -10310,9 +9878,7 @@ def StackedFoxFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__fi
|
|
|
10310
9878
|
outstartfile = infile.tell()
|
|
10311
9879
|
try:
|
|
10312
9880
|
infile.seek(0, 2)
|
|
10313
|
-
except OSError:
|
|
10314
|
-
SeekToEndOfFile(infile)
|
|
10315
|
-
except ValueError:
|
|
9881
|
+
except (OSError, ValueError):
|
|
10316
9882
|
SeekToEndOfFile(infile)
|
|
10317
9883
|
outfsize = infile.tell()
|
|
10318
9884
|
infile.seek(outstartfile, 0)
|
|
@@ -10388,7 +9954,7 @@ def TarFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10388
9954
|
formatspecs = formatspecs[checkcompressfile]
|
|
10389
9955
|
fp = MkTempFile()
|
|
10390
9956
|
fp = PackFoxFileFromTarFile(
|
|
10391
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9957
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10392
9958
|
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10393
9959
|
return listarrayfiles
|
|
10394
9960
|
|
|
@@ -10399,7 +9965,7 @@ def ZipFileToArray(infile, seekstart=0, seekend=0, listonly=False, contentasfile
|
|
|
10399
9965
|
formatspecs = formatspecs[checkcompressfile]
|
|
10400
9966
|
fp = MkTempFile()
|
|
10401
9967
|
fp = PackFoxFileFromZipFile(
|
|
10402
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9968
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10403
9969
|
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10404
9970
|
return listarrayfiles
|
|
10405
9971
|
|
|
@@ -10415,7 +9981,7 @@ if(rarfile_support):
|
|
|
10415
9981
|
formatspecs = formatspecs[checkcompressfile]
|
|
10416
9982
|
fp = MkTempFile()
|
|
10417
9983
|
fp = PackFoxFileFromRarFile(
|
|
10418
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9984
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10419
9985
|
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10420
9986
|
return listarrayfiles
|
|
10421
9987
|
|
|
@@ -10430,7 +9996,7 @@ if(py7zr_support):
|
|
|
10430
9996
|
formatspecs = formatspecs[checkcompressfile]
|
|
10431
9997
|
fp = MkTempFile()
|
|
10432
9998
|
fp = PackFoxFileFromSevenZipFile(
|
|
10433
|
-
infile, fp, "auto", True, None, compressionlistalt, "
|
|
9999
|
+
infile, fp, "auto", True, None, compressionlistalt, "md5", [], formatspecs, False, True)
|
|
10434
10000
|
listarrayfiles = FoxFileToArray(fp, "auto", 0, seekstart, seekend, listonly, contentasfile, True, skipchecksum, formatspecs, seektoend, returnfp)
|
|
10435
10001
|
return listarrayfiles
|
|
10436
10002
|
|
|
@@ -10454,7 +10020,7 @@ def InFileToArray(infile, filestart=0, seekstart=0, seekend=0, listonly=False, c
|
|
|
10454
10020
|
return False
|
|
10455
10021
|
|
|
10456
10022
|
|
|
10457
|
-
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["
|
|
10023
|
+
def ListDirToArray(infiles, dirlistfromtxt=False, fmttype=__file_format_default__, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, filestart=0, seekstart=0, seekend=0, listonly=False, skipchecksum=False, checksumtype=["md5", "md5", "md5"], extradata=[], formatspecs=__file_format_dict__, verbose=False, seektoend=False, returnfp=False):
|
|
10458
10024
|
outarray = MkTempFile()
|
|
10459
10025
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10460
10026
|
compressionlevel, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10585,7 +10151,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10585
10151
|
if compressionuselist is None:
|
|
10586
10152
|
compressionuselist = compressionlistalt
|
|
10587
10153
|
if checksumtype is None:
|
|
10588
|
-
checksumtype = ["
|
|
10154
|
+
checksumtype = ["md5", "md5", "md5", "md5"]
|
|
10589
10155
|
if extradata is None:
|
|
10590
10156
|
extradata = []
|
|
10591
10157
|
if jsondata is None:
|
|
@@ -10672,7 +10238,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10672
10238
|
compression = "auto"
|
|
10673
10239
|
|
|
10674
10240
|
if verbose:
|
|
10675
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10241
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10676
10242
|
|
|
10677
10243
|
# No files?
|
|
10678
10244
|
if not listarrayfiles.get('ffilelist'):
|
|
@@ -10777,7 +10343,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10777
10343
|
while ilmin < ilsize:
|
|
10778
10344
|
cfcontents = MkTempFile()
|
|
10779
10345
|
fcontents.seek(0, 0)
|
|
10780
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10346
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10781
10347
|
fcontents.seek(0, 0)
|
|
10782
10348
|
cfcontents.seek(0, 0)
|
|
10783
10349
|
cfcontents = CompressOpenFileAlt(
|
|
@@ -10795,7 +10361,7 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10795
10361
|
|
|
10796
10362
|
fcontents.seek(0, 0)
|
|
10797
10363
|
cfcontents = MkTempFile()
|
|
10798
|
-
shutil.copyfileobj(fcontents, cfcontents)
|
|
10364
|
+
shutil.copyfileobj(fcontents, cfcontents, length=__filebuff_size__)
|
|
10799
10365
|
cfcontents.seek(0, 0)
|
|
10800
10366
|
cfcontents = CompressOpenFileAlt(
|
|
10801
10367
|
cfcontents, curcompression, compressionlevel, compressionuselist, formatspecs
|
|
@@ -10894,22 +10460,12 @@ def RePackFoxFile(infile, outfile, fmttype="auto", compression="auto", compressw
|
|
|
10894
10460
|
fp.flush()
|
|
10895
10461
|
if hasattr(os, "sync"):
|
|
10896
10462
|
os.fsync(fp.fileno())
|
|
10897
|
-
except io.UnsupportedOperation:
|
|
10898
|
-
|
|
10899
|
-
logging.warning("Flush/sync unsupported on this file object.")
|
|
10900
|
-
except AttributeError:
|
|
10901
|
-
if verbose:
|
|
10902
|
-
logging.warning("Flush/sync attributes missing on this file object.")
|
|
10903
|
-
except OSError as e:
|
|
10904
|
-
if verbose:
|
|
10905
|
-
logging.warning("OS error during flush/sync: %s", e)
|
|
10463
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
10464
|
+
pass
|
|
10906
10465
|
|
|
10907
10466
|
if outfile == "-":
|
|
10908
10467
|
fp.seek(0, 0)
|
|
10909
|
-
|
|
10910
|
-
shutil.copyfileobj(fp, sys.stdout.buffer)
|
|
10911
|
-
else:
|
|
10912
|
-
shutil.copyfileobj(fp, sys.stdout)
|
|
10468
|
+
shutil.copyfileobj(fp, PY_STDOUT_BUF, length=__filebuff_size__)
|
|
10913
10469
|
elif outfile is None:
|
|
10914
10470
|
fp.seek(0, 0)
|
|
10915
10471
|
outvar = fp.read()
|
|
@@ -10948,14 +10504,14 @@ def RePackMultipleFoxFile(infiles, outfile, fmttype="auto", compression="auto",
|
|
|
10948
10504
|
return True
|
|
10949
10505
|
return returnout
|
|
10950
10506
|
|
|
10951
|
-
def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10507
|
+
def RePackFoxFileFromString(instr, outfile, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10952
10508
|
fp = MkTempFile(instr)
|
|
10953
10509
|
listarrayfiles = RePackFoxFile(fp, outfile, fmttype, compression, compresswholefile, compressionlevel, compressionuselist, followlink, filestart, seekstart, seekend,
|
|
10954
10510
|
checksumtype, skipchecksum, extradata, jsondata, formatspecs, seektoend, verbose, returnfp)
|
|
10955
10511
|
return listarrayfiles
|
|
10956
10512
|
|
|
10957
10513
|
|
|
10958
|
-
def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["
|
|
10514
|
+
def PackFoxFileFromListDir(infiles, outfile, dirlistfromtxt=False, fmttype="auto", compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, filestart=0, seekstart=0, seekend=0, checksumtype=["md5", "md5", "md5"], skipchecksum=False, extradata=[], jsondata={}, formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
10959
10515
|
outarray = MkTempFile()
|
|
10960
10516
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, fmttype, compression, compresswholefile,
|
|
10961
10517
|
compressionlevel, compressionuselist, followlink, checksumtype, extradata, formatspecs, verbose, True)
|
|
@@ -10968,7 +10524,7 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
10968
10524
|
if(outdir is not None):
|
|
10969
10525
|
outdir = RemoveWindowsPath(outdir)
|
|
10970
10526
|
if(verbose):
|
|
10971
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10527
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
10972
10528
|
if(isinstance(infile, dict)):
|
|
10973
10529
|
listarrayfiles = infile
|
|
10974
10530
|
else:
|
|
@@ -11018,16 +10574,12 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
11018
10574
|
listarrayfiles['ffilelist'][lcfi]['fcontents'])
|
|
11019
10575
|
listarrayfiles['ffilelist'][lcfi]['fcontents'].seek(0, 0)
|
|
11020
10576
|
shutil.copyfileobj(
|
|
11021
|
-
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc)
|
|
10577
|
+
listarrayfiles['ffilelist'][lcfi]['fcontents'], fpc, length=__filebuff_size__)
|
|
11022
10578
|
try:
|
|
11023
10579
|
fpc.flush()
|
|
11024
10580
|
if(hasattr(os, "sync")):
|
|
11025
10581
|
os.fsync(fpc.fileno())
|
|
11026
|
-
except io.UnsupportedOperation:
|
|
11027
|
-
pass
|
|
11028
|
-
except AttributeError:
|
|
11029
|
-
pass
|
|
11030
|
-
except OSError:
|
|
10582
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11031
10583
|
pass
|
|
11032
10584
|
if(hasattr(os, "chown") and funame == listarrayfiles['ffilelist'][lcfi]['funame'] and fgname == listarrayfiles['ffilelist'][lcfi]['fgname'] and preservepermissions):
|
|
11033
10585
|
os.chown(PrependPath(outdir, listarrayfiles['ffilelist'][lcfi]['fname']),
|
|
@@ -11069,16 +10621,12 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
11069
10621
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11070
10622
|
flinkinfo['fcontents'])
|
|
11071
10623
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11072
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10624
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11073
10625
|
try:
|
|
11074
10626
|
fpc.flush()
|
|
11075
10627
|
if(hasattr(os, "sync")):
|
|
11076
10628
|
os.fsync(fpc.fileno())
|
|
11077
|
-
except io.UnsupportedOperation:
|
|
11078
|
-
pass
|
|
11079
|
-
except AttributeError:
|
|
11080
|
-
pass
|
|
11081
|
-
except OSError:
|
|
10629
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11082
10630
|
pass
|
|
11083
10631
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11084
10632
|
os.chown(PrependPath(
|
|
@@ -11148,16 +10696,12 @@ def UnPackFoxFile(infile, outdir=None, followlink=False, filestart=0, seekstart=
|
|
|
11148
10696
|
flinkinfo['fcontents'] = MkTempFile(
|
|
11149
10697
|
flinkinfo['fcontents'])
|
|
11150
10698
|
flinkinfo['fcontents'].seek(0, 0)
|
|
11151
|
-
shutil.copyfileobj(flinkinfo['fcontents'], fpc)
|
|
10699
|
+
shutil.copyfileobj(flinkinfo['fcontents'], fpc, length=__filebuff_size__)
|
|
11152
10700
|
try:
|
|
11153
10701
|
fpc.flush()
|
|
11154
10702
|
if(hasattr(os, "sync")):
|
|
11155
10703
|
os.fsync(fpc.fileno())
|
|
11156
|
-
except io.UnsupportedOperation:
|
|
11157
|
-
pass
|
|
11158
|
-
except AttributeError:
|
|
11159
|
-
pass
|
|
11160
|
-
except OSError:
|
|
10704
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
11161
10705
|
pass
|
|
11162
10706
|
if(hasattr(os, "chown") and funame == flinkinfo['funame'] and fgname == flinkinfo['fgname'] and preservepermissions):
|
|
11163
10707
|
os.chown(PrependPath(
|
|
@@ -11244,7 +10788,7 @@ def ftype_to_str(ftype):
|
|
|
11244
10788
|
|
|
11245
10789
|
def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0, skipchecksum=False, formatspecs=__file_format_multi_dict__, seektoend=False, verbose=False, newstyle=False, returnfp=False):
|
|
11246
10790
|
if(verbose):
|
|
11247
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10791
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11248
10792
|
if(isinstance(infile, dict)):
|
|
11249
10793
|
listarrayfileslist = [infile]
|
|
11250
10794
|
if(isinstance(infile, list)):
|
|
@@ -11252,7 +10796,7 @@ def FoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, seekend=0
|
|
|
11252
10796
|
else:
|
|
11253
10797
|
if(infile != "-" and not hasattr(infile, "read") and not hasattr(infile, "write") and not (sys.version_info[0] >= 3 and isinstance(infile, bytes))):
|
|
11254
10798
|
infile = RemoveWindowsPath(infile)
|
|
11255
|
-
listarrayfileslist =
|
|
10799
|
+
listarrayfileslist = FoxFileToArray(infile, fmttype, filestart, seekstart, seekend, True, False, False, skipchecksum, formatspecs, seektoend, returnfp)
|
|
11256
10800
|
if(not listarrayfileslist):
|
|
11257
10801
|
return False
|
|
11258
10802
|
for listarrayfiles in listarrayfileslist:
|
|
@@ -11325,9 +10869,7 @@ def StackedFoxFileListFiles(infile, fmttype="auto", filestart=0, seekstart=0, se
|
|
|
11325
10869
|
outstartfile = infile.tell()
|
|
11326
10870
|
try:
|
|
11327
10871
|
infile.seek(0, 2)
|
|
11328
|
-
except OSError:
|
|
11329
|
-
SeekToEndOfFile(infile)
|
|
11330
|
-
except ValueError:
|
|
10872
|
+
except (OSError, ValueError):
|
|
11331
10873
|
SeekToEndOfFile(infile)
|
|
11332
10874
|
outfsize = infile.tell()
|
|
11333
10875
|
infile.seek(outstartfile, 0)
|
|
@@ -11357,13 +10899,10 @@ def FoxFileStringListFiles(instr, filestart=0, seekstart=0, seekend=0, skipcheck
|
|
|
11357
10899
|
|
|
11358
10900
|
def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11359
10901
|
if(verbose):
|
|
11360
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
10902
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11361
10903
|
if(infile == "-"):
|
|
11362
10904
|
infile = MkTempFile()
|
|
11363
|
-
|
|
11364
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11365
|
-
else:
|
|
11366
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
10905
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11367
10906
|
infile.seek(0, 0)
|
|
11368
10907
|
if(not infile):
|
|
11369
10908
|
return False
|
|
@@ -11482,13 +11021,10 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11482
11021
|
|
|
11483
11022
|
def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11484
11023
|
if(verbose):
|
|
11485
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11024
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11486
11025
|
if(infile == "-"):
|
|
11487
11026
|
infile = MkTempFile()
|
|
11488
|
-
|
|
11489
|
-
shutil.copyfileobj(sys.stdin.buffer, infile)
|
|
11490
|
-
else:
|
|
11491
|
-
shutil.copyfileobj(sys.stdin, infile)
|
|
11027
|
+
shutil.copyfileobj(PY_STDIN_BUF, infile, length=__filebuff_size__)
|
|
11492
11028
|
infile.seek(0, 0)
|
|
11493
11029
|
if(not infile):
|
|
11494
11030
|
return False
|
|
@@ -11565,24 +11101,18 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11565
11101
|
printfname = member.filename
|
|
11566
11102
|
try:
|
|
11567
11103
|
fuid = int(os.getuid())
|
|
11568
|
-
except AttributeError:
|
|
11569
|
-
fuid = int(0)
|
|
11570
|
-
except KeyError:
|
|
11104
|
+
except (KeyError, AttributeError):
|
|
11571
11105
|
fuid = int(0)
|
|
11572
11106
|
try:
|
|
11573
11107
|
fgid = int(os.getgid())
|
|
11574
|
-
except AttributeError:
|
|
11575
|
-
fgid = int(0)
|
|
11576
|
-
except KeyError:
|
|
11108
|
+
except (KeyError, AttributeError):
|
|
11577
11109
|
fgid = int(0)
|
|
11578
11110
|
try:
|
|
11579
11111
|
import pwd
|
|
11580
11112
|
try:
|
|
11581
11113
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11582
11114
|
funame = userinfo.pw_name
|
|
11583
|
-
except KeyError:
|
|
11584
|
-
funame = ""
|
|
11585
|
-
except AttributeError:
|
|
11115
|
+
except (KeyError, AttributeError):
|
|
11586
11116
|
funame = ""
|
|
11587
11117
|
except ImportError:
|
|
11588
11118
|
funame = ""
|
|
@@ -11592,9 +11122,7 @@ def ZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11592
11122
|
try:
|
|
11593
11123
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11594
11124
|
fgname = groupinfo.gr_name
|
|
11595
|
-
except KeyError:
|
|
11596
|
-
fgname = ""
|
|
11597
|
-
except AttributeError:
|
|
11125
|
+
except (KeyError, AttributeError):
|
|
11598
11126
|
fgname = ""
|
|
11599
11127
|
except ImportError:
|
|
11600
11128
|
fgname = ""
|
|
@@ -11620,7 +11148,7 @@ if(not rarfile_support):
|
|
|
11620
11148
|
if(rarfile_support):
|
|
11621
11149
|
def RarFileListFiles(infile, verbose=False, returnfp=False):
|
|
11622
11150
|
if(verbose):
|
|
11623
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11151
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11624
11152
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11625
11153
|
return False
|
|
11626
11154
|
if(not rarfile.is_rarfile(infile) and not rarfile.is_rarfile_sfx(infile)):
|
|
@@ -11703,24 +11231,18 @@ if(rarfile_support):
|
|
|
11703
11231
|
printfname = member.filename
|
|
11704
11232
|
try:
|
|
11705
11233
|
fuid = int(os.getuid())
|
|
11706
|
-
except AttributeError:
|
|
11707
|
-
fuid = int(0)
|
|
11708
|
-
except KeyError:
|
|
11234
|
+
except (KeyError, AttributeError):
|
|
11709
11235
|
fuid = int(0)
|
|
11710
11236
|
try:
|
|
11711
11237
|
fgid = int(os.getgid())
|
|
11712
|
-
except AttributeError:
|
|
11713
|
-
fgid = int(0)
|
|
11714
|
-
except KeyError:
|
|
11238
|
+
except (KeyError, AttributeError):
|
|
11715
11239
|
fgid = int(0)
|
|
11716
11240
|
try:
|
|
11717
11241
|
import pwd
|
|
11718
11242
|
try:
|
|
11719
11243
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11720
11244
|
funame = userinfo.pw_name
|
|
11721
|
-
except KeyError:
|
|
11722
|
-
funame = ""
|
|
11723
|
-
except AttributeError:
|
|
11245
|
+
except (KeyError, AttributeError):
|
|
11724
11246
|
funame = ""
|
|
11725
11247
|
except ImportError:
|
|
11726
11248
|
funame = ""
|
|
@@ -11730,9 +11252,7 @@ if(rarfile_support):
|
|
|
11730
11252
|
try:
|
|
11731
11253
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11732
11254
|
fgname = groupinfo.gr_name
|
|
11733
|
-
except KeyError:
|
|
11734
|
-
fgname = ""
|
|
11735
|
-
except AttributeError:
|
|
11255
|
+
except (KeyError, AttributeError):
|
|
11736
11256
|
fgname = ""
|
|
11737
11257
|
except ImportError:
|
|
11738
11258
|
fgname = ""
|
|
@@ -11757,7 +11277,7 @@ if(not py7zr_support):
|
|
|
11757
11277
|
if(py7zr_support):
|
|
11758
11278
|
def SevenZipFileListFiles(infile, verbose=False, returnfp=False):
|
|
11759
11279
|
if(verbose):
|
|
11760
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11280
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11761
11281
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
11762
11282
|
return False
|
|
11763
11283
|
lcfi = 0
|
|
@@ -11810,24 +11330,18 @@ if(py7zr_support):
|
|
|
11810
11330
|
file_content[member.filename].close()
|
|
11811
11331
|
try:
|
|
11812
11332
|
fuid = int(os.getuid())
|
|
11813
|
-
except AttributeError:
|
|
11814
|
-
fuid = int(0)
|
|
11815
|
-
except KeyError:
|
|
11333
|
+
except (KeyError, AttributeError):
|
|
11816
11334
|
fuid = int(0)
|
|
11817
11335
|
try:
|
|
11818
11336
|
fgid = int(os.getgid())
|
|
11819
|
-
except AttributeError:
|
|
11820
|
-
fgid = int(0)
|
|
11821
|
-
except KeyError:
|
|
11337
|
+
except (KeyError, AttributeError):
|
|
11822
11338
|
fgid = int(0)
|
|
11823
11339
|
try:
|
|
11824
11340
|
import pwd
|
|
11825
11341
|
try:
|
|
11826
11342
|
userinfo = pwd.getpwuid(os.getuid())
|
|
11827
11343
|
funame = userinfo.pw_name
|
|
11828
|
-
except KeyError:
|
|
11829
|
-
funame = ""
|
|
11830
|
-
except AttributeError:
|
|
11344
|
+
except (KeyError, AttributeError):
|
|
11831
11345
|
funame = ""
|
|
11832
11346
|
except ImportError:
|
|
11833
11347
|
funame = ""
|
|
@@ -11837,9 +11351,7 @@ if(py7zr_support):
|
|
|
11837
11351
|
try:
|
|
11838
11352
|
groupinfo = grp.getgrgid(os.getgid())
|
|
11839
11353
|
fgname = groupinfo.gr_name
|
|
11840
|
-
except KeyError:
|
|
11841
|
-
fgname = ""
|
|
11842
|
-
except AttributeError:
|
|
11354
|
+
except (KeyError, AttributeError):
|
|
11843
11355
|
fgname = ""
|
|
11844
11356
|
except ImportError:
|
|
11845
11357
|
fgname = ""
|
|
@@ -11860,7 +11372,7 @@ if(py7zr_support):
|
|
|
11860
11372
|
|
|
11861
11373
|
def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict__, seektoend=False, newstyle=False, returnfp=False):
|
|
11862
11374
|
if(verbose):
|
|
11863
|
-
logging.basicConfig(format="%(message)s", stream=
|
|
11375
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
11864
11376
|
checkcompressfile = CheckCompressionSubType(infile, formatspecs, filestart, True)
|
|
11865
11377
|
if(IsNestedDict(formatspecs) and checkcompressfile in formatspecs):
|
|
11866
11378
|
formatspecs = formatspecs[checkcompressfile]
|
|
@@ -11879,7 +11391,7 @@ def InFileListFiles(infile, verbose=False, formatspecs=__file_format_multi_dict_
|
|
|
11879
11391
|
return False
|
|
11880
11392
|
|
|
11881
11393
|
|
|
11882
|
-
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["
|
|
11394
|
+
def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, seekstart=0, seekend=0, skipchecksum=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, seektoend=False, verbose=False, returnfp=False):
|
|
11883
11395
|
outarray = MkTempFile()
|
|
11884
11396
|
packform = PackFoxFile(infiles, outarray, dirlistfromtxt, compression, compresswholefile,
|
|
11885
11397
|
compressionlevel, followlink, checksumtype, formatspecs, False, True)
|
|
@@ -11891,19 +11403,19 @@ def ListDirListFiles(infiles, dirlistfromtxt=False, compression="auto", compress
|
|
|
11891
11403
|
PyNeoFile compatibility layer
|
|
11892
11404
|
"""
|
|
11893
11405
|
|
|
11894
|
-
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11406
|
+
def make_empty_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11895
11407
|
return MakeEmptyFilePointer(fp, fmttype, checksumtype, formatspecs)
|
|
11896
11408
|
|
|
11897
|
-
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='
|
|
11409
|
+
def make_empty_archive_file_pointer_neo(fp, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8'):
|
|
11898
11410
|
return make_empty_file_pointer_neo(fp, fmttype, checksumtype, formatspecs, encoding)
|
|
11899
11411
|
|
|
11900
|
-
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11412
|
+
def make_empty_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11901
11413
|
return MakeEmptyFile(outfile, fmttype, "auto", False, None, compressionlistalt, checksumtype, formatspecs, returnfp)
|
|
11902
11414
|
|
|
11903
|
-
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='
|
|
11415
|
+
def make_empty_archive_file_neo(outfile=None, fmttype=None, checksumtype='md5', formatspecs=__file_format_multi_dict__, encoding='UTF-8', returnfp=False):
|
|
11904
11416
|
return make_empty_file_neo(outfile, fmttype, checksumtype, formatspecs, encoding, returnfp)
|
|
11905
11417
|
|
|
11906
|
-
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11418
|
+
def pack_neo(infiles, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], encoding="UTF-8", compression="auto", compression_level=None, returnfp=False):
|
|
11907
11419
|
return PackFoxFile(infiles, outfile, False, "auto", compression, False, compression_level, compressionlistalt, False, checksumtypes, [], {}, formatspecs, False, returnfp)
|
|
11908
11420
|
|
|
11909
11421
|
def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonly=False, skipchecksum=False, uncompress=True, returnfp=False):
|
|
@@ -11912,7 +11424,7 @@ def archive_to_array_neo(infile, formatspecs=__file_format_multi_dict__, listonl
|
|
|
11912
11424
|
def unpack_neo(infile, outdir='.', formatspecs=__file_format_multi_dict__, skipchecksum=False, uncompress=True, returnfp=False):
|
|
11913
11425
|
return UnPackFoxFile(infile, outdir, False, 0, 0, skipchecksum, formatspecs, True, True, False, False, returnfp)
|
|
11914
11426
|
|
|
11915
|
-
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["
|
|
11427
|
+
def repack_neo(infile, outfile=None, formatspecs=__file_format_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11916
11428
|
return RePackFoxFile(infile, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11917
11429
|
|
|
11918
11430
|
def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False, return_details=False, returnfp=False):
|
|
@@ -11921,7 +11433,7 @@ def validate_neo(infile, formatspecs=__file_format_multi_dict__, verbose=False,
|
|
|
11921
11433
|
def listfiles_neo(infile, formatspecs=__file_format_multi_dict__, advanced=False, include_dirs=True, returnfp=False):
|
|
11922
11434
|
return FoxFileListFiles(infile, "auto", 0, 0, False, formatspecs, False, True, advanced, returnfp)
|
|
11923
11435
|
|
|
11924
|
-
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["
|
|
11436
|
+
def convert_foreign_to_neo(infile, outfile=None, formatspecs=__file_format_multi_dict__, checksumtypes=["md5", "md5", "md5", "md5"], compression="auto", compression_level=None, returnfp=False):
|
|
11925
11437
|
intmp = InFileToArray(infile, 0, 0, 0, False, True, False, formatspecs, False, False)
|
|
11926
11438
|
return RePackFoxFile(intmp, outfile, "auto", compression, False, compression_level, compressionlistalt, False, 0, 0, checksumtypes, False, [], {}, formatspecs, False, False, returnfp)
|
|
11927
11439
|
|
|
@@ -11963,10 +11475,7 @@ def download_file_from_ftp_file(url):
|
|
|
11963
11475
|
ftp_port = 21
|
|
11964
11476
|
try:
|
|
11965
11477
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
11966
|
-
except socket.gaierror:
|
|
11967
|
-
log.info("Error With URL "+url)
|
|
11968
|
-
return False
|
|
11969
|
-
except socket.timeout:
|
|
11478
|
+
except (socket.gaierror, socket.timeout):
|
|
11970
11479
|
log.info("Error With URL "+url)
|
|
11971
11480
|
return False
|
|
11972
11481
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12054,10 +11563,7 @@ def upload_file_to_ftp_file(ftpfile, url):
|
|
|
12054
11563
|
ftp_port = 21
|
|
12055
11564
|
try:
|
|
12056
11565
|
ftp.connect(urlparts.hostname, ftp_port)
|
|
12057
|
-
except socket.gaierror:
|
|
12058
|
-
log.info("Error With URL "+url)
|
|
12059
|
-
return False
|
|
12060
|
-
except socket.timeout:
|
|
11566
|
+
except (socket.gaierror, socket.timeout):
|
|
12061
11567
|
log.info("Error With URL "+url)
|
|
12062
11568
|
return False
|
|
12063
11569
|
if(urlparts.scheme == "ftps" or isinstance(ftp, FTP_TLS)):
|
|
@@ -12168,7 +11674,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12168
11674
|
else:
|
|
12169
11675
|
response = requests.get(rebuilt_url, headers=headers, timeout=(5, 30), stream=True)
|
|
12170
11676
|
response.raw.decode_content = True
|
|
12171
|
-
shutil.copyfileobj(response.raw, httpfile)
|
|
11677
|
+
shutil.copyfileobj(response.raw, httpfile, length=__filebuff_size__)
|
|
12172
11678
|
|
|
12173
11679
|
# 2) HTTPX branch
|
|
12174
11680
|
elif usehttp == 'httpx' and havehttpx:
|
|
@@ -12180,7 +11686,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12180
11686
|
else:
|
|
12181
11687
|
response = client.get(rebuilt_url, headers=headers)
|
|
12182
11688
|
raw_wrapper = RawIteratorWrapper(response.iter_bytes())
|
|
12183
|
-
shutil.copyfileobj(raw_wrapper, httpfile)
|
|
11689
|
+
shutil.copyfileobj(raw_wrapper, httpfile, length=__filebuff_size__)
|
|
12184
11690
|
|
|
12185
11691
|
# 3) Mechanize branch
|
|
12186
11692
|
elif usehttp == 'mechanize' and havemechanize:
|
|
@@ -12199,7 +11705,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12199
11705
|
|
|
12200
11706
|
# Open the URL and copy the response to httpfile
|
|
12201
11707
|
response = br.open(rebuilt_url)
|
|
12202
|
-
shutil.copyfileobj(response, httpfile)
|
|
11708
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12203
11709
|
|
|
12204
11710
|
# 4) Fallback to urllib
|
|
12205
11711
|
else:
|
|
@@ -12212,7 +11718,7 @@ def download_file_from_http_file(url, headers=None, usehttp=__use_http_lib__):
|
|
|
12212
11718
|
else:
|
|
12213
11719
|
opener = build_opener()
|
|
12214
11720
|
response = opener.open(request)
|
|
12215
|
-
shutil.copyfileobj(response, httpfile)
|
|
11721
|
+
shutil.copyfileobj(response, httpfile, length=__filebuff_size__)
|
|
12216
11722
|
|
|
12217
11723
|
# Reset file pointer to the start before returning
|
|
12218
11724
|
httpfile.seek(0, 0)
|
|
@@ -12345,7 +11851,7 @@ def upload_file_to_http_file(
|
|
|
12345
11851
|
fileobj.seek(0)
|
|
12346
11852
|
except Exception:
|
|
12347
11853
|
pass
|
|
12348
|
-
shutil.copyfileobj(fileobj, buf)
|
|
11854
|
+
shutil.copyfileobj(fileobj, buf, length=__filebuff_size__)
|
|
12349
11855
|
|
|
12350
11856
|
_w('\r\n')
|
|
12351
11857
|
_w('--' + boundary + '--\r\n')
|
|
@@ -12434,10 +11940,7 @@ if(haveparamiko):
|
|
|
12434
11940
|
username=sftp_username, password=sftp_password)
|
|
12435
11941
|
except paramiko.ssh_exception.SSHException:
|
|
12436
11942
|
return False
|
|
12437
|
-
except socket.gaierror:
|
|
12438
|
-
log.info("Error With URL "+url)
|
|
12439
|
-
return False
|
|
12440
|
-
except socket.timeout:
|
|
11943
|
+
except (socket.gaierror, socket.timeout):
|
|
12441
11944
|
log.info("Error With URL "+url)
|
|
12442
11945
|
return False
|
|
12443
11946
|
sftp = ssh.open_sftp()
|
|
@@ -12491,10 +11994,7 @@ if(haveparamiko):
|
|
|
12491
11994
|
username=sftp_username, password=sftp_password)
|
|
12492
11995
|
except paramiko.ssh_exception.SSHException:
|
|
12493
11996
|
return False
|
|
12494
|
-
except socket.gaierror:
|
|
12495
|
-
log.info("Error With URL "+url)
|
|
12496
|
-
return False
|
|
12497
|
-
except socket.timeout:
|
|
11997
|
+
except (socket.gaierror, socket.timeout):
|
|
12498
11998
|
log.info("Error With URL "+url)
|
|
12499
11999
|
return False
|
|
12500
12000
|
sftp = ssh.open_sftp()
|
|
@@ -12545,10 +12045,7 @@ if(havepysftp):
|
|
|
12545
12045
|
username=sftp_username, password=sftp_password)
|
|
12546
12046
|
except paramiko.ssh_exception.SSHException:
|
|
12547
12047
|
return False
|
|
12548
|
-
except socket.gaierror:
|
|
12549
|
-
log.info("Error With URL "+url)
|
|
12550
|
-
return False
|
|
12551
|
-
except socket.timeout:
|
|
12048
|
+
except (socket.gaierror, socket.timeout):
|
|
12552
12049
|
log.info("Error With URL "+url)
|
|
12553
12050
|
return False
|
|
12554
12051
|
sftpfile = MkTempFile()
|
|
@@ -12598,10 +12095,7 @@ if(havepysftp):
|
|
|
12598
12095
|
username=sftp_username, password=sftp_password)
|
|
12599
12096
|
except paramiko.ssh_exception.SSHException:
|
|
12600
12097
|
return False
|
|
12601
|
-
except socket.gaierror:
|
|
12602
|
-
log.info("Error With URL "+url)
|
|
12603
|
-
return False
|
|
12604
|
-
except socket.timeout:
|
|
12098
|
+
except (socket.gaierror, socket.timeout):
|
|
12605
12099
|
log.info("Error With URL "+url)
|
|
12606
12100
|
return False
|
|
12607
12101
|
sftpfile.seek(0, 0)
|