PyArchiveFile 0.26.0__py3-none-any.whl → 0.27.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pyarchivefile-0.26.0.data → pyarchivefile-0.27.0.data}/scripts/archivefile.py +1 -1
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.0.dist-info}/METADATA +1 -1
- pyarchivefile-0.27.0.dist-info/RECORD +10 -0
- pyarchivefile.py +751 -190
- pyarchivefile-0.26.0.dist-info/RECORD +0 -10
- {pyarchivefile-0.26.0.data → pyarchivefile-0.27.0.data}/scripts/archiveneofile.py +0 -0
- {pyarchivefile-0.26.0.data → pyarchivefile-0.27.0.data}/scripts/neoarchivefile.py +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.0.dist-info}/WHEEL +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.0.dist-info}/licenses/LICENSE +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.0.dist-info}/top_level.txt +0 -0
- {pyarchivefile-0.26.0.dist-info → pyarchivefile-0.27.0.dist-info}/zip-safe +0 -0
pyarchivefile.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
Copyright 2018-2024 Game Maker 2k - http://intdb.sourceforge.net/
|
|
15
15
|
Copyright 2018-2024 Kazuki Przyborowski - https://github.com/KazukiPrzyborowski
|
|
16
16
|
|
|
17
|
-
$FileInfo: pyarchivefile.py - Last Update: 11/
|
|
17
|
+
$FileInfo: pyarchivefile.py - Last Update: 11/14/2025 Ver. 0.27.0 RC 1 - Author: cooldude2k $
|
|
18
18
|
'''
|
|
19
19
|
|
|
20
20
|
from __future__ import absolute_import, division, print_function, unicode_literals, generators, with_statement, nested_scopes
|
|
@@ -24,7 +24,6 @@ import re
|
|
|
24
24
|
import sys
|
|
25
25
|
import time
|
|
26
26
|
import stat
|
|
27
|
-
import zlib
|
|
28
27
|
import mmap
|
|
29
28
|
import hmac
|
|
30
29
|
import base64
|
|
@@ -38,8 +37,8 @@ import zipfile
|
|
|
38
37
|
import binascii
|
|
39
38
|
import datetime
|
|
40
39
|
import platform
|
|
40
|
+
import collections
|
|
41
41
|
from io import StringIO, BytesIO
|
|
42
|
-
from collections import namedtuple
|
|
43
42
|
import posixpath # POSIX-safe joins/normpaths
|
|
44
43
|
try:
|
|
45
44
|
from backports import tempfile
|
|
@@ -50,12 +49,16 @@ try:
|
|
|
50
49
|
from http.server import BaseHTTPRequestHandler, HTTPServer
|
|
51
50
|
from socketserver import TCPServer
|
|
52
51
|
from urllib.parse import urlparse, parse_qs
|
|
53
|
-
import base64
|
|
54
52
|
except ImportError:
|
|
55
53
|
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
|
|
56
54
|
from SocketServer import TCPServer
|
|
57
55
|
from urlparse import urlparse, parse_qs
|
|
58
|
-
|
|
56
|
+
|
|
57
|
+
try:
|
|
58
|
+
# Python 3.8+ only
|
|
59
|
+
from multiprocessing import shared_memory
|
|
60
|
+
except ImportError:
|
|
61
|
+
shared_memory = None
|
|
59
62
|
|
|
60
63
|
# FTP Support
|
|
61
64
|
ftpssl = True
|
|
@@ -146,6 +149,15 @@ try:
|
|
|
146
149
|
except Exception:
|
|
147
150
|
PATH_TYPES = (basestring,)
|
|
148
151
|
|
|
152
|
+
def running_interactively():
|
|
153
|
+
main = sys.modules.get("__main__")
|
|
154
|
+
no_main_file = not hasattr(main, "__file__")
|
|
155
|
+
interactive_flag = bool(getattr(sys.flags, "interactive", 0))
|
|
156
|
+
return no_main_file or interactive_flag
|
|
157
|
+
|
|
158
|
+
if running_interactively():
|
|
159
|
+
logging.basicConfig(format="%(message)s", stream=PY_STDOUT_TEXT, level=logging.DEBUG)
|
|
160
|
+
|
|
149
161
|
def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
150
162
|
"""
|
|
151
163
|
Normalize any input to text_type (unicode on Py2, str on Py3).
|
|
@@ -166,7 +178,6 @@ def _ensure_text(s, encoding="utf-8", errors="replace", allow_none=False):
|
|
|
166
178
|
|
|
167
179
|
# Handle pathlib.Path & other path-like objects
|
|
168
180
|
try:
|
|
169
|
-
import os
|
|
170
181
|
if hasattr(os, "fspath"):
|
|
171
182
|
fs = os.fspath(s)
|
|
172
183
|
if isinstance(fs, text_type):
|
|
@@ -207,7 +218,6 @@ except ImportError:
|
|
|
207
218
|
|
|
208
219
|
# Windows-specific setup
|
|
209
220
|
if os.name == "nt":
|
|
210
|
-
import io
|
|
211
221
|
def _wrap(stream):
|
|
212
222
|
buf = getattr(stream, "buffer", None)
|
|
213
223
|
is_tty = getattr(stream, "isatty", lambda: False)()
|
|
@@ -444,7 +454,13 @@ if('PYARCHIVEFILE_CONFIG_FILE' in os.environ and os.path.exists(os.environ['PYAR
|
|
|
444
454
|
else:
|
|
445
455
|
prescriptpath = get_importing_script_path()
|
|
446
456
|
if(prescriptpath is not None):
|
|
447
|
-
|
|
457
|
+
if(__use_ini_file__ and not __use_json_file__):
|
|
458
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_ini_name__)
|
|
459
|
+
elif(__use_json_file__ and not __use_ini_file__):
|
|
460
|
+
scriptconf = os.path.join(os.path.dirname(prescriptpath), __use_json_name__)
|
|
461
|
+
else:
|
|
462
|
+
scriptconf = ""
|
|
463
|
+
prescriptpath = None
|
|
448
464
|
else:
|
|
449
465
|
scriptconf = ""
|
|
450
466
|
if os.path.exists(scriptconf):
|
|
@@ -641,12 +657,12 @@ __project__ = __program_name__
|
|
|
641
657
|
__program_alt_name__ = __program_name__
|
|
642
658
|
__project_url__ = "https://github.com/GameMaker2k/PyArchiveFile"
|
|
643
659
|
__project_release_url__ = __project_url__+"/releases/latest"
|
|
644
|
-
__version_info__ = (0,
|
|
645
|
-
__version_date_info__ = (2025, 11,
|
|
660
|
+
__version_info__ = (0, 27, 0, "RC 1", 1)
|
|
661
|
+
__version_date_info__ = (2025, 11, 14, "RC 1", 1)
|
|
646
662
|
__version_date__ = str(__version_date_info__[0]) + "." + str(
|
|
647
663
|
__version_date_info__[1]).zfill(2) + "." + str(__version_date_info__[2]).zfill(2)
|
|
648
664
|
__revision__ = __version_info__[3]
|
|
649
|
-
__revision_id__ = "$Id:
|
|
665
|
+
__revision_id__ = "$Id: 1cfe8ace647fdc1e6f8536a80322ea8257584b78 $"
|
|
650
666
|
if(__version_info__[4] is not None):
|
|
651
667
|
__version_date_plusrc__ = __version_date__ + \
|
|
652
668
|
"-" + str(__version_date_info__[4])
|
|
@@ -798,9 +814,9 @@ except Exception:
|
|
|
798
814
|
geturls_ua_pyfile_python = "Mozilla/5.0 (compatible; {proname}/{prover}; +{prourl})".format(
|
|
799
815
|
proname=__project__, prover=__version__, prourl=__project_url__)
|
|
800
816
|
if(platform.python_implementation() != ""):
|
|
801
|
-
py_implementation = platform.python_implementation()
|
|
817
|
+
py_implementation = platform.python_implementation()+str(platform.python_version_tuple()[0])
|
|
802
818
|
if(platform.python_implementation() == ""):
|
|
803
|
-
py_implementation = "CPython"
|
|
819
|
+
py_implementation = "CPython"+str(platform.python_version_tuple()[0])
|
|
804
820
|
geturls_ua_pyfile_python_alt = "Mozilla/5.0 ({osver}; {archtype}; +{prourl}) {pyimp}/{pyver} (KHTML, like Gecko) {proname}/{prover}".format(osver=platform.system(
|
|
805
821
|
)+" "+platform.release(), archtype=platform.machine(), prourl=__project_url__, pyimp=py_implementation, pyver=platform.python_version(), proname=__project__, prover=__version__)
|
|
806
822
|
geturls_ua_googlebot_google = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
|
|
@@ -816,13 +832,19 @@ geturls_headers_googlebot_google_old = {'Referer': "http://google.com/", 'User-A
|
|
|
816
832
|
|
|
817
833
|
compressionsupport = []
|
|
818
834
|
try:
|
|
819
|
-
|
|
835
|
+
try:
|
|
836
|
+
import compression.gzip as gzip
|
|
837
|
+
except ImportError:
|
|
838
|
+
import gzip
|
|
820
839
|
compressionsupport.append("gz")
|
|
821
840
|
compressionsupport.append("gzip")
|
|
822
841
|
except ImportError:
|
|
823
842
|
pass
|
|
824
843
|
try:
|
|
825
|
-
|
|
844
|
+
try:
|
|
845
|
+
import compression.bz2 as bz2
|
|
846
|
+
except ImportError:
|
|
847
|
+
import bz2
|
|
826
848
|
compressionsupport.append("bz2")
|
|
827
849
|
compressionsupport.append("bzip2")
|
|
828
850
|
except ImportError:
|
|
@@ -843,20 +865,20 @@ except ImportError:
|
|
|
843
865
|
pass
|
|
844
866
|
'''
|
|
845
867
|
try:
|
|
846
|
-
|
|
868
|
+
try:
|
|
869
|
+
import compression.zstd as zstd
|
|
870
|
+
except ImportError:
|
|
871
|
+
import pyzstd.zstdfile as zstd
|
|
847
872
|
compressionsupport.append("zst")
|
|
848
873
|
compressionsupport.append("zstd")
|
|
849
874
|
compressionsupport.append("zstandard")
|
|
850
875
|
except ImportError:
|
|
876
|
+
pass
|
|
877
|
+
try:
|
|
851
878
|
try:
|
|
852
|
-
import
|
|
853
|
-
compressionsupport.append("zst")
|
|
854
|
-
compressionsupport.append("zstd")
|
|
855
|
-
compressionsupport.append("zstandard")
|
|
879
|
+
import compression.lzma as lzma
|
|
856
880
|
except ImportError:
|
|
857
|
-
|
|
858
|
-
try:
|
|
859
|
-
import lzma
|
|
881
|
+
import lzma
|
|
860
882
|
compressionsupport.append("lzma")
|
|
861
883
|
compressionsupport.append("xz")
|
|
862
884
|
except ImportError:
|
|
@@ -866,12 +888,18 @@ except ImportError:
|
|
|
866
888
|
compressionsupport.append("xz")
|
|
867
889
|
except ImportError:
|
|
868
890
|
pass
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
872
|
-
|
|
873
|
-
|
|
874
|
-
|
|
891
|
+
try:
|
|
892
|
+
try:
|
|
893
|
+
import compression.zlib as zlib
|
|
894
|
+
except ImportError:
|
|
895
|
+
import zlib
|
|
896
|
+
compressionsupport.append("zlib")
|
|
897
|
+
compressionsupport.append("zl")
|
|
898
|
+
compressionsupport.append("zz")
|
|
899
|
+
compressionsupport.append("Z")
|
|
900
|
+
compressionsupport.append("z")
|
|
901
|
+
except ImportError:
|
|
902
|
+
pass
|
|
875
903
|
compressionlist = ['auto']
|
|
876
904
|
compressionlistalt = []
|
|
877
905
|
outextlist = []
|
|
@@ -1045,6 +1073,14 @@ def to_ns(timestamp):
|
|
|
1045
1073
|
# Multiply by 1e9 to get nanoseconds, then cast to int
|
|
1046
1074
|
return int(seconds * 1000000000)
|
|
1047
1075
|
|
|
1076
|
+
def format_ns_utc(ts_ns, fmt='%Y-%m-%d %H:%M:%S'):
|
|
1077
|
+
ts_ns = int(ts_ns)
|
|
1078
|
+
sec, ns = divmod(ts_ns, 10**9)
|
|
1079
|
+
dt = datetime.datetime.utcfromtimestamp(sec).replace(microsecond=ns // 1000)
|
|
1080
|
+
base = dt.strftime(fmt)
|
|
1081
|
+
ns_str = "%09d" % ns
|
|
1082
|
+
return base + "." + ns_str
|
|
1083
|
+
|
|
1048
1084
|
def _split_posix(name):
|
|
1049
1085
|
"""
|
|
1050
1086
|
Return a list of path parts without collapsing '..'.
|
|
@@ -2081,7 +2117,7 @@ def MkTempFile(data=None,
|
|
|
2081
2117
|
spool_max=__spoolfile_size__,
|
|
2082
2118
|
spool_dir=__use_spooldir__,
|
|
2083
2119
|
reset_to_start=True,
|
|
2084
|
-
memfd_name=
|
|
2120
|
+
memfd_name=__program_name__,
|
|
2085
2121
|
memfd_allow_sealing=False,
|
|
2086
2122
|
memfd_flags_extra=0,
|
|
2087
2123
|
on_create=None):
|
|
@@ -2573,6 +2609,384 @@ def _is_valid_zlib_header(cmf, flg):
|
|
|
2573
2609
|
return False
|
|
2574
2610
|
return True
|
|
2575
2611
|
|
|
2612
|
+
class SharedMemoryFile(object):
|
|
2613
|
+
"""
|
|
2614
|
+
File-like wrapper around multiprocessing.shared_memory.SharedMemory.
|
|
2615
|
+
|
|
2616
|
+
Binary-only API, intended to behave similarly to a regular file opened in
|
|
2617
|
+
'rb', 'wb', or 'r+b' modes (but backed by a fixed-size shared memory block).
|
|
2618
|
+
|
|
2619
|
+
Notes:
|
|
2620
|
+
- Requires Python 3.8+ at runtime to actually use SharedMemory.
|
|
2621
|
+
- On Python 2, importing is fine but constructing will raise RuntimeError.
|
|
2622
|
+
- There is no automatic resizing; buffer size is fixed by SharedMemory.
|
|
2623
|
+
- No real fileno(); this does not represent an OS-level file descriptor.
|
|
2624
|
+
- For text mode, wrap this with io.TextIOWrapper on Python 3:
|
|
2625
|
+
f = SharedMemoryFile(...)
|
|
2626
|
+
tf = io.TextIOWrapper(f, encoding="utf-8")
|
|
2627
|
+
"""
|
|
2628
|
+
|
|
2629
|
+
def __init__(self, shm=None, name=None, create=False, size=0,
|
|
2630
|
+
mode='r+b', offset=0, unlink_on_close=False):
|
|
2631
|
+
"""
|
|
2632
|
+
Parameters:
|
|
2633
|
+
shm : existing SharedMemory object (preferred).
|
|
2634
|
+
name : name of shared memory block (for attach or create).
|
|
2635
|
+
create: if True, create new SharedMemory; else attach existing.
|
|
2636
|
+
size : size in bytes (required when create=True).
|
|
2637
|
+
mode : like 'rb', 'wb', 'r+b', 'ab' (binary only; 't' not supported).
|
|
2638
|
+
offset: starting offset within the shared memory buffer.
|
|
2639
|
+
unlink_on_close: if True, call shm.unlink() when close() is called.
|
|
2640
|
+
|
|
2641
|
+
Usage examples:
|
|
2642
|
+
|
|
2643
|
+
# Create new block and file-like wrapper
|
|
2644
|
+
f = SharedMemoryFile(name=None, create=True, size=4096, mode='r+b')
|
|
2645
|
+
|
|
2646
|
+
# Attach to existing shared memory by name
|
|
2647
|
+
f = SharedMemoryFile(name="xyz", create=False, mode='r+b')
|
|
2648
|
+
|
|
2649
|
+
# Wrap an existing SharedMemory object
|
|
2650
|
+
shm = shared_memory.SharedMemory(create=True, size=1024)
|
|
2651
|
+
f = SharedMemoryFile(shm=shm, mode='r+b')
|
|
2652
|
+
"""
|
|
2653
|
+
if shared_memory is None:
|
|
2654
|
+
# No SharedMemory available on this interpreter
|
|
2655
|
+
raise RuntimeError("multiprocessing.shared_memory.SharedMemory "
|
|
2656
|
+
"is not available on this Python version")
|
|
2657
|
+
|
|
2658
|
+
if 't' in mode:
|
|
2659
|
+
raise ValueError("SharedMemoryFile is binary-only; "
|
|
2660
|
+
"wrap it with io.TextIOWrapper for text")
|
|
2661
|
+
|
|
2662
|
+
self.mode = mode
|
|
2663
|
+
self._closed = False
|
|
2664
|
+
self._unlinked = False
|
|
2665
|
+
self._unlink_on_close = bool(unlink_on_close)
|
|
2666
|
+
|
|
2667
|
+
if shm is not None:
|
|
2668
|
+
self._shm = shm
|
|
2669
|
+
else:
|
|
2670
|
+
# name may be None when create=True
|
|
2671
|
+
self._shm = shared_memory.SharedMemory(name=name, create=create, size=size)
|
|
2672
|
+
|
|
2673
|
+
self._buf = self._shm.buf
|
|
2674
|
+
self._base_offset = int(offset)
|
|
2675
|
+
if self._base_offset < 0 or self._base_offset > len(self._buf):
|
|
2676
|
+
raise ValueError("offset out of range")
|
|
2677
|
+
|
|
2678
|
+
# We treat the accessible region as [base_offset, len(buf))
|
|
2679
|
+
self._size = len(self._buf) - self._base_offset
|
|
2680
|
+
self._pos = 0 # logical file position within that region
|
|
2681
|
+
|
|
2682
|
+
# ---------- basic properties ----------
|
|
2683
|
+
|
|
2684
|
+
@property
|
|
2685
|
+
def name(self):
|
|
2686
|
+
# SharedMemory name (may be None for anonymous)
|
|
2687
|
+
return getattr(self._shm, "name", None)
|
|
2688
|
+
|
|
2689
|
+
@property
|
|
2690
|
+
def closed(self):
|
|
2691
|
+
return self._closed
|
|
2692
|
+
|
|
2693
|
+
def readable(self):
|
|
2694
|
+
return ('r' in self.mode) or ('+' in self.mode)
|
|
2695
|
+
|
|
2696
|
+
def writable(self):
|
|
2697
|
+
return any(ch in self.mode for ch in ('w', 'a', '+'))
|
|
2698
|
+
|
|
2699
|
+
def seekable(self):
|
|
2700
|
+
return True
|
|
2701
|
+
|
|
2702
|
+
# ---------- core helpers ----------
|
|
2703
|
+
|
|
2704
|
+
def _check_closed(self):
|
|
2705
|
+
if self._closed:
|
|
2706
|
+
raise ValueError("I/O operation on closed SharedMemoryFile")
|
|
2707
|
+
|
|
2708
|
+
def _clamp_pos(self, pos):
|
|
2709
|
+
if pos < 0:
|
|
2710
|
+
return 0
|
|
2711
|
+
if pos > self._size:
|
|
2712
|
+
return self._size
|
|
2713
|
+
return pos
|
|
2714
|
+
|
|
2715
|
+
def _region_bounds(self):
|
|
2716
|
+
"""Return (start, end) absolute indices into the SharedMemory buffer."""
|
|
2717
|
+
start = self._base_offset + self._pos
|
|
2718
|
+
end = self._base_offset + self._size
|
|
2719
|
+
return start, end
|
|
2720
|
+
|
|
2721
|
+
# ---------- positioning ----------
|
|
2722
|
+
|
|
2723
|
+
def seek(self, offset, whence=0):
|
|
2724
|
+
"""
|
|
2725
|
+
Seek to a new file position.
|
|
2726
|
+
|
|
2727
|
+
whence: 0 = from start, 1 = from current, 2 = from end.
|
|
2728
|
+
"""
|
|
2729
|
+
self._check_closed()
|
|
2730
|
+
offset = int(offset)
|
|
2731
|
+
whence = int(whence)
|
|
2732
|
+
|
|
2733
|
+
if whence == 0: # from start
|
|
2734
|
+
new_pos = offset
|
|
2735
|
+
elif whence == 1: # from current
|
|
2736
|
+
new_pos = self._pos + offset
|
|
2737
|
+
elif whence == 2: # from end
|
|
2738
|
+
new_pos = self._size + offset
|
|
2739
|
+
else:
|
|
2740
|
+
raise ValueError("invalid whence (expected 0, 1, or 2)")
|
|
2741
|
+
|
|
2742
|
+
self._pos = self._clamp_pos(new_pos)
|
|
2743
|
+
return self._pos
|
|
2744
|
+
|
|
2745
|
+
def tell(self):
|
|
2746
|
+
return self._pos
|
|
2747
|
+
|
|
2748
|
+
# ---------- reading ----------
|
|
2749
|
+
|
|
2750
|
+
def read(self, size=-1):
|
|
2751
|
+
"""
|
|
2752
|
+
Read up to 'size' bytes (or to EOF if size<0 or None).
|
|
2753
|
+
Returns bytes (py3) or str (py2).
|
|
2754
|
+
"""
|
|
2755
|
+
self._check_closed()
|
|
2756
|
+
if not self.readable():
|
|
2757
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2758
|
+
|
|
2759
|
+
if size is None or size < 0:
|
|
2760
|
+
size = self._size - self._pos
|
|
2761
|
+
else:
|
|
2762
|
+
size = int(size)
|
|
2763
|
+
if size < 0:
|
|
2764
|
+
size = 0
|
|
2765
|
+
|
|
2766
|
+
if size == 0:
|
|
2767
|
+
return b'' if not PY2 else ''
|
|
2768
|
+
|
|
2769
|
+
start, end_abs = self._region_bounds()
|
|
2770
|
+
available = end_abs - (self._base_offset + self._pos)
|
|
2771
|
+
if available <= 0:
|
|
2772
|
+
return b'' if not PY2 else ''
|
|
2773
|
+
|
|
2774
|
+
size = min(size, available)
|
|
2775
|
+
|
|
2776
|
+
abs_start = self._base_offset + self._pos
|
|
2777
|
+
abs_end = abs_start + size
|
|
2778
|
+
|
|
2779
|
+
chunk = self._buf[abs_start:abs_end]
|
|
2780
|
+
if PY2:
|
|
2781
|
+
data = bytes(chunk) # bytes() -> str in py2
|
|
2782
|
+
else:
|
|
2783
|
+
data = bytes(chunk)
|
|
2784
|
+
|
|
2785
|
+
self._pos += len(data)
|
|
2786
|
+
return data
|
|
2787
|
+
|
|
2788
|
+
def readline(self, size=-1):
|
|
2789
|
+
"""
|
|
2790
|
+
Read a single line (ending with '\\n' or EOF).
|
|
2791
|
+
If size >= 0, at most that many bytes are returned.
|
|
2792
|
+
"""
|
|
2793
|
+
self._check_closed()
|
|
2794
|
+
if not self.readable():
|
|
2795
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2796
|
+
|
|
2797
|
+
# Determine maximum bytes we can scan
|
|
2798
|
+
start, end_abs = self._region_bounds()
|
|
2799
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2800
|
+
if remaining <= 0:
|
|
2801
|
+
return b'' if not PY2 else ''
|
|
2802
|
+
|
|
2803
|
+
if size is not None and size >= 0:
|
|
2804
|
+
size = int(size)
|
|
2805
|
+
max_len = min(size, remaining)
|
|
2806
|
+
else:
|
|
2807
|
+
max_len = remaining
|
|
2808
|
+
|
|
2809
|
+
abs_start = self._base_offset + self._pos
|
|
2810
|
+
abs_max = abs_start + max_len
|
|
2811
|
+
|
|
2812
|
+
# Work on a local bytes slice for easy .find()
|
|
2813
|
+
if PY2:
|
|
2814
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2815
|
+
else:
|
|
2816
|
+
buf_bytes = bytes(self._buf[abs_start:abs_max])
|
|
2817
|
+
|
|
2818
|
+
idx = buf_bytes.find(b'\n')
|
|
2819
|
+
if idx == -1:
|
|
2820
|
+
# No newline; read entire chunk
|
|
2821
|
+
line_bytes = buf_bytes
|
|
2822
|
+
else:
|
|
2823
|
+
line_bytes = buf_bytes[:idx + 1]
|
|
2824
|
+
|
|
2825
|
+
self._pos += len(line_bytes)
|
|
2826
|
+
|
|
2827
|
+
if PY2:
|
|
2828
|
+
return line_bytes # already str
|
|
2829
|
+
return line_bytes
|
|
2830
|
+
|
|
2831
|
+
def readinto(self, b):
|
|
2832
|
+
"""
|
|
2833
|
+
Read bytes into a pre-allocated writable buffer (bytearray/memoryview).
|
|
2834
|
+
Returns number of bytes read.
|
|
2835
|
+
"""
|
|
2836
|
+
self._check_closed()
|
|
2837
|
+
if not self.readable():
|
|
2838
|
+
raise IOError("SharedMemoryFile not opened for reading")
|
|
2839
|
+
|
|
2840
|
+
# Normalize target buffer
|
|
2841
|
+
if isinstance(b, memoryview):
|
|
2842
|
+
mv = b
|
|
2843
|
+
else:
|
|
2844
|
+
mv = memoryview(b)
|
|
2845
|
+
|
|
2846
|
+
size = len(mv)
|
|
2847
|
+
if size <= 0:
|
|
2848
|
+
return 0
|
|
2849
|
+
|
|
2850
|
+
start, end_abs = self._region_bounds()
|
|
2851
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2852
|
+
if remaining <= 0:
|
|
2853
|
+
return 0
|
|
2854
|
+
|
|
2855
|
+
size = min(size, remaining)
|
|
2856
|
+
|
|
2857
|
+
abs_start = self._base_offset + self._pos
|
|
2858
|
+
abs_end = abs_start + size
|
|
2859
|
+
|
|
2860
|
+
mv[:size] = self._buf[abs_start:abs_end]
|
|
2861
|
+
self._pos += size
|
|
2862
|
+
return size
|
|
2863
|
+
|
|
2864
|
+
# ---------- writing ----------
|
|
2865
|
+
|
|
2866
|
+
def write(self, data):
|
|
2867
|
+
"""
|
|
2868
|
+
Write bytes-like object to the shared memory region.
|
|
2869
|
+
|
|
2870
|
+
Returns number of bytes written. Will raise if not opened writable
|
|
2871
|
+
or if writing would overflow the fixed-size region.
|
|
2872
|
+
"""
|
|
2873
|
+
self._check_closed()
|
|
2874
|
+
if not self.writable():
|
|
2875
|
+
raise IOError("SharedMemoryFile not opened for writing")
|
|
2876
|
+
|
|
2877
|
+
if isinstance(data, memoryview):
|
|
2878
|
+
data = bytes(data)
|
|
2879
|
+
elif isinstance(data, bytearray):
|
|
2880
|
+
data = bytes(data)
|
|
2881
|
+
|
|
2882
|
+
if not isinstance(data, binary_types):
|
|
2883
|
+
raise TypeError("write() expects a bytes-like object")
|
|
2884
|
+
|
|
2885
|
+
data_len = len(data)
|
|
2886
|
+
if data_len == 0:
|
|
2887
|
+
return 0
|
|
2888
|
+
|
|
2889
|
+
# Handle "append" semantics roughly: start from end on first write
|
|
2890
|
+
if 'a' in self.mode and self._pos == 0:
|
|
2891
|
+
# Move to logical end of region
|
|
2892
|
+
self._pos = self._size
|
|
2893
|
+
|
|
2894
|
+
start, end_abs = self._region_bounds()
|
|
2895
|
+
remaining = end_abs - (self._base_offset + self._pos)
|
|
2896
|
+
if data_len > remaining:
|
|
2897
|
+
raise IOError("write would overflow SharedMemory region (need %d, have %d)"
|
|
2898
|
+
% (data_len, remaining))
|
|
2899
|
+
|
|
2900
|
+
abs_start = self._base_offset + self._pos
|
|
2901
|
+
abs_end = abs_start + data_len
|
|
2902
|
+
|
|
2903
|
+
self._buf[abs_start:abs_end] = data
|
|
2904
|
+
self._pos += data_len
|
|
2905
|
+
return data_len
|
|
2906
|
+
|
|
2907
|
+
def flush(self):
|
|
2908
|
+
"""
|
|
2909
|
+
No-op for shared memory; provided for file-like compatibility.
|
|
2910
|
+
"""
|
|
2911
|
+
self._check_closed()
|
|
2912
|
+
# nothing to flush
|
|
2913
|
+
|
|
2914
|
+
# ---------- unlink / close / context manager ----------
|
|
2915
|
+
|
|
2916
|
+
def unlink(self):
|
|
2917
|
+
"""
|
|
2918
|
+
Unlink (destroy) the underlying shared memory block.
|
|
2919
|
+
|
|
2920
|
+
After unlink(), new processes cannot attach via name.
|
|
2921
|
+
Existing attachments (including this one) can continue to use
|
|
2922
|
+
the memory until they close() it.
|
|
2923
|
+
|
|
2924
|
+
This is idempotent: calling it more than once is safe.
|
|
2925
|
+
"""
|
|
2926
|
+
if self._unlinked:
|
|
2927
|
+
return
|
|
2928
|
+
|
|
2929
|
+
try:
|
|
2930
|
+
self._shm.unlink()
|
|
2931
|
+
except AttributeError:
|
|
2932
|
+
# Should not happen on normal Python 3.8+,
|
|
2933
|
+
# but keep a clear error if it does.
|
|
2934
|
+
raise RuntimeError("Underlying SharedMemory object "
|
|
2935
|
+
"does not support unlink()")
|
|
2936
|
+
|
|
2937
|
+
self._unlinked = True
|
|
2938
|
+
|
|
2939
|
+
def close(self):
|
|
2940
|
+
if self._closed:
|
|
2941
|
+
return
|
|
2942
|
+
self._closed = True
|
|
2943
|
+
|
|
2944
|
+
# Optionally unlink on close if requested
|
|
2945
|
+
if self._unlink_on_close and not self._unlinked:
|
|
2946
|
+
try:
|
|
2947
|
+
self.unlink()
|
|
2948
|
+
except Exception:
|
|
2949
|
+
# best-effort; close anyway
|
|
2950
|
+
pass
|
|
2951
|
+
|
|
2952
|
+
try:
|
|
2953
|
+
self._shm.close()
|
|
2954
|
+
except Exception:
|
|
2955
|
+
pass
|
|
2956
|
+
|
|
2957
|
+
def __enter__(self):
|
|
2958
|
+
self._check_closed()
|
|
2959
|
+
return self
|
|
2960
|
+
|
|
2961
|
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
2962
|
+
self.close()
|
|
2963
|
+
|
|
2964
|
+
# ---------- iteration ----------
|
|
2965
|
+
|
|
2966
|
+
def __iter__(self):
|
|
2967
|
+
return self
|
|
2968
|
+
|
|
2969
|
+
def __next__(self):
|
|
2970
|
+
line = self.readline()
|
|
2971
|
+
if (not line) or len(line) == 0:
|
|
2972
|
+
raise StopIteration
|
|
2973
|
+
return line
|
|
2974
|
+
|
|
2975
|
+
if PY2:
|
|
2976
|
+
next = __next__
|
|
2977
|
+
|
|
2978
|
+
# ---------- misc helpers ----------
|
|
2979
|
+
|
|
2980
|
+
def fileno(self):
|
|
2981
|
+
"""
|
|
2982
|
+
There is no real OS-level file descriptor; raise OSError for APIs
|
|
2983
|
+
that require a fileno().
|
|
2984
|
+
"""
|
|
2985
|
+
raise OSError("SharedMemoryFile does not have a real fileno()")
|
|
2986
|
+
|
|
2987
|
+
def isatty(self):
|
|
2988
|
+
return False
|
|
2989
|
+
|
|
2576
2990
|
# ---------- Main class ----------
|
|
2577
2991
|
class ZlibFile(object):
|
|
2578
2992
|
"""
|
|
@@ -4464,7 +4878,7 @@ def ReadFileHeaderDataWithContentToArray(fp, listonly=False, contentasfile=True,
|
|
|
4464
4878
|
extrastart = extrastart + 1
|
|
4465
4879
|
fvendorfieldslist = []
|
|
4466
4880
|
fvendorfields = 0;
|
|
4467
|
-
if(len(HeaderOut)>extraend):
|
|
4881
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
4468
4882
|
extrastart = extraend
|
|
4469
4883
|
extraend = len(HeaderOut) - 4
|
|
4470
4884
|
while(extrastart < extraend):
|
|
@@ -4684,6 +5098,15 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4684
5098
|
while(extrastart < extraend):
|
|
4685
5099
|
fextrafieldslist.append(HeaderOut[extrastart])
|
|
4686
5100
|
extrastart = extrastart + 1
|
|
5101
|
+
fvendorfieldslist = []
|
|
5102
|
+
fvendorfields = 0;
|
|
5103
|
+
if((len(HeaderOut) - 4)>extraend):
|
|
5104
|
+
extrastart = extraend
|
|
5105
|
+
extraend = len(HeaderOut) - 4
|
|
5106
|
+
while(extrastart < extraend):
|
|
5107
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5108
|
+
extrastart = extrastart + 1
|
|
5109
|
+
fvendorfields = fvendorfields + 1
|
|
4687
5110
|
if(fextrafields==1):
|
|
4688
5111
|
try:
|
|
4689
5112
|
fextrafieldslist = json.loads(base64.b64decode(fextrafieldslist[0]).decode("UTF-8"))
|
|
@@ -4693,6 +5116,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4693
5116
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
4694
5117
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4695
5118
|
pass
|
|
5119
|
+
fjstart = fp.tell()
|
|
4696
5120
|
if(fjsontype=="json"):
|
|
4697
5121
|
fjsoncontent = {}
|
|
4698
5122
|
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
@@ -4759,6 +5183,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4759
5183
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
4760
5184
|
pass
|
|
4761
5185
|
fp.seek(len(delimiter), 1)
|
|
5186
|
+
fjend = fp.tell() - 1
|
|
4762
5187
|
jsonfcs = GetFileChecksum(fprejsoncontent, fjsonchecksumtype, True, formatspecs, saltkey)
|
|
4763
5188
|
if(not CheckChecksums(fjsonchecksum, jsonfcs) and not skipchecksum):
|
|
4764
5189
|
VerbosePrintOut("File JSON Data Checksum Error with file " +
|
|
@@ -4791,6 +5216,7 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4791
5216
|
pyhascontents = False
|
|
4792
5217
|
fcontents.seek(0, 0)
|
|
4793
5218
|
newfccs = GetFileChecksum(fcontents, HeaderOut[-3].lower(), False, formatspecs, saltkey)
|
|
5219
|
+
fcontents.seek(0, 0)
|
|
4794
5220
|
if(not CheckChecksums(fccs, newfccs) and not skipchecksum and not listonly):
|
|
4795
5221
|
VerbosePrintOut("File Content Checksum Error with file " +
|
|
4796
5222
|
fname + " at offset " + str(fcontentstart))
|
|
@@ -4830,8 +5256,8 @@ def ReadFileHeaderDataWithContentToList(fp, listonly=False, contentasfile=False,
|
|
|
4830
5256
|
fcontents.seek(0, 0)
|
|
4831
5257
|
if(not contentasfile):
|
|
4832
5258
|
fcontents = fcontents.read()
|
|
4833
|
-
outlist = [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
4834
|
-
finode, flinkcount, fdev, frdev, fseeknextfile, fjsoncontent,
|
|
5259
|
+
outlist = {'fheaders': [ftype, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
5260
|
+
fcsize, fuid, funame, fgid, fgname, fid, finode, flinkcount, fdev, frdev, fseeknextfile], 'fextradata': fextrafieldslist, 'fjsoncontent': fjsoncontent, 'fcontents': fcontents, 'fjsonchecksumtype': fjsonchecksumtype, 'fheaderchecksumtype': HeaderOut[-4].lower(), 'fcontentchecksumtype': HeaderOut[-3].lower()}
|
|
4835
5261
|
return outlist
|
|
4836
5262
|
|
|
4837
5263
|
|
|
@@ -4848,6 +5274,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4848
5274
|
CatSizeEnd = CatSize
|
|
4849
5275
|
fp.seek(curloc, 0)
|
|
4850
5276
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5277
|
+
headeroffset = fp.tell()
|
|
4851
5278
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4852
5279
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4853
5280
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4867,7 +5294,7 @@ def ReadFileDataWithContent(fp, filestart=0, listonly=False, uncompress=True, sk
|
|
|
4867
5294
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
4868
5295
|
if(not headercheck and not skipchecksum):
|
|
4869
5296
|
VerbosePrintOut(
|
|
4870
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5297
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
4871
5298
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
4872
5299
|
"'" + newfcs + "'")
|
|
4873
5300
|
return False
|
|
@@ -4919,6 +5346,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4919
5346
|
CatSizeEnd = CatSize
|
|
4920
5347
|
fp.seek(curloc, 0)
|
|
4921
5348
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5349
|
+
headeroffset = fp.tell()
|
|
4922
5350
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
4923
5351
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
4924
5352
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -4951,7 +5379,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4951
5379
|
pass
|
|
4952
5380
|
fvendorfieldslist = []
|
|
4953
5381
|
fvendorfields = 0;
|
|
4954
|
-
if(len(inheader)>extraend):
|
|
5382
|
+
if((len(inheader) - 2)>extraend):
|
|
4955
5383
|
extrastart = extraend
|
|
4956
5384
|
extraend = len(inheader) - 2
|
|
4957
5385
|
while(extrastart < extraend):
|
|
@@ -4961,8 +5389,8 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
4961
5389
|
formversion = re.findall("([\\d]+)", formstring)
|
|
4962
5390
|
fheadsize = int(inheader[0], 16)
|
|
4963
5391
|
fnumfields = int(inheader[1], 16)
|
|
4964
|
-
fheadctime = int(inheader[
|
|
4965
|
-
fheadmtime = int(inheader[
|
|
5392
|
+
fheadctime = int(inheader[2], 16)
|
|
5393
|
+
fheadmtime = int(inheader[3], 16)
|
|
4966
5394
|
fhencoding = inheader[4]
|
|
4967
5395
|
fostype = inheader[5]
|
|
4968
5396
|
fpythontype = inheader[6]
|
|
@@ -5071,7 +5499,7 @@ def ReadFileDataWithContentToArray(fp, filestart=0, seekstart=0, seekend=0, list
|
|
|
5071
5499
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5072
5500
|
if(not headercheck and not skipchecksum):
|
|
5073
5501
|
VerbosePrintOut(
|
|
5074
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5502
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5075
5503
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5076
5504
|
"'" + newfcs + "'")
|
|
5077
5505
|
return False
|
|
@@ -5181,6 +5609,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5181
5609
|
CatSizeEnd = CatSize
|
|
5182
5610
|
fp.seek(curloc, 0)
|
|
5183
5611
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
5612
|
+
headeroffset = fp.tell()
|
|
5184
5613
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
5185
5614
|
formdelszie = len(formatspecs['format_delimiter'])
|
|
5186
5615
|
formdel = fp.read(formdelszie).decode("UTF-8")
|
|
@@ -5211,19 +5640,98 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5211
5640
|
fextrafieldslist = json.loads(fextrafieldslist[0])
|
|
5212
5641
|
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5213
5642
|
pass
|
|
5643
|
+
fvendorfieldslist = []
|
|
5644
|
+
fvendorfields = 0;
|
|
5645
|
+
if((len(inheader) - 2)>extraend):
|
|
5646
|
+
extrastart = extraend
|
|
5647
|
+
extraend = len(inheader) - 2
|
|
5648
|
+
while(extrastart < extraend):
|
|
5649
|
+
fvendorfieldslist.append(HeaderOut[extrastart])
|
|
5650
|
+
extrastart = extrastart + 1
|
|
5651
|
+
fvendorfields = fvendorfields + 1
|
|
5214
5652
|
formversion = re.findall("([\\d]+)", formstring)
|
|
5215
5653
|
fheadsize = int(inheader[0], 16)
|
|
5216
5654
|
fnumfields = int(inheader[1], 16)
|
|
5655
|
+
fheadctime = int(inheader[2], 16)
|
|
5656
|
+
fheadmtime = int(inheader[3], 16)
|
|
5657
|
+
fhencoding = inheader[4]
|
|
5658
|
+
fostype = inheader[5]
|
|
5659
|
+
fpythontype = inheader[6]
|
|
5660
|
+
fprojectname = inheader[7]
|
|
5217
5661
|
fnumfiles = int(inheader[8], 16)
|
|
5218
|
-
fseeknextfile =
|
|
5219
|
-
fjsontype =
|
|
5662
|
+
fseeknextfile = inheader[9]
|
|
5663
|
+
fjsontype = inheader[10]
|
|
5220
5664
|
fjsonlen = int(inheader[11], 16)
|
|
5221
5665
|
fjsonsize = int(inheader[12], 16)
|
|
5222
5666
|
fjsonchecksumtype = inheader[13]
|
|
5223
5667
|
fjsonchecksum = inheader[14]
|
|
5224
5668
|
fjsoncontent = {}
|
|
5225
5669
|
fjstart = fp.tell()
|
|
5226
|
-
|
|
5670
|
+
if(fjsontype=="json"):
|
|
5671
|
+
fjsoncontent = {}
|
|
5672
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5673
|
+
if(fjsonsize > 0):
|
|
5674
|
+
try:
|
|
5675
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5676
|
+
fjsoncontent = json.loads(base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8"))
|
|
5677
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5678
|
+
try:
|
|
5679
|
+
fjsonrawcontent = fprejsoncontent
|
|
5680
|
+
fjsoncontent = json.loads(fprejsoncontent)
|
|
5681
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5682
|
+
fprejsoncontent = ""
|
|
5683
|
+
fjsonrawcontent = fprejsoncontent
|
|
5684
|
+
fjsoncontent = {}
|
|
5685
|
+
else:
|
|
5686
|
+
fprejsoncontent = ""
|
|
5687
|
+
fjsonrawcontent = fprejsoncontent
|
|
5688
|
+
fjsoncontent = {}
|
|
5689
|
+
elif(testyaml and fjsontype == "yaml"):
|
|
5690
|
+
fjsoncontent = {}
|
|
5691
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5692
|
+
if (fjsonsize > 0):
|
|
5693
|
+
try:
|
|
5694
|
+
# try base64 → utf-8 → YAML
|
|
5695
|
+
fjsonrawcontent = base64.b64decode(fprejsoncontent.encode("UTF-8")).decode("UTF-8")
|
|
5696
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5697
|
+
except (binascii.Error, UnicodeDecodeError, yaml.YAMLError):
|
|
5698
|
+
try:
|
|
5699
|
+
# fall back to treating the bytes as plain text YAML
|
|
5700
|
+
fjsonrawcontent = fprejsoncontent
|
|
5701
|
+
fjsoncontent = yaml.safe_load(fjsonrawcontent) or {}
|
|
5702
|
+
except (UnicodeDecodeError, yaml.YAMLError):
|
|
5703
|
+
# final fallback: empty
|
|
5704
|
+
fprejsoncontent = ""
|
|
5705
|
+
fjsonrawcontent = fprejsoncontent
|
|
5706
|
+
fjsoncontent = {}
|
|
5707
|
+
else:
|
|
5708
|
+
fprejsoncontent = ""
|
|
5709
|
+
fjsonrawcontent = fprejsoncontent
|
|
5710
|
+
fjsoncontent = {}
|
|
5711
|
+
elif(not testyaml and fjsontype == "yaml"):
|
|
5712
|
+
fjsoncontent = {}
|
|
5713
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5714
|
+
fprejsoncontent = ""
|
|
5715
|
+
fjsonrawcontent = fprejsoncontent
|
|
5716
|
+
elif(fjsontype=="list"):
|
|
5717
|
+
fprejsoncontent = fp.read(fjsonsize).decode("UTF-8")
|
|
5718
|
+
flisttmp = MkTempFile()
|
|
5719
|
+
flisttmp.write(fprejsoncontent.encode())
|
|
5720
|
+
flisttmp.seek(0)
|
|
5721
|
+
fjsoncontent = ReadFileHeaderData(flisttmp, fjsonlen, delimiter)
|
|
5722
|
+
flisttmp.close()
|
|
5723
|
+
fjsonrawcontent = fjsoncontent
|
|
5724
|
+
if(fjsonlen==1):
|
|
5725
|
+
try:
|
|
5726
|
+
fjsonrawcontent = base64.b64decode(fjsoncontent[0]).decode("UTF-8")
|
|
5727
|
+
fjsoncontent = json.loads(base64.b64decode(fjsoncontent[0]).decode("UTF-8"))
|
|
5728
|
+
fjsonlen = len(fjsoncontent)
|
|
5729
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5730
|
+
try:
|
|
5731
|
+
fjsonrawcontent = fjsoncontent[0]
|
|
5732
|
+
fjsoncontent = json.loads(fjsoncontent[0])
|
|
5733
|
+
except (binascii.Error, json.decoder.JSONDecodeError, UnicodeDecodeError):
|
|
5734
|
+
pass
|
|
5227
5735
|
fjend = fp.tell()
|
|
5228
5736
|
if(re.findall("^\\+([0-9]+)", fseeknextfile)):
|
|
5229
5737
|
fseeknextasnum = int(fseeknextfile.replace("+", ""))
|
|
@@ -5254,7 +5762,7 @@ def ReadFileDataWithContentToList(fp, filestart=0, seekstart=0, seekend=0, listo
|
|
|
5254
5762
|
newfcs = GetHeaderChecksum([formstring] + inheader[:-1], fprechecksumtype, True, formatspecs, saltkey)
|
|
5255
5763
|
if(not headercheck and not skipchecksum):
|
|
5256
5764
|
VerbosePrintOut(
|
|
5257
|
-
"File Header Checksum Error with file at offset " + str(
|
|
5765
|
+
"File Header Checksum Error with file at offset " + str(headeroffset))
|
|
5258
5766
|
VerbosePrintOut("'" + fprechecksum + "' != " +
|
|
5259
5767
|
"'" + newfcs + "'")
|
|
5260
5768
|
return False
|
|
@@ -5758,7 +6266,7 @@ def AppendFileHeader(fp, numfiles, fencoding, extradata=[], jsondata={}, checksu
|
|
|
5758
6266
|
else:
|
|
5759
6267
|
fctime = format(int(to_ns(time.time())), 'x').lower()
|
|
5760
6268
|
# Serialize the first group
|
|
5761
|
-
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__, fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
6269
|
+
fnumfilesa = AppendNullBytes([tmpoutlenhex, fctime, fctime, fencoding, platform.system(), py_implementation, __program_name__+str(__version_info__[0]), fnumfiles_hex, "+"+str(len(formatspecs['format_delimiter']))], delimiter)
|
|
5762
6270
|
# Append tmpoutlist
|
|
5763
6271
|
fnumfilesa += AppendNullBytes(tmpoutlist, delimiter)
|
|
5764
6272
|
# Append extradata items if any
|
|
@@ -5984,9 +6492,7 @@ def AppendFileHeaderWithContent(fp, filevalues=[], extradata=[], jsondata={}, fi
|
|
|
5984
6492
|
pass
|
|
5985
6493
|
return fp
|
|
5986
6494
|
|
|
5987
|
-
def
|
|
5988
|
-
if(not hasattr(fp, "write")):
|
|
5989
|
-
return False
|
|
6495
|
+
def AppendFilesWithContentToList(infiles, dirlistfromtxt=False, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
5990
6496
|
advancedlist = __use_advanced_list__
|
|
5991
6497
|
altinode = __use_alt_inode__
|
|
5992
6498
|
infilelist = []
|
|
@@ -6028,16 +6534,8 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6028
6534
|
inodetofile = {}
|
|
6029
6535
|
filetoinode = {}
|
|
6030
6536
|
inodetoforminode = {}
|
|
6031
|
-
numfiles = int(len(GetDirList))
|
|
6032
|
-
fnumfiles = format(numfiles, 'x').lower()
|
|
6033
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6034
|
-
try:
|
|
6035
|
-
fp.flush()
|
|
6036
|
-
if(hasattr(os, "sync")):
|
|
6037
|
-
os.fsync(fp.fileno())
|
|
6038
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6039
|
-
pass
|
|
6040
6537
|
FullSizeFilesAlt = 0
|
|
6538
|
+
tmpoutlist = []
|
|
6041
6539
|
for curfname in GetDirList:
|
|
6042
6540
|
fencoding = "UTF-8"
|
|
6043
6541
|
if(re.findall("^[.|/]", curfname)):
|
|
@@ -6212,7 +6710,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6212
6710
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6213
6711
|
fcontents.seek(0, 0)
|
|
6214
6712
|
if(typechecktest is not False):
|
|
6215
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6713
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6216
6714
|
fcontents.seek(0, 0)
|
|
6217
6715
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6218
6716
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6261,7 +6759,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6261
6759
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6262
6760
|
fcontents.seek(0, 0)
|
|
6263
6761
|
if(typechecktest is not False):
|
|
6264
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6762
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6265
6763
|
fcontents.seek(0, 0)
|
|
6266
6764
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6267
6765
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6305,10 +6803,29 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6305
6803
|
if(fcompression == "none"):
|
|
6306
6804
|
fcompression = ""
|
|
6307
6805
|
fcontents.seek(0, 0)
|
|
6806
|
+
if(not contentasfile):
|
|
6807
|
+
fcontents = fcontents.read()
|
|
6308
6808
|
ftypehex = format(ftype, 'x').lower()
|
|
6309
|
-
tmpoutlist
|
|
6310
|
-
|
|
6311
|
-
|
|
6809
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
6810
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
6811
|
+
return tmpoutlist
|
|
6812
|
+
|
|
6813
|
+
def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6814
|
+
GetDirList = AppendFilesWithContentToList(infiles, dirlistfromtxt, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, followlink, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
6815
|
+
if(not hasattr(fp, "write")):
|
|
6816
|
+
return False
|
|
6817
|
+
numfiles = int(len(GetDirList))
|
|
6818
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
6819
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6820
|
+
try:
|
|
6821
|
+
fp.flush()
|
|
6822
|
+
if(hasattr(os, "sync")):
|
|
6823
|
+
os.fsync(fp.fileno())
|
|
6824
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6825
|
+
pass
|
|
6826
|
+
for curfname in GetDirList:
|
|
6827
|
+
tmpoutlist = curfname['fheaders']
|
|
6828
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6312
6829
|
try:
|
|
6313
6830
|
fp.flush()
|
|
6314
6831
|
if(hasattr(os, "sync")):
|
|
@@ -6317,9 +6834,7 @@ def AppendFilesWithContent(infiles, fp, dirlistfromtxt=False, extradata=[], json
|
|
|
6317
6834
|
pass
|
|
6318
6835
|
return fp
|
|
6319
6836
|
|
|
6320
|
-
def
|
|
6321
|
-
if(not hasattr(fp, "write")):
|
|
6322
|
-
return False
|
|
6837
|
+
def AppendFilesWithContentFromTarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6323
6838
|
curinode = 0
|
|
6324
6839
|
curfid = 0
|
|
6325
6840
|
inodelist = []
|
|
@@ -6361,10 +6876,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6361
6876
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6362
6877
|
formatspecs = formatspecs[compresscheck]
|
|
6363
6878
|
if(compresscheck=="zstd"):
|
|
6364
|
-
if '
|
|
6365
|
-
infile = ZstdFile(
|
|
6366
|
-
elif 'pyzstd' in sys.modules:
|
|
6367
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6879
|
+
if 'zstd' in compressionsupport:
|
|
6880
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6368
6881
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6369
6882
|
else:
|
|
6370
6883
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -6373,23 +6886,14 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6373
6886
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
6374
6887
|
formatspecs = formatspecs[compresscheck]
|
|
6375
6888
|
if(compresscheck=="zstd"):
|
|
6376
|
-
if '
|
|
6377
|
-
infile = ZstdFile(
|
|
6378
|
-
elif 'pyzstd' in sys.modules:
|
|
6379
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
6889
|
+
if 'zstd' in compressionsupport:
|
|
6890
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
6380
6891
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
6381
6892
|
else:
|
|
6382
6893
|
tarfp = tarfile.open(infile, "r")
|
|
6383
6894
|
except FileNotFoundError:
|
|
6384
6895
|
return False
|
|
6385
|
-
|
|
6386
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6387
|
-
try:
|
|
6388
|
-
fp.flush()
|
|
6389
|
-
if(hasattr(os, "sync")):
|
|
6390
|
-
os.fsync(fp.fileno())
|
|
6391
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6392
|
-
pass
|
|
6896
|
+
tmpoutlist = []
|
|
6393
6897
|
for member in sorted(tarfp.getmembers(), key=lambda x: x.name):
|
|
6394
6898
|
fencoding = "UTF-8"
|
|
6395
6899
|
if(re.findall("^[.|/]", member.name)):
|
|
@@ -6402,14 +6906,8 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6402
6906
|
ffullmode = member.mode
|
|
6403
6907
|
flinkcount = 0
|
|
6404
6908
|
fblksize = 0
|
|
6405
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6406
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6407
6909
|
fblocks = 0
|
|
6408
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6409
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6410
6910
|
fflags = 0
|
|
6411
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6412
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6413
6911
|
ftype = 0
|
|
6414
6912
|
if(member.isreg()):
|
|
6415
6913
|
ffullmode = member.mode + stat.S_IFREG
|
|
@@ -6487,7 +6985,7 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6487
6985
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
6488
6986
|
fcontents.seek(0, 0)
|
|
6489
6987
|
if(typechecktest is not False):
|
|
6490
|
-
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=
|
|
6988
|
+
typechecktest = GetBinaryFileType(fcontents, filestart=0, closefp=False)
|
|
6491
6989
|
fcontents.seek(0, 0)
|
|
6492
6990
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
6493
6991
|
if(typechecktest is False and not compresswholefile):
|
|
@@ -6531,22 +7029,38 @@ def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6531
7029
|
if(fcompression == "none"):
|
|
6532
7030
|
fcompression = ""
|
|
6533
7031
|
fcontents.seek(0, 0)
|
|
7032
|
+
if(not contentasfile):
|
|
7033
|
+
fcontents = fcontents.read()
|
|
6534
7034
|
ftypehex = format(ftype, 'x').lower()
|
|
6535
|
-
tmpoutlist
|
|
6536
|
-
|
|
6537
|
-
|
|
7035
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7036
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7037
|
+
return tmpoutlist
|
|
7038
|
+
|
|
7039
|
+
def AppendFilesWithContentFromTarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7040
|
+
if(not hasattr(fp, "write")):
|
|
7041
|
+
return False
|
|
7042
|
+
GetDirList = AppendFilesWithContentFromTarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7043
|
+
numfiles = int(len(GetDirList))
|
|
7044
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7045
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7046
|
+
try:
|
|
7047
|
+
fp.flush()
|
|
7048
|
+
if(hasattr(os, "sync")):
|
|
7049
|
+
os.fsync(fp.fileno())
|
|
7050
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7051
|
+
pass
|
|
7052
|
+
for curfname in GetDirList:
|
|
7053
|
+
tmpoutlist = curfname['fheaders']
|
|
7054
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6538
7055
|
try:
|
|
6539
7056
|
fp.flush()
|
|
6540
7057
|
if(hasattr(os, "sync")):
|
|
6541
7058
|
os.fsync(fp.fileno())
|
|
6542
7059
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6543
7060
|
pass
|
|
6544
|
-
fcontents.close()
|
|
6545
7061
|
return fp
|
|
6546
7062
|
|
|
6547
|
-
def
|
|
6548
|
-
if(not hasattr(fp, "write")):
|
|
6549
|
-
return False
|
|
7063
|
+
def AppendFilesWithContentFromZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6550
7064
|
curinode = 0
|
|
6551
7065
|
curfid = 0
|
|
6552
7066
|
inodelist = []
|
|
@@ -6579,14 +7093,7 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6579
7093
|
ziptest = zipfp.testzip()
|
|
6580
7094
|
if(ziptest):
|
|
6581
7095
|
VerbosePrintOut("Bad file found!")
|
|
6582
|
-
|
|
6583
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6584
|
-
try:
|
|
6585
|
-
fp.flush()
|
|
6586
|
-
if(hasattr(os, "sync")):
|
|
6587
|
-
os.fsync(fp.fileno())
|
|
6588
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6589
|
-
pass
|
|
7096
|
+
tmpoutlist = []
|
|
6590
7097
|
for member in sorted(zipfp.infolist(), key=lambda x: x.filename):
|
|
6591
7098
|
fencoding = "UTF-8"
|
|
6592
7099
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -6602,14 +7109,8 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6602
7109
|
fpremode = int(stat.S_IFREG | 0x1b6)
|
|
6603
7110
|
flinkcount = 0
|
|
6604
7111
|
fblksize = 0
|
|
6605
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6606
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6607
7112
|
fblocks = 0
|
|
6608
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6609
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6610
7113
|
fflags = 0
|
|
6611
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6612
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6613
7114
|
ftype = 0
|
|
6614
7115
|
if ((hasattr(member, "is_dir") and member.is_dir()) or member.filename.endswith('/')):
|
|
6615
7116
|
ftype = 5
|
|
@@ -6749,26 +7250,44 @@ def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, com
|
|
|
6749
7250
|
if(fcompression == "none"):
|
|
6750
7251
|
fcompression = ""
|
|
6751
7252
|
fcontents.seek(0, 0)
|
|
7253
|
+
if(not contentasfile):
|
|
7254
|
+
fcontents = fcontents.read()
|
|
6752
7255
|
ftypehex = format(ftype, 'x').lower()
|
|
6753
|
-
tmpoutlist
|
|
6754
|
-
|
|
6755
|
-
|
|
7256
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7257
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7258
|
+
return tmpoutlist
|
|
7259
|
+
|
|
7260
|
+
def AppendFilesWithContentFromZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7261
|
+
if(not hasattr(fp, "write")):
|
|
7262
|
+
return False
|
|
7263
|
+
GetDirList = AppendFilesWithContentFromZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7264
|
+
numfiles = int(len(GetDirList))
|
|
7265
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7266
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7267
|
+
try:
|
|
7268
|
+
fp.flush()
|
|
7269
|
+
if(hasattr(os, "sync")):
|
|
7270
|
+
os.fsync(fp.fileno())
|
|
7271
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7272
|
+
pass
|
|
7273
|
+
for curfname in GetDirList:
|
|
7274
|
+
tmpoutlist = curfname['fheaders']
|
|
7275
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6756
7276
|
try:
|
|
6757
7277
|
fp.flush()
|
|
6758
7278
|
if(hasattr(os, "sync")):
|
|
6759
7279
|
os.fsync(fp.fileno())
|
|
6760
7280
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6761
7281
|
pass
|
|
6762
|
-
fcontents.close()
|
|
6763
7282
|
return fp
|
|
6764
7283
|
|
|
6765
7284
|
if(not rarfile_support):
|
|
6766
|
-
def
|
|
7285
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6767
7286
|
return False
|
|
6768
|
-
else:
|
|
6769
7287
|
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6770
|
-
|
|
6771
|
-
|
|
7288
|
+
return False
|
|
7289
|
+
else:
|
|
7290
|
+
def AppendFilesWithContentFromRarFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
6772
7291
|
curinode = 0
|
|
6773
7292
|
curfid = 0
|
|
6774
7293
|
inodelist = []
|
|
@@ -6783,20 +7302,7 @@ else:
|
|
|
6783
7302
|
rartest = rarfp.testrar()
|
|
6784
7303
|
if(rartest):
|
|
6785
7304
|
VerbosePrintOut("Bad file found!")
|
|
6786
|
-
|
|
6787
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
6788
|
-
try:
|
|
6789
|
-
fp.flush()
|
|
6790
|
-
if(hasattr(os, "sync")):
|
|
6791
|
-
os.fsync(fp.fileno())
|
|
6792
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6793
|
-
pass
|
|
6794
|
-
try:
|
|
6795
|
-
fp.flush()
|
|
6796
|
-
if(hasattr(os, "sync")):
|
|
6797
|
-
os.fsync(fp.fileno())
|
|
6798
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6799
|
-
pass
|
|
7305
|
+
tmpoutlist = []
|
|
6800
7306
|
for member in sorted(rarfp.infolist(), key=lambda x: x.filename):
|
|
6801
7307
|
is_unix = False
|
|
6802
7308
|
is_windows = False
|
|
@@ -6841,14 +7347,8 @@ else:
|
|
|
6841
7347
|
fcsize = format(int(0), 'x').lower()
|
|
6842
7348
|
flinkcount = 0
|
|
6843
7349
|
fblksize = 0
|
|
6844
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
6845
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
6846
7350
|
fblocks = 0
|
|
6847
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
6848
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
6849
7351
|
fflags = 0
|
|
6850
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
6851
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
6852
7352
|
ftype = 0
|
|
6853
7353
|
if(member.is_file()):
|
|
6854
7354
|
ftype = 0
|
|
@@ -6986,26 +7486,84 @@ else:
|
|
|
6986
7486
|
if(fcompression == "none"):
|
|
6987
7487
|
fcompression = ""
|
|
6988
7488
|
fcontents.seek(0, 0)
|
|
7489
|
+
if(not contentasfile):
|
|
7490
|
+
fcontents = fcontents.read()
|
|
6989
7491
|
ftypehex = format(ftype, 'x').lower()
|
|
6990
|
-
tmpoutlist
|
|
6991
|
-
|
|
6992
|
-
|
|
7492
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7493
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7494
|
+
return tmpoutlist
|
|
7495
|
+
def AppendFilesWithContentFromRarFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7496
|
+
if(not hasattr(fp, "write")):
|
|
7497
|
+
return False
|
|
7498
|
+
GetDirList = AppendFilesWithContentFromRarFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7499
|
+
numfiles = int(len(GetDirList))
|
|
7500
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7501
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7502
|
+
try:
|
|
7503
|
+
fp.flush()
|
|
7504
|
+
if(hasattr(os, "sync")):
|
|
7505
|
+
os.fsync(fp.fileno())
|
|
7506
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7507
|
+
pass
|
|
7508
|
+
for curfname in GetDirList:
|
|
7509
|
+
tmpoutlist = curfname['fheaders']
|
|
7510
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
6993
7511
|
try:
|
|
6994
7512
|
fp.flush()
|
|
6995
7513
|
if(hasattr(os, "sync")):
|
|
6996
7514
|
os.fsync(fp.fileno())
|
|
6997
7515
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
6998
7516
|
pass
|
|
6999
|
-
fcontents.close()
|
|
7000
7517
|
return fp
|
|
7001
7518
|
|
|
7002
7519
|
if(not py7zr_support):
|
|
7003
|
-
def
|
|
7520
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7004
7521
|
return False
|
|
7005
7522
|
else:
|
|
7523
|
+
class _MemoryIO(py7zr.Py7zIO):
|
|
7524
|
+
"""In-memory file object used by py7zr's factory API."""
|
|
7525
|
+
def __init__(self):
|
|
7526
|
+
self._buf = bytearray()
|
|
7527
|
+
def write(self, data):
|
|
7528
|
+
# py7zr will call this repeatedly with chunks
|
|
7529
|
+
self._buf.extend(data)
|
|
7530
|
+
def read(self, size=None):
|
|
7531
|
+
if size is None:
|
|
7532
|
+
return bytes(self._buf)
|
|
7533
|
+
return bytes(self._buf[:size])
|
|
7534
|
+
def seek(self, offset, whence=0):
|
|
7535
|
+
# we don't really need seeking for your use case
|
|
7536
|
+
return 0
|
|
7537
|
+
def flush(self):
|
|
7538
|
+
pass
|
|
7539
|
+
def size(self):
|
|
7540
|
+
return len(self._buf)
|
|
7541
|
+
class _MemoryFactory(py7zr.WriterFactory):
|
|
7542
|
+
"""Factory that creates _MemoryIO objects and keeps them by filename."""
|
|
7543
|
+
def __init__(self):
|
|
7544
|
+
self.files = {}
|
|
7545
|
+
def create(self, filename: str) -> py7zr.Py7zIO:
|
|
7546
|
+
io_obj = _MemoryIO()
|
|
7547
|
+
self.files[filename] = io_obj
|
|
7548
|
+
return io_obj
|
|
7549
|
+
def sevenzip_readall(infile, **kwargs):
|
|
7550
|
+
"""
|
|
7551
|
+
Replacement for SevenZipFile.readall() using the new py7zr API.
|
|
7552
|
+
|
|
7553
|
+
Returns: dict[filename -> _MemoryIO]
|
|
7554
|
+
"""
|
|
7555
|
+
factory = _MemoryFactory()
|
|
7556
|
+
with py7zr.SevenZipFile(infile, mode="r", **kwargs) as archive:
|
|
7557
|
+
archive.extractall(factory=factory)
|
|
7558
|
+
return factory.files
|
|
7559
|
+
|
|
7560
|
+
if(not py7zr_support):
|
|
7561
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7562
|
+
return False
|
|
7006
7563
|
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7007
|
-
|
|
7008
|
-
|
|
7564
|
+
return False
|
|
7565
|
+
else:
|
|
7566
|
+
def AppendFilesWithContentFromSevenZipFileToList(infile, extradata=[], jsondata={}, contentasfile=False, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7009
7567
|
formver = formatspecs['format_ver']
|
|
7010
7568
|
fileheaderver = str(int(formver.replace(".", "")))
|
|
7011
7569
|
curinode = 0
|
|
@@ -7017,19 +7575,15 @@ else:
|
|
|
7017
7575
|
if(not os.path.exists(infile) or not os.path.isfile(infile)):
|
|
7018
7576
|
return False
|
|
7019
7577
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
7020
|
-
|
|
7578
|
+
try:
|
|
7579
|
+
file_content = szpfp.readall()
|
|
7580
|
+
except AttributeError:
|
|
7581
|
+
file_content = sevenzip_readall(infile)
|
|
7021
7582
|
#sztest = szpfp.testzip()
|
|
7022
7583
|
sztestalt = szpfp.test()
|
|
7023
7584
|
if(sztestalt):
|
|
7024
7585
|
VerbosePrintOut("Bad file found!")
|
|
7025
|
-
|
|
7026
|
-
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7027
|
-
try:
|
|
7028
|
-
fp.flush()
|
|
7029
|
-
if(hasattr(os, "sync")):
|
|
7030
|
-
os.fsync(fp.fileno())
|
|
7031
|
-
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7032
|
-
pass
|
|
7586
|
+
tmpoutlist = []
|
|
7033
7587
|
for member in sorted(szpfp.list(), key=lambda x: x.filename):
|
|
7034
7588
|
fencoding = "UTF-8"
|
|
7035
7589
|
if(re.findall("^[.|/]", member.filename)):
|
|
@@ -7047,14 +7601,8 @@ else:
|
|
|
7047
7601
|
fcsize = format(int(0), 'x').lower()
|
|
7048
7602
|
flinkcount = 0
|
|
7049
7603
|
fblksize = 0
|
|
7050
|
-
if(hasattr(fstatinfo, "st_blksize")):
|
|
7051
|
-
fblksize = format(int(fstatinfo.st_blksize), 'x').lower()
|
|
7052
7604
|
fblocks = 0
|
|
7053
|
-
if(hasattr(fstatinfo, "st_blocks")):
|
|
7054
|
-
fblocks = format(int(fstatinfo.st_blocks), 'x').lower()
|
|
7055
7605
|
fflags = 0
|
|
7056
|
-
if(hasattr(fstatinfo, "st_flags")):
|
|
7057
|
-
fflags = format(int(fstatinfo.st_flags), 'x').lower()
|
|
7058
7606
|
ftype = 0
|
|
7059
7607
|
if(member.is_directory):
|
|
7060
7608
|
ftype = 5
|
|
@@ -7121,7 +7669,10 @@ else:
|
|
|
7121
7669
|
typechecktest = CheckCompressionType(fcontents, filestart=0, closefp=False)
|
|
7122
7670
|
fcontents.seek(0, 0)
|
|
7123
7671
|
fcencoding = GetFileEncoding(fcontents, 0, False)[0]
|
|
7124
|
-
|
|
7672
|
+
try:
|
|
7673
|
+
file_content[member.filename].close()
|
|
7674
|
+
except AttributeError:
|
|
7675
|
+
pass
|
|
7125
7676
|
if(typechecktest is False and not compresswholefile):
|
|
7126
7677
|
fcontents.seek(0, 2)
|
|
7127
7678
|
ucfsize = fcontents.tell()
|
|
@@ -7163,17 +7714,34 @@ else:
|
|
|
7163
7714
|
if(fcompression == "none"):
|
|
7164
7715
|
fcompression = ""
|
|
7165
7716
|
fcontents.seek(0, 0)
|
|
7717
|
+
if(not contentasfile):
|
|
7718
|
+
fcontents = fcontents.read()
|
|
7166
7719
|
ftypehex = format(ftype, 'x').lower()
|
|
7167
|
-
tmpoutlist
|
|
7168
|
-
|
|
7169
|
-
|
|
7720
|
+
tmpoutlist.append({'fheaders': [ftypehex, fencoding, fcencoding, fname, flinkname, fsize, fblksize, fblocks, fflags, fatime, fmtime, fctime, fbtime, fmode, fwinattributes, fcompression,
|
|
7721
|
+
fcsize, fuid, funame, fgid, fgname, fcurfid, fcurinode, flinkcount, fdev, frdev, "+"+str(len(formatspecs['format_delimiter']))], 'fextradata': extradata, 'fjsoncontent': jsondata, 'fcontents': fcontents, 'fjsonchecksumtype': checksumtype[2], 'fheaderchecksumtype': checksumtype[0], 'fcontentchecksumtype': checksumtype[1]})
|
|
7722
|
+
return tmpoutlist
|
|
7723
|
+
def AppendFilesWithContentFromSevenZipFile(infile, fp, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, compressionuselist=compressionlistalt, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
7724
|
+
if(not hasattr(fp, "write")):
|
|
7725
|
+
return False
|
|
7726
|
+
GetDirList = AppendFilesWithContentFromSevenZipFileToList(infile, extradata, jsondata, False, compression, compresswholefile, compressionlevel, compressionuselist, [checksumtype[2], checksumtype[3], checksumtype[3]], formatspecs, saltkey, verbose)
|
|
7727
|
+
numfiles = int(len(GetDirList))
|
|
7728
|
+
fnumfiles = format(numfiles, 'x').lower()
|
|
7729
|
+
AppendFileHeader(fp, numfiles, "UTF-8", [], {}, [checksumtype[0], checksumtype[1]], formatspecs, saltkey)
|
|
7730
|
+
try:
|
|
7731
|
+
fp.flush()
|
|
7732
|
+
if(hasattr(os, "sync")):
|
|
7733
|
+
os.fsync(fp.fileno())
|
|
7734
|
+
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7735
|
+
pass
|
|
7736
|
+
for curfname in GetDirList:
|
|
7737
|
+
tmpoutlist = curfname['fheaders']
|
|
7738
|
+
AppendFileHeaderWithContent(fp, tmpoutlist, curfname['fextradata'], curfname['fjsoncontent'], curfname['fcontents'], [curfname['fheaderchecksumtype'], curfname['fcontentchecksumtype'], curfname['fjsonchecksumtype']], formatspecs, saltkey)
|
|
7170
7739
|
try:
|
|
7171
7740
|
fp.flush()
|
|
7172
7741
|
if(hasattr(os, "sync")):
|
|
7173
7742
|
os.fsync(fp.fileno())
|
|
7174
7743
|
except (io.UnsupportedOperation, AttributeError, OSError):
|
|
7175
7744
|
pass
|
|
7176
|
-
fcontents.close()
|
|
7177
7745
|
return fp
|
|
7178
7746
|
|
|
7179
7747
|
def AppendListsWithContent(inlist, fp, dirlistfromtxt=False, extradata=[], jsondata={}, compression="auto", compresswholefile=True, compressionlevel=None, followlink=False, checksumtype=["md5", "md5", "md5", "md5", "md5"], formatspecs=__file_format_dict__, saltkey=None, verbose=False):
|
|
@@ -8443,10 +9011,8 @@ def CheckCompressionSubType(infile, formatspecs=__file_format_multi_dict__, file
|
|
|
8443
9011
|
elif(compresscheck == "lz4" and compresscheck in compressionsupport):
|
|
8444
9012
|
fp = lz4.frame.open(infile, "rb")
|
|
8445
9013
|
elif(compresscheck == "zstd" and compresscheck in compressionsupport):
|
|
8446
|
-
if '
|
|
8447
|
-
fp = ZstdFile(infile, mode="rb")
|
|
8448
|
-
elif 'pyzstd' in sys.modules:
|
|
8449
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode="rb")
|
|
9014
|
+
if 'zstd' in compressionsupport:
|
|
9015
|
+
fp = zstd.ZstdFile(infile, mode="rb")
|
|
8450
9016
|
else:
|
|
8451
9017
|
return Flase
|
|
8452
9018
|
elif((compresscheck == "lzma" or compresscheck == "xz") and compresscheck in compressionsupport):
|
|
@@ -8563,10 +9129,8 @@ def UncompressFileAlt(fp, formatspecs=__file_format_multi_dict__, filestart=0,
|
|
|
8563
9129
|
elif kind in ("lzma","xz") and (("lzma" in compressionsupport) or ("xz" in compressionsupport)):
|
|
8564
9130
|
wrapped = lzma.LZMAFile(src)
|
|
8565
9131
|
elif kind == "zstd" and ("zstd" in compressionsupport or "zstandard" in compressionsupport):
|
|
8566
|
-
if '
|
|
8567
|
-
wrapped = ZstdFile(
|
|
8568
|
-
elif 'pyzstd' in sys.modules:
|
|
8569
|
-
wrapped = pyzstd.zstdfile.ZstdFile(fileobj=src, mode="rb")
|
|
9132
|
+
if 'zstd' in compressionsupport:
|
|
9133
|
+
wrapped = zstd.ZstdFile(src, mode="rb")
|
|
8570
9134
|
else:
|
|
8571
9135
|
return False
|
|
8572
9136
|
elif kind == "lz4" and "lz4" in compressionsupport:
|
|
@@ -8634,10 +9198,8 @@ def UncompressFile(infile, formatspecs=__file_format_multi_dict__, mode="rb",
|
|
|
8634
9198
|
elif (compresscheck == "bzip2" and "bzip2" in compressionsupport):
|
|
8635
9199
|
fp = bz2.open(infile, mode)
|
|
8636
9200
|
elif (compresscheck == "zstd" and "zstandard" in compressionsupport):
|
|
8637
|
-
if '
|
|
8638
|
-
fp = ZstdFile(infile, mode=mode)
|
|
8639
|
-
elif 'pyzstd' in sys.modules:
|
|
8640
|
-
fp = pyzstd.zstdfile.ZstdFile(infile, mode=mode)
|
|
9201
|
+
if 'zstd' in compressionsupport:
|
|
9202
|
+
fp = zstd.ZstdFile(infile, mode=mode)
|
|
8641
9203
|
else:
|
|
8642
9204
|
return False
|
|
8643
9205
|
elif (compresscheck == "lz4" and "lz4" in compressionsupport):
|
|
@@ -9406,10 +9968,8 @@ def CompressOpenFile(outfile, compressionenable=True, compressionlevel=None,
|
|
|
9406
9968
|
outfp = FileLikeAdapter(bz2.open(outfile, mode, compressionlevel), mode="wb")
|
|
9407
9969
|
|
|
9408
9970
|
elif (fextname == ".zst" and "zstandard" in compressionsupport):
|
|
9409
|
-
if '
|
|
9410
|
-
outfp = FileLikeAdapter(ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9411
|
-
elif 'pyzstd' in sys.modules:
|
|
9412
|
-
outfp = FileLikeAdapter(pyzstd.zstdfile.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9971
|
+
if 'zstd' in compressionsupport:
|
|
9972
|
+
outfp = FileLikeAdapter(zstd.ZstdFile(outfile, mode=mode, level=compressionlevel), mode="wb")
|
|
9413
9973
|
else:
|
|
9414
9974
|
return False # fix: 'Flase' -> False
|
|
9415
9975
|
|
|
@@ -9726,6 +10286,7 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_
|
|
|
9726
10286
|
formatspecs = formatspecs[compresschecking]
|
|
9727
10287
|
fp.seek(filestart, 0)
|
|
9728
10288
|
inheaderver = str(int(formatspecs['format_ver'].replace(".", "")))
|
|
10289
|
+
headeroffset = fp.tell()
|
|
9729
10290
|
formstring = fp.read(formatspecs['format_len'] + len(inheaderver)).decode("UTF-8")
|
|
9730
10291
|
formdelsize = len(formatspecs['format_delimiter'])
|
|
9731
10292
|
formdel = fp.read(formdelsize).decode("UTF-8")
|
|
@@ -9733,7 +10294,6 @@ def ArchiveFileValidate(infile, fmttype="auto", filestart=0, formatspecs=__file_
|
|
|
9733
10294
|
return False
|
|
9734
10295
|
if(formdel != formatspecs['format_delimiter']):
|
|
9735
10296
|
return False
|
|
9736
|
-
headeroffset = fp.tell()
|
|
9737
10297
|
if(__use_new_style__):
|
|
9738
10298
|
inheader = ReadFileHeaderDataBySize(fp, formatspecs['format_delimiter'])
|
|
9739
10299
|
else:
|
|
@@ -11049,10 +11609,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11049
11609
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
11050
11610
|
formatspecs = formatspecs[compresscheck]
|
|
11051
11611
|
if(compresscheck=="zstd"):
|
|
11052
|
-
if '
|
|
11053
|
-
infile = ZstdFile(
|
|
11054
|
-
elif 'pyzstd' in sys.modules:
|
|
11055
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11612
|
+
if 'zstd' in compressionsupport:
|
|
11613
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
11056
11614
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
11057
11615
|
else:
|
|
11058
11616
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
@@ -11061,10 +11619,8 @@ def TarFileListFiles(infile, verbose=False, returnfp=False):
|
|
|
11061
11619
|
if(IsNestedDict(formatspecs) and compresscheck in formatspecs):
|
|
11062
11620
|
formatspecs = formatspecs[compresscheck]
|
|
11063
11621
|
if(compresscheck=="zstd"):
|
|
11064
|
-
if '
|
|
11065
|
-
infile = ZstdFile(
|
|
11066
|
-
elif 'pyzstd' in sys.modules:
|
|
11067
|
-
infile = pyzstd.zstdfile.ZstdFile(fileobj=infile, mode="rb")
|
|
11622
|
+
if 'zstd' in compressionsupport:
|
|
11623
|
+
infile = zstd.ZstdFile(infile, mode="rb")
|
|
11068
11624
|
tarfp = tarfile.open(fileobj=infile, mode="r")
|
|
11069
11625
|
else:
|
|
11070
11626
|
tarfp = tarfile.open(infile, "r")
|
|
@@ -11391,7 +11947,10 @@ if(py7zr_support):
|
|
|
11391
11947
|
lcfi = 0
|
|
11392
11948
|
returnval = {}
|
|
11393
11949
|
szpfp = py7zr.SevenZipFile(infile, mode="r")
|
|
11394
|
-
|
|
11950
|
+
try:
|
|
11951
|
+
file_content = szpfp.readall()
|
|
11952
|
+
except AttributeError:
|
|
11953
|
+
file_content = sevenzip_readall(infile)
|
|
11395
11954
|
#sztest = szpfp.testzip()
|
|
11396
11955
|
sztestalt = szpfp.test()
|
|
11397
11956
|
if(sztestalt):
|
|
@@ -11435,7 +11994,10 @@ if(py7zr_support):
|
|
|
11435
11994
|
printfname = member.filename
|
|
11436
11995
|
if(ftype == 0):
|
|
11437
11996
|
fsize = len(file_content[member.filename].read())
|
|
11438
|
-
|
|
11997
|
+
try:
|
|
11998
|
+
file_content[member.filename].close()
|
|
11999
|
+
except AttributeError:
|
|
12000
|
+
pass
|
|
11439
12001
|
try:
|
|
11440
12002
|
fuid = int(os.getuid())
|
|
11441
12003
|
except (KeyError, AttributeError):
|
|
@@ -13570,7 +14132,6 @@ def run_http_file_server(fileobj, url, on_progress=None, backlog=5):
|
|
|
13570
14132
|
if not ah or not ah.strip().lower().startswith("basic "):
|
|
13571
14133
|
return False
|
|
13572
14134
|
try:
|
|
13573
|
-
import base64
|
|
13574
14135
|
b64 = ah.strip().split(" ", 1)[1]
|
|
13575
14136
|
raw = base64.b64decode(_to_bytes(b64))
|
|
13576
14137
|
try: raw_txt = raw.decode("utf-8")
|